From 81af496d7cbe7f4ebee2186f9e458beedeb997e5 Mon Sep 17 00:00:00 2001 From: Ralf Jung Date: Sun, 3 Dec 2023 16:02:42 +0100 Subject: [PATCH 001/127] fix simd_bitmask docs --- crates/core_simd/src/intrinsics.rs | 3 ++- 1 file changed, 2 insertions(+), 1 deletion(-) diff --git a/crates/core_simd/src/intrinsics.rs b/crates/core_simd/src/intrinsics.rs index b27893bc7294..5260de93354e 100644 --- a/crates/core_simd/src/intrinsics.rs +++ b/crates/core_simd/src/intrinsics.rs @@ -132,9 +132,10 @@ extern "platform-intrinsic" { // `fn simd_bitmask(vector) -> unsigned integer` takes a vector of integers and // returns either an unsigned integer or array of `u8`. // Every element in the vector becomes a single bit in the returned bitmask. - // If the vector has less than 8 lanes, a u8 is returned with zeroed trailing bits. // The bit order of the result depends on the byte endianness. LSB-first for little // endian and MSB-first for big endian. + // If the vector has less than 8 lanes, the mask lives in the least-significant bits + // (e.g., [true, false] becomes `0b01` on little endian and `0b10` on big endian). // // UB if called on a vector with values other than 0 and -1. #[allow(unused)] From 289c1d14f0dfd80d5e94141a3b9b59bed41c3539 Mon Sep 17 00:00:00 2001 From: Caleb Zulawski Date: Sun, 3 Dec 2023 11:27:57 -0500 Subject: [PATCH 002/127] Fix bitmask vector bit order --- crates/core_simd/src/masks/full_masks.rs | 6 ++++ crates/core_simd/tests/masks.rs | 42 ++++++++++++++++++++++++ 2 files changed, 48 insertions(+) diff --git a/crates/core_simd/src/masks/full_masks.rs b/crates/core_simd/src/masks/full_masks.rs index 63964f455e05..b184b98a1476 100644 --- a/crates/core_simd/src/masks/full_masks.rs +++ b/crates/core_simd/src/masks/full_masks.rs @@ -157,6 +157,9 @@ where for x in bytes.as_mut() { *x = x.reverse_bits() } + if N % 8 > 0 { + bytes.as_mut()[N / 8] >>= 8 - N % 8; + } } bitmask.as_mut_array()[..bytes.as_ref().len()].copy_from_slice(bytes.as_ref()); @@ -180,6 +183,9 @@ where for x in bytes.as_mut() { *x = x.reverse_bits(); } + if N % 8 > 0 { + bytes.as_mut()[N / 8] >>= 8 - N % 8; + } } // Compute the regular mask diff --git a/crates/core_simd/tests/masks.rs b/crates/core_simd/tests/masks.rs index 00fc2a24e27a..fc6a3476b7c6 100644 --- a/crates/core_simd/tests/masks.rs +++ b/crates/core_simd/tests/masks.rs @@ -99,6 +99,19 @@ macro_rules! test_mask_api { assert_eq!(Mask::<$type, 2>::from_bitmask(bitmask), mask); } + #[cfg(feature = "all_lane_counts")] + #[test] + fn roundtrip_bitmask_conversion_odd() { + let values = [ + true, false, true, false, true, true, false, false, false, true, true, + ]; + let mask = Mask::<$type, 11>::from_array(values); + let bitmask = mask.to_bitmask(); + assert_eq!(bitmask, 0b11000110101); + assert_eq!(Mask::<$type, 11>::from_bitmask(bitmask), mask); + } + + #[test] fn cast() { fn cast_impl() @@ -134,6 +147,35 @@ macro_rules! test_mask_api { assert_eq!(bitmask.resize::<2>(0).to_ne_bytes()[..2], [0b01001001, 0b10000011]); assert_eq!(Mask::<$type, 16>::from_bitmask_vector(bitmask), mask); } + + // rust-lang/portable-simd#379 + #[test] + fn roundtrip_bitmask_vector_conversion_small() { + use core_simd::simd::ToBytes; + let values = [ + true, false, true, true + ]; + let mask = Mask::<$type, 4>::from_array(values); + let bitmask = mask.to_bitmask_vector(); + assert_eq!(bitmask.resize::<1>(0).to_ne_bytes()[0], 0b00001101); + assert_eq!(Mask::<$type, 4>::from_bitmask_vector(bitmask), mask); + } + + /* FIXME doesn't work with non-powers-of-two, yet + // rust-lang/portable-simd#379 + #[cfg(feature = "all_lane_counts")] + #[test] + fn roundtrip_bitmask_vector_conversion_odd() { + use core_simd::simd::ToBytes; + let values = [ + true, false, true, false, true, true, false, false, false, true, true, + ]; + let mask = Mask::<$type, 11>::from_array(values); + let bitmask = mask.to_bitmask_vector(); + assert_eq!(bitmask.resize::<2>(0).to_ne_bytes()[..2], [0b00110101, 0b00000110]); + assert_eq!(Mask::<$type, 11>::from_bitmask_vector(bitmask), mask); + } + */ } } } From c7057757607ab7a6fe460ac18b3f0c3ae2b4dc68 Mon Sep 17 00:00:00 2001 From: Jacob Lifshay Date: Mon, 11 Dec 2023 12:17:10 -0800 Subject: [PATCH 003/127] Fix load/store safety comments to require aligned `T` Fixes: #382 --- crates/core_simd/src/vector.rs | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/crates/core_simd/src/vector.rs b/crates/core_simd/src/vector.rs index 105c06741c58..881406d0eac8 100644 --- a/crates/core_simd/src/vector.rs +++ b/crates/core_simd/src/vector.rs @@ -194,7 +194,7 @@ where /// With padding, `read_unaligned` will read past the end of an array of N elements. /// /// # Safety - /// Reading `ptr` must be safe, as if by `<*const [T; N]>::read_unaligned`. + /// Reading `ptr` must be safe, as if by `<*const [T; N]>::read`. #[inline] const unsafe fn load(ptr: *const [T; N]) -> Self { // There are potentially simpler ways to write this function, but this should result in @@ -215,7 +215,7 @@ where /// See `load` as to why this function is necessary. /// /// # Safety - /// Writing to `ptr` must be safe, as if by `<*mut [T; N]>::write_unaligned`. + /// Writing to `ptr` must be safe, as if by `<*mut [T; N]>::write`. #[inline] const unsafe fn store(self, ptr: *mut [T; N]) { // There are potentially simpler ways to write this function, but this should result in From b6eeb4ee90e31f7846e94e4f639f44bc5f623ca0 Mon Sep 17 00:00:00 2001 From: Jubilee Young Date: Wed, 13 Dec 2023 17:46:46 -0800 Subject: [PATCH 004/127] Assume masks are correct This allows miri to detect when they are not, and may be exploited by LLVM during optimization. --- crates/core_simd/src/lib.rs | 1 + crates/core_simd/src/masks.rs | 5 ++++- 2 files changed, 5 insertions(+), 1 deletion(-) diff --git a/crates/core_simd/src/lib.rs b/crates/core_simd/src/lib.rs index 64ba9705ef52..e974e7aa25a4 100644 --- a/crates/core_simd/src/lib.rs +++ b/crates/core_simd/src/lib.rs @@ -4,6 +4,7 @@ const_maybe_uninit_as_mut_ptr, const_mut_refs, convert_float_to_int, + core_intrinsics, decl_macro, inline_const, intra_doc_pointers, diff --git a/crates/core_simd/src/masks.rs b/crates/core_simd/src/masks.rs index 0623d2bf3d12..b95c070d09c3 100644 --- a/crates/core_simd/src/masks.rs +++ b/crates/core_simd/src/masks.rs @@ -175,7 +175,10 @@ where #[must_use = "method returns a new mask and does not mutate the original value"] pub unsafe fn from_int_unchecked(value: Simd) -> Self { // Safety: the caller must confirm this invariant - unsafe { Self(mask_impl::Mask::from_int_unchecked(value)) } + unsafe { + core::intrinsics::assume(::valid(value)); + Self(mask_impl::Mask::from_int_unchecked(value)) + } } /// Converts a vector of integers to a mask, where 0 represents `false` and -1 From bb50fa23252a82e6829308a2d771d9eb26226547 Mon Sep 17 00:00:00 2001 From: Caleb Zulawski Date: Fri, 29 Dec 2023 18:30:53 -0500 Subject: [PATCH 005/127] Use core::intrinsics --- crates/core_simd/src/intrinsics.rs | 170 --------------------- crates/core_simd/src/lib.rs | 2 +- crates/core_simd/src/masks.rs | 19 +-- crates/core_simd/src/masks/bitmask.rs | 9 +- crates/core_simd/src/masks/full_masks.rs | 21 ++- crates/core_simd/src/mod.rs | 4 - crates/core_simd/src/ops.rs | 6 +- crates/core_simd/src/ops/unary.rs | 3 +- crates/core_simd/src/select.rs | 3 +- crates/core_simd/src/simd/cmp/eq.rs | 9 +- crates/core_simd/src/simd/cmp/ord.rs | 25 ++- crates/core_simd/src/simd/num/float.rs | 20 +-- crates/core_simd/src/simd/num/int.rs | 26 ++-- crates/core_simd/src/simd/num/uint.rs | 30 ++-- crates/core_simd/src/simd/ptr/const_ptr.rs | 14 +- crates/core_simd/src/simd/ptr/mut_ptr.rs | 14 +- crates/core_simd/src/swizzle.rs | 5 +- crates/core_simd/src/vector.rs | 11 +- 18 files changed, 106 insertions(+), 285 deletions(-) delete mode 100644 crates/core_simd/src/intrinsics.rs diff --git a/crates/core_simd/src/intrinsics.rs b/crates/core_simd/src/intrinsics.rs deleted file mode 100644 index 5260de93354e..000000000000 --- a/crates/core_simd/src/intrinsics.rs +++ /dev/null @@ -1,170 +0,0 @@ -//! This module contains the LLVM intrinsics bindings that provide the functionality for this -//! crate. -//! -//! The LLVM assembly language is documented here: -//! -//! A quick glossary of jargon that may appear in this module, mostly paraphrasing LLVM's LangRef: -//! - poison: "undefined behavior as a value". specifically, it is like uninit memory (such as padding bytes). it is "safe" to create poison, BUT -//! poison MUST NOT be observed from safe code, as operations on poison return poison, like NaN. unlike NaN, which has defined comparisons, -//! poison is neither true nor false, and LLVM may also convert it to undef (at which point it is both). so, it can't be conditioned on, either. -//! - undef: "a value that is every value". functionally like poison, insofar as Rust is concerned. poison may become this. note: -//! this means that division by poison or undef is like division by zero, which means it inflicts... -//! - "UB": poison and undef cover most of what people call "UB". "UB" means this operation immediately invalidates the program: -//! LLVM is allowed to lower it to `ud2` or other opcodes that may cause an illegal instruction exception, and this is the "good end". -//! The "bad end" is that LLVM may reverse time to the moment control flow diverged on a path towards undefined behavior, -//! and destroy the other branch, potentially deleting safe code and violating Rust's `unsafe` contract. -//! -//! Note that according to LLVM, vectors are not arrays, but they are equivalent when stored to and loaded from memory. -//! -//! Unless stated otherwise, all intrinsics for binary operations require SIMD vectors of equal types and lengths. - -// These intrinsics aren't linked directly from LLVM and are mostly undocumented, however they are -// mostly lowered to the matching LLVM instructions by the compiler in a fairly straightforward manner. -// The associated LLVM instruction or intrinsic is documented alongside each Rust intrinsic function. -extern "platform-intrinsic" { - /// add/fadd - pub(crate) fn simd_add(x: T, y: T) -> T; - - /// sub/fsub - pub(crate) fn simd_sub(lhs: T, rhs: T) -> T; - - /// mul/fmul - pub(crate) fn simd_mul(x: T, y: T) -> T; - - /// udiv/sdiv/fdiv - /// ints and uints: {s,u}div incur UB if division by zero occurs. - /// ints: sdiv is UB for int::MIN / -1. - /// floats: fdiv is never UB, but may create NaNs or infinities. - pub(crate) fn simd_div(lhs: T, rhs: T) -> T; - - /// urem/srem/frem - /// ints and uints: {s,u}rem incur UB if division by zero occurs. - /// ints: srem is UB for int::MIN / -1. - /// floats: frem is equivalent to libm::fmod in the "default" floating point environment, sans errno. - pub(crate) fn simd_rem(lhs: T, rhs: T) -> T; - - /// shl - /// for (u)ints. poison if rhs >= lhs::BITS - pub(crate) fn simd_shl(lhs: T, rhs: T) -> T; - - /// ints: ashr - /// uints: lshr - /// poison if rhs >= lhs::BITS - pub(crate) fn simd_shr(lhs: T, rhs: T) -> T; - - /// and - pub(crate) fn simd_and(x: T, y: T) -> T; - - /// or - pub(crate) fn simd_or(x: T, y: T) -> T; - - /// xor - pub(crate) fn simd_xor(x: T, y: T) -> T; - - /// fptoui/fptosi/uitofp/sitofp - /// casting floats to integers is truncating, so it is safe to convert values like e.g. 1.5 - /// but the truncated value must fit in the target type or the result is poison. - /// use `simd_as` instead for a cast that performs a saturating conversion. - pub(crate) fn simd_cast(x: T) -> U; - /// follows Rust's `T as U` semantics, including saturating float casts - /// which amounts to the same as `simd_cast` for many cases - pub(crate) fn simd_as(x: T) -> U; - - /// neg/fneg - /// ints: ultimately becomes a call to cg_ssa's BuilderMethods::neg. cg_llvm equates this to `simd_sub(Simd::splat(0), x)`. - /// floats: LLVM's fneg, which changes the floating point sign bit. Some arches have instructions for it. - /// Rust panics for Neg::neg(int::MIN) due to overflow, but it is not UB in LLVM without `nsw`. - pub(crate) fn simd_neg(x: T) -> T; - - /// fabs - pub(crate) fn simd_fabs(x: T) -> T; - - // minnum/maxnum - pub(crate) fn simd_fmin(x: T, y: T) -> T; - pub(crate) fn simd_fmax(x: T, y: T) -> T; - - // these return Simd with the same BITS size as the inputs - pub(crate) fn simd_eq(x: T, y: T) -> U; - pub(crate) fn simd_ne(x: T, y: T) -> U; - pub(crate) fn simd_lt(x: T, y: T) -> U; - pub(crate) fn simd_le(x: T, y: T) -> U; - pub(crate) fn simd_gt(x: T, y: T) -> U; - pub(crate) fn simd_ge(x: T, y: T) -> U; - - // shufflevector - // idx: LLVM calls it a "shuffle mask vector constant", a vector of i32s - pub(crate) fn simd_shuffle(x: T, y: T, idx: U) -> V; - - /// llvm.masked.gather - /// like a loop of pointer reads - /// val: vector of values to select if a lane is masked - /// ptr: vector of pointers to read from - /// mask: a "wide" mask of integers, selects as if simd_select(mask, read(ptr), val) - /// note, the LLVM intrinsic accepts a mask vector of `` - /// FIXME: review this if/when we fix up our mask story in general? - pub(crate) fn simd_gather(val: T, ptr: U, mask: V) -> T; - /// llvm.masked.scatter - /// like gather, but more spicy, as it writes instead of reads - pub(crate) fn simd_scatter(val: T, ptr: U, mask: V); - - // {s,u}add.sat - pub(crate) fn simd_saturating_add(x: T, y: T) -> T; - - // {s,u}sub.sat - pub(crate) fn simd_saturating_sub(lhs: T, rhs: T) -> T; - - // reductions - // llvm.vector.reduce.{add,fadd} - pub(crate) fn simd_reduce_add_ordered(x: T, y: U) -> U; - // llvm.vector.reduce.{mul,fmul} - pub(crate) fn simd_reduce_mul_ordered(x: T, y: U) -> U; - #[allow(unused)] - pub(crate) fn simd_reduce_all(x: T) -> bool; - #[allow(unused)] - pub(crate) fn simd_reduce_any(x: T) -> bool; - pub(crate) fn simd_reduce_max(x: T) -> U; - pub(crate) fn simd_reduce_min(x: T) -> U; - pub(crate) fn simd_reduce_and(x: T) -> U; - pub(crate) fn simd_reduce_or(x: T) -> U; - pub(crate) fn simd_reduce_xor(x: T) -> U; - - // truncate integer vector to bitmask - // `fn simd_bitmask(vector) -> unsigned integer` takes a vector of integers and - // returns either an unsigned integer or array of `u8`. - // Every element in the vector becomes a single bit in the returned bitmask. - // The bit order of the result depends on the byte endianness. LSB-first for little - // endian and MSB-first for big endian. - // If the vector has less than 8 lanes, the mask lives in the least-significant bits - // (e.g., [true, false] becomes `0b01` on little endian and `0b10` on big endian). - // - // UB if called on a vector with values other than 0 and -1. - #[allow(unused)] - pub(crate) fn simd_bitmask(x: T) -> U; - - // select - // first argument is a vector of integers, -1 (all bits 1) is "true" - // logically equivalent to (yes & m) | (no & (m^-1), - // but you can use it on floats. - pub(crate) fn simd_select(m: M, yes: T, no: T) -> T; - #[allow(unused)] - pub(crate) fn simd_select_bitmask(m: M, yes: T, no: T) -> T; - - /// getelementptr (without inbounds) - /// equivalent to wrapping_offset - pub(crate) fn simd_arith_offset(ptr: T, offset: U) -> T; - - /// equivalent to `T as U` semantics, specifically for pointers - pub(crate) fn simd_cast_ptr(ptr: T) -> U; - - /// expose a pointer as an address - pub(crate) fn simd_expose_addr(ptr: T) -> U; - - /// convert an exposed address back to a pointer - pub(crate) fn simd_from_exposed_addr(addr: T) -> U; - - // Integer operations - pub(crate) fn simd_bswap(x: T) -> T; - pub(crate) fn simd_bitreverse(x: T) -> T; - pub(crate) fn simd_ctlz(x: T) -> T; - pub(crate) fn simd_cttz(x: T) -> T; -} diff --git a/crates/core_simd/src/lib.rs b/crates/core_simd/src/lib.rs index 64ba9705ef52..faec64c6344b 100644 --- a/crates/core_simd/src/lib.rs +++ b/crates/core_simd/src/lib.rs @@ -1,5 +1,6 @@ #![no_std] #![feature( + core_intrinsics, const_refs_to_cell, const_maybe_uninit_as_mut_ptr, const_mut_refs, @@ -7,7 +8,6 @@ decl_macro, inline_const, intra_doc_pointers, - platform_intrinsics, repr_simd, simd_ffi, staged_api, diff --git a/crates/core_simd/src/masks.rs b/crates/core_simd/src/masks.rs index 0623d2bf3d12..32fd9acbaea9 100644 --- a/crates/core_simd/src/masks.rs +++ b/crates/core_simd/src/masks.rs @@ -12,9 +12,7 @@ )] mod mask_impl; -use crate::simd::{ - cmp::SimdPartialEq, intrinsics, LaneCount, Simd, SimdCast, SimdElement, SupportedLaneCount, -}; +use crate::simd::{cmp::SimdPartialEq, LaneCount, Simd, SimdCast, SimdElement, SupportedLaneCount}; use core::cmp::Ordering; use core::{fmt, mem}; @@ -141,8 +139,9 @@ where // but these are "dependently-sized" types, so copy elision it is! unsafe { let bytes: [u8; N] = mem::transmute_copy(&array); - let bools: Simd = intrinsics::simd_ne(Simd::from_array(bytes), Simd::splat(0u8)); - Mask::from_int_unchecked(intrinsics::simd_cast(bools)) + let bools: Simd = + core::intrinsics::simd::simd_ne(Simd::from_array(bytes), Simd::splat(0u8)); + Mask::from_int_unchecked(core::intrinsics::simd::simd_cast(bools)) } } @@ -160,7 +159,7 @@ where // This would be hypothetically valid as an "in-place" transmute, // but these are "dependently-sized" types, so copy elision it is! unsafe { - let mut bytes: Simd = intrinsics::simd_cast(self.to_int()); + let mut bytes: Simd = core::intrinsics::simd::simd_cast(self.to_int()); bytes &= Simd::splat(1i8); mem::transmute_copy(&bytes) } @@ -374,15 +373,17 @@ where ); // Safety: the input and output are integer vectors - let index: Simd = unsafe { intrinsics::simd_cast(index) }; + let index: Simd = unsafe { core::intrinsics::simd::simd_cast(index) }; let masked_index = self.select(index, Self::splat(true).to_int()); // Safety: the input and output are integer vectors - let masked_index: Simd = unsafe { intrinsics::simd_cast(masked_index) }; + let masked_index: Simd = + unsafe { core::intrinsics::simd::simd_cast(masked_index) }; // Safety: the input is an integer vector - let min_index: T::Unsigned = unsafe { intrinsics::simd_reduce_min(masked_index) }; + let min_index: T::Unsigned = + unsafe { core::intrinsics::simd::simd_reduce_min(masked_index) }; // Safety: the return value is the unsigned version of T let min_index: T = unsafe { core::mem::transmute_copy(&min_index) }; diff --git a/crates/core_simd/src/masks/bitmask.rs b/crates/core_simd/src/masks/bitmask.rs index 6ddff07fea25..96c553426ee7 100644 --- a/crates/core_simd/src/masks/bitmask.rs +++ b/crates/core_simd/src/masks/bitmask.rs @@ -1,6 +1,5 @@ #![allow(unused_imports)] use super::MaskElement; -use crate::simd::intrinsics; use crate::simd::{LaneCount, Simd, SupportedLaneCount}; use core::marker::PhantomData; @@ -109,14 +108,18 @@ where #[must_use = "method returns a new vector and does not mutate the original value"] pub fn to_int(self) -> Simd { unsafe { - intrinsics::simd_select_bitmask(self.0, Simd::splat(T::TRUE), Simd::splat(T::FALSE)) + core::intrinsics::simd::simd_select_bitmask( + self.0, + Simd::splat(T::TRUE), + Simd::splat(T::FALSE), + ) } } #[inline] #[must_use = "method returns a new mask and does not mutate the original value"] pub unsafe fn from_int_unchecked(value: Simd) -> Self { - unsafe { Self(intrinsics::simd_bitmask(value), PhantomData) } + unsafe { Self(core::intrinsics::simd::simd_bitmask(value), PhantomData) } } #[inline] diff --git a/crates/core_simd/src/masks/full_masks.rs b/crates/core_simd/src/masks/full_masks.rs index 63964f455e05..333e449e4388 100644 --- a/crates/core_simd/src/masks/full_masks.rs +++ b/crates/core_simd/src/masks/full_masks.rs @@ -1,6 +1,5 @@ //! Masks that take up full SIMD vector registers. -use crate::simd::intrinsics; use crate::simd::{LaneCount, MaskElement, Simd, SupportedLaneCount}; #[repr(transparent)] @@ -138,7 +137,7 @@ where U: MaskElement, { // Safety: masks are simply integer vectors of 0 and -1, and we can cast the element type. - unsafe { Mask(intrinsics::simd_cast(self.0)) } + unsafe { Mask(core::intrinsics::simd::simd_cast(self.0)) } } #[inline] @@ -150,7 +149,7 @@ where unsafe { // Compute the bitmask let mut bytes: as SupportedLaneCount>::BitMask = - intrinsics::simd_bitmask(self.0); + core::intrinsics::simd::simd_bitmask(self.0); // LLVM assumes bit order should match endianness if cfg!(target_endian = "big") { @@ -183,7 +182,7 @@ where } // Compute the regular mask - Self::from_int_unchecked(intrinsics::simd_select_bitmask( + Self::from_int_unchecked(core::intrinsics::simd::simd_select_bitmask( bytes, Self::splat(true).to_int(), Self::splat(false).to_int(), @@ -199,7 +198,7 @@ where let resized = self.to_int().resize::(T::FALSE); // Safety: `resized` is an integer vector with length M, which must match T - let bitmask: U = unsafe { intrinsics::simd_bitmask(resized) }; + let bitmask: U = unsafe { core::intrinsics::simd::simd_bitmask(resized) }; // LLVM assumes bit order should match endianness if cfg!(target_endian = "big") { @@ -223,7 +222,7 @@ where // SAFETY: `mask` is the correct bitmask type for a u64 bitmask let mask: Simd = unsafe { - intrinsics::simd_select_bitmask( + core::intrinsics::simd::simd_select_bitmask( bitmask, Simd::::splat(T::TRUE), Simd::::splat(T::FALSE), @@ -274,14 +273,14 @@ where #[must_use = "method returns a new bool and does not mutate the original value"] pub fn any(self) -> bool { // Safety: use `self` as an integer vector - unsafe { intrinsics::simd_reduce_any(self.to_int()) } + unsafe { core::intrinsics::simd::simd_reduce_any(self.to_int()) } } #[inline] #[must_use = "method returns a new vector and does not mutate the original value"] pub fn all(self) -> bool { // Safety: use `self` as an integer vector - unsafe { intrinsics::simd_reduce_all(self.to_int()) } + unsafe { core::intrinsics::simd::simd_reduce_all(self.to_int()) } } } @@ -306,7 +305,7 @@ where #[must_use = "method returns a new mask and does not mutate the original value"] fn bitand(self, rhs: Self) -> Self { // Safety: `self` is an integer vector - unsafe { Self(intrinsics::simd_and(self.0, rhs.0)) } + unsafe { Self(core::intrinsics::simd::simd_and(self.0, rhs.0)) } } } @@ -320,7 +319,7 @@ where #[must_use = "method returns a new mask and does not mutate the original value"] fn bitor(self, rhs: Self) -> Self { // Safety: `self` is an integer vector - unsafe { Self(intrinsics::simd_or(self.0, rhs.0)) } + unsafe { Self(core::intrinsics::simd::simd_or(self.0, rhs.0)) } } } @@ -334,7 +333,7 @@ where #[must_use = "method returns a new mask and does not mutate the original value"] fn bitxor(self, rhs: Self) -> Self { // Safety: `self` is an integer vector - unsafe { Self(intrinsics::simd_xor(self.0, rhs.0)) } + unsafe { Self(core::intrinsics::simd::simd_xor(self.0, rhs.0)) } } } diff --git a/crates/core_simd/src/mod.rs b/crates/core_simd/src/mod.rs index fd016f1c6f7a..45b1a0f97514 100644 --- a/crates/core_simd/src/mod.rs +++ b/crates/core_simd/src/mod.rs @@ -1,8 +1,6 @@ #[macro_use] mod swizzle; -pub(crate) mod intrinsics; - mod alias; mod cast; mod fmt; @@ -27,8 +25,6 @@ pub mod simd { pub mod cmp; - pub(crate) use crate::core_simd::intrinsics; - pub use crate::core_simd::alias::*; pub use crate::core_simd::cast::*; pub use crate::core_simd::lane_count::{LaneCount, SupportedLaneCount}; diff --git a/crates/core_simd/src/ops.rs b/crates/core_simd/src/ops.rs index 8a1b083f0398..d8e10eeaa1a2 100644 --- a/crates/core_simd/src/ops.rs +++ b/crates/core_simd/src/ops.rs @@ -37,7 +37,7 @@ where macro_rules! unsafe_base { ($lhs:ident, $rhs:ident, {$simd_call:ident}, $($_:tt)*) => { // Safety: $lhs and $rhs are vectors - unsafe { $crate::simd::intrinsics::$simd_call($lhs, $rhs) } + unsafe { core::intrinsics::simd::$simd_call($lhs, $rhs) } }; } @@ -55,7 +55,7 @@ macro_rules! wrap_bitshift { #[allow(clippy::suspicious_arithmetic_impl)] // Safety: $lhs and the bitand result are vectors unsafe { - $crate::simd::intrinsics::$simd_call( + core::intrinsics::simd::$simd_call( $lhs, $rhs.bitand(Simd::splat(<$int>::BITS as $int - 1)), ) @@ -97,7 +97,7 @@ macro_rules! int_divrem_guard { $rhs }; // Safety: $lhs and rhs are vectors - unsafe { $crate::simd::intrinsics::$simd_call($lhs, rhs) } + unsafe { core::intrinsics::simd::$simd_call($lhs, rhs) } } }; } diff --git a/crates/core_simd/src/ops/unary.rs b/crates/core_simd/src/ops/unary.rs index a651aa73e952..bdae96332a3a 100644 --- a/crates/core_simd/src/ops/unary.rs +++ b/crates/core_simd/src/ops/unary.rs @@ -1,4 +1,3 @@ -use crate::simd::intrinsics; use crate::simd::{LaneCount, Simd, SimdElement, SupportedLaneCount}; use core::ops::{Neg, Not}; // unary ops @@ -15,7 +14,7 @@ macro_rules! neg { #[must_use = "operator returns a new vector without mutating the input"] fn neg(self) -> Self::Output { // Safety: `self` is a signed vector - unsafe { intrinsics::simd_neg(self) } + unsafe { core::intrinsics::simd::simd_neg(self) } } })* } diff --git a/crates/core_simd/src/select.rs b/crates/core_simd/src/select.rs index cdcf8eeec815..f33aa261a928 100644 --- a/crates/core_simd/src/select.rs +++ b/crates/core_simd/src/select.rs @@ -1,4 +1,3 @@ -use crate::simd::intrinsics; use crate::simd::{LaneCount, Mask, MaskElement, Simd, SimdElement, SupportedLaneCount}; impl Mask @@ -29,7 +28,7 @@ where { // Safety: The mask has been cast to a vector of integers, // and the operands to select between are vectors of the same type and length. - unsafe { intrinsics::simd_select(self.to_int(), true_values, false_values) } + unsafe { core::intrinsics::simd::simd_select(self.to_int(), true_values, false_values) } } /// Choose elements from two masks. diff --git a/crates/core_simd/src/simd/cmp/eq.rs b/crates/core_simd/src/simd/cmp/eq.rs index f132fa2cc0ca..5b4615ce51d7 100644 --- a/crates/core_simd/src/simd/cmp/eq.rs +++ b/crates/core_simd/src/simd/cmp/eq.rs @@ -1,5 +1,4 @@ use crate::simd::{ - intrinsics, ptr::{SimdConstPtr, SimdMutPtr}, LaneCount, Mask, Simd, SimdElement, SupportedLaneCount, }; @@ -31,14 +30,14 @@ macro_rules! impl_number { fn simd_eq(self, other: Self) -> Self::Mask { // Safety: `self` is a vector, and the result of the comparison // is always a valid mask. - unsafe { Mask::from_int_unchecked(intrinsics::simd_eq(self, other)) } + unsafe { Mask::from_int_unchecked(core::intrinsics::simd::simd_eq(self, other)) } } #[inline] fn simd_ne(self, other: Self) -> Self::Mask { // Safety: `self` is a vector, and the result of the comparison // is always a valid mask. - unsafe { Mask::from_int_unchecked(intrinsics::simd_ne(self, other)) } + unsafe { Mask::from_int_unchecked(core::intrinsics::simd::simd_ne(self, other)) } } } )* @@ -60,14 +59,14 @@ macro_rules! impl_mask { fn simd_eq(self, other: Self) -> Self::Mask { // Safety: `self` is a vector, and the result of the comparison // is always a valid mask. - unsafe { Self::from_int_unchecked(intrinsics::simd_eq(self.to_int(), other.to_int())) } + unsafe { Self::from_int_unchecked(core::intrinsics::simd::simd_eq(self.to_int(), other.to_int())) } } #[inline] fn simd_ne(self, other: Self) -> Self::Mask { // Safety: `self` is a vector, and the result of the comparison // is always a valid mask. - unsafe { Self::from_int_unchecked(intrinsics::simd_ne(self.to_int(), other.to_int())) } + unsafe { Self::from_int_unchecked(core::intrinsics::simd::simd_ne(self.to_int(), other.to_int())) } } } )* diff --git a/crates/core_simd/src/simd/cmp/ord.rs b/crates/core_simd/src/simd/cmp/ord.rs index 4e9d49ea2211..899f00a83164 100644 --- a/crates/core_simd/src/simd/cmp/ord.rs +++ b/crates/core_simd/src/simd/cmp/ord.rs @@ -1,6 +1,5 @@ use crate::simd::{ cmp::SimdPartialEq, - intrinsics, ptr::{SimdConstPtr, SimdMutPtr}, LaneCount, Mask, Simd, SupportedLaneCount, }; @@ -57,28 +56,28 @@ macro_rules! impl_integer { fn simd_lt(self, other: Self) -> Self::Mask { // Safety: `self` is a vector, and the result of the comparison // is always a valid mask. - unsafe { Mask::from_int_unchecked(intrinsics::simd_lt(self, other)) } + unsafe { Mask::from_int_unchecked(core::intrinsics::simd::simd_lt(self, other)) } } #[inline] fn simd_le(self, other: Self) -> Self::Mask { // Safety: `self` is a vector, and the result of the comparison // is always a valid mask. - unsafe { Mask::from_int_unchecked(intrinsics::simd_le(self, other)) } + unsafe { Mask::from_int_unchecked(core::intrinsics::simd::simd_le(self, other)) } } #[inline] fn simd_gt(self, other: Self) -> Self::Mask { // Safety: `self` is a vector, and the result of the comparison // is always a valid mask. - unsafe { Mask::from_int_unchecked(intrinsics::simd_gt(self, other)) } + unsafe { Mask::from_int_unchecked(core::intrinsics::simd::simd_gt(self, other)) } } #[inline] fn simd_ge(self, other: Self) -> Self::Mask { // Safety: `self` is a vector, and the result of the comparison // is always a valid mask. - unsafe { Mask::from_int_unchecked(intrinsics::simd_ge(self, other)) } + unsafe { Mask::from_int_unchecked(core::intrinsics::simd::simd_ge(self, other)) } } } @@ -123,28 +122,28 @@ macro_rules! impl_float { fn simd_lt(self, other: Self) -> Self::Mask { // Safety: `self` is a vector, and the result of the comparison // is always a valid mask. - unsafe { Mask::from_int_unchecked(intrinsics::simd_lt(self, other)) } + unsafe { Mask::from_int_unchecked(core::intrinsics::simd::simd_lt(self, other)) } } #[inline] fn simd_le(self, other: Self) -> Self::Mask { // Safety: `self` is a vector, and the result of the comparison // is always a valid mask. - unsafe { Mask::from_int_unchecked(intrinsics::simd_le(self, other)) } + unsafe { Mask::from_int_unchecked(core::intrinsics::simd::simd_le(self, other)) } } #[inline] fn simd_gt(self, other: Self) -> Self::Mask { // Safety: `self` is a vector, and the result of the comparison // is always a valid mask. - unsafe { Mask::from_int_unchecked(intrinsics::simd_gt(self, other)) } + unsafe { Mask::from_int_unchecked(core::intrinsics::simd::simd_gt(self, other)) } } #[inline] fn simd_ge(self, other: Self) -> Self::Mask { // Safety: `self` is a vector, and the result of the comparison // is always a valid mask. - unsafe { Mask::from_int_unchecked(intrinsics::simd_ge(self, other)) } + unsafe { Mask::from_int_unchecked(core::intrinsics::simd::simd_ge(self, other)) } } } )* @@ -164,28 +163,28 @@ macro_rules! impl_mask { fn simd_lt(self, other: Self) -> Self::Mask { // Safety: `self` is a vector, and the result of the comparison // is always a valid mask. - unsafe { Self::from_int_unchecked(intrinsics::simd_lt(self.to_int(), other.to_int())) } + unsafe { Self::from_int_unchecked(core::intrinsics::simd::simd_lt(self.to_int(), other.to_int())) } } #[inline] fn simd_le(self, other: Self) -> Self::Mask { // Safety: `self` is a vector, and the result of the comparison // is always a valid mask. - unsafe { Self::from_int_unchecked(intrinsics::simd_le(self.to_int(), other.to_int())) } + unsafe { Self::from_int_unchecked(core::intrinsics::simd::simd_le(self.to_int(), other.to_int())) } } #[inline] fn simd_gt(self, other: Self) -> Self::Mask { // Safety: `self` is a vector, and the result of the comparison // is always a valid mask. - unsafe { Self::from_int_unchecked(intrinsics::simd_gt(self.to_int(), other.to_int())) } + unsafe { Self::from_int_unchecked(core::intrinsics::simd::simd_gt(self.to_int(), other.to_int())) } } #[inline] fn simd_ge(self, other: Self) -> Self::Mask { // Safety: `self` is a vector, and the result of the comparison // is always a valid mask. - unsafe { Self::from_int_unchecked(intrinsics::simd_ge(self.to_int(), other.to_int())) } + unsafe { Self::from_int_unchecked(core::intrinsics::simd::simd_ge(self.to_int(), other.to_int())) } } } diff --git a/crates/core_simd/src/simd/num/float.rs b/crates/core_simd/src/simd/num/float.rs index fc0b99e87a68..59e43851ea8d 100644 --- a/crates/core_simd/src/simd/num/float.rs +++ b/crates/core_simd/src/simd/num/float.rs @@ -1,7 +1,7 @@ use super::sealed::Sealed; use crate::simd::{ cmp::{SimdPartialEq, SimdPartialOrd}, - intrinsics, LaneCount, Mask, Simd, SimdCast, SimdElement, SupportedLaneCount, + LaneCount, Mask, Simd, SimdCast, SimdElement, SupportedLaneCount, }; /// Operations on SIMD vectors of floats. @@ -259,7 +259,7 @@ macro_rules! impl_trait { fn cast(self) -> Self::Cast { // Safety: supported types are guaranteed by SimdCast - unsafe { intrinsics::simd_as(self) } + unsafe { core::intrinsics::simd::simd_as(self) } } #[inline] @@ -269,7 +269,7 @@ macro_rules! impl_trait { Self::Scalar: core::convert::FloatToInt, { // Safety: supported types are guaranteed by SimdCast, the caller is responsible for the extra invariants - unsafe { intrinsics::simd_cast(self) } + unsafe { core::intrinsics::simd::simd_cast(self) } } #[inline] @@ -289,7 +289,7 @@ macro_rules! impl_trait { #[inline] fn abs(self) -> Self { // Safety: `self` is a float vector - unsafe { intrinsics::simd_fabs(self) } + unsafe { core::intrinsics::simd::simd_fabs(self) } } #[inline] @@ -363,13 +363,13 @@ macro_rules! impl_trait { #[inline] fn simd_min(self, other: Self) -> Self { // Safety: `self` and `other` are float vectors - unsafe { intrinsics::simd_fmin(self, other) } + unsafe { core::intrinsics::simd::simd_fmin(self, other) } } #[inline] fn simd_max(self, other: Self) -> Self { // Safety: `self` and `other` are floating point vectors - unsafe { intrinsics::simd_fmax(self, other) } + unsafe { core::intrinsics::simd::simd_fmax(self, other) } } #[inline] @@ -391,7 +391,7 @@ macro_rules! impl_trait { self.as_array().iter().sum() } else { // Safety: `self` is a float vector - unsafe { intrinsics::simd_reduce_add_ordered(self, 0.) } + unsafe { core::intrinsics::simd::simd_reduce_add_ordered(self, 0.) } } } @@ -402,20 +402,20 @@ macro_rules! impl_trait { self.as_array().iter().product() } else { // Safety: `self` is a float vector - unsafe { intrinsics::simd_reduce_mul_ordered(self, 1.) } + unsafe { core::intrinsics::simd::simd_reduce_mul_ordered(self, 1.) } } } #[inline] fn reduce_max(self) -> Self::Scalar { // Safety: `self` is a float vector - unsafe { intrinsics::simd_reduce_max(self) } + unsafe { core::intrinsics::simd::simd_reduce_max(self) } } #[inline] fn reduce_min(self) -> Self::Scalar { // Safety: `self` is a float vector - unsafe { intrinsics::simd_reduce_min(self) } + unsafe { core::intrinsics::simd::simd_reduce_min(self) } } } )* diff --git a/crates/core_simd/src/simd/num/int.rs b/crates/core_simd/src/simd/num/int.rs index 1f1aa2727829..d7598d9ceaf9 100644 --- a/crates/core_simd/src/simd/num/int.rs +++ b/crates/core_simd/src/simd/num/int.rs @@ -1,6 +1,6 @@ use super::sealed::Sealed; use crate::simd::{ - cmp::SimdPartialOrd, intrinsics, num::SimdUint, LaneCount, Mask, Simd, SimdCast, SimdElement, + cmp::SimdPartialOrd, num::SimdUint, LaneCount, Mask, Simd, SimdCast, SimdElement, SupportedLaneCount, }; @@ -237,19 +237,19 @@ macro_rules! impl_trait { #[inline] fn cast(self) -> Self::Cast { // Safety: supported types are guaranteed by SimdCast - unsafe { intrinsics::simd_as(self) } + unsafe { core::intrinsics::simd::simd_as(self) } } #[inline] fn saturating_add(self, second: Self) -> Self { // Safety: `self` is a vector - unsafe { intrinsics::simd_saturating_add(self, second) } + unsafe { core::intrinsics::simd::simd_saturating_add(self, second) } } #[inline] fn saturating_sub(self, second: Self) -> Self { // Safety: `self` is a vector - unsafe { intrinsics::simd_saturating_sub(self, second) } + unsafe { core::intrinsics::simd::simd_saturating_sub(self, second) } } #[inline] @@ -293,55 +293,55 @@ macro_rules! impl_trait { #[inline] fn reduce_sum(self) -> Self::Scalar { // Safety: `self` is an integer vector - unsafe { intrinsics::simd_reduce_add_ordered(self, 0) } + unsafe { core::intrinsics::simd::simd_reduce_add_ordered(self, 0) } } #[inline] fn reduce_product(self) -> Self::Scalar { // Safety: `self` is an integer vector - unsafe { intrinsics::simd_reduce_mul_ordered(self, 1) } + unsafe { core::intrinsics::simd::simd_reduce_mul_ordered(self, 1) } } #[inline] fn reduce_max(self) -> Self::Scalar { // Safety: `self` is an integer vector - unsafe { intrinsics::simd_reduce_max(self) } + unsafe { core::intrinsics::simd::simd_reduce_max(self) } } #[inline] fn reduce_min(self) -> Self::Scalar { // Safety: `self` is an integer vector - unsafe { intrinsics::simd_reduce_min(self) } + unsafe { core::intrinsics::simd::simd_reduce_min(self) } } #[inline] fn reduce_and(self) -> Self::Scalar { // Safety: `self` is an integer vector - unsafe { intrinsics::simd_reduce_and(self) } + unsafe { core::intrinsics::simd::simd_reduce_and(self) } } #[inline] fn reduce_or(self) -> Self::Scalar { // Safety: `self` is an integer vector - unsafe { intrinsics::simd_reduce_or(self) } + unsafe { core::intrinsics::simd::simd_reduce_or(self) } } #[inline] fn reduce_xor(self) -> Self::Scalar { // Safety: `self` is an integer vector - unsafe { intrinsics::simd_reduce_xor(self) } + unsafe { core::intrinsics::simd::simd_reduce_xor(self) } } #[inline] fn swap_bytes(self) -> Self { // Safety: `self` is an integer vector - unsafe { intrinsics::simd_bswap(self) } + unsafe { core::intrinsics::simd::simd_bswap(self) } } #[inline] fn reverse_bits(self) -> Self { // Safety: `self` is an integer vector - unsafe { intrinsics::simd_bitreverse(self) } + unsafe { core::intrinsics::simd::simd_bitreverse(self) } } #[inline] diff --git a/crates/core_simd/src/simd/num/uint.rs b/crates/core_simd/src/simd/num/uint.rs index c955ee8fe8bd..53dd97f501c6 100644 --- a/crates/core_simd/src/simd/num/uint.rs +++ b/crates/core_simd/src/simd/num/uint.rs @@ -1,5 +1,5 @@ use super::sealed::Sealed; -use crate::simd::{intrinsics, LaneCount, Simd, SimdCast, SimdElement, SupportedLaneCount}; +use crate::simd::{LaneCount, Simd, SimdCast, SimdElement, SupportedLaneCount}; /// Operations on SIMD vectors of unsigned integers. pub trait SimdUint: Copy + Sealed { @@ -117,7 +117,7 @@ macro_rules! impl_trait { #[inline] fn cast(self) -> Self::Cast { // Safety: supported types are guaranteed by SimdCast - unsafe { intrinsics::simd_as(self) } + unsafe { core::intrinsics::simd::simd_as(self) } } #[inline] @@ -129,79 +129,79 @@ macro_rules! impl_trait { #[inline] fn saturating_add(self, second: Self) -> Self { // Safety: `self` is a vector - unsafe { intrinsics::simd_saturating_add(self, second) } + unsafe { core::intrinsics::simd::simd_saturating_add(self, second) } } #[inline] fn saturating_sub(self, second: Self) -> Self { // Safety: `self` is a vector - unsafe { intrinsics::simd_saturating_sub(self, second) } + unsafe { core::intrinsics::simd::simd_saturating_sub(self, second) } } #[inline] fn reduce_sum(self) -> Self::Scalar { // Safety: `self` is an integer vector - unsafe { intrinsics::simd_reduce_add_ordered(self, 0) } + unsafe { core::intrinsics::simd::simd_reduce_add_ordered(self, 0) } } #[inline] fn reduce_product(self) -> Self::Scalar { // Safety: `self` is an integer vector - unsafe { intrinsics::simd_reduce_mul_ordered(self, 1) } + unsafe { core::intrinsics::simd::simd_reduce_mul_ordered(self, 1) } } #[inline] fn reduce_max(self) -> Self::Scalar { // Safety: `self` is an integer vector - unsafe { intrinsics::simd_reduce_max(self) } + unsafe { core::intrinsics::simd::simd_reduce_max(self) } } #[inline] fn reduce_min(self) -> Self::Scalar { // Safety: `self` is an integer vector - unsafe { intrinsics::simd_reduce_min(self) } + unsafe { core::intrinsics::simd::simd_reduce_min(self) } } #[inline] fn reduce_and(self) -> Self::Scalar { // Safety: `self` is an integer vector - unsafe { intrinsics::simd_reduce_and(self) } + unsafe { core::intrinsics::simd::simd_reduce_and(self) } } #[inline] fn reduce_or(self) -> Self::Scalar { // Safety: `self` is an integer vector - unsafe { intrinsics::simd_reduce_or(self) } + unsafe { core::intrinsics::simd::simd_reduce_or(self) } } #[inline] fn reduce_xor(self) -> Self::Scalar { // Safety: `self` is an integer vector - unsafe { intrinsics::simd_reduce_xor(self) } + unsafe { core::intrinsics::simd::simd_reduce_xor(self) } } #[inline] fn swap_bytes(self) -> Self { // Safety: `self` is an integer vector - unsafe { intrinsics::simd_bswap(self) } + unsafe { core::intrinsics::simd::simd_bswap(self) } } #[inline] fn reverse_bits(self) -> Self { // Safety: `self` is an integer vector - unsafe { intrinsics::simd_bitreverse(self) } + unsafe { core::intrinsics::simd::simd_bitreverse(self) } } #[inline] fn leading_zeros(self) -> Self { // Safety: `self` is an integer vector - unsafe { intrinsics::simd_ctlz(self) } + unsafe { core::intrinsics::simd::simd_ctlz(self) } } #[inline] fn trailing_zeros(self) -> Self { // Safety: `self` is an integer vector - unsafe { intrinsics::simd_cttz(self) } + unsafe { core::intrinsics::simd::simd_cttz(self) } } #[inline] diff --git a/crates/core_simd/src/simd/ptr/const_ptr.rs b/crates/core_simd/src/simd/ptr/const_ptr.rs index 97fe3fb600df..e217d1c8c87c 100644 --- a/crates/core_simd/src/simd/ptr/const_ptr.rs +++ b/crates/core_simd/src/simd/ptr/const_ptr.rs @@ -1,7 +1,5 @@ use super::sealed::Sealed; -use crate::simd::{ - cmp::SimdPartialEq, intrinsics, num::SimdUint, LaneCount, Mask, Simd, SupportedLaneCount, -}; +use crate::simd::{cmp::SimdPartialEq, num::SimdUint, LaneCount, Mask, Simd, SupportedLaneCount}; /// Operations on SIMD vectors of constant pointers. pub trait SimdConstPtr: Copy + Sealed { @@ -103,13 +101,13 @@ where assert_eq!(size_of::<::Metadata>(), 0); // Safety: pointers can be cast - unsafe { intrinsics::simd_cast_ptr(self) } + unsafe { core::intrinsics::simd::simd_cast_ptr(self) } } #[inline] fn cast_mut(self) -> Self::MutPtr { // Safety: pointers can be cast - unsafe { intrinsics::simd_cast_ptr(self) } + unsafe { core::intrinsics::simd::simd_cast_ptr(self) } } #[inline] @@ -135,19 +133,19 @@ where #[inline] fn expose_addr(self) -> Self::Usize { // Safety: `self` is a pointer vector - unsafe { intrinsics::simd_expose_addr(self) } + unsafe { core::intrinsics::simd::simd_expose_addr(self) } } #[inline] fn from_exposed_addr(addr: Self::Usize) -> Self { // Safety: `self` is a pointer vector - unsafe { intrinsics::simd_from_exposed_addr(addr) } + unsafe { core::intrinsics::simd::simd_from_exposed_addr(addr) } } #[inline] fn wrapping_offset(self, count: Self::Isize) -> Self { // Safety: simd_arith_offset takes a vector of pointers and a vector of offsets - unsafe { intrinsics::simd_arith_offset(self, count) } + unsafe { core::intrinsics::simd::simd_arith_offset(self, count) } } #[inline] diff --git a/crates/core_simd/src/simd/ptr/mut_ptr.rs b/crates/core_simd/src/simd/ptr/mut_ptr.rs index e35633d0433d..5cb27af4fdeb 100644 --- a/crates/core_simd/src/simd/ptr/mut_ptr.rs +++ b/crates/core_simd/src/simd/ptr/mut_ptr.rs @@ -1,7 +1,5 @@ use super::sealed::Sealed; -use crate::simd::{ - cmp::SimdPartialEq, intrinsics, num::SimdUint, LaneCount, Mask, Simd, SupportedLaneCount, -}; +use crate::simd::{cmp::SimdPartialEq, num::SimdUint, LaneCount, Mask, Simd, SupportedLaneCount}; /// Operations on SIMD vectors of mutable pointers. pub trait SimdMutPtr: Copy + Sealed { @@ -100,13 +98,13 @@ where assert_eq!(size_of::<::Metadata>(), 0); // Safety: pointers can be cast - unsafe { intrinsics::simd_cast_ptr(self) } + unsafe { core::intrinsics::simd::simd_cast_ptr(self) } } #[inline] fn cast_const(self) -> Self::ConstPtr { // Safety: pointers can be cast - unsafe { intrinsics::simd_cast_ptr(self) } + unsafe { core::intrinsics::simd::simd_cast_ptr(self) } } #[inline] @@ -132,19 +130,19 @@ where #[inline] fn expose_addr(self) -> Self::Usize { // Safety: `self` is a pointer vector - unsafe { intrinsics::simd_expose_addr(self) } + unsafe { core::intrinsics::simd::simd_expose_addr(self) } } #[inline] fn from_exposed_addr(addr: Self::Usize) -> Self { // Safety: `self` is a pointer vector - unsafe { intrinsics::simd_from_exposed_addr(addr) } + unsafe { core::intrinsics::simd::simd_from_exposed_addr(addr) } } #[inline] fn wrapping_offset(self, count: Self::Isize) -> Self { // Safety: simd_arith_offset takes a vector of pointers and a vector of offsets - unsafe { intrinsics::simd_arith_offset(self, count) } + unsafe { core::intrinsics::simd::simd_arith_offset(self, count) } } #[inline] diff --git a/crates/core_simd/src/swizzle.rs b/crates/core_simd/src/swizzle.rs index ec8548d55745..71110bb28201 100644 --- a/crates/core_simd/src/swizzle.rs +++ b/crates/core_simd/src/swizzle.rs @@ -1,4 +1,3 @@ -use crate::simd::intrinsics; use crate::simd::{LaneCount, Mask, MaskElement, Simd, SimdElement, SupportedLaneCount}; /// Constructs a new SIMD vector by copying elements from selected elements in other vectors. @@ -88,7 +87,7 @@ pub trait Swizzle { { // Safety: `vector` is a vector, and the index is a const array of u32. unsafe { - intrinsics::simd_shuffle( + core::intrinsics::simd::simd_shuffle( vector, vector, const { @@ -124,7 +123,7 @@ pub trait Swizzle { { // Safety: `first` and `second` are vectors, and the index is a const array of u32. unsafe { - intrinsics::simd_shuffle( + core::intrinsics::simd::simd_shuffle( first, second, const { diff --git a/crates/core_simd/src/vector.rs b/crates/core_simd/src/vector.rs index 881406d0eac8..9e97a3161bb2 100644 --- a/crates/core_simd/src/vector.rs +++ b/crates/core_simd/src/vector.rs @@ -1,6 +1,5 @@ use crate::simd::{ cmp::SimdPartialOrd, - intrinsics, ptr::{SimdConstPtr, SimdMutPtr}, LaneCount, Mask, MaskElement, SupportedLaneCount, Swizzle, }; @@ -491,7 +490,7 @@ where or: Self, ) -> Self { // Safety: The caller is responsible for upholding all invariants - unsafe { intrinsics::simd_gather(or, source, enable.to_int()) } + unsafe { core::intrinsics::simd::simd_gather(or, source, enable.to_int()) } } /// Writes the values in a SIMD vector to potentially discontiguous indices in `slice`. @@ -650,7 +649,7 @@ where #[cfg_attr(miri, track_caller)] // even without panics, this helps for Miri backtraces pub unsafe fn scatter_select_ptr(self, dest: Simd<*mut T, N>, enable: Mask) { // Safety: The caller is responsible for upholding all invariants - unsafe { intrinsics::simd_scatter(self, dest, enable.to_int()) } + unsafe { core::intrinsics::simd::simd_scatter(self, dest, enable.to_int()) } } } @@ -692,7 +691,8 @@ where fn eq(&self, other: &Self) -> bool { // Safety: All SIMD vectors are SimdPartialEq, and the comparison produces a valid mask. let mask = unsafe { - let tfvec: Simd<::Mask, N> = intrinsics::simd_eq(*self, *other); + let tfvec: Simd<::Mask, N> = + core::intrinsics::simd::simd_eq(*self, *other); Mask::from_int_unchecked(tfvec) }; @@ -705,7 +705,8 @@ where fn ne(&self, other: &Self) -> bool { // Safety: All SIMD vectors are SimdPartialEq, and the comparison produces a valid mask. let mask = unsafe { - let tfvec: Simd<::Mask, N> = intrinsics::simd_ne(*self, *other); + let tfvec: Simd<::Mask, N> = + core::intrinsics::simd::simd_ne(*self, *other); Mask::from_int_unchecked(tfvec) }; From e7130ec093a761ee61e22a784ffac32aa1c49d65 Mon Sep 17 00:00:00 2001 From: Caleb Zulawski Date: Fri, 5 Jan 2024 17:59:47 -0500 Subject: [PATCH 006/127] Add exposed_provenance for rust-lang/rust#118487 --- crates/core_simd/tests/pointers.rs | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/crates/core_simd/tests/pointers.rs b/crates/core_simd/tests/pointers.rs index a90ff928cedc..b9f32d16e01d 100644 --- a/crates/core_simd/tests/pointers.rs +++ b/crates/core_simd/tests/pointers.rs @@ -1,4 +1,4 @@ -#![feature(portable_simd, strict_provenance)] +#![feature(portable_simd, strict_provenance, exposed_provenance)] use core_simd::simd::{ ptr::{SimdConstPtr, SimdMutPtr}, From 0380d982f9cf6cd49de3ea009f37bdc18e30e81f Mon Sep 17 00:00:00 2001 From: YouWei Zhao Date: Wed, 10 Jan 2024 21:55:41 +0800 Subject: [PATCH 007/127] highlight rustdoc --- .../code/language-configuration-rustdoc.json | 54 +++++++++++ editors/code/package.json | 28 ++++++ editors/code/rustdoc-inject.json | 93 +++++++++++++++++++ editors/code/rustdoc.json | 82 ++++++++++++++++ 4 files changed, 257 insertions(+) create mode 100644 editors/code/language-configuration-rustdoc.json create mode 100644 editors/code/rustdoc-inject.json create mode 100644 editors/code/rustdoc.json diff --git a/editors/code/language-configuration-rustdoc.json b/editors/code/language-configuration-rustdoc.json new file mode 100644 index 000000000000..d7cbf2a0d24c --- /dev/null +++ b/editors/code/language-configuration-rustdoc.json @@ -0,0 +1,54 @@ +{ + "comments": { + "blockComment": [ + "" + ] + }, + "brackets": [ + ["{", "}"], + ["[", "]"], + ["(", ")"] + ], + "colorizedBracketPairs": [ + ], + "autoClosingPairs": [ + { + "open": "{", + "close": "}" + }, + { + "open": "[", + "close": "]" + }, + { + "open": "(", + "close": ")" + }, + { + "open": "<", + "close": ">", + "notIn": [ + "string" + ] + } + ], + "surroundingPairs": [ + ["(", ")"], + ["[", "]"], + ["`", "`"], + ["_", "_"], + ["*", "*"], + ["{", "}"], + ["'", "'"], + ["\"", "\""] + ], + "folding": { + "offSide": true, + "markers": { + "start": "^\\s*", + "end": "^\\s*" + } + }, + "wordPattern": { "pattern": "(\\p{Alphabetic}|\\p{Number}|\\p{Nonspacing_Mark})(((\\p{Alphabetic}|\\p{Number}|\\p{Nonspacing_Mark})|[_])?(\\p{Alphabetic}|\\p{Number}|\\p{Nonspacing_Mark}))*", "flags": "ug" }, +} diff --git a/editors/code/package.json b/editors/code/package.json index 8307f6833e6f..351370a09673 100644 --- a/editors/code/package.json +++ b/editors/code/package.json @@ -1729,6 +1729,13 @@ "rs" ], "configuration": "language-configuration.json" + }, + { + "id": "rustdoc", + "extensions": [ + ".rustdoc" + ], + "configuration": "./language-configuration-rustdoc.json" } ], "grammars": [ @@ -1736,6 +1743,27 @@ "language": "ra_syntax_tree", "scopeName": "source.ra_syntax_tree", "path": "ra_syntax_tree.tmGrammar.json" + }, + { + "language": "rustdoc", + "scopeName": "text.html.markdown.rustdoc", + "path": "rustdoc.json", + "embeddedLanguages": { + "meta.embedded.block.html": "html", + "meta.embedded.block.markdown": "markdown", + "meta.embedded.block.rust": "rust" + } + }, + { + "injectTo": [ + "source.rust" + ], + "scopeName": "comment.markdown-cell-inject.rustdoc", + "path": "rustdoc-inject.json", + "embeddedLanguages": { + "meta.embedded.block.rustdoc": "rustdoc", + "meta.embedded.block.rust": "rust" + } } ], "problemMatchers": [ diff --git a/editors/code/rustdoc-inject.json b/editors/code/rustdoc-inject.json new file mode 100644 index 000000000000..45ca2fa96bc2 --- /dev/null +++ b/editors/code/rustdoc-inject.json @@ -0,0 +1,93 @@ +{ + "injectionSelector": "L:source.rust -string -comment -meta.embedded.block.rustdoc.md", + "patterns": [ + { + "include": "#triple-slash" + }, + { + "include": "#double-slash-exclamation" + }, + { + "include": "#slash-start-exclamation" + }, + { + "include": "#slash-double-start" + } + ], + "repository": { + "triple-slash": { + "begin": "(^|\\G)\\s*(///) ?", + "captures": { + "2": { + "name": "comment.line.double-slash.rust" + } + }, + "name": "comment.quote_code.triple-slash.rust", + "contentName": "meta.embedded.block.rustdoc", + "patterns": [ + { + "include": "text.html.markdown.rustdoc" + } + ], + "while": "(^|\\G)\\s*(///) ?" + }, + "double-slash-exclamation": { + "begin": "(^|\\G)\\s*(//!) ?", + "captures": { + "2": { + "name": "comment.line.double-slash.rust" + } + }, + "name": "comment.quote_code.double-slash-exclamation.rust", + "contentName": "meta.embedded.block.rustdoc", + "patterns": [ + { + "include": "text.html.markdown.rustdoc" + } + ], + "while": "(^|\\G)\\s*(//!) ?" + }, + "slash-start-exclamation": { + "begin": "(^)(/\\*!) ?$", + "captures": { + "2": { + "name": "comment.block.rust" + } + }, + "name": "comment.quote_code.slash-start-exclamation.rust", + "contentName": "meta.embedded.block.rustdoc", + "patterns": [ + { + "include": "text.html.markdown.rustdoc" + } + ], + "end": "( ?)(\\*/)" + }, + "slash-double-start": { + "name": "comment.quote_code.slash-double-start-quote-star.rust", + "begin": "(?:^)\\s*/\\*\\* ?$", + "end": "\\*/", + "patterns": [ + { + "include": "#quote-star" + } + ] + }, + "quote-star": { + "begin": "(^|\\G)\\s*(\\*(?!/)) ?", + "captures": { + "2": { + "name": "comment.punctuation.definition.quote_code.slash-star.MR" + } + }, + "contentName": "meta.embedded.block.rustdoc", + "patterns": [ + { + "include": "text.html.markdown.rustdoc" + } + ], + "while": "(^|\\G)\\s*(\\*(?!/)) ?" + } + }, + "scopeName": "comment.markdown-cell-inject.rustdoc" +} \ No newline at end of file diff --git a/editors/code/rustdoc.json b/editors/code/rustdoc.json new file mode 100644 index 000000000000..230d2de1cfac --- /dev/null +++ b/editors/code/rustdoc.json @@ -0,0 +1,82 @@ +{ + "name": "rustdoc", + "patterns": [ + { + "include": "#fenced_code_block" + }, + { + "include": "#markdown" + } + ], + "scopeName": "text.html.markdown.rustdoc", + "repository": { + "markdown":{ + "patterns": [ + { + "include": "text.html.markdown" + } + ] + }, + "fenced_code_block":{ + "patterns": [ + { + "include": "#fenced_code_block_rust" + }, + { + "include": "#fenced_code_block_unknown" + } + ] + }, + "fenced_code_block_rust": { + "begin": "(^|\\G)(\\s*)(`{3,}|~{3,})\\s*(?i:(rust|not run|not_run)?((\\s+|:|,|\\{|\\?)[^`~]*)?$)", + "name": "markup.fenced_code.block.markdown", + "end": "(^|\\G)(\\2|\\s{0,3})(\\3)\\s*$", + "beginCaptures": { + "3": { + "name": "punctuation.definition.markdown" + }, + "4": { + "name": "fenced_code.block.language.markdown" + }, + "5": { + "name": "fenced_code.block.language.attributes.markdown" + } + }, + "endCaptures": { + "3": { + "name": "punctuation.definition.markdown" + } + }, + "patterns": [ + { + "begin": "(^|\\G)(\\s*)(.*)", + "while": "(^|\\G)(?!\\s*([`~]{3,})\\s*$)", + "contentName": "meta.embedded.block.rust", + "patterns": [ + { + "include": "source.rust" + } + ] + } + ] + }, + "fenced_code_block_unknown": { + "begin": "(^|\\G)(\\s*)(`{3,}|~{3,})\\s*(?=([^`~]+)?$)", + "beginCaptures": { + "3": { + "name": "punctuation.definition.markdown" + }, + "4": { + "name": "fenced_code.block.language" + } + }, + "end": "(^|\\G)(\\2|\\s{0,3})(\\3)\\s*$", + "endCaptures": { + "3": { + "name": "punctuation.definition.markdown" + } + }, + "name": "markup.fenced_code.block.markdown" + } + } +} \ No newline at end of file From 1040c0091e40e1e2e9131ceda62ad885dbbcf55f Mon Sep 17 00:00:00 2001 From: YouWei Zhao Date: Tue, 16 Jan 2024 21:20:22 +0800 Subject: [PATCH 008/127] reformat --- .../code/language-configuration-rustdoc.json | 30 ++++--------------- editors/code/rustdoc.json | 4 +-- 2 files changed, 8 insertions(+), 26 deletions(-) diff --git a/editors/code/language-configuration-rustdoc.json b/editors/code/language-configuration-rustdoc.json index d7cbf2a0d24c..a1d2a90aeacb 100644 --- a/editors/code/language-configuration-rustdoc.json +++ b/editors/code/language-configuration-rustdoc.json @@ -1,9 +1,6 @@ { "comments": { - "blockComment": [ - "" - ] + "blockComment": [""] }, "brackets": [ ["{", "}"], @@ -13,25 +10,10 @@ "colorizedBracketPairs": [ ], "autoClosingPairs": [ - { - "open": "{", - "close": "}" - }, - { - "open": "[", - "close": "]" - }, - { - "open": "(", - "close": ")" - }, - { - "open": "<", - "close": ">", - "notIn": [ - "string" - ] - } + { "open": "{", "close": "}" }, + { "open": "[", "close": "]" }, + { "open": "(", "close": ")" }, + { "open": "<", "close": ">", "notIn": [ "string" ] } ], "surroundingPairs": [ ["(", ")"], @@ -50,5 +32,5 @@ "end": "^\\s*" } }, - "wordPattern": { "pattern": "(\\p{Alphabetic}|\\p{Number}|\\p{Nonspacing_Mark})(((\\p{Alphabetic}|\\p{Number}|\\p{Nonspacing_Mark})|[_])?(\\p{Alphabetic}|\\p{Number}|\\p{Nonspacing_Mark}))*", "flags": "ug" }, + "wordPattern": { "pattern": "(\\p{Alphabetic}|\\p{Number}|\\p{Nonspacing_Mark})(((\\p{Alphabetic}|\\p{Number}|\\p{Nonspacing_Mark})|[_])?(\\p{Alphabetic}|\\p{Number}|\\p{Nonspacing_Mark}))*", "flags": "ug" } } diff --git a/editors/code/rustdoc.json b/editors/code/rustdoc.json index 230d2de1cfac..a149af47efdc 100644 --- a/editors/code/rustdoc.json +++ b/editors/code/rustdoc.json @@ -10,14 +10,14 @@ ], "scopeName": "text.html.markdown.rustdoc", "repository": { - "markdown":{ + "markdown": { "patterns": [ { "include": "text.html.markdown" } ] }, - "fenced_code_block":{ + "fenced_code_block": { "patterns": [ { "include": "#fenced_code_block_rust" From 32a140944f60671d60a2959ee991274ea8c6d145 Mon Sep 17 00:00:00 2001 From: YouWei Zhao Date: Tue, 16 Jan 2024 23:46:59 +0800 Subject: [PATCH 009/127] add files to .vscodeignore --- editors/code/.vscodeignore | 3 +++ 1 file changed, 3 insertions(+) diff --git a/editors/code/.vscodeignore b/editors/code/.vscodeignore index 09dc27056b37..5c48205694fe 100644 --- a/editors/code/.vscodeignore +++ b/editors/code/.vscodeignore @@ -12,3 +12,6 @@ !ra_syntax_tree.tmGrammar.json !server !README.md +!language-configuration-rustdoc.json +!rustdoc-inject.json +!rustdoc.json From 895bcce1baedcd8294c3997770df6cde8ee33f85 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Tom=C3=A1s=20Vallotton?= Date: Fri, 15 Dec 2023 10:20:33 -0300 Subject: [PATCH 010/127] fix: change issue number of waker_getters from #87021 to #96992. --- crates/ide-db/src/generated/lints.rs | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/crates/ide-db/src/generated/lints.rs b/crates/ide-db/src/generated/lints.rs index 1cb6ff8627a2..c3b806dec4dd 100644 --- a/crates/ide-db/src/generated/lints.rs +++ b/crates/ide-db/src/generated/lints.rs @@ -9630,9 +9630,9 @@ The tracking issue for this feature is: [#81944] label: "waker_getters", description: r##"# `waker_getters` -The tracking issue for this feature is: [#87021] +The tracking issue for this feature is: [#96992] -[#87021]: https://github.com/rust-lang/rust/issues/87021 +[#96992]: https://github.com/rust-lang/rust/issues/96992 ------------------------ "##, From 51a7396ad3d78d9326ee1537b9ff29ab3919556f Mon Sep 17 00:00:00 2001 From: Jan Verbeek Date: Sun, 3 Dec 2023 12:25:11 +0100 Subject: [PATCH 011/127] Move `OsStr::slice_encoded_bytes` validation to platform modules On Windows and UEFI this improves performance and error messaging. On other platforms we optimize the fast path a bit more. This also prepares for later relaxing the checks on certain platforms. --- library/std/src/ffi/mod.rs | 7 +++ library/std/src/ffi/os_str.rs | 43 +++------------ library/std/src/ffi/os_str/tests.rs | 68 +++++++++++++++++++++--- library/std/src/sys/os_str/bytes.rs | 43 +++++++++++++++ library/std/src/sys/os_str/wtf8.rs | 7 ++- library/std/src/sys_common/wtf8.rs | 36 +++++++++++-- library/std/src/sys_common/wtf8/tests.rs | 62 +++++++++++++++++++++ 7 files changed, 219 insertions(+), 47 deletions(-) diff --git a/library/std/src/ffi/mod.rs b/library/std/src/ffi/mod.rs index 97e78d177867..818571ddaaa1 100644 --- a/library/std/src/ffi/mod.rs +++ b/library/std/src/ffi/mod.rs @@ -127,6 +127,11 @@ //! trait, which provides a [`from_wide`] method to convert a native Windows //! string (without the terminating nul character) to an [`OsString`]. //! +//! ## Other platforms +//! +//! Many other platforms provide their own extension traits in a +//! `std::os::*::ffi` module. +//! //! ## On all platforms //! //! On all platforms, [`OsStr`] consists of a sequence of bytes that is encoded as a superset of @@ -135,6 +140,8 @@ //! For limited, inexpensive conversions from and to bytes, see [`OsStr::as_encoded_bytes`] and //! [`OsStr::from_encoded_bytes_unchecked`]. //! +//! For basic string processing, see [`OsStr::slice_encoded_bytes`]. +//! //! [Unicode scalar value]: https://www.unicode.org/glossary/#unicode_scalar_value //! [Unicode code point]: https://www.unicode.org/glossary/#code_point //! [`env::set_var()`]: crate::env::set_var "env::set_var" diff --git a/library/std/src/ffi/os_str.rs b/library/std/src/ffi/os_str.rs index 81973182148e..28747ad8f340 100644 --- a/library/std/src/ffi/os_str.rs +++ b/library/std/src/ffi/os_str.rs @@ -9,7 +9,7 @@ use crate::hash::{Hash, Hasher}; use crate::ops::{self, Range}; use crate::rc::Rc; use crate::slice; -use crate::str::{from_utf8 as str_from_utf8, FromStr}; +use crate::str::FromStr; use crate::sync::Arc; use crate::sys::os_str::{Buf, Slice}; @@ -997,42 +997,15 @@ impl OsStr { /// ``` #[unstable(feature = "os_str_slice", issue = "118485")] pub fn slice_encoded_bytes>(&self, range: R) -> &Self { - #[track_caller] - fn check_valid_boundary(bytes: &[u8], index: usize) { - if index == 0 || index == bytes.len() { - return; - } - - // Fast path - if bytes[index - 1].is_ascii() || bytes[index].is_ascii() { - return; - } - - let (before, after) = bytes.split_at(index); - - // UTF-8 takes at most 4 bytes per codepoint, so we don't - // need to check more than that. - let after = after.get(..4).unwrap_or(after); - match str_from_utf8(after) { - Ok(_) => return, - Err(err) if err.valid_up_to() != 0 => return, - Err(_) => (), - } - - for len in 2..=4.min(index) { - let before = &before[index - len..]; - if str_from_utf8(before).is_ok() { - return; - } - } - - panic!("byte index {index} is not an OsStr boundary"); - } - let encoded_bytes = self.as_encoded_bytes(); let Range { start, end } = slice::range(range, ..encoded_bytes.len()); - check_valid_boundary(encoded_bytes, start); - check_valid_boundary(encoded_bytes, end); + + // `check_public_boundary` should panic if the index does not lie on an + // `OsStr` boundary as described above. It's possible to do this in an + // encoding-agnostic way, but details of the internal encoding might + // permit a more efficient implementation. + self.inner.check_public_boundary(start); + self.inner.check_public_boundary(end); // SAFETY: `slice::range` ensures that `start` and `end` are valid let slice = unsafe { encoded_bytes.get_unchecked(start..end) }; diff --git a/library/std/src/ffi/os_str/tests.rs b/library/std/src/ffi/os_str/tests.rs index 60cde376d326..b020e05eaab2 100644 --- a/library/std/src/ffi/os_str/tests.rs +++ b/library/std/src/ffi/os_str/tests.rs @@ -194,15 +194,65 @@ fn slice_encoded_bytes() { } #[test] -#[should_panic(expected = "byte index 2 is not an OsStr boundary")] +#[should_panic] +fn slice_out_of_bounds() { + let crab = OsStr::new("🦀"); + let _ = crab.slice_encoded_bytes(..5); +} + +#[test] +#[should_panic] fn slice_mid_char() { let crab = OsStr::new("🦀"); let _ = crab.slice_encoded_bytes(..2); } +#[cfg(unix)] +#[test] +#[should_panic(expected = "byte index 1 is not an OsStr boundary")] +fn slice_invalid_data() { + use crate::os::unix::ffi::OsStrExt; + + let os_string = OsStr::from_bytes(b"\xFF\xFF"); + let _ = os_string.slice_encoded_bytes(1..); +} + +#[cfg(unix)] +#[test] +#[should_panic(expected = "byte index 1 is not an OsStr boundary")] +fn slice_partial_utf8() { + use crate::os::unix::ffi::{OsStrExt, OsStringExt}; + + let part_crab = OsStr::from_bytes(&"🦀".as_bytes()[..3]); + let mut os_string = OsString::from_vec(vec![0xFF]); + os_string.push(part_crab); + let _ = os_string.slice_encoded_bytes(1..); +} + +#[cfg(unix)] +#[test] +fn slice_invalid_edge() { + use crate::os::unix::ffi::{OsStrExt, OsStringExt}; + + let os_string = OsStr::from_bytes(b"a\xFFa"); + assert_eq!(os_string.slice_encoded_bytes(..1), "a"); + assert_eq!(os_string.slice_encoded_bytes(1..), OsStr::from_bytes(b"\xFFa")); + assert_eq!(os_string.slice_encoded_bytes(..2), OsStr::from_bytes(b"a\xFF")); + assert_eq!(os_string.slice_encoded_bytes(2..), "a"); + + let os_string = OsStr::from_bytes(&"abc🦀".as_bytes()[..6]); + assert_eq!(os_string.slice_encoded_bytes(..3), "abc"); + assert_eq!(os_string.slice_encoded_bytes(3..), OsStr::from_bytes(b"\xF0\x9F\xA6")); + + let mut os_string = OsString::from_vec(vec![0xFF]); + os_string.push("🦀"); + assert_eq!(os_string.slice_encoded_bytes(..1), OsStr::from_bytes(b"\xFF")); + assert_eq!(os_string.slice_encoded_bytes(1..), "🦀"); +} + #[cfg(windows)] #[test] -#[should_panic(expected = "byte index 3 is not an OsStr boundary")] +#[should_panic(expected = "byte index 3 lies between surrogate codepoints")] fn slice_between_surrogates() { use crate::os::windows::ffi::OsStringExt; @@ -216,10 +266,14 @@ fn slice_between_surrogates() { fn slice_surrogate_edge() { use crate::os::windows::ffi::OsStringExt; - let os_string = OsString::from_wide(&[0xD800]); - let mut with_crab = os_string.clone(); - with_crab.push("🦀"); + let surrogate = OsString::from_wide(&[0xD800]); + let mut pre_crab = surrogate.clone(); + pre_crab.push("🦀"); + assert_eq!(pre_crab.slice_encoded_bytes(..3), surrogate); + assert_eq!(pre_crab.slice_encoded_bytes(3..), "🦀"); - assert_eq!(with_crab.slice_encoded_bytes(..3), os_string); - assert_eq!(with_crab.slice_encoded_bytes(3..), "🦀"); + let mut post_crab = OsString::from("🦀"); + post_crab.push(&surrogate); + assert_eq!(post_crab.slice_encoded_bytes(..4), "🦀"); + assert_eq!(post_crab.slice_encoded_bytes(4..), surrogate); } diff --git a/library/std/src/sys/os_str/bytes.rs b/library/std/src/sys/os_str/bytes.rs index 3a75ce9ebb78..4ca3f1cd1853 100644 --- a/library/std/src/sys/os_str/bytes.rs +++ b/library/std/src/sys/os_str/bytes.rs @@ -211,6 +211,49 @@ impl Slice { unsafe { mem::transmute(s) } } + #[track_caller] + #[inline] + pub fn check_public_boundary(&self, index: usize) { + if index == 0 || index == self.inner.len() { + return; + } + if index < self.inner.len() + && (self.inner[index - 1].is_ascii() || self.inner[index].is_ascii()) + { + return; + } + + slow_path(&self.inner, index); + + /// We're betting that typical splits will involve an ASCII character. + /// + /// Putting the expensive checks in a separate function generates notably + /// better assembly. + #[track_caller] + #[inline(never)] + fn slow_path(bytes: &[u8], index: usize) { + let (before, after) = bytes.split_at(index); + + // UTF-8 takes at most 4 bytes per codepoint, so we don't + // need to check more than that. + let after = after.get(..4).unwrap_or(after); + match str::from_utf8(after) { + Ok(_) => return, + Err(err) if err.valid_up_to() != 0 => return, + Err(_) => (), + } + + for len in 2..=4.min(index) { + let before = &before[index - len..]; + if str::from_utf8(before).is_ok() { + return; + } + } + + panic!("byte index {index} is not an OsStr boundary"); + } + } + #[inline] pub fn from_str(s: &str) -> &Slice { unsafe { Slice::from_encoded_bytes_unchecked(s.as_bytes()) } diff --git a/library/std/src/sys/os_str/wtf8.rs b/library/std/src/sys/os_str/wtf8.rs index 237854fac4e2..352bd7359033 100644 --- a/library/std/src/sys/os_str/wtf8.rs +++ b/library/std/src/sys/os_str/wtf8.rs @@ -6,7 +6,7 @@ use crate::fmt; use crate::mem; use crate::rc::Rc; use crate::sync::Arc; -use crate::sys_common::wtf8::{Wtf8, Wtf8Buf}; +use crate::sys_common::wtf8::{check_utf8_boundary, Wtf8, Wtf8Buf}; use crate::sys_common::{AsInner, FromInner, IntoInner}; #[derive(Clone, Hash)] @@ -171,6 +171,11 @@ impl Slice { mem::transmute(Wtf8::from_bytes_unchecked(s)) } + #[track_caller] + pub fn check_public_boundary(&self, index: usize) { + check_utf8_boundary(&self.inner, index); + } + #[inline] pub fn from_str(s: &str) -> &Slice { unsafe { mem::transmute(Wtf8::from_str(s)) } diff --git a/library/std/src/sys_common/wtf8.rs b/library/std/src/sys_common/wtf8.rs index 67db5ebd89cf..2dbd19d71719 100644 --- a/library/std/src/sys_common/wtf8.rs +++ b/library/std/src/sys_common/wtf8.rs @@ -885,15 +885,43 @@ fn decode_surrogate_pair(lead: u16, trail: u16) -> char { unsafe { char::from_u32_unchecked(code_point) } } -/// Copied from core::str::StrPrelude::is_char_boundary +/// Copied from str::is_char_boundary #[inline] pub fn is_code_point_boundary(slice: &Wtf8, index: usize) -> bool { - if index == slice.len() { + if index == 0 { return true; } match slice.bytes.get(index) { - None => false, - Some(&b) => b < 128 || b >= 192, + None => index == slice.len(), + Some(&b) => (b as i8) >= -0x40, + } +} + +/// Verify that `index` is at the edge of either a valid UTF-8 codepoint +/// (i.e. a codepoint that's not a surrogate) or of the whole string. +/// +/// These are the cases currently permitted by `OsStr::slice_encoded_bytes`. +/// Splitting between surrogates is valid as far as WTF-8 is concerned, but +/// we do not permit it in the public API because WTF-8 is considered an +/// implementation detail. +#[track_caller] +#[inline] +pub fn check_utf8_boundary(slice: &Wtf8, index: usize) { + if index == 0 { + return; + } + match slice.bytes.get(index) { + Some(0xED) => (), // Might be a surrogate + Some(&b) if (b as i8) >= -0x40 => return, + Some(_) => panic!("byte index {index} is not a codepoint boundary"), + None if index == slice.len() => return, + None => panic!("byte index {index} is out of bounds"), + } + if slice.bytes[index + 1] >= 0xA0 { + // There's a surrogate after index. Now check before index. + if index >= 3 && slice.bytes[index - 3] == 0xED && slice.bytes[index - 2] >= 0xA0 { + panic!("byte index {index} lies between surrogate codepoints"); + } } } diff --git a/library/std/src/sys_common/wtf8/tests.rs b/library/std/src/sys_common/wtf8/tests.rs index 28a426648e50..6a1cc41a8fb0 100644 --- a/library/std/src/sys_common/wtf8/tests.rs +++ b/library/std/src/sys_common/wtf8/tests.rs @@ -663,3 +663,65 @@ fn wtf8_to_owned() { assert_eq!(string.bytes, b"\xED\xA0\x80"); assert!(!string.is_known_utf8); } + +#[test] +fn wtf8_valid_utf8_boundaries() { + let mut string = Wtf8Buf::from_str("aé 💩"); + string.push(CodePoint::from_u32(0xD800).unwrap()); + string.push(CodePoint::from_u32(0xD800).unwrap()); + check_utf8_boundary(&string, 0); + check_utf8_boundary(&string, 1); + check_utf8_boundary(&string, 3); + check_utf8_boundary(&string, 4); + check_utf8_boundary(&string, 8); + check_utf8_boundary(&string, 14); + assert_eq!(string.len(), 14); + + string.push_char('a'); + check_utf8_boundary(&string, 14); + check_utf8_boundary(&string, 15); + + let mut string = Wtf8Buf::from_str("a"); + string.push(CodePoint::from_u32(0xD800).unwrap()); + check_utf8_boundary(&string, 1); + + let mut string = Wtf8Buf::from_str("\u{D7FF}"); + string.push(CodePoint::from_u32(0xD800).unwrap()); + check_utf8_boundary(&string, 3); + + let mut string = Wtf8Buf::new(); + string.push(CodePoint::from_u32(0xD800).unwrap()); + string.push_char('\u{D7FF}'); + check_utf8_boundary(&string, 3); +} + +#[test] +#[should_panic(expected = "byte index 4 is out of bounds")] +fn wtf8_utf8_boundary_out_of_bounds() { + let string = Wtf8::from_str("aé"); + check_utf8_boundary(&string, 4); +} + +#[test] +#[should_panic(expected = "byte index 1 is not a codepoint boundary")] +fn wtf8_utf8_boundary_inside_codepoint() { + let string = Wtf8::from_str("é"); + check_utf8_boundary(&string, 1); +} + +#[test] +#[should_panic(expected = "byte index 1 is not a codepoint boundary")] +fn wtf8_utf8_boundary_inside_surrogate() { + let mut string = Wtf8Buf::new(); + string.push(CodePoint::from_u32(0xD800).unwrap()); + check_utf8_boundary(&string, 1); +} + +#[test] +#[should_panic(expected = "byte index 3 lies between surrogate codepoints")] +fn wtf8_utf8_boundary_between_surrogates() { + let mut string = Wtf8Buf::new(); + string.push(CodePoint::from_u32(0xD800).unwrap()); + string.push(CodePoint::from_u32(0xD800).unwrap()); + check_utf8_boundary(&string, 3); +} From d907a6b8ed33f0cc332992ef330e099c9474faba Mon Sep 17 00:00:00 2001 From: Guillaume Gomez Date: Wed, 31 Jan 2024 14:34:29 +0100 Subject: [PATCH 012/127] rustdoc: Correctly handle long crate names on mobile --- src/librustdoc/html/static/css/rustdoc.css | 7 +------ src/librustdoc/html/static/js/main.js | 2 +- 2 files changed, 2 insertions(+), 7 deletions(-) diff --git a/src/librustdoc/html/static/css/rustdoc.css b/src/librustdoc/html/static/css/rustdoc.css index 9c593aa85d98..899c4383986d 100644 --- a/src/librustdoc/html/static/css/rustdoc.css +++ b/src/librustdoc/html/static/css/rustdoc.css @@ -1943,13 +1943,8 @@ in src-script.js and main.js pixels to avoid overflowing the topbar when the user sets a bigger font size. */ font-size: 24px; - } - - .mobile-topbar h2 a { - display: block; - text-overflow: ellipsis; - overflow: hidden; white-space: nowrap; + text-overflow: ellipsis; } .mobile-topbar .logo-container > img { diff --git a/src/librustdoc/html/static/js/main.js b/src/librustdoc/html/static/js/main.js index b46701b55ea7..b26efb75ff65 100644 --- a/src/librustdoc/html/static/js/main.js +++ b/src/librustdoc/html/static/js/main.js @@ -45,7 +45,7 @@ function setMobileTopbar() { const mobileTitle = document.createElement("h2"); mobileTitle.className = "location"; if (hasClass(document.querySelector(".rustdoc"), "crate")) { - mobileTitle.innerText = `Crate ${window.currentCrate}`; + mobileTitle.innerHTML = `Crate ${window.currentCrate}`; } else if (locationTitle) { mobileTitle.innerHTML = locationTitle.innerHTML; } From 5d29f5a6de0cac97c69fd33d81e9063187102741 Mon Sep 17 00:00:00 2001 From: Guillaume Gomez Date: Wed, 31 Jan 2024 14:35:03 +0100 Subject: [PATCH 013/127] Add regression test for #120471 to ensure that long crate name are handled as expected on mobile --- tests/rustdoc-gui/mobile-crate-name.goml | 22 +++++++++++++++++++ .../rustdoc-gui/type-declation-overflow.goml | 4 ++-- 2 files changed, 24 insertions(+), 2 deletions(-) create mode 100644 tests/rustdoc-gui/mobile-crate-name.goml diff --git a/tests/rustdoc-gui/mobile-crate-name.goml b/tests/rustdoc-gui/mobile-crate-name.goml new file mode 100644 index 000000000000..a0c96eec8a5a --- /dev/null +++ b/tests/rustdoc-gui/mobile-crate-name.goml @@ -0,0 +1,22 @@ +// Checks that if the crate name is too long on mobile, it will not grow and overflow its parent +// (thanks to text overflow ellipsis). + +go-to: "file://" + |DOC_PATH| + "/test_docs/index.html" +// First we change the title to make it big. +set-window-size: (350, 800) +// We ensure that the "format" of the title is the same as the one we'll use. +assert-text: (".mobile-topbar .location a", "test_docs") +// We store the height we know is correct. +store-property: (".mobile-topbar .location", {"offsetHeight": height}) +// We change the crate name to something longer. +set-text: (".mobile-topbar .location a", "cargo_packager_resource_resolver") +// And we check that the size remained the same. +assert-property: (".mobile-topbar .location", {"offsetHeight": |height|}) + +// Now we check if it works for the non-crate pages as well. +go-to: "file://" + |DOC_PATH| + "/test_docs/struct.Foo.html" +// We store the height we know is correct. +store-property: (".mobile-topbar .location", {"offsetHeight": height}) +set-text: (".mobile-topbar .location a", "Something_incredibly_long_because") +// And we check that the size remained the same. +assert-property: (".mobile-topbar .location", {"offsetHeight": |height|}) diff --git a/tests/rustdoc-gui/type-declation-overflow.goml b/tests/rustdoc-gui/type-declation-overflow.goml index 5780f5c88f82..a97cc98897ae 100644 --- a/tests/rustdoc-gui/type-declation-overflow.goml +++ b/tests/rustdoc-gui/type-declation-overflow.goml @@ -39,8 +39,8 @@ assert-property: ("pre.item-decl", {"scrollWidth": "950"}) set-window-size: (600, 600) go-to: "file://" + |DOC_PATH| + "/lib2/too_long/struct.SuperIncrediblyLongLongLongLongLongLongLongGigaGigaGigaMegaLongLongLongStructName.html" // It shouldn't have an overflow in the topbar either. -store-property: (".mobile-topbar h2", {"scrollWidth": scrollWidth}) -assert-property: (".mobile-topbar h2", {"clientWidth": |scrollWidth|}) +store-property: (".mobile-topbar", {"scrollWidth": scrollWidth}) +assert-property: (".mobile-topbar", {"clientWidth": |scrollWidth|}) assert-css: (".mobile-topbar h2", {"overflow-x": "hidden"}) // Check wrapping for top main-heading h1 and out-of-band. From 4e36929e1728a62d47b32946c0d236d9395f9dba Mon Sep 17 00:00:00 2001 From: Ralf Jung Date: Fri, 9 Feb 2024 21:56:19 +0100 Subject: [PATCH 014/127] re-add const_intrinsic_copy feature --- crates/core_simd/src/lib.rs | 1 + 1 file changed, 1 insertion(+) diff --git a/crates/core_simd/src/lib.rs b/crates/core_simd/src/lib.rs index faec64c6344b..9180e5c7dfda 100644 --- a/crates/core_simd/src/lib.rs +++ b/crates/core_simd/src/lib.rs @@ -1,6 +1,7 @@ #![no_std] #![feature( core_intrinsics, + const_intrinsic_copy, const_refs_to_cell, const_maybe_uninit_as_mut_ptr, const_mut_refs, From 047ba0a280a5d23f52c717af053b3171b0f78de8 Mon Sep 17 00:00:00 2001 From: Ralf Jung Date: Fri, 9 Feb 2024 22:04:03 +0100 Subject: [PATCH 015/127] stdsimd feature got split up --- crates/core_simd/src/lib.rs | 19 ++++++++++++++++++- crates/test_helpers/src/lib.rs | 6 +++++- 2 files changed, 23 insertions(+), 2 deletions(-) diff --git a/crates/core_simd/src/lib.rs b/crates/core_simd/src/lib.rs index 9180e5c7dfda..dfb36d8c5138 100644 --- a/crates/core_simd/src/lib.rs +++ b/crates/core_simd/src/lib.rs @@ -12,10 +12,27 @@ repr_simd, simd_ffi, staged_api, - stdsimd, strict_provenance, ptr_metadata )] +#![cfg_attr( + all( + any(target_arch = "aarch64", target_arch = "arm",), + any( + all(target_feature = "v6", not(target_feature = "mclass")), + all(target_feature = "mclass", target_feature = "dsp"), + ) + ), + feature(stdarch_arm_dsp) +)] +#![cfg_attr( + all(target_arch = "arm", target_feature = "v7"), + feature(stdarch_arm_neon_intrinsics) +)] +#![cfg_attr( + any(target_arch = "powerpc", target_arch = "powerpc64"), + feature(stdarch_powerpc) +)] #![warn(missing_docs, clippy::missing_inline_in_public_items)] // basically all items, really #![deny(unsafe_op_in_unsafe_fn, clippy::undocumented_unsafe_blocks)] #![allow(internal_features)] diff --git a/crates/test_helpers/src/lib.rs b/crates/test_helpers/src/lib.rs index b80c745aaf2f..51b860a86356 100644 --- a/crates/test_helpers/src/lib.rs +++ b/crates/test_helpers/src/lib.rs @@ -1,4 +1,8 @@ -#![feature(stdsimd, powerpc_target_feature)] +#![feature(powerpc_target_feature)] +#![cfg_attr( + any(target_arch = "powerpc", target_arch = "powerpc64"), + feature(stdarch_powerpc) +)] pub mod array; From e41ab350d660ba135dd43c914aacf454f7839d2c Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Lauren=C8=9Biu=20Nicola?= Date: Sun, 11 Feb 2024 08:40:19 +0200 Subject: [PATCH 016/127] Merge commit 'ddf105b646c6749a2de2451c9a499a354eec79c2' into sync-from-ra --- .github/workflows/ci.yaml | 6 +- Cargo.lock | 228 +++-- Cargo.toml | 29 +- clippy.toml | 5 + crates/base-db/Cargo.toml | 2 +- crates/base-db/src/input.rs | 2 +- crates/flycheck/src/lib.rs | 32 +- crates/hir-def/src/attr.rs | 54 +- crates/hir-def/src/attr/builtin.rs | 3 - crates/hir-def/src/body/lower.rs | 5 +- crates/hir-def/src/body/pretty.rs | 6 +- crates/hir-def/src/child_by_source.rs | 175 ++-- crates/hir-def/src/data/adt.rs | 48 +- crates/hir-def/src/find_path.rs | 2 + crates/hir-def/src/generics.rs | 191 +--- crates/hir-def/src/import_map.rs | 18 +- crates/hir-def/src/item_scope.rs | 10 +- crates/hir-def/src/item_tree.rs | 78 +- crates/hir-def/src/item_tree/lower.rs | 6 +- crates/hir-def/src/item_tree/pretty.rs | 4 +- crates/hir-def/src/lib.rs | 834 +++++++++++------- .../builtin_derive_macro.rs | 2 +- .../hir-def/src/macro_expansion_tests/mbe.rs | 10 +- .../hir-def/src/macro_expansion_tests/mod.rs | 2 +- crates/hir-def/src/nameres/collector.rs | 6 +- crates/hir-def/src/resolver.rs | 41 +- crates/hir-def/src/src.rs | 193 ++-- crates/hir-def/src/trace.rs | 2 + crates/hir-def/src/visibility.rs | 120 +-- crates/hir-expand/src/ast_id_map.rs | 2 +- crates/hir-expand/src/attrs.rs | 2 +- crates/hir-expand/src/builtin_attr_macro.rs | 5 +- crates/hir-expand/src/builtin_derive_macro.rs | 22 +- crates/hir-expand/src/builtin_fn_macro.rs | 28 +- crates/hir-expand/src/db.rs | 10 +- crates/hir-expand/src/fixup.rs | 6 +- crates/hir-expand/src/hygiene.rs | 71 +- crates/hir-expand/src/lib.rs | 6 +- crates/hir-expand/src/quote.rs | 9 +- crates/hir-ty/src/autoderef.rs | 1 + crates/hir-ty/src/builder.rs | 2 + crates/hir-ty/src/chalk_db.rs | 6 +- crates/hir-ty/src/consteval/tests.rs | 4 +- crates/hir-ty/src/db.rs | 18 +- crates/hir-ty/src/diagnostics/decl_check.rs | 725 +++++++-------- crates/hir-ty/src/diagnostics/expr.rs | 83 +- .../diagnostics/match_check/pat_analysis.rs | 113 ++- crates/hir-ty/src/display.rs | 13 +- crates/hir-ty/src/infer/closure.rs | 16 +- crates/hir-ty/src/infer/expr.rs | 9 +- crates/hir-ty/src/infer/unify.rs | 2 + crates/hir-ty/src/layout.rs | 10 +- crates/hir-ty/src/layout/tests.rs | 5 +- crates/hir-ty/src/lib.rs | 8 +- crates/hir-ty/src/lower.rs | 15 +- crates/hir-ty/src/method_resolution.rs | 27 +- crates/hir-ty/src/mir/borrowck.rs | 9 +- crates/hir-ty/src/mir/eval.rs | 12 +- crates/hir-ty/src/mir/eval/shim.rs | 4 +- crates/hir-ty/src/mir/lower.rs | 14 +- crates/hir-ty/src/mir/lower/as_place.rs | 2 +- crates/hir-ty/src/mir/monomorphization.rs | 4 +- crates/hir-ty/src/tests.rs | 15 +- crates/hir-ty/src/tests/macros.rs | 31 + crates/hir-ty/src/traits.rs | 6 +- crates/hir/src/attrs.rs | 2 +- crates/hir/src/diagnostics.rs | 39 +- crates/hir/src/display.rs | 4 +- crates/hir/src/lib.rs | 89 +- .../src/handlers/apply_demorgan.rs | 4 +- .../src/handlers/convert_bool_then.rs | 5 +- .../src/handlers/convert_comment_block.rs | 2 +- .../src/handlers/convert_to_guarded_return.rs | 175 +++- .../src/handlers/desugar_doc_comment.rs | 4 +- .../src/handlers/extract_function.rs | 296 ++++--- .../src/handlers/extract_module.rs | 14 +- .../src/handlers/extract_variable.rs | 2 +- .../src/handlers/flip_trait_bound.rs | 5 + .../src/handlers/generate_delegate_methods.rs | 12 +- .../src/handlers/generate_delegate_trait.rs | 10 +- .../generate_documentation_template.rs | 8 +- .../src/handlers/generate_enum_variant.rs | 2 +- .../src/handlers/generate_function.rs | 8 +- .../src/handlers/generate_getter_or_setter.rs | 188 ++-- .../ide-assists/src/handlers/generate_impl.rs | 126 ++- .../handlers/generate_is_empty_from_len.rs | 2 +- .../ide-assists/src/handlers/generate_new.rs | 190 ++-- .../src/handlers/generate_trait_from_impl.rs | 2 +- .../ide-assists/src/handlers/inline_call.rs | 62 +- .../src/handlers/inline_const_as_literal.rs | 2 +- .../ide-assists/src/handlers/inline_macro.rs | 2 +- .../src/handlers/inline_type_alias.rs | 10 +- .../src/handlers/into_to_qualified_from.rs | 2 +- .../src/handlers/introduce_named_lifetime.rs | 2 +- .../src/handlers/merge_match_arms.rs | 11 +- .../src/handlers/move_module_to_file.rs | 2 +- .../src/handlers/number_representation.rs | 2 +- .../ide-assists/src/handlers/qualify_path.rs | 2 +- .../src/handlers/remove_unused_imports.rs | 6 +- .../replace_derive_with_manual_impl.rs | 131 +-- .../src/handlers/unwrap_result_return_type.rs | 4 +- crates/ide-assists/src/tests.rs | 17 +- crates/ide-assists/src/tests/generated.rs | 30 +- crates/ide-assists/src/tests/sourcegen.rs | 4 +- crates/ide-assists/src/utils.rs | 217 +++-- crates/ide-assists/src/utils/suggest_name.rs | 10 +- crates/ide-completion/src/completions/dot.rs | 17 +- crates/ide-completion/src/completions/expr.rs | 10 +- .../src/completions/extern_crate.rs | 4 +- .../src/completions/flyimport.rs | 8 + .../src/completions/item_list.rs | 4 +- .../ide-completion/src/completions/keyword.rs | 6 + .../ide-completion/src/completions/postfix.rs | 63 +- .../ide-completion/src/completions/record.rs | 7 +- .../ide-completion/src/completions/snippet.rs | 4 +- crates/ide-completion/src/context.rs | 148 ++-- crates/ide-completion/src/context/analysis.rs | 55 +- crates/ide-completion/src/item.rs | 2 +- crates/ide-completion/src/lib.rs | 1 + crates/ide-completion/src/render.rs | 12 +- crates/ide-completion/src/render/const_.rs | 2 +- crates/ide-completion/src/render/function.rs | 8 +- crates/ide-completion/src/render/pattern.rs | 2 +- .../ide-completion/src/render/type_alias.rs | 2 +- crates/ide-completion/src/render/variant.rs | 4 +- crates/ide-completion/src/tests/expression.rs | 33 + .../ide-completion/src/tests/proc_macros.rs | 8 + crates/ide-completion/src/tests/special.rs | 2 + crates/ide-db/src/defs.rs | 2 +- crates/ide-db/src/famous_defs.rs | 13 +- crates/ide-db/src/generated/lints.rs | 11 - crates/ide-db/src/imports/import_assets.rs | 9 +- crates/ide-db/src/imports/insert_use.rs | 2 +- .../src/syntax_helpers/format_string_exprs.rs | 2 +- crates/ide-db/src/tests/sourcegen_lints.rs | 5 +- crates/ide-db/src/traits.rs | 2 +- .../src/handlers/inactive_code.rs | 2 +- .../src/handlers/incoherent_impl.rs | 2 +- .../src/handlers/incorrect_case.rs | 75 +- .../src/handlers/json_is_not_rust.rs | 6 +- .../src/handlers/macro_error.rs | 6 +- .../src/handlers/mismatched_arg_count.rs | 2 +- .../src/handlers/missing_fields.rs | 29 +- .../src/handlers/missing_match_arms.rs | 2 +- .../src/handlers/mutability_errors.rs | 10 +- .../src/handlers/private_field.rs | 26 + .../src/handlers/remove_trailing_return.rs | 375 ++++++++ .../src/handlers/remove_unnecessary_else.rs | 390 ++++++++ .../replace_filter_map_next_with_find_map.rs | 4 +- .../src/handlers/trait_impl_orphan.rs | 2 +- .../trait_impl_redundant_assoc_item.rs | 2 +- .../src/handlers/type_mismatch.rs | 11 +- .../handlers/unimplemented_builtin_macro.rs | 2 +- .../src/handlers/unresolved_field.rs | 2 +- .../src/handlers/unresolved_method.rs | 2 +- .../src/handlers/unresolved_module.rs | 4 +- .../src/handlers/unresolved_proc_macro.rs | 4 +- .../src/handlers/useless_braces.rs | 4 +- crates/ide-diagnostics/src/lib.rs | 16 +- crates/ide-diagnostics/src/tests.rs | 44 +- crates/ide-ssr/src/matching.rs | 6 +- crates/ide-ssr/src/parsing.rs | 2 +- crates/ide-ssr/src/tests.rs | 1 + crates/ide/src/doc_links.rs | 12 +- crates/ide/src/doc_links/tests.rs | 3 - crates/ide/src/file_structure.rs | 2 +- crates/ide/src/goto_declaration.rs | 2 +- crates/ide/src/goto_definition.rs | 6 +- crates/ide/src/goto_implementation.rs | 4 +- crates/ide/src/highlight_related.rs | 2 +- crates/ide/src/hover/render.rs | 10 +- crates/ide/src/hover/tests.rs | 87 +- crates/ide/src/inlay_hints/implicit_drop.rs | 10 +- crates/ide/src/inlay_hints/range_exclusive.rs | 2 +- crates/ide/src/interpret_function.rs | 4 +- crates/ide/src/join_lines.rs | 8 +- crates/ide/src/lib.rs | 2 +- crates/ide/src/moniker.rs | 8 +- crates/ide/src/navigation_target.rs | 4 +- crates/ide/src/prime_caches.rs | 2 +- crates/ide/src/references.rs | 4 +- crates/ide/src/runnables.rs | 5 +- crates/ide/src/ssr.rs | 2 +- crates/ide/src/static_index.rs | 15 +- crates/ide/src/status.rs | 2 +- .../ide/src/syntax_highlighting/highlight.rs | 2 +- crates/ide/src/syntax_tree.rs | 2 +- crates/ide/src/typing.rs | 15 +- crates/ide/src/view_hir.rs | 2 +- crates/ide/src/view_mir.rs | 2 +- crates/limit/src/lib.rs | 13 +- crates/load-cargo/src/lib.rs | 4 +- crates/mbe/src/benchmark.rs | 72 +- crates/mbe/src/expander.rs | 2 +- crates/mbe/src/expander/matcher.rs | 8 +- crates/mbe/src/expander/transcriber.rs | 14 +- crates/mbe/src/syntax_bridge.rs | 16 +- crates/mbe/src/syntax_bridge/tests.rs | 5 +- crates/mbe/src/tt_iter.rs | 2 +- crates/parser/src/grammar.rs | 15 + crates/parser/src/grammar/expressions.rs | 13 +- crates/parser/src/grammar/generic_args.rs | 12 +- crates/parser/src/grammar/generic_params.rs | 25 +- crates/parser/src/grammar/items/adt.rs | 47 +- crates/parser/src/grammar/items/use_item.rs | 13 +- crates/parser/src/lexed_str.rs | 2 +- .../src/tests/sourcegen_inline_tests.rs | 5 +- .../inline/err/0015_arg_list_recovery.rast | 28 + .../inline/err/0015_arg_list_recovery.rs | 1 + .../err/0026_use_tree_list_err_recovery.rast | 25 + .../err/0026_use_tree_list_err_recovery.rs | 1 + ...028_method_call_missing_argument_list.rast | 56 ++ .../0028_method_call_missing_argument_list.rs | 4 + .../err/0029_tuple_field_list_recovery.rast | 44 + .../err/0029_tuple_field_list_recovery.rs | 2 + .../err/0030_generic_arg_list_recover.rast | 33 + .../err/0030_generic_arg_list_recover.rs | 1 + .../err/0031_generic_param_list_recover.rast | 45 + .../err/0031_generic_param_list_recover.rs | 1 + crates/proc-macro-api/Cargo.toml | 3 +- crates/proc-macro-api/src/lib.rs | 2 +- crates/proc-macro-api/src/msg.rs | 123 +-- crates/proc-macro-api/src/msg/flat.rs | 13 +- crates/proc-macro-api/src/process.rs | 6 +- crates/proc-macro-api/src/version.rs | 2 +- crates/proc-macro-srv-cli/src/main.rs | 2 + .../proc-macro-test/imp/src/lib.rs | 1 + .../src/server/rust_analyzer_span.rs | 4 +- crates/proc-macro-srv/src/server/token_id.rs | 10 +- .../proc-macro-srv/src/server/token_stream.rs | 8 +- crates/profile/src/lib.rs | 3 +- crates/profile/src/stop_watch.rs | 3 + crates/project-model/src/build_scripts.rs | 8 +- crates/project-model/src/cargo_workspace.rs | 8 +- crates/project-model/src/cfg_flag.rs | 2 +- crates/project-model/src/lib.rs | 2 +- crates/project-model/src/rustc_cfg.rs | 2 +- crates/project-model/src/sysroot.rs | 2 +- crates/project-model/src/tests.rs | 2 +- crates/project-model/src/workspace.rs | 12 +- crates/rust-analyzer/src/bin/main.rs | 6 +- crates/rust-analyzer/src/caps.rs | 16 +- crates/rust-analyzer/src/cli.rs | 2 + .../rust-analyzer/src/cli/analysis_stats.rs | 8 +- crates/rust-analyzer/src/cli/diagnostics.rs | 2 +- crates/rust-analyzer/src/cli/lsif.rs | 30 +- .../rust-analyzer/src/cli/progress_report.rs | 2 +- crates/rust-analyzer/src/cli/run_tests.rs | 2 +- crates/rust-analyzer/src/cli/rustc_tests.rs | 15 +- crates/rust-analyzer/src/cli/scip.rs | 28 +- crates/rust-analyzer/src/config.rs | 20 +- .../src/config/patch_old_style.rs | 2 +- crates/rust-analyzer/src/diagnostics.rs | 4 +- .../rust-analyzer/src/diagnostics/to_proto.rs | 16 +- crates/rust-analyzer/src/dispatch.rs | 6 +- crates/rust-analyzer/src/global_state.rs | 24 +- .../src/handlers/notification.rs | 61 +- crates/rust-analyzer/src/handlers/request.rs | 20 +- crates/rust-analyzer/src/lsp/ext.rs | 28 +- crates/rust-analyzer/src/lsp/from_proto.rs | 2 +- crates/rust-analyzer/src/lsp/to_proto.rs | 8 +- crates/rust-analyzer/src/lsp/utils.rs | 27 +- crates/rust-analyzer/src/main_loop.rs | 73 +- crates/rust-analyzer/src/reload.rs | 16 +- crates/rust-analyzer/src/task_pool.rs | 11 + crates/rust-analyzer/src/tracing/hprof.rs | 1 + crates/rust-analyzer/tests/slow-tests/main.rs | 89 +- .../rust-analyzer/tests/slow-tests/support.rs | 46 +- crates/rust-analyzer/tests/slow-tests/tidy.rs | 73 +- crates/salsa/Cargo.toml | 35 + crates/salsa/FAQ.md | 34 + crates/salsa/LICENSE-APACHE | 201 +++++ crates/salsa/LICENSE-MIT | 23 + crates/salsa/README.md | 42 + crates/salsa/salsa-macros/Cargo.toml | 23 + crates/salsa/salsa-macros/LICENSE-APACHE | 1 + crates/salsa/salsa-macros/LICENSE-MIT | 1 + crates/salsa/salsa-macros/README.md | 1 + .../salsa-macros/src/database_storage.rs | 250 ++++++ crates/salsa/salsa-macros/src/lib.rs | 146 +++ .../salsa/salsa-macros/src/parenthesized.rs | 13 + crates/salsa/salsa-macros/src/query_group.rs | 737 ++++++++++++++++ crates/salsa/src/debug.rs | 66 ++ crates/salsa/src/derived.rs | 233 +++++ crates/salsa/src/derived/slot.rs | 833 +++++++++++++++++ crates/salsa/src/doctest.rs | 115 +++ crates/salsa/src/durability.rs | 50 ++ crates/salsa/src/hash.rs | 4 + crates/salsa/src/input.rs | 384 ++++++++ crates/salsa/src/intern_id.rs | 131 +++ crates/salsa/src/interned.rs | 508 +++++++++++ crates/salsa/src/lib.rs | 742 ++++++++++++++++ crates/salsa/src/lru.rs | 325 +++++++ crates/salsa/src/plumbing.rs | 238 +++++ crates/salsa/src/revision.rs | 67 ++ crates/salsa/src/runtime.rs | 667 ++++++++++++++ crates/salsa/src/runtime/dependency_graph.rs | 251 ++++++ crates/salsa/src/runtime/local_state.rs | 214 +++++ crates/salsa/src/storage.rs | 54 ++ crates/salsa/tests/cycles.rs | 493 +++++++++++ crates/salsa/tests/dyn_trait.rs | 28 + crates/salsa/tests/incremental/constants.rs | 145 +++ crates/salsa/tests/incremental/counter.rs | 14 + .../salsa/tests/incremental/implementation.rs | 59 ++ crates/salsa/tests/incremental/log.rs | 16 + crates/salsa/tests/incremental/main.rs | 9 + .../tests/incremental/memoized_dep_inputs.rs | 60 ++ .../tests/incremental/memoized_inputs.rs | 76 ++ .../tests/incremental/memoized_volatile.rs | 77 ++ crates/salsa/tests/interned.rs | 90 ++ crates/salsa/tests/lru.rs | 102 +++ crates/salsa/tests/macros.rs | 11 + crates/salsa/tests/no_send_sync.rs | 31 + crates/salsa/tests/on_demand_inputs.rs | 147 +++ crates/salsa/tests/panic_safely.rs | 93 ++ crates/salsa/tests/parallel/cancellation.rs | 132 +++ crates/salsa/tests/parallel/frozen.rs | 57 ++ crates/salsa/tests/parallel/independent.rs | 29 + crates/salsa/tests/parallel/main.rs | 13 + .../parallel/parallel_cycle_all_recover.rs | 110 +++ .../parallel/parallel_cycle_mid_recover.rs | 110 +++ .../parallel/parallel_cycle_none_recover.rs | 69 ++ .../parallel/parallel_cycle_one_recovers.rs | 95 ++ crates/salsa/tests/parallel/race.rs | 37 + crates/salsa/tests/parallel/setup.rs | 197 +++++ crates/salsa/tests/parallel/signal.rs | 40 + crates/salsa/tests/parallel/stress.rs | 168 ++++ crates/salsa/tests/parallel/true_parallel.rs | 125 +++ .../tests/storage_varieties/implementation.rs | 19 + crates/salsa/tests/storage_varieties/main.rs | 5 + .../salsa/tests/storage_varieties/queries.rs | 22 + crates/salsa/tests/storage_varieties/tests.rs | 49 + crates/salsa/tests/transparent.rs | 39 + crates/salsa/tests/variadic.rs | 51 ++ crates/sourcegen/src/lib.rs | 5 +- crates/span/Cargo.toml | 2 +- crates/span/src/lib.rs | 23 +- crates/stdx/Cargo.toml | 2 +- crates/stdx/src/anymap.rs | 5 +- crates/stdx/src/lib.rs | 2 + crates/stdx/src/panic_context.rs | 3 +- crates/stdx/src/rand.rs | 5 +- crates/syntax/src/ast/edit_in_place.rs | 42 + crates/syntax/src/ast/make.rs | 60 +- crates/syntax/src/fuzz.rs | 3 +- crates/syntax/src/lib.rs | 2 +- crates/syntax/src/parsing.rs | 2 +- crates/syntax/src/parsing/reparsing.rs | 2 +- crates/syntax/src/ptr.rs | 2 +- crates/syntax/src/tests/sourcegen_ast.rs | 12 +- crates/test-fixture/src/lib.rs | 10 +- crates/test-utils/src/bench_fixture.rs | 2 +- crates/test-utils/src/fixture.rs | 34 +- crates/test-utils/src/lib.rs | 7 +- crates/test-utils/src/minicore.rs | 2 +- crates/text-edit/src/lib.rs | 6 +- crates/tt/src/buffer.rs | 2 +- crates/tt/src/lib.rs | 52 +- crates/vfs/src/loader.rs | 2 +- crates/vfs/src/vfs_path/tests.rs | 14 +- docs/dev/lsp-extensions.md | 21 +- docs/user/generated_config.adoc | 11 + editors/code/package.json | 10 + editors/code/src/ctx.ts | 17 + editors/code/src/lsp_ext.ts | 6 + editors/code/src/main.ts | 2 +- lib/la-arena/src/lib.rs | 12 +- lib/la-arena/src/map.rs | 6 +- lib/lsp-server/examples/goto_def.rs | 3 + lib/lsp-server/src/lib.rs | 11 +- lib/lsp-server/src/msg.rs | 2 +- lib/lsp-server/src/req_queue.rs | 2 +- xtask/src/dist.rs | 8 +- xtask/src/flags.rs | 2 +- xtask/src/install.rs | 2 +- xtask/src/main.rs | 1 + xtask/src/metrics.rs | 6 +- xtask/src/release/changelog.rs | 2 +- 378 files changed, 14720 insertions(+), 3111 deletions(-) create mode 100644 clippy.toml create mode 100644 crates/ide-diagnostics/src/handlers/remove_trailing_return.rs create mode 100644 crates/ide-diagnostics/src/handlers/remove_unnecessary_else.rs create mode 100644 crates/parser/test_data/parser/inline/err/0028_method_call_missing_argument_list.rast create mode 100644 crates/parser/test_data/parser/inline/err/0028_method_call_missing_argument_list.rs create mode 100644 crates/parser/test_data/parser/inline/err/0029_tuple_field_list_recovery.rast create mode 100644 crates/parser/test_data/parser/inline/err/0029_tuple_field_list_recovery.rs create mode 100644 crates/parser/test_data/parser/inline/err/0030_generic_arg_list_recover.rast create mode 100644 crates/parser/test_data/parser/inline/err/0030_generic_arg_list_recover.rs create mode 100644 crates/parser/test_data/parser/inline/err/0031_generic_param_list_recover.rast create mode 100644 crates/parser/test_data/parser/inline/err/0031_generic_param_list_recover.rs create mode 100644 crates/salsa/Cargo.toml create mode 100644 crates/salsa/FAQ.md create mode 100644 crates/salsa/LICENSE-APACHE create mode 100644 crates/salsa/LICENSE-MIT create mode 100644 crates/salsa/README.md create mode 100644 crates/salsa/salsa-macros/Cargo.toml create mode 100644 crates/salsa/salsa-macros/LICENSE-APACHE create mode 100644 crates/salsa/salsa-macros/LICENSE-MIT create mode 100644 crates/salsa/salsa-macros/README.md create mode 100644 crates/salsa/salsa-macros/src/database_storage.rs create mode 100644 crates/salsa/salsa-macros/src/lib.rs create mode 100644 crates/salsa/salsa-macros/src/parenthesized.rs create mode 100644 crates/salsa/salsa-macros/src/query_group.rs create mode 100644 crates/salsa/src/debug.rs create mode 100644 crates/salsa/src/derived.rs create mode 100644 crates/salsa/src/derived/slot.rs create mode 100644 crates/salsa/src/doctest.rs create mode 100644 crates/salsa/src/durability.rs create mode 100644 crates/salsa/src/hash.rs create mode 100644 crates/salsa/src/input.rs create mode 100644 crates/salsa/src/intern_id.rs create mode 100644 crates/salsa/src/interned.rs create mode 100644 crates/salsa/src/lib.rs create mode 100644 crates/salsa/src/lru.rs create mode 100644 crates/salsa/src/plumbing.rs create mode 100644 crates/salsa/src/revision.rs create mode 100644 crates/salsa/src/runtime.rs create mode 100644 crates/salsa/src/runtime/dependency_graph.rs create mode 100644 crates/salsa/src/runtime/local_state.rs create mode 100644 crates/salsa/src/storage.rs create mode 100644 crates/salsa/tests/cycles.rs create mode 100644 crates/salsa/tests/dyn_trait.rs create mode 100644 crates/salsa/tests/incremental/constants.rs create mode 100644 crates/salsa/tests/incremental/counter.rs create mode 100644 crates/salsa/tests/incremental/implementation.rs create mode 100644 crates/salsa/tests/incremental/log.rs create mode 100644 crates/salsa/tests/incremental/main.rs create mode 100644 crates/salsa/tests/incremental/memoized_dep_inputs.rs create mode 100644 crates/salsa/tests/incremental/memoized_inputs.rs create mode 100644 crates/salsa/tests/incremental/memoized_volatile.rs create mode 100644 crates/salsa/tests/interned.rs create mode 100644 crates/salsa/tests/lru.rs create mode 100644 crates/salsa/tests/macros.rs create mode 100644 crates/salsa/tests/no_send_sync.rs create mode 100644 crates/salsa/tests/on_demand_inputs.rs create mode 100644 crates/salsa/tests/panic_safely.rs create mode 100644 crates/salsa/tests/parallel/cancellation.rs create mode 100644 crates/salsa/tests/parallel/frozen.rs create mode 100644 crates/salsa/tests/parallel/independent.rs create mode 100644 crates/salsa/tests/parallel/main.rs create mode 100644 crates/salsa/tests/parallel/parallel_cycle_all_recover.rs create mode 100644 crates/salsa/tests/parallel/parallel_cycle_mid_recover.rs create mode 100644 crates/salsa/tests/parallel/parallel_cycle_none_recover.rs create mode 100644 crates/salsa/tests/parallel/parallel_cycle_one_recovers.rs create mode 100644 crates/salsa/tests/parallel/race.rs create mode 100644 crates/salsa/tests/parallel/setup.rs create mode 100644 crates/salsa/tests/parallel/signal.rs create mode 100644 crates/salsa/tests/parallel/stress.rs create mode 100644 crates/salsa/tests/parallel/true_parallel.rs create mode 100644 crates/salsa/tests/storage_varieties/implementation.rs create mode 100644 crates/salsa/tests/storage_varieties/main.rs create mode 100644 crates/salsa/tests/storage_varieties/queries.rs create mode 100644 crates/salsa/tests/storage_varieties/tests.rs create mode 100644 crates/salsa/tests/transparent.rs create mode 100644 crates/salsa/tests/variadic.rs diff --git a/.github/workflows/ci.yaml b/.github/workflows/ci.yaml index 07fa85a61cbd..964be478fa3a 100644 --- a/.github/workflows/ci.yaml +++ b/.github/workflows/ci.yaml @@ -28,7 +28,7 @@ jobs: proc_macros: ${{ steps.filter.outputs.proc_macros }} steps: - uses: actions/checkout@v3 - - uses: dorny/paths-filter@4067d885736b84de7c414f582ac45897079b0a78 + - uses: dorny/paths-filter@1441771bbfdd59dcd748680ee64ebd8faab1a242 id: filter with: filters: | @@ -104,8 +104,8 @@ jobs: run: target/${{ matrix.target }}/debug/rust-analyzer analysis-stats --with-deps $(rustc --print sysroot)/lib/rustlib/src/rust/library/std - name: clippy - if: matrix.os == 'ubuntu-latest' - run: cargo clippy --all-targets + if: matrix.os == 'windows-latest' + run: cargo clippy --all-targets -- -D clippy::disallowed_macros -D clippy::dbg_macro -D clippy::todo -D clippy::print_stdout -D clippy::print_stderr # Weird targets to catch non-portable code rust-cross: diff --git a/Cargo.lock b/Cargo.lock index e9492ce0202b..dc2bf3a76943 100644 --- a/Cargo.lock +++ b/Cargo.lock @@ -72,8 +72,8 @@ dependencies = [ "cfg", "la-arena 0.3.1 (registry+https://github.com/rust-lang/crates.io-index)", "profile", - "rust-analyzer-salsa", "rustc-hash", + "salsa", "semver", "span", "stdx", @@ -167,7 +167,7 @@ checksum = "5676cea088c32290fe65c82895be9d06dd21e0fa49bb97ca840529e9417ab71a" dependencies = [ "proc-macro2", "quote", - "syn 2.0.39", + "syn", "synstructure", ] @@ -313,17 +313,6 @@ dependencies = [ "parking_lot_core", ] -[[package]] -name = "derivative" -version = "2.2.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "fcc3dd5e9e9c0b295d6e1e4d811fb6f157d5ffd784b8d202fc62eac8035a770b" -dependencies = [ - "proc-macro2", - "quote", - "syn 1.0.109", -] - [[package]] name = "derive_arbitrary" version = "1.3.2" @@ -332,7 +321,7 @@ checksum = "67e77553c4162a157adbf834ebae5b415acbecbeafc7a74b0e886657506a7611" dependencies = [ "proc-macro2", "quote", - "syn 2.0.39", + "syn", ] [[package]] @@ -368,6 +357,15 @@ dependencies = [ "log", ] +[[package]] +name = "env_logger" +version = "0.10.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "4cd405aab171cb85d6735e5c8d9db038c17d3ca007a4d2c25f337935c3d90580" +dependencies = [ + "log", +] + [[package]] name = "equivalent" version = "1.0.0" @@ -452,6 +450,17 @@ version = "0.4.7" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "7ab85b9b05e3978cc9a9cf8fea7f01b494e1a09ed3037e16ba39edc7a29eb61a" +[[package]] +name = "getrandom" +version = "0.2.12" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "190092ea657667030ac6a35e305e62fc4dd69fd98ac98631e5d3a2b1575a12b5" +dependencies = [ + "cfg-if", + "libc", + "wasi", +] + [[package]] name = "gimli" version = "0.27.3" @@ -929,6 +938,12 @@ dependencies = [ "text-size", ] +[[package]] +name = "linked-hash-map" +version = "0.5.6" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "0717cef1bc8b636c6e1c1bbdefc09e6322da8a9321966e8928ef80d20f7f770f" + [[package]] name = "load-cargo" version = "0.0.0" @@ -1272,6 +1287,12 @@ version = "0.2.9" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "e0a7ae3ac2f1173085d398531c705756c94a4c56843785df85a60c1a0afac116" +[[package]] +name = "ppv-lite86" +version = "0.2.17" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "5b40af805b3121feab8a3c29f04d8ad262fa8e0561883e7653e024ae4479e6de" + [[package]] name = "proc-macro-api" version = "0.0.0" @@ -1283,6 +1304,7 @@ dependencies = [ "object 0.32.0", "paths", "profile", + "rustc-hash", "serde", "serde_json", "snap", @@ -1435,17 +1457,6 @@ dependencies = [ "tracing", ] -[[package]] -name = "ra-ap-rustc_index" -version = "0.33.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "5e5313d7f243b63ef9e58d94355b11aa8499f1328055f1f58adf0a5ea7d2faca" -dependencies = [ - "arrayvec", - "ra-ap-rustc_index_macros 0.33.0", - "smallvec", -] - [[package]] name = "ra-ap-rustc_index" version = "0.35.0" @@ -1458,15 +1469,14 @@ dependencies = [ ] [[package]] -name = "ra-ap-rustc_index_macros" -version = "0.33.0" +name = "ra-ap-rustc_index" +version = "0.36.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "a83108ebf3e73dde205b9c25706209bcd7736480820f90ded28eabaf8b469f25" +checksum = "f8a41dee58608b1fc93779ea365edaa70ac9927e3335ae914b675be0fa063cd7" dependencies = [ - "proc-macro2", - "quote", - "syn 2.0.39", - "synstructure", + "arrayvec", + "ra-ap-rustc_index_macros 0.36.0", + "smallvec", ] [[package]] @@ -1477,7 +1487,19 @@ checksum = "054e25eac52f0506c1309ca4317c11ad4925d7b99eb897f71aa7c3cbafb46c2b" dependencies = [ "proc-macro2", "quote", - "syn 2.0.39", + "syn", + "synstructure", +] + +[[package]] +name = "ra-ap-rustc_index_macros" +version = "0.36.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "fbfe98def54c4337a2f7d8233850bd5d5349972b185fe8a0db2b979164b30ed8" +dependencies = [ + "proc-macro2", + "quote", + "syn", "synstructure", ] @@ -1503,18 +1525,47 @@ dependencies = [ [[package]] name = "ra-ap-rustc_pattern_analysis" -version = "0.33.0" +version = "0.36.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "6c4085e0c771fd4b883930b599ef42966b855762bbe4052c17673b3253421a6d" +checksum = "b5529bffec7530b4a3425640bfdfd9b95d87c4c620f740266c0de6572561aab4" dependencies = [ - "derivative", - "ra-ap-rustc_index 0.33.0", + "ra-ap-rustc_index 0.36.0", "rustc-hash", "rustc_apfloat", "smallvec", "tracing", ] +[[package]] +name = "rand" +version = "0.8.5" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "34af8d1a0e25924bc5b7c43c079c942339d8f0a8b57c39049bef581b46327404" +dependencies = [ + "libc", + "rand_chacha", + "rand_core", +] + +[[package]] +name = "rand_chacha" +version = "0.3.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "e6c10a63a0fa32252be49d21e7709d4d4baf8d231c2dbce1eaa8141b9b127d88" +dependencies = [ + "ppv-lite86", + "rand_core", +] + +[[package]] +name = "rand_core" +version = "0.6.4" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "ec0be4795e2f6a28069bec0b5ff3e2ac9bafc99e6a9a7dc3547996c5c816922c" +dependencies = [ + "getrandom", +] + [[package]] name = "rayon" version = "1.8.0" @@ -1622,35 +1673,6 @@ dependencies = [ "xshell", ] -[[package]] -name = "rust-analyzer-salsa" -version = "0.17.0-pre.6" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "719825638c59fd26a55412a24561c7c5bcf54364c88b9a7a04ba08a6eafaba8d" -dependencies = [ - "indexmap", - "lock_api", - "oorandom", - "parking_lot", - "rust-analyzer-salsa-macros", - "rustc-hash", - "smallvec", - "tracing", - "triomphe", -] - -[[package]] -name = "rust-analyzer-salsa-macros" -version = "0.17.0-pre.6" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "4d96498e9684848c6676c399032ebc37c52da95ecbefa83d71ccc53b9f8a4a8e" -dependencies = [ - "heck", - "proc-macro2", - "quote", - "syn 2.0.39", -] - [[package]] name = "rustc-demangle" version = "0.1.23" @@ -1679,6 +1701,36 @@ version = "1.0.13" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "f91339c0467de62360649f8d3e185ca8de4224ff281f66000de5eb2a77a79041" +[[package]] +name = "salsa" +version = "0.0.0" +dependencies = [ + "dissimilar", + "expect-test", + "indexmap", + "linked-hash-map", + "lock_api", + "oorandom", + "parking_lot", + "rand", + "rustc-hash", + "salsa-macros", + "smallvec", + "test-log", + "tracing", + "triomphe", +] + +[[package]] +name = "salsa-macros" +version = "0.0.0" +dependencies = [ + "heck", + "proc-macro2", + "quote", + "syn", +] + [[package]] name = "same-file" version = "1.0.6" @@ -1735,7 +1787,7 @@ checksum = "43576ca501357b9b071ac53cdc7da8ef0cbd9493d8df094cd821777ea6e894d3" dependencies = [ "proc-macro2", "quote", - "syn 2.0.39", + "syn", ] [[package]] @@ -1758,7 +1810,7 @@ checksum = "bcec881020c684085e55a25f7fd888954d56609ef363479dc5a1305eb0d40cab" dependencies = [ "proc-macro2", "quote", - "syn 2.0.39", + "syn", ] [[package]] @@ -1803,7 +1855,7 @@ name = "span" version = "0.0.0" dependencies = [ "la-arena 0.3.1 (registry+https://github.com/rust-lang/crates.io-index)", - "rust-analyzer-salsa", + "salsa", "stdx", "syntax", "vfs", @@ -1835,17 +1887,6 @@ dependencies = [ "winapi", ] -[[package]] -name = "syn" -version = "1.0.109" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "72b64191b275b66ffe2469e8af2c1cfe3bafa67b529ead792a6d0160888b4237" -dependencies = [ - "proc-macro2", - "quote", - "unicode-ident", -] - [[package]] name = "syn" version = "2.0.39" @@ -1865,7 +1906,7 @@ checksum = "285ba80e733fac80aa4270fbcdf83772a79b80aa35c97075320abfee4a915b06" dependencies = [ "proc-macro2", "quote", - "syn 2.0.39", + "syn", "unicode-xid", ] @@ -1911,6 +1952,27 @@ dependencies = [ "tt", ] +[[package]] +name = "test-log" +version = "0.2.14" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "6159ab4116165c99fc88cce31f99fa2c9dbe08d3691cb38da02fc3b45f357d2b" +dependencies = [ + "env_logger", + "test-log-macros", +] + +[[package]] +name = "test-log-macros" +version = "0.2.14" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "7ba277e77219e9eea169e8508942db1bf5d8a41ff2db9b20aab5a5aadc9fa25d" +dependencies = [ + "proc-macro2", + "quote", + "syn", +] + [[package]] name = "test-utils" version = "0.0.0" @@ -1954,7 +2016,7 @@ checksum = "f9456a42c5b0d803c8cd86e73dd7cc9edd429499f37a3550d286d5e86720569f" dependencies = [ "proc-macro2", "quote", - "syn 2.0.39", + "syn", ] [[package]] @@ -2055,7 +2117,7 @@ checksum = "34704c8d6ebcbc939824180af020566b01a7c01f80641264eba0999f6c2b6be7" dependencies = [ "proc-macro2", "quote", - "syn 2.0.39", + "syn", ] [[package]] diff --git a/Cargo.toml b/Cargo.toml index 3fb5d9aa7a86..2b81f7b11b23 100644 --- a/Cargo.toml +++ b/Cargo.toml @@ -70,6 +70,7 @@ proc-macro-srv = { path = "./crates/proc-macro-srv", version = "0.0.0" } proc-macro-srv-cli = { path = "./crates/proc-macro-srv-cli", version = "0.0.0" } profile = { path = "./crates/profile", version = "0.0.0" } project-model = { path = "./crates/project-model", version = "0.0.0" } +salsa = { path = "./crates/salsa", version = "0.0.0" } span = { path = "./crates/span", version = "0.0.0" } stdx = { path = "./crates/stdx", version = "0.0.0" } syntax = { path = "./crates/syntax", version = "0.0.0" } @@ -83,7 +84,7 @@ ra-ap-rustc_lexer = { version = "0.35.0", default-features = false } ra-ap-rustc_parse_format = { version = "0.35.0", default-features = false } ra-ap-rustc_index = { version = "0.35.0", default-features = false } ra-ap-rustc_abi = { version = "0.35.0", default-features = false } -ra-ap-rustc_pattern_analysis = { version = "0.33.0", default-features = false } +ra-ap-rustc_pattern_analysis = { version = "0.36.0", default-features = false } # local crates that aren't published to crates.io. These should not have versions. sourcegen = { path = "./crates/sourcegen" } @@ -106,22 +107,21 @@ dissimilar = "1.0.7" either = "1.9.0" expect-test = "1.4.0" hashbrown = { version = "0.14", features = [ - "inline-more", + "inline-more", ], default-features = false } indexmap = "2.1.0" itertools = "0.12.0" libc = "0.2.150" nohash-hasher = "0.2.0" rayon = "1.8.0" -rust-analyzer-salsa = "0.17.0-pre.6" rustc-hash = "1.1.0" semver = "1.0.14" serde = { version = "1.0.192", features = ["derive"] } serde_json = "1.0.108" smallvec = { version = "1.10.0", features = [ - "const_new", - "union", - "const_generics", + "const_new", + "union", + "const_generics", ] } smol_str = "0.2.1" text-size = "1.1.1" @@ -164,23 +164,20 @@ len_without_is_empty = "allow" enum_variant_names = "allow" # Builder pattern disagrees new_ret_no_self = "allow" +# Has a bunch of false positives +useless_asref = "allow" ## Following lints should be tackled at some point -borrowed_box = "allow" -derived_hash_with_manual_eq = "allow" -forget_non_drop = "allow" -needless_doctest_main = "allow" -non_canonical_clone_impl = "allow" -non_canonical_partial_ord_impl = "allow" -self_named_constructors = "allow" too_many_arguments = "allow" type_complexity = "allow" wrong_self_convention = "allow" ## warn at following lints +# CI raises these to deny dbg_macro = "warn" todo = "warn" -unimplemented = "allow" +print_stdout = "warn" +print_stderr = "warn" + rc_buffer = "warn" -# FIXME enable this, we use this pattern a lot so its annoying work ... -# str_to_string = "warn" +str_to_string = "warn" diff --git a/clippy.toml b/clippy.toml new file mode 100644 index 000000000000..8032c775ab0a --- /dev/null +++ b/clippy.toml @@ -0,0 +1,5 @@ +disallowed-types = [ + { path = "std::collections::HashMap", reason = "use FxHashMap" }, + { path = "std::collections::HashSet", reason = "use FxHashSet" }, + { path = "std::collections::hash_map::RandomState", reason = "use BuildHasherDefault"} +] diff --git a/crates/base-db/Cargo.toml b/crates/base-db/Cargo.toml index 485ba78846a8..801ba2d1f6c8 100644 --- a/crates/base-db/Cargo.toml +++ b/crates/base-db/Cargo.toml @@ -13,7 +13,7 @@ doctest = false [dependencies] la-arena.workspace = true -rust-analyzer-salsa.workspace = true +salsa.workspace = true rustc-hash.workspace = true triomphe.workspace = true semver.workspace = true diff --git a/crates/base-db/src/input.rs b/crates/base-db/src/input.rs index 51e6fdb9510a..9560826e373e 100644 --- a/crates/base-db/src/input.rs +++ b/crates/base-db/src/input.rs @@ -782,7 +782,7 @@ impl FromStr for Edition { "2018" => Edition::Edition2018, "2021" => Edition::Edition2021, "2024" => Edition::Edition2024, - _ => return Err(ParseEditionError { invalid_input: s.to_string() }), + _ => return Err(ParseEditionError { invalid_input: s.to_owned() }), }; Ok(res) } diff --git a/crates/flycheck/src/lib.rs b/crates/flycheck/src/lib.rs index 22603842a1b6..c59aff2a8bbf 100644 --- a/crates/flycheck/src/lib.rs +++ b/crates/flycheck/src/lib.rs @@ -100,9 +100,14 @@ impl FlycheckHandle { FlycheckHandle { id, sender, _thread: thread } } - /// Schedule a re-start of the cargo check worker. - pub fn restart(&self) { - self.sender.send(StateChange::Restart).unwrap(); + /// Schedule a re-start of the cargo check worker to do a workspace wide check. + pub fn restart_workspace(&self) { + self.sender.send(StateChange::Restart(None)).unwrap(); + } + + /// Schedule a re-start of the cargo check worker to do a package wide check. + pub fn restart_for_package(&self, package: String) { + self.sender.send(StateChange::Restart(Some(package))).unwrap(); } /// Stop this cargo check worker. @@ -153,7 +158,7 @@ pub enum Progress { } enum StateChange { - Restart, + Restart(Option), Cancel, } @@ -213,7 +218,7 @@ impl FlycheckActor { tracing::debug!(flycheck_id = self.id, "flycheck cancelled"); self.cancel_check_process(); } - Event::RequestStateChange(StateChange::Restart) => { + Event::RequestStateChange(StateChange::Restart(package)) => { // Cancel the previously spawned process self.cancel_check_process(); while let Ok(restart) = inbox.recv_timeout(Duration::from_millis(50)) { @@ -223,7 +228,7 @@ impl FlycheckActor { } } - let command = self.check_command(); + let command = self.check_command(package.as_deref()); let formatted_command = format!("{:?}", command); tracing::debug!(?command, "will restart flycheck"); @@ -297,7 +302,7 @@ impl FlycheckActor { } } - fn check_command(&self) -> Command { + fn check_command(&self, package: Option<&str>) -> Command { let (mut cmd, args) = match &self.config { FlycheckConfig::CargoCommand { command, @@ -314,7 +319,11 @@ impl FlycheckActor { let mut cmd = Command::new(toolchain::cargo()); cmd.arg(command); cmd.current_dir(&self.root); - cmd.arg("--workspace"); + + match package { + Some(pkg) => cmd.arg("-p").arg(pkg), + None => cmd.arg("--workspace"), + }; cmd.arg(if *ansi_color_output { "--message-format=json-diagnostic-rendered-ansi" @@ -493,9 +502,7 @@ impl CargoActor { // Skip certain kinds of messages to only spend time on what's useful JsonMessage::Cargo(message) => match message { cargo_metadata::Message::CompilerArtifact(artifact) if !artifact.fresh => { - self.sender - .send(CargoMessage::CompilerArtifact(Box::new(artifact))) - .unwrap(); + self.sender.send(CargoMessage::CompilerArtifact(artifact)).unwrap(); } cargo_metadata::Message::CompilerMessage(msg) => { self.sender.send(CargoMessage::Diagnostic(msg.message)).unwrap(); @@ -539,8 +546,9 @@ impl CargoActor { } } +#[allow(clippy::large_enum_variant)] enum CargoMessage { - CompilerArtifact(Box), + CompilerArtifact(cargo_metadata::Artifact), Diagnostic(Diagnostic), } diff --git a/crates/hir-def/src/attr.rs b/crates/hir-def/src/attr.rs index bee6f0083b1e..c91a5497262b 100644 --- a/crates/hir-def/src/attr.rs +++ b/crates/hir-def/src/attr.rs @@ -24,11 +24,11 @@ use triomphe::Arc; use crate::{ db::DefDatabase, - item_tree::{AttrOwner, Fields, ItemTreeId, ItemTreeModItemNode}, + item_tree::{AttrOwner, Fields, ItemTreeNode}, lang_item::LangItem, nameres::{ModuleOrigin, ModuleSource}, src::{HasChildSource, HasSource}, - AdtId, AssocItemLoc, AttrDefId, GenericParamId, ItemLoc, LocalFieldId, Lookup, MacroId, + AdtId, AttrDefId, GenericParamId, HasModule, ItemTreeLoc, LocalFieldId, Lookup, MacroId, VariantId, }; @@ -317,7 +317,7 @@ fn parse_comma_sep(subtree: &tt::Subtree) -> Vec { } impl AttrsWithOwner { - pub fn attrs_with_owner(db: &dyn DefDatabase, owner: AttrDefId) -> Self { + pub fn new(db: &dyn DefDatabase, owner: AttrDefId) -> Self { Self { attrs: db.attrs(owner), owner } } @@ -356,12 +356,7 @@ impl AttrsWithOwner { AttrDefId::FieldId(it) => { return db.fields_attrs(it.parent)[it.local_id].clone(); } - // FIXME: DRY this up - AttrDefId::EnumVariantId(it) => { - let id = it.lookup(db).id; - let tree = id.item_tree(db); - tree.raw_attrs(id.value.into()).clone() - } + AttrDefId::EnumVariantId(it) => attrs_from_item_tree_loc(db, it), AttrDefId::AdtId(it) => match it { AdtId::StructId(it) => attrs_from_item_tree_loc(db, it), AdtId::EnumId(it) => attrs_from_item_tree_loc(db, it), @@ -370,15 +365,15 @@ impl AttrsWithOwner { AttrDefId::TraitId(it) => attrs_from_item_tree_loc(db, it), AttrDefId::TraitAliasId(it) => attrs_from_item_tree_loc(db, it), AttrDefId::MacroId(it) => match it { - MacroId::Macro2Id(it) => attrs_from_item_tree(db, it.lookup(db).id), - MacroId::MacroRulesId(it) => attrs_from_item_tree(db, it.lookup(db).id), - MacroId::ProcMacroId(it) => attrs_from_item_tree(db, it.lookup(db).id), + MacroId::Macro2Id(it) => attrs_from_item_tree_loc(db, it), + MacroId::MacroRulesId(it) => attrs_from_item_tree_loc(db, it), + MacroId::ProcMacroId(it) => attrs_from_item_tree_loc(db, it), }, AttrDefId::ImplId(it) => attrs_from_item_tree_loc(db, it), - AttrDefId::ConstId(it) => attrs_from_item_tree_assoc(db, it), - AttrDefId::StaticId(it) => attrs_from_item_tree_assoc(db, it), - AttrDefId::FunctionId(it) => attrs_from_item_tree_assoc(db, it), - AttrDefId::TypeAliasId(it) => attrs_from_item_tree_assoc(db, it), + AttrDefId::ConstId(it) => attrs_from_item_tree_loc(db, it), + AttrDefId::StaticId(it) => attrs_from_item_tree_loc(db, it), + AttrDefId::FunctionId(it) => attrs_from_item_tree_loc(db, it), + AttrDefId::TypeAliasId(it) => attrs_from_item_tree_loc(db, it), AttrDefId::GenericParamId(it) => match it { GenericParamId::ConstParamId(it) => { let src = it.parent().child_source(db); @@ -603,29 +598,14 @@ fn any_has_attrs<'db>( id.lookup(db).source(db).map(ast::AnyHasAttrs::new) } -fn attrs_from_item_tree( - db: &dyn DefDatabase, - id: ItemTreeId, +fn attrs_from_item_tree_loc<'db, N: ItemTreeNode>( + db: &(dyn DefDatabase + 'db), + lookup: impl Lookup = dyn DefDatabase + 'db, Data = impl ItemTreeLoc>, ) -> RawAttrs { + let id = lookup.lookup(db).item_tree_id(); let tree = id.item_tree(db); - let mod_item = N::id_to_mod_item(id.value); - tree.raw_attrs(mod_item.into()).clone() -} - -fn attrs_from_item_tree_loc<'db, N: ItemTreeModItemNode>( - db: &(dyn DefDatabase + 'db), - lookup: impl Lookup = dyn DefDatabase + 'db, Data = ItemLoc>, -) -> RawAttrs { - let id = lookup.lookup(db).id; - attrs_from_item_tree(db, id) -} - -fn attrs_from_item_tree_assoc<'db, N: ItemTreeModItemNode>( - db: &(dyn DefDatabase + 'db), - lookup: impl Lookup = dyn DefDatabase + 'db, Data = AssocItemLoc>, -) -> RawAttrs { - let id = lookup.lookup(db).id; - attrs_from_item_tree(db, id) + let attr_owner = N::attr_owner(id.value); + tree.raw_attrs(attr_owner).clone() } pub(crate) fn fields_attrs_source_map( diff --git a/crates/hir-def/src/attr/builtin.rs b/crates/hir-def/src/attr/builtin.rs index 48a596f7f53a..b20ee9e5bf6c 100644 --- a/crates/hir-def/src/attr/builtin.rs +++ b/crates/hir-def/src/attr/builtin.rs @@ -283,9 +283,6 @@ pub const INERT_ATTRIBUTES: &[BuiltinAttribute] = &[ experimental!(optimize), ), - gated!( - ffi_returns_twice, Normal, template!(Word), WarnFollowing, experimental!(ffi_returns_twice) - ), gated!(ffi_pure, Normal, template!(Word), WarnFollowing, experimental!(ffi_pure)), gated!(ffi_const, Normal, template!(Word), WarnFollowing, experimental!(ffi_const)), gated!( diff --git a/crates/hir-def/src/body/lower.rs b/crates/hir-def/src/body/lower.rs index 492ea6d5c591..29ac666277d0 100644 --- a/crates/hir-def/src/body/lower.rs +++ b/crates/hir-def/src/body/lower.rs @@ -1980,10 +1980,7 @@ fn pat_literal_to_hir(lit: &ast::LiteralPat) -> Option<(Literal, ast::Literal)> let ast_lit = lit.literal()?; let mut hir_lit: Literal = ast_lit.kind().into(); if lit.minus_token().is_some() { - let Some(h) = hir_lit.negate() else { - return None; - }; - hir_lit = h; + hir_lit = hir_lit.negate()?; } Some((hir_lit, ast_lit)) } diff --git a/crates/hir-def/src/body/pretty.rs b/crates/hir-def/src/body/pretty.rs index b821b91b8959..4afb40865170 100644 --- a/crates/hir-def/src/body/pretty.rs +++ b/crates/hir-def/src/body/pretty.rs @@ -29,11 +29,11 @@ pub(super) fn print_body_hir(db: &dyn DefDatabase, body: &Body, owner: DefWithBo "const {} = ", match &it.name { Some(name) => name.display(db.upcast()).to_string(), - None => "_".to_string(), + None => "_".to_owned(), } ) }), - DefWithBodyId::InTypeConstId(_) => "In type const = ".to_string(), + DefWithBodyId::InTypeConstId(_) => "In type const = ".to_owned(), DefWithBodyId::VariantId(it) => { let loc = it.lookup(db); let enum_loc = loc.parent.lookup(db); @@ -123,7 +123,7 @@ impl Printer<'_> { wln!(self); f(self); self.indent_level -= 1; - self.buf = self.buf.trim_end_matches('\n').to_string(); + self.buf = self.buf.trim_end_matches('\n').to_owned(); } fn whitespace(&mut self) { diff --git a/crates/hir-def/src/child_by_source.rs b/crates/hir-def/src/child_by_source.rs index b3bb3355f123..ba7d06272af1 100644 --- a/crates/hir-def/src/child_by_source.rs +++ b/crates/hir-def/src/child_by_source.rs @@ -6,15 +6,21 @@ use either::Either; use hir_expand::{attrs::collect_attrs, HirFileId}; +use syntax::ast; use crate::{ db::DefDatabase, - dyn_map::{keys, DynMap}, + dyn_map::{ + keys::{self, Key}, + DynMap, + }, item_scope::ItemScope, + item_tree::ItemTreeNode, nameres::DefMap, src::{HasChildSource, HasSource}, - AdtId, AssocItemId, DefWithBodyId, EnumId, ExternCrateId, FieldId, ImplId, Lookup, MacroId, - ModuleDefId, ModuleId, TraitId, UseId, VariantId, + AdtId, AssocItemId, DefWithBodyId, EnumId, FieldId, GenericDefId, ImplId, ItemTreeLoc, + LifetimeParamId, Lookup, MacroId, ModuleDefId, ModuleId, TraitId, TypeOrConstParamId, + VariantId, }; pub trait ChildBySource { @@ -55,29 +61,6 @@ impl ChildBySource for ImplId { } } -fn add_assoc_item(db: &dyn DefDatabase, res: &mut DynMap, file_id: HirFileId, item: AssocItemId) { - match item { - AssocItemId::FunctionId(func) => { - let loc = func.lookup(db); - if loc.id.file_id() == file_id { - res[keys::FUNCTION].insert(loc.source(db).value, func) - } - } - AssocItemId::ConstId(konst) => { - let loc = konst.lookup(db); - if loc.id.file_id() == file_id { - res[keys::CONST].insert(loc.source(db).value, konst) - } - } - AssocItemId::TypeAliasId(ty) => { - let loc = ty.lookup(db); - if loc.id.file_id() == file_id { - res[keys::TYPE_ALIAS].insert(loc.source(db).value, ty) - } - } - } -} - impl ChildBySource for ModuleId { fn child_by_source_to(&self, db: &dyn DefDatabase, res: &mut DynMap, file_id: HirFileId) { let def_map = self.def_map(db); @@ -89,15 +72,12 @@ impl ChildBySource for ModuleId { impl ChildBySource for ItemScope { fn child_by_source_to(&self, db: &dyn DefDatabase, res: &mut DynMap, file_id: HirFileId) { self.declarations().for_each(|item| add_module_def(db, res, file_id, item)); - self.impls().for_each(|imp| add_impl(db, res, file_id, imp)); - self.extern_crate_decls().for_each(|ext| add_extern_crate(db, res, file_id, ext)); - self.use_decls().for_each(|ext| add_use(db, res, file_id, ext)); - self.unnamed_consts(db).for_each(|konst| { - let loc = konst.lookup(db); - if loc.id.file_id() == file_id { - res[keys::CONST].insert(loc.source(db).value, konst); - } - }); + self.impls().for_each(|imp| insert_item_loc(db, res, file_id, imp, keys::IMPL)); + self.extern_crate_decls() + .for_each(|ext| insert_item_loc(db, res, file_id, ext, keys::EXTERN_CRATE)); + self.use_decls().for_each(|ext| insert_item_loc(db, res, file_id, ext, keys::USE)); + self.unnamed_consts(db) + .for_each(|konst| insert_item_loc(db, res, file_id, konst, keys::CONST)); self.attr_macro_invocs().filter(|(id, _)| id.file_id == file_id).for_each( |(ast_id, call_id)| { res[keys::ATTR_MACRO_CALL].insert(ast_id.to_node(db.upcast()), call_id); @@ -132,59 +112,38 @@ impl ChildBySource for ItemScope { file_id: HirFileId, item: ModuleDefId, ) { - macro_rules! insert { - ($map:ident[$key:path].$insert:ident($id:ident)) => {{ - let loc = $id.lookup(db); - if loc.id.file_id() == file_id { - $map[$key].$insert(loc.source(db).value, $id) - } - }}; - } match item { - ModuleDefId::FunctionId(id) => insert!(map[keys::FUNCTION].insert(id)), - ModuleDefId::ConstId(id) => insert!(map[keys::CONST].insert(id)), - ModuleDefId::StaticId(id) => insert!(map[keys::STATIC].insert(id)), - ModuleDefId::TypeAliasId(id) => insert!(map[keys::TYPE_ALIAS].insert(id)), - ModuleDefId::TraitId(id) => insert!(map[keys::TRAIT].insert(id)), - ModuleDefId::TraitAliasId(id) => insert!(map[keys::TRAIT_ALIAS].insert(id)), + ModuleDefId::FunctionId(id) => { + insert_item_loc(db, map, file_id, id, keys::FUNCTION) + } + ModuleDefId::ConstId(id) => insert_item_loc(db, map, file_id, id, keys::CONST), + ModuleDefId::TypeAliasId(id) => { + insert_item_loc(db, map, file_id, id, keys::TYPE_ALIAS) + } + ModuleDefId::StaticId(id) => insert_item_loc(db, map, file_id, id, keys::STATIC), + ModuleDefId::TraitId(id) => insert_item_loc(db, map, file_id, id, keys::TRAIT), + ModuleDefId::TraitAliasId(id) => { + insert_item_loc(db, map, file_id, id, keys::TRAIT_ALIAS) + } ModuleDefId::AdtId(adt) => match adt { - AdtId::StructId(id) => insert!(map[keys::STRUCT].insert(id)), - AdtId::UnionId(id) => insert!(map[keys::UNION].insert(id)), - AdtId::EnumId(id) => insert!(map[keys::ENUM].insert(id)), + AdtId::StructId(id) => insert_item_loc(db, map, file_id, id, keys::STRUCT), + AdtId::UnionId(id) => insert_item_loc(db, map, file_id, id, keys::UNION), + AdtId::EnumId(id) => insert_item_loc(db, map, file_id, id, keys::ENUM), }, ModuleDefId::MacroId(id) => match id { - MacroId::Macro2Id(id) => insert!(map[keys::MACRO2].insert(id)), - MacroId::MacroRulesId(id) => insert!(map[keys::MACRO_RULES].insert(id)), - MacroId::ProcMacroId(id) => insert!(map[keys::PROC_MACRO].insert(id)), + MacroId::Macro2Id(id) => insert_item_loc(db, map, file_id, id, keys::MACRO2), + MacroId::MacroRulesId(id) => { + insert_item_loc(db, map, file_id, id, keys::MACRO_RULES) + } + MacroId::ProcMacroId(id) => { + insert_item_loc(db, map, file_id, id, keys::PROC_MACRO) + } }, ModuleDefId::ModuleId(_) | ModuleDefId::EnumVariantId(_) | ModuleDefId::BuiltinType(_) => (), } } - fn add_impl(db: &dyn DefDatabase, map: &mut DynMap, file_id: HirFileId, imp: ImplId) { - let loc = imp.lookup(db); - if loc.id.file_id() == file_id { - map[keys::IMPL].insert(loc.source(db).value, imp) - } - } - fn add_extern_crate( - db: &dyn DefDatabase, - map: &mut DynMap, - file_id: HirFileId, - ext: ExternCrateId, - ) { - let loc = ext.lookup(db); - if loc.id.file_id() == file_id { - map[keys::EXTERN_CRATE].insert(loc.source(db).value, ext) - } - } - fn add_use(db: &dyn DefDatabase, map: &mut DynMap, file_id: HirFileId, ext: UseId) { - let loc = ext.lookup(db); - if loc.id.file_id() == file_id { - map[keys::USE].insert(loc.source(db).value, ext) - } - } } } @@ -237,3 +196,63 @@ impl ChildBySource for DefWithBodyId { } } } + +impl ChildBySource for GenericDefId { + fn child_by_source_to(&self, db: &dyn DefDatabase, res: &mut DynMap, file_id: HirFileId) { + let (gfile_id, generic_params_list) = self.file_id_and_params_of(db); + if gfile_id != file_id { + return; + } + + let generic_params = db.generic_params(*self); + let mut toc_idx_iter = generic_params.type_or_consts.iter().map(|(idx, _)| idx); + let lts_idx_iter = generic_params.lifetimes.iter().map(|(idx, _)| idx); + + // For traits the first type index is `Self`, skip it. + if let GenericDefId::TraitId(_) = *self { + toc_idx_iter.next().unwrap(); // advance_by(1); + } + + if let Some(generic_params_list) = generic_params_list { + for (local_id, ast_param) in + toc_idx_iter.zip(generic_params_list.type_or_const_params()) + { + let id = TypeOrConstParamId { parent: *self, local_id }; + match ast_param { + ast::TypeOrConstParam::Type(a) => res[keys::TYPE_PARAM].insert(a, id), + ast::TypeOrConstParam::Const(a) => res[keys::CONST_PARAM].insert(a, id), + } + } + for (local_id, ast_param) in lts_idx_iter.zip(generic_params_list.lifetime_params()) { + let id = LifetimeParamId { parent: *self, local_id }; + res[keys::LIFETIME_PARAM].insert(ast_param, id); + } + } + } +} + +fn insert_item_loc( + db: &dyn DefDatabase, + res: &mut DynMap, + file_id: HirFileId, + id: ID, + key: Key, +) where + ID: for<'db> Lookup = dyn DefDatabase + 'db, Data = Data> + 'static, + Data: ItemTreeLoc, + N: ItemTreeNode, + N::Source: 'static, +{ + let loc = id.lookup(db); + if loc.item_tree_id().file_id() == file_id { + res[key].insert(loc.source(db).value, id) + } +} + +fn add_assoc_item(db: &dyn DefDatabase, res: &mut DynMap, file_id: HirFileId, item: AssocItemId) { + match item { + AssocItemId::FunctionId(func) => insert_item_loc(db, res, file_id, func, keys::FUNCTION), + AssocItemId::ConstId(konst) => insert_item_loc(db, res, file_id, konst, keys::CONST), + AssocItemId::TypeAliasId(ty) => insert_item_loc(db, res, file_id, ty, keys::TYPE_ALIAS), + } +} diff --git a/crates/hir-def/src/data/adt.rs b/crates/hir-def/src/data/adt.rs index 5986b7df3d93..540f643ae7d9 100644 --- a/crates/hir-def/src/data/adt.rs +++ b/crates/hir-def/src/data/adt.rs @@ -10,7 +10,7 @@ use hir_expand::{ HirFileId, InFile, }; use intern::Interned; -use la_arena::{Arena, ArenaMap}; +use la_arena::Arena; use rustc_abi::{Align, Integer, IntegerType, ReprFlags, ReprOptions}; use syntax::ast::{self, HasName, HasVisibility}; use triomphe::Arc; @@ -22,13 +22,11 @@ use crate::{ lang_item::LangItem, lower::LowerCtx, nameres::diagnostics::{DefDiagnostic, DefDiagnostics}, - src::HasChildSource, - src::HasSource, trace::Trace, tt::{Delimiter, DelimiterKind, Leaf, Subtree, TokenTree}, type_ref::TypeRef, visibility::RawVisibility, - EnumId, EnumVariantId, LocalFieldId, LocalModuleId, Lookup, StructId, UnionId, VariantId, + EnumId, EnumVariantId, LocalFieldId, LocalModuleId, Lookup, StructId, UnionId, }; /// Note that we use `StructData` for unions as well! @@ -387,46 +385,6 @@ impl VariantData { } } -impl HasChildSource for VariantId { - type Value = Either; - - fn child_source(&self, db: &dyn DefDatabase) -> InFile> { - let item_tree; - let (src, fields, container) = match *self { - VariantId::EnumVariantId(it) => { - let lookup = it.lookup(db); - item_tree = lookup.id.item_tree(db); - ( - lookup.source(db).map(|it| it.kind()), - &item_tree[lookup.id.value].fields, - lookup.parent.lookup(db).container, - ) - } - VariantId::StructId(it) => { - let lookup = it.lookup(db); - item_tree = lookup.id.item_tree(db); - ( - lookup.source(db).map(|it| it.kind()), - &item_tree[lookup.id.value].fields, - lookup.container, - ) - } - VariantId::UnionId(it) => { - let lookup = it.lookup(db); - item_tree = lookup.id.item_tree(db); - ( - lookup.source(db).map(|it| it.kind()), - &item_tree[lookup.id.value].fields, - lookup.container, - ) - } - }; - let mut trace = Trace::new_for_map(); - lower_struct(db, &mut trace, &src, container.krate, &item_tree, fields); - src.with_value(trace.into_map()) - } -} - #[derive(Debug, Copy, Clone, PartialEq, Eq)] pub enum StructKind { Tuple, @@ -434,7 +392,7 @@ pub enum StructKind { Unit, } -fn lower_struct( +pub(crate) fn lower_struct( db: &dyn DefDatabase, trace: &mut Trace>, ast: &InFile, diff --git a/crates/hir-def/src/find_path.rs b/crates/hir-def/src/find_path.rs index 1a44c319de58..2e137f67b4c2 100644 --- a/crates/hir-def/src/find_path.rs +++ b/crates/hir-def/src/find_path.rs @@ -171,6 +171,7 @@ fn find_path_inner(ctx: FindPathCtx<'_>, item: ItemInNs, from: ModuleId) -> Opti .map(|(item, _)| item) } +#[tracing::instrument(skip_all)] fn find_path_for_module( ctx: FindPathCtx<'_>, def_map: &DefMap, @@ -312,6 +313,7 @@ fn find_self_super(def_map: &DefMap, item: ModuleId, from: ModuleId) -> Option, def_map: &DefMap, diff --git a/crates/hir-def/src/generics.rs b/crates/hir-def/src/generics.rs index 349d327aaae9..1d2c7c3a55fd 100644 --- a/crates/hir-def/src/generics.rs +++ b/crates/hir-def/src/generics.rs @@ -3,31 +3,27 @@ //! generic parameters. See also the `Generics` type and the `generics_of` query //! in rustc. -use base_db::FileId; use either::Either; use hir_expand::{ name::{AsName, Name}, - ExpandResult, HirFileId, InFile, + ExpandResult, }; use intern::Interned; -use la_arena::{Arena, ArenaMap, Idx}; +use la_arena::{Arena, Idx}; use once_cell::unsync::Lazy; use stdx::impl_from; use syntax::ast::{self, HasGenericParams, HasName, HasTypeBounds}; use triomphe::Arc; use crate::{ - child_by_source::ChildBySource, db::DefDatabase, - dyn_map::{keys, DynMap}, expander::Expander, - item_tree::ItemTree, + item_tree::{GenericsItemTreeNode, ItemTree}, lower::LowerCtx, nameres::{DefMap, MacroSubNs}, - src::{HasChildSource, HasSource}, type_ref::{ConstRef, LifetimeRef, TypeBound, TypeRef}, - AdtId, ConstParamId, GenericDefId, HasModule, LifetimeParamId, LocalLifetimeParamId, - LocalTypeOrConstParamId, Lookup, TypeOrConstParamId, TypeParamId, + AdtId, ConstParamId, GenericDefId, HasModule, ItemTreeLoc, LocalTypeOrConstParamId, Lookup, + TypeOrConstParamId, TypeParamId, }; /// Data about a generic type parameter (to a function, struct, impl, ...). @@ -264,7 +260,7 @@ impl GenericParamsCollector { self.add_where_predicate_from_bound( lower_ctx, bound, - lifetimes.as_ref(), + lifetimes.as_deref(), target.clone(), ); } @@ -275,14 +271,14 @@ impl GenericParamsCollector { &mut self, lower_ctx: &LowerCtx<'_>, bound: ast::TypeBound, - hrtb_lifetimes: Option<&Box<[Name]>>, + hrtb_lifetimes: Option<&[Name]>, target: Either, ) { let bound = TypeBound::from_ast(lower_ctx, bound); let predicate = match (target, bound) { (Either::Left(type_ref), bound) => match hrtb_lifetimes { Some(hrtb_lifetimes) => WherePredicate::ForLifetime { - lifetimes: hrtb_lifetimes.clone(), + lifetimes: hrtb_lifetimes.to_vec().into_boxed_slice(), target: WherePredicateTypeTarget::TypeRef(Interned::new(type_ref)), bound: Interned::new(bound), }, @@ -418,13 +414,18 @@ impl GenericParams { }) } }; - macro_rules! id_to_generics { - ($id:ident) => {{ - let id = $id.lookup(db).id; - let tree = id.item_tree(db); - let item = &tree[id.value]; - enabled_params(&item.generic_params, &tree) - }}; + fn id_to_generics( + db: &dyn DefDatabase, + id: impl for<'db> Lookup< + Database<'db> = dyn DefDatabase + 'db, + Data = impl ItemTreeLoc, + >, + enabled_params: impl Fn(&Interned, &ItemTree) -> Interned, + ) -> Interned { + let id = id.lookup(db).item_tree_id(); + let tree = id.item_tree(db); + let item = &tree[id.value]; + enabled_params(item.generic_params(), &tree) } match def { @@ -457,13 +458,13 @@ impl GenericParams { Interned::new(generic_params.finish()) } } - GenericDefId::AdtId(AdtId::StructId(id)) => id_to_generics!(id), - GenericDefId::AdtId(AdtId::EnumId(id)) => id_to_generics!(id), - GenericDefId::AdtId(AdtId::UnionId(id)) => id_to_generics!(id), - GenericDefId::TraitId(id) => id_to_generics!(id), - GenericDefId::TraitAliasId(id) => id_to_generics!(id), - GenericDefId::TypeAliasId(id) => id_to_generics!(id), - GenericDefId::ImplId(id) => id_to_generics!(id), + GenericDefId::AdtId(AdtId::StructId(id)) => id_to_generics(db, id, enabled_params), + GenericDefId::AdtId(AdtId::EnumId(id)) => id_to_generics(db, id, enabled_params), + GenericDefId::AdtId(AdtId::UnionId(id)) => id_to_generics(db, id, enabled_params), + GenericDefId::TraitId(id) => id_to_generics(db, id, enabled_params), + GenericDefId::TraitAliasId(id) => id_to_generics(db, id, enabled_params), + GenericDefId::TypeAliasId(id) => id_to_generics(db, id, enabled_params), + GenericDefId::ImplId(id) => id_to_generics(db, id, enabled_params), GenericDefId::EnumVariantId(_) | GenericDefId::ConstId(_) => { Interned::new(GenericParams { type_or_consts: Default::default(), @@ -507,141 +508,3 @@ impl GenericParams { }) } } - -fn file_id_and_params_of( - def: GenericDefId, - db: &dyn DefDatabase, -) -> (HirFileId, Option) { - match def { - GenericDefId::FunctionId(it) => { - let src = it.lookup(db).source(db); - (src.file_id, src.value.generic_param_list()) - } - GenericDefId::AdtId(AdtId::StructId(it)) => { - let src = it.lookup(db).source(db); - (src.file_id, src.value.generic_param_list()) - } - GenericDefId::AdtId(AdtId::UnionId(it)) => { - let src = it.lookup(db).source(db); - (src.file_id, src.value.generic_param_list()) - } - GenericDefId::AdtId(AdtId::EnumId(it)) => { - let src = it.lookup(db).source(db); - (src.file_id, src.value.generic_param_list()) - } - GenericDefId::TraitId(it) => { - let src = it.lookup(db).source(db); - (src.file_id, src.value.generic_param_list()) - } - GenericDefId::TraitAliasId(it) => { - let src = it.lookup(db).source(db); - (src.file_id, src.value.generic_param_list()) - } - GenericDefId::TypeAliasId(it) => { - let src = it.lookup(db).source(db); - (src.file_id, src.value.generic_param_list()) - } - GenericDefId::ImplId(it) => { - let src = it.lookup(db).source(db); - (src.file_id, src.value.generic_param_list()) - } - // We won't be using this ID anyway - GenericDefId::EnumVariantId(_) | GenericDefId::ConstId(_) => (FileId::BOGUS.into(), None), - } -} - -impl HasChildSource for GenericDefId { - type Value = Either; - fn child_source( - &self, - db: &dyn DefDatabase, - ) -> InFile> { - let generic_params = db.generic_params(*self); - let mut idx_iter = generic_params.type_or_consts.iter().map(|(idx, _)| idx); - - let (file_id, generic_params_list) = file_id_and_params_of(*self, db); - - let mut params = ArenaMap::default(); - - // For traits and trait aliases the first type index is `Self`, we need to add it before - // the other params. - match *self { - GenericDefId::TraitId(id) => { - let trait_ref = id.lookup(db).source(db).value; - let idx = idx_iter.next().unwrap(); - params.insert(idx, Either::Right(ast::TraitOrAlias::Trait(trait_ref))); - } - GenericDefId::TraitAliasId(id) => { - let alias = id.lookup(db).source(db).value; - let idx = idx_iter.next().unwrap(); - params.insert(idx, Either::Right(ast::TraitOrAlias::TraitAlias(alias))); - } - _ => {} - } - - if let Some(generic_params_list) = generic_params_list { - for (idx, ast_param) in idx_iter.zip(generic_params_list.type_or_const_params()) { - params.insert(idx, Either::Left(ast_param)); - } - } - - InFile::new(file_id, params) - } -} - -impl HasChildSource for GenericDefId { - type Value = ast::LifetimeParam; - fn child_source( - &self, - db: &dyn DefDatabase, - ) -> InFile> { - let generic_params = db.generic_params(*self); - let idx_iter = generic_params.lifetimes.iter().map(|(idx, _)| idx); - - let (file_id, generic_params_list) = file_id_and_params_of(*self, db); - - let mut params = ArenaMap::default(); - - if let Some(generic_params_list) = generic_params_list { - for (idx, ast_param) in idx_iter.zip(generic_params_list.lifetime_params()) { - params.insert(idx, ast_param); - } - } - - InFile::new(file_id, params) - } -} - -impl ChildBySource for GenericDefId { - fn child_by_source_to(&self, db: &dyn DefDatabase, res: &mut DynMap, file_id: HirFileId) { - let (gfile_id, generic_params_list) = file_id_and_params_of(*self, db); - if gfile_id != file_id { - return; - } - - let generic_params = db.generic_params(*self); - let mut toc_idx_iter = generic_params.type_or_consts.iter().map(|(idx, _)| idx); - let lts_idx_iter = generic_params.lifetimes.iter().map(|(idx, _)| idx); - - // For traits the first type index is `Self`, skip it. - if let GenericDefId::TraitId(_) = *self { - toc_idx_iter.next().unwrap(); // advance_by(1); - } - - if let Some(generic_params_list) = generic_params_list { - for (local_id, ast_param) in - toc_idx_iter.zip(generic_params_list.type_or_const_params()) - { - let id = TypeOrConstParamId { parent: *self, local_id }; - match ast_param { - ast::TypeOrConstParam::Type(a) => res[keys::TYPE_PARAM].insert(a, id), - ast::TypeOrConstParam::Const(a) => res[keys::CONST_PARAM].insert(a, id), - } - } - for (local_id, ast_param) in lts_idx_iter.zip(generic_params_list.lifetime_params()) { - let id = LifetimeParamId { parent: *self, local_id }; - res[keys::LIFETIME_PARAM].insert(ast_param, id); - } - } - } -} diff --git a/crates/hir-def/src/import_map.rs b/crates/hir-def/src/import_map.rs index c698510ca99b..98982c7db840 100644 --- a/crates/hir-def/src/import_map.rs +++ b/crates/hir-def/src/import_map.rs @@ -859,7 +859,7 @@ mod tests { check_search( ra_fixture, "main", - Query::new("fmt".to_string()).fuzzy(), + Query::new("fmt".to_owned()).fuzzy(), expect![[r#" dep::fmt (t) dep::fmt::Display::FMT_CONST (a) @@ -888,9 +888,7 @@ mod tests { check_search( ra_fixture, "main", - Query::new("fmt".to_string()) - .fuzzy() - .assoc_search_mode(AssocSearchMode::AssocItemsOnly), + Query::new("fmt".to_owned()).fuzzy().assoc_search_mode(AssocSearchMode::AssocItemsOnly), expect![[r#" dep::fmt::Display::FMT_CONST (a) dep::fmt::Display::format_function (a) @@ -901,7 +899,7 @@ mod tests { check_search( ra_fixture, "main", - Query::new("fmt".to_string()).fuzzy().assoc_search_mode(AssocSearchMode::Exclude), + Query::new("fmt".to_owned()).fuzzy().assoc_search_mode(AssocSearchMode::Exclude), expect![[r#" dep::fmt (t) "#]], @@ -937,7 +935,7 @@ pub mod fmt { check_search( ra_fixture, "main", - Query::new("fmt".to_string()).fuzzy(), + Query::new("fmt".to_owned()).fuzzy(), expect![[r#" dep::Fmt (m) dep::Fmt (t) @@ -951,7 +949,7 @@ pub mod fmt { check_search( ra_fixture, "main", - Query::new("fmt".to_string()), + Query::new("fmt".to_owned()), expect![[r#" dep::Fmt (m) dep::Fmt (t) @@ -991,7 +989,7 @@ pub mod fmt { check_search( ra_fixture, "main", - Query::new("fmt".to_string()), + Query::new("fmt".to_owned()), expect![[r#" dep::Fmt (m) dep::Fmt (t) @@ -1015,7 +1013,7 @@ pub mod fmt { check_search( ra_fixture, "main", - Query::new("FMT".to_string()), + Query::new("FMT".to_owned()), expect![[r#" dep::FMT (t) dep::FMT (v) @@ -1027,7 +1025,7 @@ pub mod fmt { check_search( ra_fixture, "main", - Query::new("FMT".to_string()).case_sensitive(), + Query::new("FMT".to_owned()).case_sensitive(), expect![[r#" dep::FMT (t) dep::FMT (v) diff --git a/crates/hir-def/src/item_scope.rs b/crates/hir-def/src/item_scope.rs index 0b0c838bedb5..0e6826a75a6c 100644 --- a/crates/hir-def/src/item_scope.rs +++ b/crates/hir-def/src/item_scope.rs @@ -222,17 +222,15 @@ impl ItemScope { self.declarations.iter().copied() } - pub fn extern_crate_decls( - &self, - ) -> impl Iterator + ExactSizeIterator + '_ { + pub fn extern_crate_decls(&self) -> impl ExactSizeIterator + '_ { self.extern_crate_decls.iter().copied() } - pub fn use_decls(&self) -> impl Iterator + ExactSizeIterator + '_ { + pub fn use_decls(&self) -> impl ExactSizeIterator + '_ { self.use_decls.iter().copied() } - pub fn impls(&self) -> impl Iterator + ExactSizeIterator + '_ { + pub fn impls(&self) -> impl ExactSizeIterator + '_ { self.impls.iter().copied() } @@ -674,7 +672,7 @@ impl ItemScope { format_to!( buf, "{}:", - name.map_or("_".to_string(), |name| name.display(db).to_string()) + name.map_or("_".to_owned(), |name| name.display(db).to_string()) ); if let Some((.., i)) = def.types { diff --git a/crates/hir-def/src/item_tree.rs b/crates/hir-def/src/item_tree.rs index 336e0de7fd66..be16a5e31a23 100644 --- a/crates/hir-def/src/item_tree.rs +++ b/crates/hir-def/src/item_tree.rs @@ -337,20 +337,18 @@ from_attrs!( LifetimeParamData(Idx), ); -/// Trait implemented by all item nodes in the item tree. -pub trait ItemTreeModItemNode: Clone { - type Source: AstIdNode + Into; +/// Trait implemented by all nodes in the item tree. +pub trait ItemTreeNode: Clone { + type Source: AstIdNode; fn ast_id(&self) -> FileAstId; /// Looks up an instance of `Self` in an item tree. fn lookup(tree: &ItemTree, index: Idx) -> &Self; - - /// Downcasts a `ModItem` to a `FileItemTreeId` specific to this type. - fn id_from_mod_item(mod_item: ModItem) -> Option>; - - /// Upcasts a `FileItemTreeId` to a generic `ModItem`. - fn id_to_mod_item(id: FileItemTreeId) -> ModItem; + fn attr_owner(id: FileItemTreeId) -> AttrOwner; +} +pub trait GenericsItemTreeNode: ItemTreeNode { + fn generic_params(&self) -> &Interned; } pub struct FileItemTreeId(Idx); @@ -372,7 +370,7 @@ impl FileItemTreeId { impl Clone for FileItemTreeId { fn clone(&self) -> Self { - Self(self.0) + *self } } impl Copy for FileItemTreeId {} @@ -478,7 +476,7 @@ impl Hash for ItemTreeId { } macro_rules! mod_items { - ( $( $typ:ident in $fld:ident -> $ast:ty ),+ $(,)? ) => { + ( $( $typ:ident $(<$generic_params:ident>)? in $fld:ident -> $ast:ty ),+ $(,)? ) => { #[derive(Debug, Copy, Clone, Eq, PartialEq, Hash)] pub enum ModItem { $( @@ -495,7 +493,7 @@ macro_rules! mod_items { )+ $( - impl ItemTreeModItemNode for $typ { + impl ItemTreeNode for $typ { type Source = $ast; fn ast_id(&self) -> FileAstId { @@ -506,15 +504,8 @@ macro_rules! mod_items { &tree.data().$fld[index] } - fn id_from_mod_item(mod_item: ModItem) -> Option> { - match mod_item { - ModItem::$typ(id) => Some(id), - _ => None, - } - } - - fn id_to_mod_item(id: FileItemTreeId) -> ModItem { - ModItem::$typ(id) + fn attr_owner(id: FileItemTreeId) -> AttrOwner { + AttrOwner::ModItem(ModItem::$typ(id)) } } @@ -525,6 +516,14 @@ macro_rules! mod_items { &self.data().$fld[index] } } + + $( + impl GenericsItemTreeNode for $typ { + fn generic_params(&self) -> &Interned { + &self.$generic_params + } + } + )? )+ }; } @@ -533,16 +532,16 @@ mod_items! { Use in uses -> ast::Use, ExternCrate in extern_crates -> ast::ExternCrate, ExternBlock in extern_blocks -> ast::ExternBlock, - Function in functions -> ast::Fn, - Struct in structs -> ast::Struct, - Union in unions -> ast::Union, - Enum in enums -> ast::Enum, + Function in functions -> ast::Fn, + Struct in structs -> ast::Struct, + Union in unions -> ast::Union, + Enum in enums -> ast::Enum, Const in consts -> ast::Const, Static in statics -> ast::Static, - Trait in traits -> ast::Trait, - TraitAlias in trait_aliases -> ast::TraitAlias, - Impl in impls -> ast::Impl, - TypeAlias in type_aliases -> ast::TypeAlias, + Trait in traits -> ast::Trait, + TraitAlias in trait_aliases -> ast::TraitAlias, + Impl in impls -> ast::Impl, + TypeAlias in type_aliases -> ast::TypeAlias, Mod in mods -> ast::Module, MacroCall in macro_calls -> ast::MacroCall, MacroRules in macro_rules -> ast::MacroRules, @@ -578,17 +577,26 @@ impl Index for ItemTree { } } -impl Index> for ItemTree { +impl Index> for ItemTree { type Output = N; fn index(&self, id: FileItemTreeId) -> &N { N::lookup(self, id.index()) } } -impl Index> for ItemTree { - type Output = Variant; - fn index(&self, id: FileItemTreeId) -> &Variant { - &self[id.index()] +impl ItemTreeNode for Variant { + type Source = ast::Variant; + + fn ast_id(&self) -> FileAstId { + self.ast_id + } + + fn lookup(tree: &ItemTree, index: Idx) -> &Self { + &tree.data().variants[index] + } + + fn attr_owner(id: FileItemTreeId) -> AttrOwner { + AttrOwner::Variant(id) } } @@ -1027,7 +1035,7 @@ impl AssocItem { } } -#[derive(Debug, Eq, PartialEq)] +#[derive(Debug, Clone, PartialEq, Eq)] pub struct Variant { pub name: Name, pub fields: Fields, diff --git a/crates/hir-def/src/item_tree/lower.rs b/crates/hir-def/src/item_tree/lower.rs index b500f56b6c14..e0aa3ae61235 100644 --- a/crates/hir-def/src/item_tree/lower.rs +++ b/crates/hir-def/src/item_tree/lower.rs @@ -13,7 +13,7 @@ use crate::{ use super::*; -fn id(index: Idx) -> FileItemTreeId { +fn id(index: Idx) -> FileItemTreeId { FileItemTreeId(index) } @@ -267,7 +267,7 @@ impl<'a> Ctx<'a> { if let Some(data) = self.lower_variant(&variant) { let idx = self.data().variants.alloc(data); self.add_attrs( - FileItemTreeId(idx).into(), + id(idx).into(), RawAttrs::new(self.db.upcast(), &variant, self.span_map()), ); } @@ -658,7 +658,7 @@ impl<'a> Ctx<'a> { fn lower_visibility(&mut self, item: &dyn ast::HasVisibility) -> RawVisibilityId { let vis = - RawVisibility::from_ast_with_span_map(self.db, item.visibility(), self.span_map()); + RawVisibility::from_opt_ast_with_span_map(self.db, item.visibility(), self.span_map()); self.data().vis.alloc(vis) } diff --git a/crates/hir-def/src/item_tree/pretty.rs b/crates/hir-def/src/item_tree/pretty.rs index 520034d213ce..0086b7180b2b 100644 --- a/crates/hir-def/src/item_tree/pretty.rs +++ b/crates/hir-def/src/item_tree/pretty.rs @@ -24,7 +24,7 @@ pub(super) fn print_item_tree(db: &dyn DefDatabase, tree: &ItemTree) -> String { p.print_mod_item(*item); } - let mut s = p.buf.trim_end_matches('\n').to_string(); + let mut s = p.buf.trim_end_matches('\n').to_owned(); s.push('\n'); s } @@ -58,7 +58,7 @@ impl Printer<'_> { wln!(self); f(self); self.indent_level -= 1; - self.buf = self.buf.trim_end_matches('\n').to_string(); + self.buf = self.buf.trim_end_matches('\n').to_owned(); } /// Ensures that a blank line is output before the next text. diff --git a/crates/hir-def/src/lib.rs b/crates/hir-def/src/lib.rs index 71bc52133361..5670ebfa17f2 100644 --- a/crates/hir-def/src/lib.rs +++ b/crates/hir-def/src/lib.rs @@ -70,7 +70,11 @@ use std::{ panic::{RefUnwindSafe, UnwindSafe}, }; -use base_db::{impl_intern_key, salsa, CrateId, Edition}; +use base_db::{ + impl_intern_key, + salsa::{self, impl_intern_value_trivial}, + CrateId, Edition, +}; use hir_expand::{ ast_id_map::{AstIdNode, FileAstId}, builtin_attr_macro::BuiltinAttrExpander, @@ -87,7 +91,7 @@ use hir_expand::{ use item_tree::ExternBlock; use la_arena::Idx; use nameres::DefMap; -use span::Span; +use span::{FileId, Span}; use stdx::impl_from; use syntax::{ast, AstNode}; @@ -98,11 +102,268 @@ use crate::{ data::adt::VariantData, db::DefDatabase, item_tree::{ - Const, Enum, ExternCrate, Function, Impl, ItemTreeId, ItemTreeModItemNode, Macro2, - MacroRules, Static, Struct, Trait, TraitAlias, TypeAlias, Union, Use, Variant, + Const, Enum, ExternCrate, Function, Impl, ItemTreeId, ItemTreeNode, Macro2, MacroRules, + Static, Struct, Trait, TraitAlias, TypeAlias, Union, Use, Variant, }, }; +#[derive(Debug)] +pub struct ItemLoc { + pub container: ModuleId, + pub id: ItemTreeId, +} + +impl Clone for ItemLoc { + fn clone(&self) -> Self { + *self + } +} + +impl Copy for ItemLoc {} + +impl PartialEq for ItemLoc { + fn eq(&self, other: &Self) -> bool { + self.container == other.container && self.id == other.id + } +} + +impl Eq for ItemLoc {} + +impl Hash for ItemLoc { + fn hash(&self, state: &mut H) { + self.container.hash(state); + self.id.hash(state); + } +} + +#[derive(Debug)] +pub struct AssocItemLoc { + pub container: ItemContainerId, + pub id: ItemTreeId, +} + +impl Clone for AssocItemLoc { + fn clone(&self) -> Self { + *self + } +} + +impl Copy for AssocItemLoc {} + +impl PartialEq for AssocItemLoc { + fn eq(&self, other: &Self) -> bool { + self.container == other.container && self.id == other.id + } +} + +impl Eq for AssocItemLoc {} + +impl Hash for AssocItemLoc { + fn hash(&self, state: &mut H) { + self.container.hash(state); + self.id.hash(state); + } +} + +pub trait ItemTreeLoc { + type Container; + type Id; + fn item_tree_id(&self) -> ItemTreeId; + fn container(&self) -> Self::Container; +} + +macro_rules! impl_intern { + ($id:ident, $loc:ident, $intern:ident, $lookup:ident) => { + impl_intern_key!($id); + impl_intern_value_trivial!($loc); + impl_intern_lookup!(DefDatabase, $id, $loc, $intern, $lookup); + }; +} + +macro_rules! impl_loc { + ($loc:ident, $id:ident: $id_ty:ident, $container:ident: $container_type:ident) => { + impl ItemTreeLoc for $loc { + type Container = $container_type; + type Id = $id_ty; + fn item_tree_id(&self) -> ItemTreeId { + self.$id + } + fn container(&self) -> Self::Container { + self.$container + } + } + }; +} + +#[derive(Debug, Clone, Copy, PartialEq, Eq, Hash)] +pub struct FunctionId(salsa::InternId); +type FunctionLoc = AssocItemLoc; +impl_intern!(FunctionId, FunctionLoc, intern_function, lookup_intern_function); +impl_loc!(FunctionLoc, id: Function, container: ItemContainerId); + +#[derive(Debug, Clone, Copy, PartialEq, Eq, Hash, PartialOrd, Ord)] +pub struct StructId(salsa::InternId); +type StructLoc = ItemLoc; +impl_intern!(StructId, StructLoc, intern_struct, lookup_intern_struct); +impl_loc!(StructLoc, id: Struct, container: ModuleId); + +#[derive(Debug, Clone, Copy, PartialEq, Eq, Hash, PartialOrd, Ord)] +pub struct UnionId(salsa::InternId); +pub type UnionLoc = ItemLoc; +impl_intern!(UnionId, UnionLoc, intern_union, lookup_intern_union); +impl_loc!(UnionLoc, id: Union, container: ModuleId); + +#[derive(Debug, Clone, Copy, PartialEq, Eq, Hash, PartialOrd, Ord)] +pub struct EnumId(salsa::InternId); +pub type EnumLoc = ItemLoc; +impl_intern!(EnumId, EnumLoc, intern_enum, lookup_intern_enum); +impl_loc!(EnumLoc, id: Enum, container: ModuleId); + +#[derive(Debug, Clone, Copy, PartialEq, Eq, Hash)] +pub struct ConstId(salsa::InternId); +type ConstLoc = AssocItemLoc; +impl_intern!(ConstId, ConstLoc, intern_const, lookup_intern_const); +impl_loc!(ConstLoc, id: Const, container: ItemContainerId); + +#[derive(Debug, Clone, Copy, PartialEq, Eq, Hash)] +pub struct StaticId(salsa::InternId); +pub type StaticLoc = AssocItemLoc; +impl_intern!(StaticId, StaticLoc, intern_static, lookup_intern_static); +impl_loc!(StaticLoc, id: Static, container: ItemContainerId); + +#[derive(Debug, Clone, Copy, PartialEq, Eq, Hash)] +pub struct TraitId(salsa::InternId); +pub type TraitLoc = ItemLoc; +impl_intern!(TraitId, TraitLoc, intern_trait, lookup_intern_trait); +impl_loc!(TraitLoc, id: Trait, container: ModuleId); + +#[derive(Debug, Clone, Copy, PartialEq, Eq, Hash)] +pub struct TraitAliasId(salsa::InternId); +pub type TraitAliasLoc = ItemLoc; +impl_intern!(TraitAliasId, TraitAliasLoc, intern_trait_alias, lookup_intern_trait_alias); +impl_loc!(TraitAliasLoc, id: TraitAlias, container: ModuleId); + +#[derive(Debug, Clone, Copy, PartialEq, Eq, Hash)] +pub struct TypeAliasId(salsa::InternId); +type TypeAliasLoc = AssocItemLoc; +impl_intern!(TypeAliasId, TypeAliasLoc, intern_type_alias, lookup_intern_type_alias); +impl_loc!(TypeAliasLoc, id: TypeAlias, container: ItemContainerId); + +#[derive(Debug, Clone, Copy, PartialEq, Eq, Hash, Ord, PartialOrd)] +pub struct ImplId(salsa::InternId); +type ImplLoc = ItemLoc; +impl_intern!(ImplId, ImplLoc, intern_impl, lookup_intern_impl); +impl_loc!(ImplLoc, id: Impl, container: ModuleId); + +#[derive(Debug, Clone, Copy, PartialEq, Eq, Hash, Ord, PartialOrd)] +pub struct UseId(salsa::InternId); +type UseLoc = ItemLoc; +impl_intern!(UseId, UseLoc, intern_use, lookup_intern_use); +impl_loc!(UseLoc, id: Use, container: ModuleId); + +#[derive(Debug, Clone, Copy, PartialEq, Eq, Hash, Ord, PartialOrd)] +pub struct ExternCrateId(salsa::InternId); +type ExternCrateLoc = ItemLoc; +impl_intern!(ExternCrateId, ExternCrateLoc, intern_extern_crate, lookup_intern_extern_crate); +impl_loc!(ExternCrateLoc, id: ExternCrate, container: ModuleId); + +#[derive(Debug, Clone, Copy, PartialEq, Eq, Hash, Ord, PartialOrd)] +pub struct ExternBlockId(salsa::InternId); +type ExternBlockLoc = ItemLoc; +impl_intern!(ExternBlockId, ExternBlockLoc, intern_extern_block, lookup_intern_extern_block); +impl_loc!(ExternBlockLoc, id: ExternBlock, container: ModuleId); + +#[derive(Debug, Clone, Copy, PartialEq, Eq, Hash)] +pub struct EnumVariantId(salsa::InternId); + +#[derive(Debug, Clone, Copy, PartialEq, Eq, Hash)] +pub struct EnumVariantLoc { + pub id: ItemTreeId, + pub parent: EnumId, + pub index: u32, +} +impl_intern!(EnumVariantId, EnumVariantLoc, intern_enum_variant, lookup_intern_enum_variant); +impl_loc!(EnumVariantLoc, id: Variant, parent: EnumId); + +#[derive(Debug, Clone, Copy, PartialEq, Eq, Hash, Ord, PartialOrd)] +pub struct Macro2Id(salsa::InternId); +#[derive(Debug, Clone, Copy, PartialEq, Eq, Hash)] +pub struct Macro2Loc { + pub container: ModuleId, + pub id: ItemTreeId, + pub expander: MacroExpander, + pub allow_internal_unsafe: bool, + pub edition: Edition, +} +impl_intern!(Macro2Id, Macro2Loc, intern_macro2, lookup_intern_macro2); +impl_loc!(Macro2Loc, id: Macro2, container: ModuleId); + +#[derive(Debug, Clone, Copy, PartialEq, Eq, Hash, Ord, PartialOrd)] +pub struct MacroRulesId(salsa::InternId); +#[derive(Debug, Clone, Copy, PartialEq, Eq, Hash)] +pub struct MacroRulesLoc { + pub container: ModuleId, + pub id: ItemTreeId, + pub expander: MacroExpander, + pub flags: MacroRulesLocFlags, + pub edition: Edition, +} +impl_intern!(MacroRulesId, MacroRulesLoc, intern_macro_rules, lookup_intern_macro_rules); +impl_loc!(MacroRulesLoc, id: MacroRules, container: ModuleId); + +bitflags::bitflags! { + #[derive(Debug, Clone, Copy, PartialEq, Eq, Hash)] + pub struct MacroRulesLocFlags: u8 { + const ALLOW_INTERNAL_UNSAFE = 1 << 0; + const LOCAL_INNER = 1 << 1; + } +} + +#[derive(Debug, Clone, Copy, PartialEq, Eq, Hash)] +pub enum MacroExpander { + Declarative, + BuiltIn(BuiltinFnLikeExpander), + BuiltInAttr(BuiltinAttrExpander), + BuiltInDerive(BuiltinDeriveExpander), + BuiltInEager(EagerExpander), +} +#[derive(Debug, Clone, Copy, PartialEq, Eq, Hash, Ord, PartialOrd)] +pub struct ProcMacroId(salsa::InternId); +#[derive(Debug, Clone, Copy, PartialEq, Eq, Hash)] +pub struct ProcMacroLoc { + pub container: CrateRootModuleId, + pub id: ItemTreeId, + pub expander: CustomProcMacroExpander, + pub kind: ProcMacroKind, + pub edition: Edition, +} +impl_intern!(ProcMacroId, ProcMacroLoc, intern_proc_macro, lookup_intern_proc_macro); +impl_loc!(ProcMacroLoc, id: Function, container: CrateRootModuleId); + +#[derive(Debug, Clone, Copy, PartialEq, Eq, Hash, Ord, PartialOrd)] +pub struct BlockId(salsa::InternId); +#[derive(Debug, Hash, PartialEq, Eq, Clone)] +pub struct BlockLoc { + ast_id: AstId, + /// The containing module. + module: ModuleId, +} +impl_intern!(BlockId, BlockLoc, intern_block, lookup_intern_block); + +/// Id of the anonymous const block expression and patterns. This is very similar to `ClosureId` and +/// shouldn't be a `DefWithBodyId` since its type inference is dependent on its parent. +#[derive(Debug, Clone, Copy, Eq, PartialEq, Hash)] +pub struct ConstBlockId(salsa::InternId); +impl_intern!(ConstBlockId, ConstBlockLoc, intern_anonymous_const, lookup_intern_anonymous_const); + +#[derive(Debug, Hash, PartialEq, Eq, Clone)] +pub struct ConstBlockLoc { + /// The parent of the anonymous const block. + pub parent: DefWithBodyId, + /// The root expression of this const block in the parent body. + pub root: hir::ExprId, +} + /// A `ModuleId` that is always a crate's root module. #[derive(Debug, Clone, Copy, PartialEq, Eq, Hash)] pub struct CrateRootModuleId { @@ -124,23 +385,6 @@ impl PartialEq for CrateRootModuleId { other.block.is_none() && other.local_id == DefMap::ROOT && self.krate == other.krate } } -impl PartialEq for ModuleId { - fn eq(&self, other: &CrateRootModuleId) -> bool { - other == self - } -} - -impl From for ModuleId { - fn from(CrateRootModuleId { krate }: CrateRootModuleId) -> Self { - ModuleId { krate, block: None, local_id: DefMap::ROOT } - } -} - -impl From for ModuleDefId { - fn from(value: CrateRootModuleId) -> Self { - ModuleDefId::ModuleId(value.into()) - } -} impl From for CrateRootModuleId { fn from(krate: CrateId) -> Self { @@ -208,105 +452,27 @@ impl ModuleId { } } +impl PartialEq for ModuleId { + fn eq(&self, other: &CrateRootModuleId) -> bool { + other == self + } +} + +impl From for ModuleId { + fn from(CrateRootModuleId { krate }: CrateRootModuleId) -> Self { + ModuleId { krate, block: None, local_id: DefMap::ROOT } + } +} + +impl From for ModuleDefId { + fn from(value: CrateRootModuleId) -> Self { + ModuleDefId::ModuleId(value.into()) + } +} + /// An ID of a module, **local** to a `DefMap`. pub type LocalModuleId = Idx; -#[derive(Debug)] -pub struct ItemLoc { - pub container: ModuleId, - pub id: ItemTreeId, -} - -impl Clone for ItemLoc { - fn clone(&self) -> Self { - Self { container: self.container, id: self.id } - } -} - -impl Copy for ItemLoc {} - -impl PartialEq for ItemLoc { - fn eq(&self, other: &Self) -> bool { - self.container == other.container && self.id == other.id - } -} - -impl Eq for ItemLoc {} - -impl Hash for ItemLoc { - fn hash(&self, state: &mut H) { - self.container.hash(state); - self.id.hash(state); - } -} - -#[derive(Debug)] -pub struct AssocItemLoc { - pub container: ItemContainerId, - pub id: ItemTreeId, -} - -impl Clone for AssocItemLoc { - fn clone(&self) -> Self { - Self { container: self.container, id: self.id } - } -} - -impl Copy for AssocItemLoc {} - -impl PartialEq for AssocItemLoc { - fn eq(&self, other: &Self) -> bool { - self.container == other.container && self.id == other.id - } -} - -impl Eq for AssocItemLoc {} - -impl Hash for AssocItemLoc { - fn hash(&self, state: &mut H) { - self.container.hash(state); - self.id.hash(state); - } -} - -macro_rules! impl_intern { - ($id:ident, $loc:ident, $intern:ident, $lookup:ident) => { - impl_intern_key!($id); - impl_intern_lookup!(DefDatabase, $id, $loc, $intern, $lookup); - }; -} - -#[derive(Debug, Clone, Copy, PartialEq, Eq, Hash)] -pub struct FunctionId(salsa::InternId); -type FunctionLoc = AssocItemLoc; -impl_intern!(FunctionId, FunctionLoc, intern_function, lookup_intern_function); - -#[derive(Debug, Clone, Copy, PartialEq, Eq, Hash, PartialOrd, Ord)] -pub struct StructId(salsa::InternId); -type StructLoc = ItemLoc; -impl_intern!(StructId, StructLoc, intern_struct, lookup_intern_struct); - -#[derive(Debug, Clone, Copy, PartialEq, Eq, Hash, PartialOrd, Ord)] -pub struct UnionId(salsa::InternId); -pub type UnionLoc = ItemLoc; -impl_intern!(UnionId, UnionLoc, intern_union, lookup_intern_union); - -#[derive(Debug, Clone, Copy, PartialEq, Eq, Hash, PartialOrd, Ord)] -pub struct EnumId(salsa::InternId); -pub type EnumLoc = ItemLoc; -impl_intern!(EnumId, EnumLoc, intern_enum, lookup_intern_enum); - -#[derive(Debug, Clone, Copy, PartialEq, Eq, Hash)] -pub struct EnumVariantId(salsa::InternId); - -#[derive(Debug, Clone, Copy, PartialEq, Eq, Hash)] -pub struct EnumVariantLoc { - pub id: ItemTreeId, - pub parent: EnumId, - pub index: u32, -} -impl_intern!(EnumVariantId, EnumVariantLoc, intern_enum_variant, lookup_intern_enum_variant); - #[derive(Debug, Clone, Copy, PartialEq, Eq, Hash)] pub struct FieldId { pub parent: VariantId, @@ -324,119 +490,12 @@ pub struct TupleFieldId { pub index: u32, } -#[derive(Debug, Clone, Copy, PartialEq, Eq, Hash)] -pub struct ConstId(salsa::InternId); -type ConstLoc = AssocItemLoc; -impl_intern!(ConstId, ConstLoc, intern_const, lookup_intern_const); - -#[derive(Debug, Clone, Copy, PartialEq, Eq, Hash)] -pub struct StaticId(salsa::InternId); -pub type StaticLoc = AssocItemLoc; -impl_intern!(StaticId, StaticLoc, intern_static, lookup_intern_static); - -#[derive(Debug, Clone, Copy, PartialEq, Eq, Hash)] -pub struct TraitId(salsa::InternId); -pub type TraitLoc = ItemLoc; -impl_intern!(TraitId, TraitLoc, intern_trait, lookup_intern_trait); - -#[derive(Debug, Clone, Copy, PartialEq, Eq, Hash)] -pub struct TraitAliasId(salsa::InternId); -pub type TraitAliasLoc = ItemLoc; -impl_intern!(TraitAliasId, TraitAliasLoc, intern_trait_alias, lookup_intern_trait_alias); - -#[derive(Debug, Clone, Copy, PartialEq, Eq, Hash)] -pub struct TypeAliasId(salsa::InternId); -type TypeAliasLoc = AssocItemLoc; -impl_intern!(TypeAliasId, TypeAliasLoc, intern_type_alias, lookup_intern_type_alias); - -#[derive(Debug, Clone, Copy, PartialEq, Eq, Hash, Ord, PartialOrd)] -pub struct ImplId(salsa::InternId); -type ImplLoc = ItemLoc; -impl_intern!(ImplId, ImplLoc, intern_impl, lookup_intern_impl); - -#[derive(Debug, Clone, Copy, PartialEq, Eq, Hash, Ord, PartialOrd)] -pub struct UseId(salsa::InternId); -type UseLoc = ItemLoc; -impl_intern!(UseId, UseLoc, intern_use, lookup_intern_use); - -#[derive(Debug, Clone, Copy, PartialEq, Eq, Hash, Ord, PartialOrd)] -pub struct ExternCrateId(salsa::InternId); -type ExternCrateLoc = ItemLoc; -impl_intern!(ExternCrateId, ExternCrateLoc, intern_extern_crate, lookup_intern_extern_crate); - -#[derive(Debug, Clone, Copy, PartialEq, Eq, Hash, Ord, PartialOrd)] -pub struct ExternBlockId(salsa::InternId); -type ExternBlockLoc = ItemLoc; -impl_intern!(ExternBlockId, ExternBlockLoc, intern_extern_block, lookup_intern_extern_block); - -#[derive(Debug, Clone, Copy, PartialEq, Eq, Hash)] -pub enum MacroExpander { - Declarative, - BuiltIn(BuiltinFnLikeExpander), - BuiltInAttr(BuiltinAttrExpander), - BuiltInDerive(BuiltinDeriveExpander), - BuiltInEager(EagerExpander), -} - -#[derive(Debug, Clone, Copy, PartialEq, Eq, Hash, Ord, PartialOrd)] -pub struct Macro2Id(salsa::InternId); -#[derive(Debug, Clone, Copy, PartialEq, Eq, Hash)] -pub struct Macro2Loc { - pub container: ModuleId, - pub id: ItemTreeId, - pub expander: MacroExpander, - pub allow_internal_unsafe: bool, - pub edition: Edition, -} -impl_intern!(Macro2Id, Macro2Loc, intern_macro2, lookup_intern_macro2); - -#[derive(Debug, Clone, Copy, PartialEq, Eq, Hash, Ord, PartialOrd)] -pub struct MacroRulesId(salsa::InternId); -#[derive(Debug, Clone, Copy, PartialEq, Eq, Hash)] -pub struct MacroRulesLoc { - pub container: ModuleId, - pub id: ItemTreeId, - pub expander: MacroExpander, - pub flags: MacroRulesLocFlags, - pub edition: Edition, -} -impl_intern!(MacroRulesId, MacroRulesLoc, intern_macro_rules, lookup_intern_macro_rules); - -bitflags::bitflags! { - #[derive(Debug, Clone, Copy, PartialEq, Eq, Hash)] - pub struct MacroRulesLocFlags: u8 { - const ALLOW_INTERNAL_UNSAFE = 1 << 0; - const LOCAL_INNER = 1 << 1; - } -} - -#[derive(Debug, Clone, Copy, PartialEq, Eq, Hash, Ord, PartialOrd)] -pub struct ProcMacroId(salsa::InternId); -#[derive(Debug, Clone, Copy, PartialEq, Eq, Hash)] -pub struct ProcMacroLoc { - pub container: CrateRootModuleId, - pub id: ItemTreeId, - pub expander: CustomProcMacroExpander, - pub kind: ProcMacroKind, - pub edition: Edition, -} -impl_intern!(ProcMacroId, ProcMacroLoc, intern_proc_macro, lookup_intern_proc_macro); - -#[derive(Debug, Clone, Copy, PartialEq, Eq, Hash, Ord, PartialOrd)] -pub struct BlockId(salsa::InternId); -#[derive(Debug, Hash, PartialEq, Eq, Clone)] -pub struct BlockLoc { - ast_id: AstId, - /// The containing module. - module: ModuleId, -} -impl_intern!(BlockId, BlockLoc, intern_block, lookup_intern_block); - #[derive(Debug, Clone, Copy, PartialEq, Eq, Hash)] pub struct TypeOrConstParamId { pub parent: GenericDefId, pub local_id: LocalTypeOrConstParamId, } +impl_intern_value_trivial!(TypeOrConstParamId); /// A TypeOrConstParamId with an invariant that it actually belongs to a type #[derive(Debug, Clone, Copy, PartialEq, Eq, Hash)] @@ -498,6 +557,7 @@ pub struct LifetimeParamId { pub local_id: LocalLifetimeParamId, } pub type LocalLifetimeParamId = Idx; +impl_intern_value_trivial!(LifetimeParamId); #[derive(Debug, Clone, Copy, PartialEq, Eq, Hash)] pub enum ItemContainerId { @@ -572,20 +632,6 @@ impl_from!( for ModuleDefId ); -/// Id of the anonymous const block expression and patterns. This is very similar to `ClosureId` and -/// shouldn't be a `DefWithBodyId` since its type inference is dependent on its parent. -#[derive(Debug, Clone, Copy, Eq, PartialEq, Hash)] -pub struct ConstBlockId(salsa::InternId); -impl_intern!(ConstBlockId, ConstBlockLoc, intern_anonymous_const, lookup_intern_anonymous_const); - -#[derive(Debug, Hash, PartialEq, Eq, Clone)] -pub struct ConstBlockLoc { - /// The parent of the anonymous const block. - pub parent: DefWithBodyId, - /// The root expression of this const block in the parent body. - pub root: hir::ExprId, -} - /// Something that holds types, required for the current const arg lowering implementation as they /// need to be able to query where they are defined. #[derive(Debug, Clone, Copy, Eq, PartialEq, Hash)] @@ -721,6 +767,9 @@ impl Clone for Box { pub struct InTypeConstId(salsa::InternId); impl_intern!(InTypeConstId, InTypeConstLoc, intern_in_type_const, lookup_intern_in_type_const); +// We would like to set `derive(PartialEq)` +// but the compiler complains about that `.expected_ty` does not implement the `Copy` trait. +#[allow(clippy::derived_hash_with_manual_eq)] #[derive(Debug, Hash, Eq, Clone)] pub struct InTypeConstLoc { pub id: AstId, @@ -850,6 +899,39 @@ impl_from!( for GenericDefId ); +impl GenericDefId { + fn file_id_and_params_of( + self, + db: &dyn DefDatabase, + ) -> (HirFileId, Option) { + fn file_id_and_params_of_item_loc( + db: &dyn DefDatabase, + def: impl for<'db> Lookup = dyn DefDatabase + 'db, Data = Loc>, + ) -> (HirFileId, Option) + where + Loc: src::HasSource, + Loc::Value: ast::HasGenericParams, + { + let src = def.lookup(db).source(db); + (src.file_id, ast::HasGenericParams::generic_param_list(&src.value)) + } + + match self { + GenericDefId::FunctionId(it) => file_id_and_params_of_item_loc(db, it), + GenericDefId::TypeAliasId(it) => file_id_and_params_of_item_loc(db, it), + GenericDefId::ConstId(_) => (FileId::BOGUS.into(), None), + GenericDefId::AdtId(AdtId::StructId(it)) => file_id_and_params_of_item_loc(db, it), + GenericDefId::AdtId(AdtId::UnionId(it)) => file_id_and_params_of_item_loc(db, it), + GenericDefId::AdtId(AdtId::EnumId(it)) => file_id_and_params_of_item_loc(db, it), + GenericDefId::TraitId(it) => file_id_and_params_of_item_loc(db, it), + GenericDefId::TraitAliasId(it) => file_id_and_params_of_item_loc(db, it), + GenericDefId::ImplId(it) => file_id_and_params_of_item_loc(db, it), + // We won't be using this ID anyway + GenericDefId::EnumVariantId(_) => (FileId::BOGUS.into(), None), + } + } +} + impl From for GenericDefId { fn from(item: AssocItemId) -> Self { match item { @@ -983,44 +1065,92 @@ impl VariantId { } pub trait HasModule { + /// Returns the enclosing module this thing is defined within. fn module(&self, db: &dyn DefDatabase) -> ModuleId; -} - -impl HasModule for ItemContainerId { - fn module(&self, db: &dyn DefDatabase) -> ModuleId { - match *self { - ItemContainerId::ModuleId(it) => it, - ItemContainerId::ImplId(it) => it.lookup(db).container, - ItemContainerId::TraitId(it) => it.lookup(db).container, - ItemContainerId::ExternBlockId(it) => it.lookup(db).container, - } - } -} - -impl HasModule for AssocItemLoc { + /// Returns the crate this thing is defined within. #[inline] - fn module(&self, db: &dyn DefDatabase) -> ModuleId { - self.container.module(db) + #[doc(alias = "crate")] + fn krate(&self, db: &dyn DefDatabase) -> CrateId { + self.module(db).krate } } -impl HasModule for AdtId { - fn module(&self, db: &dyn DefDatabase) -> ModuleId { - match self { - AdtId::StructId(it) => it.lookup(db).container, - AdtId::UnionId(it) => it.lookup(db).container, - AdtId::EnumId(it) => it.lookup(db).container, - } - } -} +// In theory this impl should work out for us, but rustc thinks it collides with all the other +// manual impls that do not have a ModuleId container... +// impl HasModule for ItemId +// where +// N: ItemTreeNode, +// ItemId: for<'db> Lookup = dyn DefDatabase + 'db, Data = Data> + Copy, +// Data: ItemTreeLoc, +// { +// #[inline] +// fn module(&self, db: &dyn DefDatabase) -> ModuleId { +// self.lookup(db).container() +// } +// } -impl HasModule for EnumId { +impl HasModule for ItemId +where + N: ItemTreeNode, + ItemId: for<'db> Lookup = dyn DefDatabase + 'db, Data = ItemLoc> + Copy, +{ #[inline] fn module(&self, db: &dyn DefDatabase) -> ModuleId { self.lookup(db).container } } +// Technically this does not overlap with the above, but rustc currently forbids this, hence why we +// need to write the 3 impls manually instead +// impl HasModule for ItemId +// where +// N: ItemTreeModItemNode, +// ItemId: for<'db> Lookup = dyn DefDatabase + 'db, Data = AssocItemLoc> + Copy, +// { +// #[inline] +// fn module(&self, db: &dyn DefDatabase) -> ModuleId { +// self.lookup(db).container.module(db) +// } +// } + +// region: manual-assoc-has-module-impls +#[inline] +fn module_for_assoc_item_loc<'db>( + db: &(dyn 'db + DefDatabase), + id: impl Lookup = dyn DefDatabase + 'db, Data = AssocItemLoc>, +) -> ModuleId { + id.lookup(db).container.module(db) +} + +impl HasModule for FunctionId { + #[inline] + fn module(&self, db: &dyn DefDatabase) -> ModuleId { + module_for_assoc_item_loc(db, *self) + } +} + +impl HasModule for ConstId { + #[inline] + fn module(&self, db: &dyn DefDatabase) -> ModuleId { + module_for_assoc_item_loc(db, *self) + } +} + +impl HasModule for StaticId { + #[inline] + fn module(&self, db: &dyn DefDatabase) -> ModuleId { + module_for_assoc_item_loc(db, *self) + } +} + +impl HasModule for TypeAliasId { + #[inline] + fn module(&self, db: &dyn DefDatabase) -> ModuleId { + module_for_assoc_item_loc(db, *self) + } +} +// endregion: manual-assoc-has-module-impls + impl HasModule for EnumVariantId { #[inline] fn module(&self, db: &dyn DefDatabase) -> ModuleId { @@ -1028,46 +1158,81 @@ impl HasModule for EnumVariantId { } } -impl HasModule for ExternCrateId { +impl HasModule for MacroRulesId { #[inline] fn module(&self, db: &dyn DefDatabase) -> ModuleId { self.lookup(db).container } } +impl HasModule for Macro2Id { + #[inline] + fn module(&self, db: &dyn DefDatabase) -> ModuleId { + self.lookup(db).container + } +} + +impl HasModule for ProcMacroId { + #[inline] + fn module(&self, db: &dyn DefDatabase) -> ModuleId { + self.lookup(db).container.into() + } +} + +impl HasModule for ItemContainerId { + fn module(&self, db: &dyn DefDatabase) -> ModuleId { + match *self { + ItemContainerId::ModuleId(it) => it, + ItemContainerId::ImplId(it) => it.module(db), + ItemContainerId::TraitId(it) => it.module(db), + ItemContainerId::ExternBlockId(it) => it.module(db), + } + } +} + +impl HasModule for AdtId { + fn module(&self, db: &dyn DefDatabase) -> ModuleId { + match *self { + AdtId::StructId(it) => it.module(db), + AdtId::UnionId(it) => it.module(db), + AdtId::EnumId(it) => it.module(db), + } + } +} + impl HasModule for VariantId { fn module(&self, db: &dyn DefDatabase) -> ModuleId { - match self { - VariantId::EnumVariantId(it) => it.lookup(db).parent.module(db), - VariantId::StructId(it) => it.lookup(db).container, - VariantId::UnionId(it) => it.lookup(db).container, + match *self { + VariantId::EnumVariantId(it) => it.module(db), + VariantId::StructId(it) => it.module(db), + VariantId::UnionId(it) => it.module(db), } } } impl HasModule for MacroId { fn module(&self, db: &dyn DefDatabase) -> ModuleId { - match self { - MacroId::MacroRulesId(it) => it.lookup(db).container, - MacroId::Macro2Id(it) => it.lookup(db).container, - MacroId::ProcMacroId(it) => it.lookup(db).container.into(), + match *self { + MacroId::MacroRulesId(it) => it.module(db), + MacroId::Macro2Id(it) => it.module(db), + MacroId::ProcMacroId(it) => it.module(db), } } } impl HasModule for TypeOwnerId { fn module(&self, db: &dyn DefDatabase) -> ModuleId { - match self { - TypeOwnerId::FunctionId(it) => it.lookup(db).module(db), - TypeOwnerId::StaticId(it) => it.lookup(db).module(db), - TypeOwnerId::ConstId(it) => it.lookup(db).module(db), - TypeOwnerId::InTypeConstId(it) => it.lookup(db).owner.module(db), + match *self { + TypeOwnerId::FunctionId(it) => it.module(db), + TypeOwnerId::StaticId(it) => it.module(db), + TypeOwnerId::ConstId(it) => it.module(db), TypeOwnerId::AdtId(it) => it.module(db), - TypeOwnerId::TraitId(it) => it.lookup(db).container, - TypeOwnerId::TraitAliasId(it) => it.lookup(db).container, - TypeOwnerId::TypeAliasId(it) => it.lookup(db).module(db), - TypeOwnerId::ImplId(it) => it.lookup(db).container, - TypeOwnerId::EnumVariantId(it) => it.lookup(db).parent.module(db), + TypeOwnerId::TraitId(it) => it.module(db), + TypeOwnerId::TraitAliasId(it) => it.module(db), + TypeOwnerId::TypeAliasId(it) => it.module(db), + TypeOwnerId::ImplId(it) => it.module(db), + TypeOwnerId::EnumVariantId(it) => it.module(db), + TypeOwnerId::InTypeConstId(it) => it.lookup(db).owner.module(db), } } } @@ -1075,10 +1240,10 @@ impl HasModule for TypeOwnerId { impl HasModule for DefWithBodyId { fn module(&self, db: &dyn DefDatabase) -> ModuleId { match self { - DefWithBodyId::FunctionId(it) => it.lookup(db).module(db), - DefWithBodyId::StaticId(it) => it.lookup(db).module(db), - DefWithBodyId::ConstId(it) => it.lookup(db).module(db), - DefWithBodyId::VariantId(it) => it.lookup(db).parent.module(db), + DefWithBodyId::FunctionId(it) => it.module(db), + DefWithBodyId::StaticId(it) => it.module(db), + DefWithBodyId::ConstId(it) => it.module(db), + DefWithBodyId::VariantId(it) => it.module(db), DefWithBodyId::InTypeConstId(it) => it.lookup(db).owner.module(db), } } @@ -1087,29 +1252,43 @@ impl HasModule for DefWithBodyId { impl HasModule for GenericDefId { fn module(&self, db: &dyn DefDatabase) -> ModuleId { match self { - GenericDefId::FunctionId(it) => it.lookup(db).module(db), + GenericDefId::FunctionId(it) => it.module(db), GenericDefId::AdtId(it) => it.module(db), - GenericDefId::TraitId(it) => it.lookup(db).container, - GenericDefId::TraitAliasId(it) => it.lookup(db).container, - GenericDefId::TypeAliasId(it) => it.lookup(db).module(db), - GenericDefId::ImplId(it) => it.lookup(db).container, - GenericDefId::EnumVariantId(it) => it.lookup(db).parent.lookup(db).container, - GenericDefId::ConstId(it) => it.lookup(db).module(db), + GenericDefId::TraitId(it) => it.module(db), + GenericDefId::TraitAliasId(it) => it.module(db), + GenericDefId::TypeAliasId(it) => it.module(db), + GenericDefId::ImplId(it) => it.module(db), + GenericDefId::EnumVariantId(it) => it.module(db), + GenericDefId::ConstId(it) => it.module(db), } } } -impl HasModule for TypeAliasId { - #[inline] +impl HasModule for AttrDefId { fn module(&self, db: &dyn DefDatabase) -> ModuleId { - self.lookup(db).module(db) - } -} - -impl HasModule for TraitId { - #[inline] - fn module(&self, db: &dyn DefDatabase) -> ModuleId { - self.lookup(db).container + match self { + AttrDefId::ModuleId(it) => *it, + AttrDefId::FieldId(it) => it.parent.module(db), + AttrDefId::AdtId(it) => it.module(db), + AttrDefId::FunctionId(it) => it.module(db), + AttrDefId::EnumVariantId(it) => it.module(db), + AttrDefId::StaticId(it) => it.module(db), + AttrDefId::ConstId(it) => it.module(db), + AttrDefId::TraitId(it) => it.module(db), + AttrDefId::TraitAliasId(it) => it.module(db), + AttrDefId::TypeAliasId(it) => it.module(db), + AttrDefId::ImplId(it) => it.module(db), + AttrDefId::ExternBlockId(it) => it.module(db), + AttrDefId::GenericParamId(it) => match it { + GenericParamId::TypeParamId(it) => it.parent(), + GenericParamId::ConstParamId(it) => it.parent(), + GenericParamId::LifetimeParamId(it) => it.parent, + } + .module(db), + AttrDefId::MacroId(it) => it.module(db), + AttrDefId::ExternCrateId(it) => it.module(db), + AttrDefId::UseId(it) => it.module(db), + } } } @@ -1120,51 +1299,20 @@ impl ModuleDefId { pub fn module(&self, db: &dyn DefDatabase) -> Option { Some(match self { ModuleDefId::ModuleId(id) => *id, - ModuleDefId::FunctionId(id) => id.lookup(db).module(db), + ModuleDefId::FunctionId(id) => id.module(db), ModuleDefId::AdtId(id) => id.module(db), - ModuleDefId::EnumVariantId(id) => id.lookup(db).parent.module(db), - ModuleDefId::ConstId(id) => id.lookup(db).container.module(db), - ModuleDefId::StaticId(id) => id.lookup(db).module(db), - ModuleDefId::TraitId(id) => id.lookup(db).container, - ModuleDefId::TraitAliasId(id) => id.lookup(db).container, - ModuleDefId::TypeAliasId(id) => id.lookup(db).module(db), + ModuleDefId::EnumVariantId(id) => id.module(db), + ModuleDefId::ConstId(id) => id.module(db), + ModuleDefId::StaticId(id) => id.module(db), + ModuleDefId::TraitId(id) => id.module(db), + ModuleDefId::TraitAliasId(id) => id.module(db), + ModuleDefId::TypeAliasId(id) => id.module(db), ModuleDefId::MacroId(id) => id.module(db), ModuleDefId::BuiltinType(_) => return None, }) } } -impl AttrDefId { - pub fn krate(&self, db: &dyn DefDatabase) -> CrateId { - match self { - AttrDefId::ModuleId(it) => it.krate, - AttrDefId::FieldId(it) => it.parent.module(db).krate, - AttrDefId::AdtId(it) => it.module(db).krate, - AttrDefId::FunctionId(it) => it.lookup(db).module(db).krate, - AttrDefId::EnumVariantId(it) => it.lookup(db).parent.module(db).krate, - AttrDefId::StaticId(it) => it.lookup(db).module(db).krate, - AttrDefId::ConstId(it) => it.lookup(db).module(db).krate, - AttrDefId::TraitId(it) => it.lookup(db).container.krate, - AttrDefId::TraitAliasId(it) => it.lookup(db).container.krate, - AttrDefId::TypeAliasId(it) => it.lookup(db).module(db).krate, - AttrDefId::ImplId(it) => it.lookup(db).container.krate, - AttrDefId::ExternBlockId(it) => it.lookup(db).container.krate, - AttrDefId::GenericParamId(it) => { - match it { - GenericParamId::TypeParamId(it) => it.parent(), - GenericParamId::ConstParamId(it) => it.parent(), - GenericParamId::LifetimeParamId(it) => it.parent, - } - .module(db) - .krate - } - AttrDefId::MacroId(it) => it.module(db).krate, - AttrDefId::ExternCrateId(it) => it.lookup(db).container.krate, - AttrDefId::UseId(it) => it.lookup(db).container.krate, - } - } -} - /// A helper trait for converting to MacroCallId pub trait AsMacroCall { fn as_call_id( diff --git a/crates/hir-def/src/macro_expansion_tests/builtin_derive_macro.rs b/crates/hir-def/src/macro_expansion_tests/builtin_derive_macro.rs index 553c0b795336..86b4466153a3 100644 --- a/crates/hir-def/src/macro_expansion_tests/builtin_derive_macro.rs +++ b/crates/hir-def/src/macro_expansion_tests/builtin_derive_macro.rs @@ -157,7 +157,7 @@ where generic: Vec, } -impl $crate::clone::Clone for Foo where T: Trait, T::InFieldShorthand: $crate::clone::Clone, T::InGenericArg: $crate::clone::Clone, { +impl $crate::clone::Clone for Foo where ::InWc: Marker, T: Trait, T::InFieldShorthand: $crate::clone::Clone, T::InGenericArg: $crate::clone::Clone, { fn clone(&self ) -> Self { match self { Foo { diff --git a/crates/hir-def/src/macro_expansion_tests/mbe.rs b/crates/hir-def/src/macro_expansion_tests/mbe.rs index d0ae1f59f7ca..edc8247f166d 100644 --- a/crates/hir-def/src/macro_expansion_tests/mbe.rs +++ b/crates/hir-def/src/macro_expansion_tests/mbe.rs @@ -35,9 +35,9 @@ macro_rules! f { }; } -struct#0:1@58..64#2# MyTraitMap2#0:2@31..42#0# {#0:1@72..73#2# - map#0:1@86..89#2#:#0:1@89..90#2# #0:1@89..90#2#::#0:1@91..92#2#std#0:1@93..96#2#::#0:1@96..97#2#collections#0:1@98..109#2#::#0:1@109..110#2#HashSet#0:1@111..118#2#<#0:1@118..119#2#(#0:1@119..120#2#)#0:1@120..121#2#>#0:1@121..122#2#,#0:1@122..123#2# -}#0:1@132..133#2# +struct#0:1@58..64#1# MyTraitMap2#0:2@31..42#0# {#0:1@72..73#1# + map#0:1@86..89#1#:#0:1@89..90#1# #0:1@89..90#1#::#0:1@91..92#1#std#0:1@93..96#1#::#0:1@96..97#1#collections#0:1@98..109#1#::#0:1@109..110#1#HashSet#0:1@111..118#1#<#0:1@118..119#1#(#0:1@119..120#1#)#0:1@120..121#1#>#0:1@121..122#1#,#0:1@122..123#1# +}#0:1@132..133#1# "#]], ); } @@ -171,7 +171,7 @@ fn main(foo: ()) { } fn main(foo: ()) { - /* error: unresolved macro unresolved */"helloworld!"#0:3@207..323#6#; + /* error: unresolved macro unresolved */"helloworld!"#0:3@207..323#2#; } } @@ -197,7 +197,7 @@ macro_rules! mk_struct { #[macro_use] mod foo; -struct#1:1@59..65#2# Foo#0:2@32..35#0#(#1:1@70..71#2#u32#0:2@41..44#0#)#1:1@74..75#2#;#1:1@75..76#2# +struct#1:1@59..65#1# Foo#0:2@32..35#0#(#1:1@70..71#1#u32#0:2@41..44#0#)#1:1@74..75#1#;#1:1@75..76#1# "#]], ); } diff --git a/crates/hir-def/src/macro_expansion_tests/mod.rs b/crates/hir-def/src/macro_expansion_tests/mod.rs index e315414e9bdd..fc5a6e80a427 100644 --- a/crates/hir-def/src/macro_expansion_tests/mod.rs +++ b/crates/hir-def/src/macro_expansion_tests/mod.rs @@ -224,7 +224,7 @@ fn reindent(indent: IndentLevel, pp: String) -> String { return pp; } let mut lines = pp.split_inclusive('\n'); - let mut res = lines.next().unwrap().to_string(); + let mut res = lines.next().unwrap().to_owned(); for line in lines { if line.trim().is_empty() { res.push_str(line) diff --git a/crates/hir-def/src/nameres/collector.rs b/crates/hir-def/src/nameres/collector.rs index fb6fd867a161..21cc28f1b3d0 100644 --- a/crates/hir-def/src/nameres/collector.rs +++ b/crates/hir-def/src/nameres/collector.rs @@ -33,8 +33,8 @@ use crate::{ db::DefDatabase, item_scope::{ImportId, ImportOrExternCrate, ImportType, PerNsGlobImports}, item_tree::{ - self, ExternCrate, Fields, FileItemTreeId, ImportKind, ItemTree, ItemTreeId, - ItemTreeModItemNode, Macro2, MacroCall, MacroRules, Mod, ModItem, ModKind, TreeId, + self, ExternCrate, Fields, FileItemTreeId, ImportKind, ItemTree, ItemTreeId, ItemTreeNode, + Macro2, MacroCall, MacroRules, Mod, ModItem, ModKind, TreeId, }, macro_call_as_call_id, macro_call_as_call_id_with_eager, nameres::{ @@ -2125,7 +2125,7 @@ impl ModCollector<'_, '_> { let is_export = export_attr.exists(); let local_inner = if is_export { - export_attr.tt_values().flat_map(|it| &it.token_trees).any(|it| match it { + export_attr.tt_values().flat_map(|it| it.token_trees.iter()).any(|it| match it { tt::TokenTree::Leaf(tt::Leaf::Ident(ident)) => { ident.text.contains("local_inner_macros") } diff --git a/crates/hir-def/src/resolver.rs b/crates/hir-def/src/resolver.rs index 7a9c4ea01699..db47d743c5a4 100644 --- a/crates/hir-def/src/resolver.rs +++ b/crates/hir-def/src/resolver.rs @@ -27,9 +27,9 @@ use crate::{ visibility::{RawVisibility, Visibility}, AdtId, ConstId, ConstParamId, CrateRootModuleId, DefWithBodyId, EnumId, EnumVariantId, ExternBlockId, ExternCrateId, FunctionId, GenericDefId, GenericParamId, HasModule, ImplId, - ItemContainerId, LifetimeParamId, LocalModuleId, Lookup, Macro2Id, MacroId, MacroRulesId, - ModuleDefId, ModuleId, ProcMacroId, StaticId, StructId, TraitAliasId, TraitId, TypeAliasId, - TypeOrConstParamId, TypeOwnerId, TypeParamId, UseId, VariantId, + ItemContainerId, ItemTreeLoc, LifetimeParamId, LocalModuleId, Lookup, Macro2Id, MacroId, + MacroRulesId, ModuleDefId, ModuleId, ProcMacroId, StaticId, StructId, TraitAliasId, TraitId, + TypeAliasId, TypeOrConstParamId, TypeOwnerId, TypeParamId, UseId, VariantId, }; #[derive(Debug, Clone)] @@ -248,6 +248,7 @@ impl Resolver { RawVisibility::Public => Some(Visibility::Public), } } + pub fn resolve_path_in_value_ns( &self, db: &dyn DefDatabase, @@ -1014,13 +1015,13 @@ impl HasResolver for CrateRootModuleId { impl HasResolver for TraitId { fn resolver(self, db: &dyn DefDatabase) -> Resolver { - self.lookup(db).container.resolver(db).push_generic_params_scope(db, self.into()) + lookup_resolver(db, self).push_generic_params_scope(db, self.into()) } } impl HasResolver for TraitAliasId { fn resolver(self, db: &dyn DefDatabase) -> Resolver { - self.lookup(db).container.resolver(db).push_generic_params_scope(db, self.into()) + lookup_resolver(db, self).push_generic_params_scope(db, self.into()) } } @@ -1036,25 +1037,25 @@ impl + Copy> HasResolver for T { impl HasResolver for FunctionId { fn resolver(self, db: &dyn DefDatabase) -> Resolver { - self.lookup(db).container.resolver(db).push_generic_params_scope(db, self.into()) + lookup_resolver(db, self).push_generic_params_scope(db, self.into()) } } impl HasResolver for ConstId { fn resolver(self, db: &dyn DefDatabase) -> Resolver { - self.lookup(db).container.resolver(db) + lookup_resolver(db, self) } } impl HasResolver for StaticId { fn resolver(self, db: &dyn DefDatabase) -> Resolver { - self.lookup(db).container.resolver(db) + lookup_resolver(db, self) } } impl HasResolver for TypeAliasId { fn resolver(self, db: &dyn DefDatabase) -> Resolver { - self.lookup(db).container.resolver(db).push_generic_params_scope(db, self.into()) + lookup_resolver(db, self).push_generic_params_scope(db, self.into()) } } @@ -1071,19 +1072,19 @@ impl HasResolver for ImplId { impl HasResolver for ExternBlockId { fn resolver(self, db: &dyn DefDatabase) -> Resolver { // Same as parent's - self.lookup(db).container.resolver(db) + lookup_resolver(db, self) } } impl HasResolver for ExternCrateId { fn resolver(self, db: &dyn DefDatabase) -> Resolver { - self.lookup(db).container.resolver(db) + lookup_resolver(db, self) } } impl HasResolver for UseId { fn resolver(self, db: &dyn DefDatabase) -> Resolver { - self.lookup(db).container.resolver(db) + lookup_resolver(db, self) } } @@ -1170,18 +1171,28 @@ impl HasResolver for MacroId { impl HasResolver for Macro2Id { fn resolver(self, db: &dyn DefDatabase) -> Resolver { - self.lookup(db).container.resolver(db) + lookup_resolver(db, self) } } impl HasResolver for ProcMacroId { fn resolver(self, db: &dyn DefDatabase) -> Resolver { - self.lookup(db).container.resolver(db) + lookup_resolver(db, self) } } impl HasResolver for MacroRulesId { fn resolver(self, db: &dyn DefDatabase) -> Resolver { - self.lookup(db).container.resolver(db) + lookup_resolver(db, self) } } + +fn lookup_resolver<'db>( + db: &(dyn DefDatabase + 'db), + lookup: impl Lookup< + Database<'db> = dyn DefDatabase + 'db, + Data = impl ItemTreeLoc, + >, +) -> Resolver { + lookup.lookup(db).container().resolver(db) +} diff --git a/crates/hir-def/src/src.rs b/crates/hir-def/src/src.rs index 9bd8c8d2215e..4283f003f897 100644 --- a/crates/hir-def/src/src.rs +++ b/crates/hir-def/src/src.rs @@ -1,12 +1,14 @@ //! Utilities for mapping between hir IDs and the surface syntax. +use either::Either; use hir_expand::InFile; use la_arena::ArenaMap; use syntax::ast; use crate::{ - db::DefDatabase, item_tree::ItemTreeModItemNode, AssocItemLoc, EnumVariantLoc, ItemLoc, Lookup, - Macro2Loc, MacroRulesLoc, ProcMacroLoc, UseId, + data::adt::lower_struct, db::DefDatabase, item_tree::ItemTreeNode, trace::Trace, GenericDefId, + ItemTreeLoc, LocalFieldId, LocalLifetimeParamId, LocalTypeOrConstParamId, Lookup, UseId, + VariantId, }; pub trait HasSource { @@ -14,81 +16,22 @@ pub trait HasSource { fn source(&self, db: &dyn DefDatabase) -> InFile; } -impl HasSource for AssocItemLoc { - type Value = N::Source; - - fn source(&self, db: &dyn DefDatabase) -> InFile { - let tree = self.id.item_tree(db); - let ast_id_map = db.ast_id_map(self.id.file_id()); - let root = db.parse_or_expand(self.id.file_id()); - let node = &tree[self.id.value]; - - InFile::new(self.id.file_id(), ast_id_map.get(node.ast_id()).to_node(&root)) - } -} - -impl HasSource for ItemLoc { - type Value = N::Source; - - fn source(&self, db: &dyn DefDatabase) -> InFile { - let tree = self.id.item_tree(db); - let ast_id_map = db.ast_id_map(self.id.file_id()); - let root = db.parse_or_expand(self.id.file_id()); - let node = &tree[self.id.value]; - - InFile::new(self.id.file_id(), ast_id_map.get(node.ast_id()).to_node(&root)) - } -} - -impl HasSource for EnumVariantLoc { - type Value = ast::Variant; - - fn source(&self, db: &dyn DefDatabase) -> InFile { - let tree = self.id.item_tree(db); - let ast_id_map = db.ast_id_map(self.id.file_id()); - let root = db.parse_or_expand(self.id.file_id()); - let node = &tree[self.id.value]; - - InFile::new(self.id.file_id(), ast_id_map.get(node.ast_id).to_node(&root)) - } -} - -impl HasSource for Macro2Loc { - type Value = ast::MacroDef; +impl HasSource for T +where + T: ItemTreeLoc, + T::Id: ItemTreeNode, +{ + type Value = ::Source; fn source(&self, db: &dyn DefDatabase) -> InFile { - let tree = self.id.item_tree(db); - let ast_id_map = db.ast_id_map(self.id.file_id()); - let root = db.parse_or_expand(self.id.file_id()); - let node = &tree[self.id.value]; + let id = self.item_tree_id(); + let file_id = id.file_id(); + let tree = id.item_tree(db); + let ast_id_map = db.ast_id_map(file_id); + let root = db.parse_or_expand(file_id); + let node = &tree[id.value]; - InFile::new(self.id.file_id(), ast_id_map.get(node.ast_id()).to_node(&root)) - } -} - -impl HasSource for MacroRulesLoc { - type Value = ast::MacroRules; - - fn source(&self, db: &dyn DefDatabase) -> InFile { - let tree = self.id.item_tree(db); - let ast_id_map = db.ast_id_map(self.id.file_id()); - let root = db.parse_or_expand(self.id.file_id()); - let node = &tree[self.id.value]; - - InFile::new(self.id.file_id(), ast_id_map.get(node.ast_id()).to_node(&root)) - } -} - -impl HasSource for ProcMacroLoc { - type Value = ast::Fn; - - fn source(&self, db: &dyn DefDatabase) -> InFile { - let tree = self.id.item_tree(db); - let ast_id_map = db.ast_id_map(self.id.file_id()); - let root = db.parse_or_expand(self.id.file_id()); - let node = &tree[self.id.value]; - - InFile::new(self.id.file_id(), ast_id_map.get(node.ast_id()).to_node(&root)) + InFile::new(file_id, ast_id_map.get(node.ast_id()).to_node(&root)) } } @@ -111,3 +54,105 @@ impl HasChildSource> for UseId { ) } } + +impl HasChildSource for GenericDefId { + type Value = Either; + fn child_source( + &self, + db: &dyn DefDatabase, + ) -> InFile> { + let generic_params = db.generic_params(*self); + let mut idx_iter = generic_params.type_or_consts.iter().map(|(idx, _)| idx); + + let (file_id, generic_params_list) = self.file_id_and_params_of(db); + + let mut params = ArenaMap::default(); + + // For traits and trait aliases the first type index is `Self`, we need to add it before + // the other params. + match *self { + GenericDefId::TraitId(id) => { + let trait_ref = id.lookup(db).source(db).value; + let idx = idx_iter.next().unwrap(); + params.insert(idx, Either::Right(ast::TraitOrAlias::Trait(trait_ref))); + } + GenericDefId::TraitAliasId(id) => { + let alias = id.lookup(db).source(db).value; + let idx = idx_iter.next().unwrap(); + params.insert(idx, Either::Right(ast::TraitOrAlias::TraitAlias(alias))); + } + _ => {} + } + + if let Some(generic_params_list) = generic_params_list { + for (idx, ast_param) in idx_iter.zip(generic_params_list.type_or_const_params()) { + params.insert(idx, Either::Left(ast_param)); + } + } + + InFile::new(file_id, params) + } +} + +impl HasChildSource for GenericDefId { + type Value = ast::LifetimeParam; + fn child_source( + &self, + db: &dyn DefDatabase, + ) -> InFile> { + let generic_params = db.generic_params(*self); + let idx_iter = generic_params.lifetimes.iter().map(|(idx, _)| idx); + + let (file_id, generic_params_list) = self.file_id_and_params_of(db); + + let mut params = ArenaMap::default(); + + if let Some(generic_params_list) = generic_params_list { + for (idx, ast_param) in idx_iter.zip(generic_params_list.lifetime_params()) { + params.insert(idx, ast_param); + } + } + + InFile::new(file_id, params) + } +} + +impl HasChildSource for VariantId { + type Value = Either; + + fn child_source(&self, db: &dyn DefDatabase) -> InFile> { + let item_tree; + let (src, fields, container) = match *self { + VariantId::EnumVariantId(it) => { + let lookup = it.lookup(db); + item_tree = lookup.id.item_tree(db); + ( + lookup.source(db).map(|it| it.kind()), + &item_tree[lookup.id.value].fields, + lookup.parent.lookup(db).container, + ) + } + VariantId::StructId(it) => { + let lookup = it.lookup(db); + item_tree = lookup.id.item_tree(db); + ( + lookup.source(db).map(|it| it.kind()), + &item_tree[lookup.id.value].fields, + lookup.container, + ) + } + VariantId::UnionId(it) => { + let lookup = it.lookup(db); + item_tree = lookup.id.item_tree(db); + ( + lookup.source(db).map(|it| it.kind()), + &item_tree[lookup.id.value].fields, + lookup.container, + ) + } + }; + let mut trace = Trace::new_for_map(); + lower_struct(db, &mut trace, &src, container.krate, &item_tree, fields); + src.with_value(trace.into_map()) + } +} diff --git a/crates/hir-def/src/trace.rs b/crates/hir-def/src/trace.rs index 04d5b266194e..da50ee8dc7a3 100644 --- a/crates/hir-def/src/trace.rs +++ b/crates/hir-def/src/trace.rs @@ -11,6 +11,8 @@ //! projections. use la_arena::{Arena, ArenaMap, Idx, RawIdx}; +// FIXME: This isn't really used anymore, at least not in a way where it does anything useful. +// Check if we should get rid of this or make proper use of it instead. pub(crate) struct Trace { arena: Option>, map: Option, V>>, diff --git a/crates/hir-def/src/visibility.rs b/crates/hir-def/src/visibility.rs index b9676179a5fd..0f3fac1cecd0 100644 --- a/crates/hir-def/src/visibility.rs +++ b/crates/hir-def/src/visibility.rs @@ -37,10 +37,14 @@ impl RawVisibility { db: &dyn DefDatabase, node: InFile>, ) -> RawVisibility { + let node = match node.transpose() { + None => return RawVisibility::private(), + Some(node) => node, + }; Self::from_ast_with_span_map(db, node.value, db.span_map(node.file_id).as_ref()) } - pub(crate) fn from_ast_with_span_map( + pub(crate) fn from_opt_ast_with_span_map( db: &dyn DefDatabase, node: Option, span_map: SpanMapRef<'_>, @@ -49,29 +53,28 @@ impl RawVisibility { None => return RawVisibility::private(), Some(node) => node, }; - match node.kind() { + Self::from_ast_with_span_map(db, node, span_map) + } + + fn from_ast_with_span_map( + db: &dyn DefDatabase, + node: ast::Visibility, + span_map: SpanMapRef<'_>, + ) -> RawVisibility { + let path = match node.kind() { ast::VisibilityKind::In(path) => { let path = ModPath::from_src(db.upcast(), path, span_map); - let path = match path { + match path { None => return RawVisibility::private(), Some(path) => path, - }; - RawVisibility::Module(path, VisibilityExplicitness::Explicit) + } } - ast::VisibilityKind::PubCrate => { - let path = ModPath::from_kind(PathKind::Crate); - RawVisibility::Module(path, VisibilityExplicitness::Explicit) - } - ast::VisibilityKind::PubSuper => { - let path = ModPath::from_kind(PathKind::Super(1)); - RawVisibility::Module(path, VisibilityExplicitness::Explicit) - } - ast::VisibilityKind::PubSelf => { - let path = ModPath::from_kind(PathKind::Super(0)); - RawVisibility::Module(path, VisibilityExplicitness::Explicit) - } - ast::VisibilityKind::Pub => RawVisibility::Public, - } + ast::VisibilityKind::PubCrate => ModPath::from_kind(PathKind::Crate), + ast::VisibilityKind::PubSuper => ModPath::from_kind(PathKind::Super(1)), + ast::VisibilityKind::PubSelf => ModPath::from_kind(PathKind::Super(0)), + ast::VisibilityKind::Pub => return RawVisibility::Public, + }; + RawVisibility::Module(path, VisibilityExplicitness::Explicit) } pub fn resolve( @@ -94,6 +97,11 @@ pub enum Visibility { } impl Visibility { + pub(crate) fn is_visible_from_other_crate(self) -> bool { + matches!(self, Visibility::Public) + } + + #[tracing::instrument(skip_all)] pub fn is_visible_from(self, db: &dyn DefDatabase, from_module: ModuleId) -> bool { let to_module = match self { Visibility::Module(m, _) => m, @@ -104,24 +112,33 @@ impl Visibility { return false; } let def_map = from_module.def_map(db); - self.is_visible_from_def_map(db, &def_map, from_module.local_id) - } - - pub(crate) fn is_visible_from_other_crate(self) -> bool { - matches!(self, Visibility::Public) + Self::is_visible_from_def_map_(db, &def_map, to_module, from_module.local_id) } pub(crate) fn is_visible_from_def_map( self, db: &dyn DefDatabase, def_map: &DefMap, - mut from_module: LocalModuleId, + from_module: LocalModuleId, ) -> bool { - let mut to_module = match self { + let to_module = match self { Visibility::Module(m, _) => m, Visibility::Public => return true, }; + // if they're not in the same crate, it can't be visible + if def_map.krate() != to_module.krate { + return false; + } + Self::is_visible_from_def_map_(db, def_map, to_module, from_module) + } + fn is_visible_from_def_map_( + db: &dyn DefDatabase, + def_map: &DefMap, + mut to_module: ModuleId, + mut from_module: LocalModuleId, + ) -> bool { + debug_assert_eq!(to_module.krate, def_map.krate()); // `to_module` might be the root module of a block expression. Those have the same // visibility as the containing module (even though no items are directly nameable from // there, getting this right is important for method resolution). @@ -129,20 +146,25 @@ impl Visibility { // Additional complication: `to_module` might be in `from_module`'s `DefMap`, which we're // currently computing, so we must not call the `def_map` query for it. - let mut arc; + let def_map_block = def_map.block_id(); loop { - let to_module_def_map = - if to_module.krate == def_map.krate() && to_module.block == def_map.block_id() { + match (to_module.block, def_map_block) { + // to_module is not a block, so there is no parent def map to use + (None, _) => (), + (Some(a), Some(b)) if a == b => { cov_mark::hit!(is_visible_from_same_block_def_map); - def_map - } else { - arc = to_module.def_map(db); - &arc - }; - match to_module_def_map.parent() { - Some(parent) => to_module = parent, - None => break, + if let Some(parent) = def_map.parent() { + to_module = parent; + } + } + _ => { + if let Some(parent) = to_module.def_map(db).parent() { + to_module = parent; + continue; + } + } } + break; } // from_module needs to be a descendant of to_module @@ -175,30 +197,25 @@ impl Visibility { /// visible in unrelated modules). pub(crate) fn max(self, other: Visibility, def_map: &DefMap) -> Option { match (self, other) { - (Visibility::Module(_, _) | Visibility::Public, Visibility::Public) - | (Visibility::Public, Visibility::Module(_, _)) => Some(Visibility::Public), - (Visibility::Module(mod_a, vis_a), Visibility::Module(mod_b, vis_b)) => { + (_, Visibility::Public) | (Visibility::Public, _) => Some(Visibility::Public), + (Visibility::Module(mod_a, expl_a), Visibility::Module(mod_b, expl_b)) => { if mod_a.krate != mod_b.krate { return None; } - let mut a_ancestors = iter::successors(Some(mod_a.local_id), |&m| { - let parent_id = def_map[m].parent?; - Some(parent_id) - }); - let mut b_ancestors = iter::successors(Some(mod_b.local_id), |&m| { - let parent_id = def_map[m].parent?; - Some(parent_id) - }); + let mut a_ancestors = + iter::successors(Some(mod_a.local_id), |&m| def_map[m].parent); + let mut b_ancestors = + iter::successors(Some(mod_b.local_id), |&m| def_map[m].parent); if a_ancestors.any(|m| m == mod_b.local_id) { // B is above A - return Some(Visibility::Module(mod_b, vis_b)); + return Some(Visibility::Module(mod_b, expl_b)); } if b_ancestors.any(|m| m == mod_a.local_id) { // A is above B - return Some(Visibility::Module(mod_a, vis_a)); + return Some(Visibility::Module(mod_a, expl_a)); } None @@ -207,7 +224,8 @@ impl Visibility { } } -/// Whether the item was imported through `pub(crate) use` or just `use`. +/// Whether the item was imported through an explicit `pub(crate) use` or just a `use` without +/// visibility. #[derive(Debug, Copy, Clone, PartialEq, Eq, Hash)] pub enum VisibilityExplicitness { Explicit, diff --git a/crates/hir-expand/src/ast_id_map.rs b/crates/hir-expand/src/ast_id_map.rs index 530f10a06847..ab582741f5b8 100644 --- a/crates/hir-expand/src/ast_id_map.rs +++ b/crates/hir-expand/src/ast_id_map.rs @@ -155,7 +155,7 @@ impl PartialEq for AstIdMap { impl Eq for AstIdMap {} impl AstIdMap { - pub(crate) fn ast_id_map( + pub(crate) fn new( db: &dyn ExpandDatabase, file_id: span::HirFileId, ) -> triomphe::Arc { diff --git a/crates/hir-expand/src/attrs.rs b/crates/hir-expand/src/attrs.rs index c20c1639e1a4..1c92dea38e6d 100644 --- a/crates/hir-expand/src/attrs.rs +++ b/crates/hir-expand/src/attrs.rs @@ -123,7 +123,7 @@ impl RawAttrs { .filter_map(|(idx, attr)| Attr::from_tt(db, attr, index.with_cfg_attr(idx))); let cfg_options = &crate_graph[krate].cfg_options; - let cfg = Subtree { delimiter: subtree.delimiter, token_trees: cfg.to_vec() }; + let cfg = Subtree { delimiter: subtree.delimiter, token_trees: Box::from(cfg) }; let cfg = CfgExpr::parse(&cfg); if cfg_options.check(&cfg) == Some(false) { smallvec![] diff --git a/crates/hir-expand/src/builtin_attr_macro.rs b/crates/hir-expand/src/builtin_attr_macro.rs index dd2aa94ad01d..903b0d480700 100644 --- a/crates/hir-expand/src/builtin_attr_macro.rs +++ b/crates/hir-expand/src/builtin_attr_macro.rs @@ -137,5 +137,8 @@ pub fn pseudo_derive_attr_expansion( token_trees.extend(tt.iter().cloned()); token_trees.push(mk_leaf(']')); } - ExpandResult::ok(tt::Subtree { delimiter: tt.delimiter, token_trees }) + ExpandResult::ok(tt::Subtree { + delimiter: tt.delimiter, + token_trees: token_trees.into_boxed_slice(), + }) } diff --git a/crates/hir-expand/src/builtin_derive_macro.rs b/crates/hir-expand/src/builtin_derive_macro.rs index 024fb8c1f61a..279548751434 100644 --- a/crates/hir-expand/src/builtin_derive_macro.rs +++ b/crates/hir-expand/src/builtin_derive_macro.rs @@ -194,6 +194,7 @@ struct BasicAdtInfo { /// second field is `Some(ty)` if it's a const param of type `ty`, `None` if it's a type param. /// third fields is where bounds, if any param_types: Vec<(tt::Subtree, Option, Option)>, + where_clause: Vec, associated_types: Vec, } @@ -202,10 +203,11 @@ fn parse_adt( adt: &ast::Adt, call_site: Span, ) -> Result { - let (name, generic_param_list, shape) = match adt { + let (name, generic_param_list, where_clause, shape) = match adt { ast::Adt::Struct(it) => ( it.name(), it.generic_param_list(), + it.where_clause(), AdtShape::Struct(VariantShape::from(tm, it.field_list())?), ), ast::Adt::Enum(it) => { @@ -217,6 +219,7 @@ fn parse_adt( ( it.name(), it.generic_param_list(), + it.where_clause(), AdtShape::Enum { default_variant, variants: it @@ -233,7 +236,9 @@ fn parse_adt( }, ) } - ast::Adt::Union(it) => (it.name(), it.generic_param_list(), AdtShape::Union), + ast::Adt::Union(it) => { + (it.name(), it.generic_param_list(), it.where_clause(), AdtShape::Union) + } }; let mut param_type_set: FxHashSet = FxHashSet::default(); @@ -274,6 +279,14 @@ fn parse_adt( }) .collect(); + let where_clause = if let Some(w) = where_clause { + w.predicates() + .map(|it| mbe::syntax_node_to_token_tree(it.syntax(), tm, call_site)) + .collect() + } else { + vec![] + }; + // For a generic parameter `T`, when shorthand associated type `T::Assoc` appears in field // types (of any variant for enums), we generate trait bound for it. It sounds reasonable to // also generate trait bound for qualified associated type `::Assoc`, but rustc @@ -301,7 +314,7 @@ fn parse_adt( .map(|it| mbe::syntax_node_to_token_tree(it.syntax(), tm, call_site)) .collect(); let name_token = name_to_token(tm, name)?; - Ok(BasicAdtInfo { name: name_token, shape, param_types, associated_types }) + Ok(BasicAdtInfo { name: name_token, shape, param_types, where_clause, associated_types }) } fn name_to_token( @@ -366,7 +379,8 @@ fn expand_simple_derive( } }; let trait_body = make_trait_body(&info); - let mut where_block = vec![]; + let mut where_block: Vec<_> = + info.where_clause.into_iter().map(|w| quote! {invoc_span => #w , }).collect(); let (params, args): (Vec<_>, Vec<_>) = info .param_types .into_iter() diff --git a/crates/hir-expand/src/builtin_fn_macro.rs b/crates/hir-expand/src/builtin_fn_macro.rs index 29d389f656f1..6d3de0e55d24 100644 --- a/crates/hir-expand/src/builtin_fn_macro.rs +++ b/crates/hir-expand/src/builtin_fn_macro.rs @@ -155,10 +155,10 @@ fn line_expand( // not incremental ExpandResult::ok(tt::Subtree { delimiter: tt::Delimiter::invisible_spanned(span), - token_trees: vec![tt::TokenTree::Leaf(tt::Leaf::Literal(tt::Literal { + token_trees: Box::new([tt::TokenTree::Leaf(tt::Leaf::Literal(tt::Literal { text: "0u32".into(), span, - }))], + }))]), }) } @@ -208,11 +208,11 @@ fn assert_expand( [cond, panic_args @ ..] => { let comma = tt::Subtree { delimiter: tt::Delimiter::invisible_spanned(call_site_span), - token_trees: vec![tt::TokenTree::Leaf(tt::Leaf::Punct(tt::Punct { + token_trees: Box::new([tt::TokenTree::Leaf(tt::Leaf::Punct(tt::Punct { char: ',', spacing: tt::Spacing::Alone, span: call_site_span, - }))], + }))]), }; let cond = cond.clone(); let panic_args = itertools::Itertools::intersperse(panic_args.iter().cloned(), comma); @@ -359,7 +359,10 @@ fn panic_expand( close: call_site_span, kind: tt::DelimiterKind::Parenthesis, }; - call.token_trees.push(tt::TokenTree::Subtree(subtree)); + + // FIXME(slow): quote! have a way to expand to builder to make this a vec! + call.push(tt::TokenTree::Subtree(subtree)); + ExpandResult::ok(call) } @@ -388,7 +391,10 @@ fn unreachable_expand( close: call_site_span, kind: tt::DelimiterKind::Parenthesis, }; - call.token_trees.push(tt::TokenTree::Subtree(subtree)); + + // FIXME(slow): quote! have a way to expand to builder to make this a vec! + call.push(tt::TokenTree::Subtree(subtree)); + ExpandResult::ok(call) } @@ -509,7 +515,7 @@ fn concat_bytes_expand( tt::TokenTree::Leaf(tt::Leaf::Literal(lit)) => { let token = ast::make::tokens::literal(&lit.to_string()); match token.kind() { - syntax::SyntaxKind::BYTE => bytes.push(token.text().to_string()), + syntax::SyntaxKind::BYTE => bytes.push(token.text().to_owned()), syntax::SyntaxKind::BYTE_STRING => { let components = unquote_byte_string(lit).unwrap_or_default(); components.into_iter().for_each(|it| bytes.push(it.to_string())); @@ -564,7 +570,7 @@ fn concat_bytes_expand_subtree( let lit = ast::make::tokens::literal(&lit.to_string()); match lit.kind() { syntax::SyntaxKind::BYTE | syntax::SyntaxKind::INT_NUMBER => { - bytes.push(lit.text().to_string()) + bytes.push(lit.text().to_owned()) } _ => { return Err(mbe::ExpandError::UnexpectedToken.into()); @@ -675,10 +681,10 @@ fn include_bytes_expand( // FIXME: actually read the file here if the user asked for macro expansion let res = tt::Subtree { delimiter: tt::Delimiter::invisible_spanned(span), - token_trees: vec![tt::TokenTree::Leaf(tt::Leaf::Literal(tt::Literal { + token_trees: Box::new([tt::TokenTree::Leaf(tt::Leaf::Literal(tt::Literal { text: r#"b"""#.into(), span, - }))], + }))]), }; ExpandResult::ok(res) } @@ -743,7 +749,7 @@ fn env_expand( // We cannot use an empty string here, because for // `include!(concat!(env!("OUT_DIR"), "/foo.rs"))` will become // `include!("foo.rs"), which might go to infinite loop - "UNRESOLVED_ENV_VAR".to_string() + "UNRESOLVED_ENV_VAR".to_owned() }); let expanded = quote! {span => #s }; diff --git a/crates/hir-expand/src/db.rs b/crates/hir-expand/src/db.rs index f220284fae7c..6a288cf91979 100644 --- a/crates/hir-expand/src/db.rs +++ b/crates/hir-expand/src/db.rs @@ -61,7 +61,7 @@ pub trait ExpandDatabase: SourceDatabase { #[salsa::input] fn proc_macros(&self) -> Arc; - #[salsa::invoke(AstIdMap::ast_id_map)] + #[salsa::invoke(AstIdMap::new)] fn ast_id_map(&self, file_id: HirFileId) -> Arc; /// Main public API -- parses a hir file, not caring whether it's a real @@ -524,7 +524,7 @@ fn macro_expand( return ExpandResult { value: CowArc::Owned(tt::Subtree { delimiter: tt::Delimiter::invisible_spanned(loc.call_site), - token_trees: Vec::new(), + token_trees: Box::new([]), }), // FIXME: We should make sure to enforce an invariant that invalid macro // calls do not reach this call path! @@ -586,7 +586,7 @@ fn macro_expand( return value.map(|()| { CowArc::Owned(tt::Subtree { delimiter: tt::Delimiter::invisible_spanned(loc.call_site), - token_trees: vec![], + token_trees: Box::new([]), }) }); } @@ -601,7 +601,7 @@ fn expand_proc_macro(db: &dyn ExpandDatabase, id: MacroCallId) -> ExpandResult ExpandResult bool { a.delimiter.kind == b.delimiter.kind && a.token_trees.len() == b.token_trees.len() - && a.token_trees.iter().zip(&b.token_trees).all(|(a, b)| check_tt_eq(a, b)) + && a.token_trees.iter().zip(b.token_trees.iter()).all(|(a, b)| check_tt_eq(a, b)) } fn check_tt_eq(a: &tt::TokenTree, b: &tt::TokenTree) -> bool { diff --git a/crates/hir-expand/src/hygiene.rs b/crates/hir-expand/src/hygiene.rs index 8ddaa3f3039e..65b834d7a81c 100644 --- a/crates/hir-expand/src/hygiene.rs +++ b/crates/hir-expand/src/hygiene.rs @@ -7,9 +7,10 @@ use std::iter; +use base_db::salsa::{self, InternValue}; use span::{MacroCallId, Span, SyntaxContextId}; -use crate::db::ExpandDatabase; +use crate::db::{ExpandDatabase, InternSyntaxContextQuery}; #[derive(Copy, Clone, Hash, PartialEq, Eq)] pub struct SyntaxContextData { @@ -22,6 +23,14 @@ pub struct SyntaxContextData { pub opaque_and_semitransparent: SyntaxContextId, } +impl InternValue for SyntaxContextData { + type Key = (SyntaxContextId, Option, Transparency); + + fn into_key(&self) -> Self::Key { + (self.parent, self.outer_expn, self.outer_transparency) + } +} + impl std::fmt::Debug for SyntaxContextData { fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result { f.debug_struct("SyntaxContextData") @@ -149,38 +158,36 @@ fn apply_mark_internal( transparency: Transparency, ) -> SyntaxContextId { let syntax_context_data = db.lookup_intern_syntax_context(ctxt); - let mut opaque = handle_self_ref(ctxt, syntax_context_data.opaque); - let mut opaque_and_semitransparent = - handle_self_ref(ctxt, syntax_context_data.opaque_and_semitransparent); + let mut opaque = syntax_context_data.opaque; + let mut opaque_and_semitransparent = syntax_context_data.opaque_and_semitransparent; if transparency >= Transparency::Opaque { let parent = opaque; - // Unlike rustc, with salsa we can't prefetch the to be allocated ID to create cycles with - // salsa when interning, so we use a sentinel value that effectively means the current - // syntax context. - let new_opaque = SyntaxContextId::SELF_REF; - opaque = db.intern_syntax_context(SyntaxContextData { - outer_expn: call_id, - outer_transparency: transparency, - parent, - opaque: new_opaque, - opaque_and_semitransparent: new_opaque, - }); + opaque = salsa::plumbing::get_query_table::(db).get_or_insert( + (parent, call_id, transparency), + |new_opaque| SyntaxContextData { + outer_expn: call_id, + outer_transparency: transparency, + parent, + opaque: new_opaque, + opaque_and_semitransparent: new_opaque, + }, + ); } if transparency >= Transparency::SemiTransparent { let parent = opaque_and_semitransparent; - // Unlike rustc, with salsa we can't prefetch the to be allocated ID to create cycles with - // salsa when interning, so we use a sentinel value that effectively means the current - // syntax context. - let new_opaque_and_semitransparent = SyntaxContextId::SELF_REF; - opaque_and_semitransparent = db.intern_syntax_context(SyntaxContextData { - outer_expn: call_id, - outer_transparency: transparency, - parent, - opaque, - opaque_and_semitransparent: new_opaque_and_semitransparent, - }); + opaque_and_semitransparent = + salsa::plumbing::get_query_table::(db).get_or_insert( + (parent, call_id, transparency), + |new_opaque_and_semitransparent| SyntaxContextData { + outer_expn: call_id, + outer_transparency: transparency, + parent, + opaque, + opaque_and_semitransparent: new_opaque_and_semitransparent, + }, + ); } let parent = ctxt; @@ -201,20 +208,12 @@ pub trait SyntaxContextExt { fn marks(self, db: &dyn ExpandDatabase) -> Vec<(Option, Transparency)>; } -#[inline(always)] -fn handle_self_ref(p: SyntaxContextId, n: SyntaxContextId) -> SyntaxContextId { - match n { - SyntaxContextId::SELF_REF => p, - _ => n, - } -} - impl SyntaxContextExt for SyntaxContextId { fn normalize_to_macro_rules(self, db: &dyn ExpandDatabase) -> Self { - handle_self_ref(self, db.lookup_intern_syntax_context(self).opaque_and_semitransparent) + db.lookup_intern_syntax_context(self).opaque_and_semitransparent } fn normalize_to_macros_2_0(self, db: &dyn ExpandDatabase) -> Self { - handle_self_ref(self, db.lookup_intern_syntax_context(self).opaque) + db.lookup_intern_syntax_context(self).opaque } fn parent_ctxt(self, db: &dyn ExpandDatabase) -> Self { db.lookup_intern_syntax_context(self).parent diff --git a/crates/hir-expand/src/lib.rs b/crates/hir-expand/src/lib.rs index bd2505249036..fd028182faf6 100644 --- a/crates/hir-expand/src/lib.rs +++ b/crates/hir-expand/src/lib.rs @@ -30,7 +30,7 @@ use triomphe::Arc; use std::{fmt, hash::Hash}; -use base_db::{CrateId, Edition, FileId}; +use base_db::{salsa::impl_intern_value_trivial, CrateId, Edition, FileId}; use either::Either; use span::{FileRange, HirFileIdRepr, Span, SyntaxContextId}; use syntax::{ @@ -66,6 +66,7 @@ pub mod tt { pub type Delimiter = ::tt::Delimiter; pub type DelimSpan = ::tt::DelimSpan; pub type Subtree = ::tt::Subtree; + pub type SubtreeBuilder = ::tt::SubtreeBuilder; pub type Leaf = ::tt::Leaf; pub type Literal = ::tt::Literal; pub type Punct = ::tt::Punct; @@ -175,6 +176,7 @@ pub struct MacroCallLoc { pub kind: MacroCallKind, pub call_site: Span, } +impl_intern_value_trivial!(MacroCallLoc); #[derive(Debug, Clone, Copy, PartialEq, Eq, Hash)] pub struct MacroDefId { @@ -760,7 +762,7 @@ impl ExpansionInfo { ( Arc::new(tt::Subtree { delimiter: tt::Delimiter::invisible_spanned(loc.call_site), - token_trees: Vec::new(), + token_trees: Box::new([]), }), SyntaxFixupUndoInfo::NONE, ) diff --git a/crates/hir-expand/src/quote.rs b/crates/hir-expand/src/quote.rs index 824f3c3e8f21..c1930c94f5c9 100644 --- a/crates/hir-expand/src/quote.rs +++ b/crates/hir-expand/src/quote.rs @@ -31,7 +31,7 @@ macro_rules! __quote { open: $span, close: $span, }, - token_trees: $crate::quote::IntoTt::to_tokens(children), + token_trees: $crate::quote::IntoTt::to_tokens(children).into_boxed_slice(), } } }; @@ -146,7 +146,7 @@ impl IntoTt for Vec { fn to_subtree(self, span: Span) -> crate::tt::Subtree { crate::tt::Subtree { delimiter: crate::tt::Delimiter::invisible_spanned(span), - token_trees: self, + token_trees: self.into_boxed_slice(), } } @@ -296,8 +296,9 @@ mod tests { // } let struct_name = mk_ident("Foo"); let fields = [mk_ident("name"), mk_ident("id")]; - let fields = - fields.iter().flat_map(|it| quote!(DUMMY =>#it: self.#it.clone(), ).token_trees); + let fields = fields + .iter() + .flat_map(|it| quote!(DUMMY =>#it: self.#it.clone(), ).token_trees.into_vec()); let list = crate::tt::Subtree { delimiter: crate::tt::Delimiter { diff --git a/crates/hir-ty/src/autoderef.rs b/crates/hir-ty/src/autoderef.rs index 991fd2f91df8..8d819e41aa2c 100644 --- a/crates/hir-ty/src/autoderef.rs +++ b/crates/hir-ty/src/autoderef.rs @@ -88,6 +88,7 @@ impl<'a, 'db> Autoderef<'a, 'db> { impl Iterator for Autoderef<'_, '_> { type Item = (Ty, usize); + #[tracing::instrument(skip_all)] fn next(&mut self) -> Option { if self.at_start { self.at_start = false; diff --git a/crates/hir-ty/src/builder.rs b/crates/hir-ty/src/builder.rs index 24a7eb3ff0a4..c485c9b2e80b 100644 --- a/crates/hir-ty/src/builder.rs +++ b/crates/hir-ty/src/builder.rs @@ -125,6 +125,7 @@ impl TyBuilder { this } + #[tracing::instrument(skip_all)] pub(crate) fn fill_with_inference_vars(self, table: &mut InferenceTable<'_>) -> Self { self.fill(|x| match x { ParamKind::Type => table.new_type_var().cast(Interner), @@ -208,6 +209,7 @@ impl TyBuilder<()> { ) } + #[tracing::instrument(skip_all)] pub fn subst_for_def( db: &dyn HirDatabase, def: impl Into, diff --git a/crates/hir-ty/src/chalk_db.rs b/crates/hir-ty/src/chalk_db.rs index 7e460f9f867a..bd243518fc60 100644 --- a/crates/hir-ty/src/chalk_db.rs +++ b/crates/hir-ty/src/chalk_db.rs @@ -17,7 +17,7 @@ use hir_def::{ use hir_expand::name::name; use crate::{ - db::HirDatabase, + db::{HirDatabase, InternedCoroutine}, display::HirDisplay, from_assoc_type_id, from_chalk_trait_id, from_foreign_def_id, make_binders, make_single_type_binders, @@ -428,7 +428,7 @@ impl chalk_solve::RustIrDatabase for ChalkContext<'_> { &self, id: chalk_ir::CoroutineId, ) -> Arc> { - let (parent, expr) = self.db.lookup_intern_coroutine(id.into()); + let InternedCoroutine(parent, expr) = self.db.lookup_intern_coroutine(id.into()); // We fill substitution with unknown type, because we only need to know whether the generic // params are types or consts to build `Binders` and those being filled up are for @@ -473,7 +473,7 @@ impl chalk_solve::RustIrDatabase for ChalkContext<'_> { let inner_types = rust_ir::CoroutineWitnessExistential { types: wrap_empty_binders(vec![]) }; - let (parent, _) = self.db.lookup_intern_coroutine(id.into()); + let InternedCoroutine(parent, _) = self.db.lookup_intern_coroutine(id.into()); // See the comment in `coroutine_datum()` for unknown types. let subst = TyBuilder::subst_for_coroutine(self.db, parent).fill_with_unknown().build(); let it = subst diff --git a/crates/hir-ty/src/consteval/tests.rs b/crates/hir-ty/src/consteval/tests.rs index ac82208708ae..98384c47490a 100644 --- a/crates/hir-ty/src/consteval/tests.rs +++ b/crates/hir-ty/src/consteval/tests.rs @@ -133,7 +133,7 @@ fn bit_op() { check_number(r#"const GOAL: i8 = 1 << 7"#, (1i8 << 7) as i128); check_number(r#"const GOAL: i8 = -1 << 2"#, (-1i8 << 2) as i128); check_fail(r#"const GOAL: i8 = 1 << 8"#, |e| { - e == ConstEvalError::MirEvalError(MirEvalError::Panic("Overflow in Shl".to_string())) + e == ConstEvalError::MirEvalError(MirEvalError::Panic("Overflow in Shl".to_owned())) }); check_number(r#"const GOAL: i32 = 100000000i32 << 11"#, (100000000i32 << 11) as i128); } @@ -2756,7 +2756,7 @@ fn memory_limit() { "#, |e| { e == ConstEvalError::MirEvalError(MirEvalError::Panic( - "Memory allocation of 30000000000 bytes failed".to_string(), + "Memory allocation of 30000000000 bytes failed".to_owned(), )) }, ); diff --git a/crates/hir-ty/src/db.rs b/crates/hir-ty/src/db.rs index 21679150b342..fbd366864a43 100644 --- a/crates/hir-ty/src/db.rs +++ b/crates/hir-ty/src/db.rs @@ -3,7 +3,11 @@ use std::sync; -use base_db::{impl_intern_key, salsa, CrateId, Upcast}; +use base_db::{ + impl_intern_key, + salsa::{self, impl_intern_value_trivial}, + CrateId, Upcast, +}; use hir_def::{ db::DefDatabase, hir::ExprId, layout::TargetDataLayout, AdtId, BlockId, ConstParamId, DefWithBodyId, EnumVariantId, FunctionId, GeneralConstId, GenericDefId, ImplId, @@ -199,9 +203,9 @@ pub trait HirDatabase: DefDatabase + Upcast { #[salsa::interned] fn intern_impl_trait_id(&self, id: ImplTraitId) -> InternedOpaqueTyId; #[salsa::interned] - fn intern_closure(&self, id: (DefWithBodyId, ExprId)) -> InternedClosureId; + fn intern_closure(&self, id: InternedClosure) -> InternedClosureId; #[salsa::interned] - fn intern_coroutine(&self, id: (DefWithBodyId, ExprId)) -> InternedCoroutineId; + fn intern_coroutine(&self, id: InternedCoroutine) -> InternedCoroutineId; #[salsa::invoke(chalk_db::associated_ty_data_query)] fn associated_ty_data( @@ -337,10 +341,18 @@ impl_intern_key!(InternedOpaqueTyId); pub struct InternedClosureId(salsa::InternId); impl_intern_key!(InternedClosureId); +#[derive(Debug, Clone, Copy, PartialEq, Eq, Hash)] +pub struct InternedClosure(pub DefWithBodyId, pub ExprId); +impl_intern_value_trivial!(InternedClosure); + #[derive(Debug, Clone, Copy, PartialEq, Eq, Hash)] pub struct InternedCoroutineId(salsa::InternId); impl_intern_key!(InternedCoroutineId); +#[derive(Debug, Clone, Copy, PartialEq, Eq, Hash)] +pub struct InternedCoroutine(pub DefWithBodyId, pub ExprId); +impl_intern_value_trivial!(InternedCoroutine); + /// This exists just for Chalk, because Chalk just has a single `FnDefId` where /// we have different IDs for struct and enum variant constructors. #[derive(Debug, Clone, Copy, PartialEq, Eq, Hash, Ord, PartialOrd)] diff --git a/crates/hir-ty/src/diagnostics/decl_check.rs b/crates/hir-ty/src/diagnostics/decl_check.rs index 78f2005e676c..38eb3371e3ce 100644 --- a/crates/hir-ty/src/diagnostics/decl_check.rs +++ b/crates/hir-ty/src/diagnostics/decl_check.rs @@ -16,11 +16,9 @@ mod case_conv; use std::fmt; use hir_def::{ - data::adt::VariantData, - hir::{Pat, PatId}, - src::HasSource, - AdtId, AttrDefId, ConstId, EnumId, FunctionId, ItemContainerId, Lookup, ModuleDefId, ModuleId, - StaticId, StructId, + data::adt::VariantData, db::DefDatabase, hir::Pat, src::HasSource, AdtId, AttrDefId, ConstId, + EnumId, FunctionId, ItemContainerId, Lookup, ModuleDefId, ModuleId, StaticId, StructId, + TraitId, TypeAliasId, }; use hir_expand::{ name::{AsName, Name}, @@ -79,12 +77,14 @@ pub enum IdentType { Enum, Field, Function, + Module, Parameter, StaticVariable, Structure, + Trait, + TypeAlias, Variable, Variant, - Module, } impl fmt::Display for IdentType { @@ -94,12 +94,14 @@ impl fmt::Display for IdentType { IdentType::Enum => "Enum", IdentType::Field => "Field", IdentType::Function => "Function", + IdentType::Module => "Module", IdentType::Parameter => "Parameter", IdentType::StaticVariable => "Static variable", IdentType::Structure => "Structure", + IdentType::Trait => "Trait", + IdentType::TypeAlias => "Type alias", IdentType::Variable => "Variable", IdentType::Variant => "Variant", - IdentType::Module => "Module", }; repr.fmt(f) @@ -136,10 +138,12 @@ impl<'a> DeclValidator<'a> { pub(super) fn validate_item(&mut self, item: ModuleDefId) { match item { ModuleDefId::ModuleId(module_id) => self.validate_module(module_id), + ModuleDefId::TraitId(trait_id) => self.validate_trait(trait_id), ModuleDefId::FunctionId(func) => self.validate_func(func), ModuleDefId::AdtId(adt) => self.validate_adt(adt), ModuleDefId::ConstId(const_id) => self.validate_const(const_id), ModuleDefId::StaticId(static_id) => self.validate_static(static_id), + ModuleDefId::TypeAliasId(type_alias_id) => self.validate_type_alias(type_alias_id), _ => (), } } @@ -242,50 +246,46 @@ impl<'a> DeclValidator<'a> { // Check the module name. let Some(module_name) = module_id.name(self.db.upcast()) else { return }; - let module_name_replacement = + let Some(module_name_replacement) = module_name.as_str().and_then(to_lower_snake_case).map(|new_name| Replacement { current_name: module_name, suggested_text: new_name, expected_case: CaseType::LowerSnakeCase, - }); + }) + else { + return; + }; + let module_data = &module_id.def_map(self.db.upcast())[module_id.local_id]; + let Some(module_src) = module_data.declaration_source(self.db.upcast()) else { + return; + }; + self.create_incorrect_case_diagnostic_for_ast_node( + module_name_replacement, + module_src.file_id, + &module_src.value, + IdentType::Module, + ); + } - if let Some(module_name_replacement) = module_name_replacement { - let module_data = &module_id.def_map(self.db.upcast())[module_id.local_id]; - let module_src = module_data.declaration_source(self.db.upcast()); - - if let Some(module_src) = module_src { - let ast_ptr = match module_src.value.name() { - Some(name) => name, - None => { - never!( - "Replacement ({:?}) was generated for a module without a name: {:?}", - module_name_replacement, - module_src - ); - return; - } - }; - - let diagnostic = IncorrectCase { - file: module_src.file_id, - ident_type: IdentType::Module, - ident: AstPtr::new(&ast_ptr), - expected_case: module_name_replacement.expected_case, - ident_text: module_name_replacement - .current_name - .display(self.db.upcast()) - .to_string(), - suggested_text: module_name_replacement.suggested_text, - }; - - self.sink.push(diagnostic); - } + fn validate_trait(&mut self, trait_id: TraitId) { + // Check whether non-snake case identifiers are allowed for this trait. + if self.allowed(trait_id.into(), allow::NON_CAMEL_CASE_TYPES, false) { + return; } + + // Check the trait name. + let data = self.db.trait_data(trait_id); + self.create_incorrect_case_diagnostic_for_item_name( + trait_id, + &data.name, + CaseType::UpperCamelCase, + IdentType::Trait, + ); } fn validate_func(&mut self, func: FunctionId) { - let data = self.db.function_data(func); - if matches!(func.lookup(self.db.upcast()).container, ItemContainerId::ExternBlockId(_)) { + let container = func.lookup(self.db.upcast()).container; + if matches!(container, ItemContainerId::ExternBlockId(_)) { cov_mark::hit!(extern_func_incorrect_case_ignored); return; } @@ -296,270 +296,173 @@ impl<'a> DeclValidator<'a> { } // Check the function name. - let function_name = data.name.display(self.db.upcast()).to_string(); - let fn_name_replacement = to_lower_snake_case(&function_name).map(|new_name| Replacement { - current_name: data.name.clone(), - suggested_text: new_name, - expected_case: CaseType::LowerSnakeCase, - }); - - let body = self.db.body(func.into()); + // Skipped if function is an associated item of a trait implementation. + if !self.is_trait_impl_container(container) { + let data = self.db.function_data(func); + self.create_incorrect_case_diagnostic_for_item_name( + func, + &data.name, + CaseType::LowerSnakeCase, + IdentType::Function, + ); + } else { + cov_mark::hit!(trait_impl_assoc_func_name_incorrect_case_ignored); + } // Check the patterns inside the function body. - // This includes function parameters. - let pats_replacements = body + self.validate_func_body(func); + } + + /// Check incorrect names for patterns inside the function body. + /// This includes function parameters except for trait implementation associated functions. + fn validate_func_body(&mut self, func: FunctionId) { + let body = self.db.body(func.into()); + let mut pats_replacements = body .pats .iter() .filter_map(|(pat_id, pat)| match pat { - Pat::Bind { id, .. } => Some((pat_id, &body.bindings[*id].name)), + Pat::Bind { id, .. } => { + let bind_name = &body.bindings[*id].name; + let replacement = Replacement { + current_name: bind_name.clone(), + suggested_text: to_lower_snake_case(&bind_name.to_smol_str())?, + expected_case: CaseType::LowerSnakeCase, + }; + Some((pat_id, replacement)) + } _ => None, }) - .filter_map(|(id, bind_name)| { - Some(( - id, - Replacement { - current_name: bind_name.clone(), - suggested_text: to_lower_snake_case( - &bind_name.display(self.db.upcast()).to_string(), - )?, - expected_case: CaseType::LowerSnakeCase, - }, - )) - }) - .collect(); + .peekable(); - // If there is at least one element to spawn a warning on, go to the source map and generate a warning. - if let Some(fn_name_replacement) = fn_name_replacement { - self.create_incorrect_case_diagnostic_for_func(func, fn_name_replacement); - } - - self.create_incorrect_case_diagnostic_for_variables(func, pats_replacements); - } - - /// Given the information about incorrect names in the function declaration, looks up into the source code - /// for exact locations and adds diagnostics into the sink. - fn create_incorrect_case_diagnostic_for_func( - &mut self, - func: FunctionId, - fn_name_replacement: Replacement, - ) { - let fn_loc = func.lookup(self.db.upcast()); - let fn_src = fn_loc.source(self.db.upcast()); - - // Diagnostic for function name. - let ast_ptr = match fn_src.value.name() { - Some(name) => name, - None => { - never!( - "Replacement ({:?}) was generated for a function without a name: {:?}", - fn_name_replacement, - fn_src - ); - return; - } - }; - - let diagnostic = IncorrectCase { - file: fn_src.file_id, - ident_type: IdentType::Function, - ident: AstPtr::new(&ast_ptr), - expected_case: fn_name_replacement.expected_case, - ident_text: fn_name_replacement.current_name.display(self.db.upcast()).to_string(), - suggested_text: fn_name_replacement.suggested_text, - }; - - self.sink.push(diagnostic); - } - - /// Given the information about incorrect variable names, looks up into the source code - /// for exact locations and adds diagnostics into the sink. - fn create_incorrect_case_diagnostic_for_variables( - &mut self, - func: FunctionId, - pats_replacements: Vec<(PatId, Replacement)>, - ) { // XXX: only look at source_map if we do have missing fields - if pats_replacements.is_empty() { + if pats_replacements.peek().is_none() { return; } let (_, source_map) = self.db.body_with_source_map(func.into()); - for (id, replacement) in pats_replacements { - if let Ok(source_ptr) = source_map.pat_syntax(id) { - if let Some(ptr) = source_ptr.value.cast::() { - let root = source_ptr.file_syntax(self.db.upcast()); - let ident_pat = ptr.to_node(&root); - let parent = match ident_pat.syntax().parent() { - Some(parent) => parent, - None => continue, - }; - let name_ast = match ident_pat.name() { - Some(name_ast) => name_ast, - None => continue, - }; + let Ok(source_ptr) = source_map.pat_syntax(id) else { + continue; + }; + let Some(ptr) = source_ptr.value.cast::() else { + continue; + }; + let root = source_ptr.file_syntax(self.db.upcast()); + let ident_pat = ptr.to_node(&root); + let Some(parent) = ident_pat.syntax().parent() else { + continue; + }; - let is_param = ast::Param::can_cast(parent.kind()); - - // We have to check that it's either `let var = ...` or `var @ Variant(_)` statement, - // because e.g. match arms are patterns as well. - // In other words, we check that it's a named variable binding. - let is_binding = ast::LetStmt::can_cast(parent.kind()) - || (ast::MatchArm::can_cast(parent.kind()) - && ident_pat.at_token().is_some()); - if !(is_param || is_binding) { - // This pattern is not an actual variable declaration, e.g. `Some(val) => {..}` match arm. - continue; - } - - let ident_type = - if is_param { IdentType::Parameter } else { IdentType::Variable }; - - let diagnostic = IncorrectCase { - file: source_ptr.file_id, - ident_type, - ident: AstPtr::new(&name_ast), - expected_case: replacement.expected_case, - ident_text: replacement.current_name.display(self.db.upcast()).to_string(), - suggested_text: replacement.suggested_text, - }; - - self.sink.push(diagnostic); - } + let is_param = ast::Param::can_cast(parent.kind()); + // We have to check that it's either `let var = ...` or `var @ Variant(_)` statement, + // because e.g. match arms are patterns as well. + // In other words, we check that it's a named variable binding. + let is_binding = ast::LetStmt::can_cast(parent.kind()) + || (ast::MatchArm::can_cast(parent.kind()) && ident_pat.at_token().is_some()); + if !(is_param || is_binding) { + // This pattern is not an actual variable declaration, e.g. `Some(val) => {..}` match arm. + continue; } + + let ident_type = if is_param { IdentType::Parameter } else { IdentType::Variable }; + + self.create_incorrect_case_diagnostic_for_ast_node( + replacement, + source_ptr.file_id, + &ident_pat, + ident_type, + ); } } fn validate_struct(&mut self, struct_id: StructId) { - let data = self.db.struct_data(struct_id); - + // Check the structure name. let non_camel_case_allowed = self.allowed(struct_id.into(), allow::NON_CAMEL_CASE_TYPES, false); - let non_snake_case_allowed = self.allowed(struct_id.into(), allow::NON_SNAKE_CASE, false); - - // Check the structure name. - let struct_name = data.name.display(self.db.upcast()).to_string(); - let struct_name_replacement = if !non_camel_case_allowed { - to_camel_case(&struct_name).map(|new_name| Replacement { - current_name: data.name.clone(), - suggested_text: new_name, - expected_case: CaseType::UpperCamelCase, - }) - } else { - None - }; - - // Check the field names. - let mut struct_fields_replacements = Vec::new(); - - if !non_snake_case_allowed { - if let VariantData::Record(fields) = data.variant_data.as_ref() { - for (_, field) in fields.iter() { - let field_name = field.name.display(self.db.upcast()).to_string(); - if let Some(new_name) = to_lower_snake_case(&field_name) { - let replacement = Replacement { - current_name: field.name.clone(), - suggested_text: new_name, - expected_case: CaseType::LowerSnakeCase, - }; - struct_fields_replacements.push(replacement); - } - } - } + if !non_camel_case_allowed { + let data = self.db.struct_data(struct_id); + self.create_incorrect_case_diagnostic_for_item_name( + struct_id, + &data.name, + CaseType::UpperCamelCase, + IdentType::Structure, + ); } - // If there is at least one element to spawn a warning on, go to the source map and generate a warning. - self.create_incorrect_case_diagnostic_for_struct( - struct_id, - struct_name_replacement, - struct_fields_replacements, - ); + // Check the field names. + self.validate_struct_fields(struct_id); } - /// Given the information about incorrect names in the struct declaration, looks up into the source code - /// for exact locations and adds diagnostics into the sink. - fn create_incorrect_case_diagnostic_for_struct( - &mut self, - struct_id: StructId, - struct_name_replacement: Option, - struct_fields_replacements: Vec, - ) { + /// Check incorrect names for struct fields. + fn validate_struct_fields(&mut self, struct_id: StructId) { + if self.allowed(struct_id.into(), allow::NON_SNAKE_CASE, false) { + return; + } + + let data = self.db.struct_data(struct_id); + let VariantData::Record(fields) = data.variant_data.as_ref() else { + return; + }; + let mut struct_fields_replacements = fields + .iter() + .filter_map(|(_, field)| { + to_lower_snake_case(&field.name.to_smol_str()).map(|new_name| Replacement { + current_name: field.name.clone(), + suggested_text: new_name, + expected_case: CaseType::LowerSnakeCase, + }) + }) + .peekable(); + // XXX: Only look at sources if we do have incorrect names. - if struct_name_replacement.is_none() && struct_fields_replacements.is_empty() { + if struct_fields_replacements.peek().is_none() { return; } let struct_loc = struct_id.lookup(self.db.upcast()); let struct_src = struct_loc.source(self.db.upcast()); - if let Some(replacement) = struct_name_replacement { - let ast_ptr = match struct_src.value.name() { - Some(name) => name, - None => { + let Some(ast::FieldList::RecordFieldList(struct_fields_list)) = + struct_src.value.field_list() + else { + always!( + struct_fields_replacements.peek().is_none(), + "Replacements ({:?}) were generated for a structure fields \ + which had no fields list: {:?}", + struct_fields_replacements.collect::>(), + struct_src + ); + return; + }; + let mut struct_fields_iter = struct_fields_list.fields(); + for field_replacement in struct_fields_replacements { + // We assume that parameters in replacement are in the same order as in the + // actual params list, but just some of them (ones that named correctly) are skipped. + let field = loop { + if let Some(field) = struct_fields_iter.next() { + let Some(field_name) = field.name() else { + continue; + }; + if field_name.as_name() == field_replacement.current_name { + break field; + } + } else { never!( - "Replacement ({:?}) was generated for a structure without a name: {:?}", - replacement, + "Replacement ({:?}) was generated for a structure field \ + which was not found: {:?}", + field_replacement, struct_src ); return; } }; - let diagnostic = IncorrectCase { - file: struct_src.file_id, - ident_type: IdentType::Structure, - ident: AstPtr::new(&ast_ptr), - expected_case: replacement.expected_case, - ident_text: replacement.current_name.display(self.db.upcast()).to_string(), - suggested_text: replacement.suggested_text, - }; - - self.sink.push(diagnostic); - } - - let struct_fields_list = match struct_src.value.field_list() { - Some(ast::FieldList::RecordFieldList(fields)) => fields, - _ => { - always!( - struct_fields_replacements.is_empty(), - "Replacements ({:?}) were generated for a structure fields which had no fields list: {:?}", - struct_fields_replacements, - struct_src - ); - return; - } - }; - let mut struct_fields_iter = struct_fields_list.fields(); - for field_to_rename in struct_fields_replacements { - // We assume that parameters in replacement are in the same order as in the - // actual params list, but just some of them (ones that named correctly) are skipped. - let ast_ptr = loop { - match struct_fields_iter.next().and_then(|field| field.name()) { - Some(field_name) => { - if field_name.as_name() == field_to_rename.current_name { - break field_name; - } - } - None => { - never!( - "Replacement ({:?}) was generated for a structure field which was not found: {:?}", - field_to_rename, struct_src - ); - return; - } - } - }; - - let diagnostic = IncorrectCase { - file: struct_src.file_id, - ident_type: IdentType::Field, - ident: AstPtr::new(&ast_ptr), - expected_case: field_to_rename.expected_case, - ident_text: field_to_rename.current_name.display(self.db.upcast()).to_string(), - suggested_text: field_to_rename.suggested_text, - }; - - self.sink.push(diagnostic); + self.create_incorrect_case_diagnostic_for_ast_node( + field_replacement, + struct_src.file_id, + &field, + IdentType::Field, + ); } } @@ -572,163 +475,103 @@ impl<'a> DeclValidator<'a> { } // Check the enum name. - let enum_name = data.name.display(self.db.upcast()).to_string(); - let enum_name_replacement = to_camel_case(&enum_name).map(|new_name| Replacement { - current_name: data.name.clone(), - suggested_text: new_name, - expected_case: CaseType::UpperCamelCase, - }); + self.create_incorrect_case_diagnostic_for_item_name( + enum_id, + &data.name, + CaseType::UpperCamelCase, + IdentType::Enum, + ); - // Check the field names. - let enum_fields_replacements = data + // Check the variant names. + self.validate_enum_variants(enum_id) + } + + /// Check incorrect names for enum variants. + fn validate_enum_variants(&mut self, enum_id: EnumId) { + let data = self.db.enum_data(enum_id); + let mut enum_variants_replacements = data .variants .iter() .filter_map(|(_, name)| { - Some(Replacement { + to_camel_case(&name.to_smol_str()).map(|new_name| Replacement { current_name: name.clone(), - suggested_text: to_camel_case(&name.to_smol_str())?, + suggested_text: new_name, expected_case: CaseType::UpperCamelCase, }) }) - .collect(); + .peekable(); - // If there is at least one element to spawn a warning on, go to the source map and generate a warning. - self.create_incorrect_case_diagnostic_for_enum( - enum_id, - enum_name_replacement, - enum_fields_replacements, - ) - } - - /// Given the information about incorrect names in the struct declaration, looks up into the source code - /// for exact locations and adds diagnostics into the sink. - fn create_incorrect_case_diagnostic_for_enum( - &mut self, - enum_id: EnumId, - enum_name_replacement: Option, - enum_variants_replacements: Vec, - ) { // XXX: only look at sources if we do have incorrect names - if enum_name_replacement.is_none() && enum_variants_replacements.is_empty() { + if enum_variants_replacements.peek().is_none() { return; } let enum_loc = enum_id.lookup(self.db.upcast()); let enum_src = enum_loc.source(self.db.upcast()); - if let Some(replacement) = enum_name_replacement { - let ast_ptr = match enum_src.value.name() { - Some(name) => name, - None => { + let Some(enum_variants_list) = enum_src.value.variant_list() else { + always!( + enum_variants_replacements.peek().is_none(), + "Replacements ({:?}) were generated for enum variants \ + which had no fields list: {:?}", + enum_variants_replacements, + enum_src + ); + return; + }; + let mut enum_variants_iter = enum_variants_list.variants(); + for variant_replacement in enum_variants_replacements { + // We assume that parameters in replacement are in the same order as in the + // actual params list, but just some of them (ones that named correctly) are skipped. + let variant = loop { + if let Some(variant) = enum_variants_iter.next() { + let Some(variant_name) = variant.name() else { + continue; + }; + if variant_name.as_name() == variant_replacement.current_name { + break variant; + } + } else { never!( - "Replacement ({:?}) was generated for a enum without a name: {:?}", - replacement, + "Replacement ({:?}) was generated for an enum variant \ + which was not found: {:?}", + variant_replacement, enum_src ); return; } }; - let diagnostic = IncorrectCase { - file: enum_src.file_id, - ident_type: IdentType::Enum, - ident: AstPtr::new(&ast_ptr), - expected_case: replacement.expected_case, - ident_text: replacement.current_name.display(self.db.upcast()).to_string(), - suggested_text: replacement.suggested_text, - }; - - self.sink.push(diagnostic); - } - - let enum_variants_list = match enum_src.value.variant_list() { - Some(variants) => variants, - _ => { - always!( - enum_variants_replacements.is_empty(), - "Replacements ({:?}) were generated for a enum variants which had no fields list: {:?}", - enum_variants_replacements, - enum_src - ); - return; - } - }; - let mut enum_variants_iter = enum_variants_list.variants(); - for variant_to_rename in enum_variants_replacements { - // We assume that parameters in replacement are in the same order as in the - // actual params list, but just some of them (ones that named correctly) are skipped. - let ast_ptr = loop { - match enum_variants_iter.next().and_then(|v| v.name()) { - Some(variant_name) => { - if variant_name.as_name() == variant_to_rename.current_name { - break variant_name; - } - } - None => { - never!( - "Replacement ({:?}) was generated for a enum variant which was not found: {:?}", - variant_to_rename, enum_src - ); - return; - } - } - }; - - let diagnostic = IncorrectCase { - file: enum_src.file_id, - ident_type: IdentType::Variant, - ident: AstPtr::new(&ast_ptr), - expected_case: variant_to_rename.expected_case, - ident_text: variant_to_rename.current_name.display(self.db.upcast()).to_string(), - suggested_text: variant_to_rename.suggested_text, - }; - - self.sink.push(diagnostic); + self.create_incorrect_case_diagnostic_for_ast_node( + variant_replacement, + enum_src.file_id, + &variant, + IdentType::Variant, + ); } } fn validate_const(&mut self, const_id: ConstId) { - let data = self.db.const_data(const_id); + let container = const_id.lookup(self.db.upcast()).container; + if self.is_trait_impl_container(container) { + cov_mark::hit!(trait_impl_assoc_const_incorrect_case_ignored); + return; + } if self.allowed(const_id.into(), allow::NON_UPPER_CASE_GLOBAL, false) { return; } - let name = match &data.name { - Some(name) => name, - None => return, - }; - - let const_name = name.to_smol_str(); - let replacement = if let Some(new_name) = to_upper_snake_case(&const_name) { - Replacement { - current_name: name.clone(), - suggested_text: new_name, - expected_case: CaseType::UpperSnakeCase, - } - } else { - // Nothing to do here. + let data = self.db.const_data(const_id); + let Some(name) = &data.name else { return; }; - - let const_loc = const_id.lookup(self.db.upcast()); - let const_src = const_loc.source(self.db.upcast()); - - let ast_ptr = match const_src.value.name() { - Some(name) => name, - None => return, - }; - - let diagnostic = IncorrectCase { - file: const_src.file_id, - ident_type: IdentType::Constant, - ident: AstPtr::new(&ast_ptr), - expected_case: replacement.expected_case, - ident_text: replacement.current_name.display(self.db.upcast()).to_string(), - suggested_text: replacement.suggested_text, - }; - - self.sink.push(diagnostic); + self.create_incorrect_case_diagnostic_for_item_name( + const_id, + name, + CaseType::UpperSnakeCase, + IdentType::Constant, + ); } fn validate_static(&mut self, static_id: StaticId) { @@ -742,32 +585,91 @@ impl<'a> DeclValidator<'a> { return; } - let name = &data.name; + self.create_incorrect_case_diagnostic_for_item_name( + static_id, + &data.name, + CaseType::UpperSnakeCase, + IdentType::StaticVariable, + ); + } - let static_name = name.to_smol_str(); - let replacement = if let Some(new_name) = to_upper_snake_case(&static_name) { - Replacement { - current_name: name.clone(), - suggested_text: new_name, - expected_case: CaseType::UpperSnakeCase, - } - } else { - // Nothing to do here. + fn validate_type_alias(&mut self, type_alias_id: TypeAliasId) { + let container = type_alias_id.lookup(self.db.upcast()).container; + if self.is_trait_impl_container(container) { + cov_mark::hit!(trait_impl_assoc_type_incorrect_case_ignored); + return; + } + + // Check whether non-snake case identifiers are allowed for this type alias. + if self.allowed(type_alias_id.into(), allow::NON_CAMEL_CASE_TYPES, false) { + return; + } + + // Check the type alias name. + let data = self.db.type_alias_data(type_alias_id); + self.create_incorrect_case_diagnostic_for_item_name( + type_alias_id, + &data.name, + CaseType::UpperCamelCase, + IdentType::TypeAlias, + ); + } + + fn create_incorrect_case_diagnostic_for_item_name( + &mut self, + item_id: L, + name: &Name, + expected_case: CaseType, + ident_type: IdentType, + ) where + N: AstNode + HasName + fmt::Debug, + S: HasSource, + L: Lookup = dyn DefDatabase + 'a>, + { + let to_expected_case_type = match expected_case { + CaseType::LowerSnakeCase => to_lower_snake_case, + CaseType::UpperSnakeCase => to_upper_snake_case, + CaseType::UpperCamelCase => to_camel_case, + }; + let Some(replacement) = to_expected_case_type(&name.to_smol_str()).map(|new_name| { + Replacement { current_name: name.clone(), suggested_text: new_name, expected_case } + }) else { return; }; - let static_loc = static_id.lookup(self.db.upcast()); - let static_src = static_loc.source(self.db.upcast()); + let item_loc = item_id.lookup(self.db.upcast()); + let item_src = item_loc.source(self.db.upcast()); + self.create_incorrect_case_diagnostic_for_ast_node( + replacement, + item_src.file_id, + &item_src.value, + ident_type, + ); + } - let ast_ptr = match static_src.value.name() { - Some(name) => name, - None => return, + fn create_incorrect_case_diagnostic_for_ast_node( + &mut self, + replacement: Replacement, + file_id: HirFileId, + node: &T, + ident_type: IdentType, + ) where + T: AstNode + HasName + fmt::Debug, + { + let Some(name_ast) = node.name() else { + never!( + "Replacement ({:?}) was generated for a {:?} without a name: {:?}", + replacement, + ident_type, + node + ); + return; }; let diagnostic = IncorrectCase { - file: static_src.file_id, - ident_type: IdentType::StaticVariable, - ident: AstPtr::new(&ast_ptr), + file: file_id, + ident_type, + ident: AstPtr::new(&name_ast), expected_case: replacement.expected_case, ident_text: replacement.current_name.display(self.db.upcast()).to_string(), suggested_text: replacement.suggested_text, @@ -775,4 +677,13 @@ impl<'a> DeclValidator<'a> { self.sink.push(diagnostic); } + + fn is_trait_impl_container(&self, container_id: ItemContainerId) -> bool { + if let ItemContainerId::ImplId(impl_id) = container_id { + if self.db.impl_trait(impl_id).is_some() { + return true; + } + } + false + } } diff --git a/crates/hir-ty/src/diagnostics/expr.rs b/crates/hir-ty/src/diagnostics/expr.rs index eda8f2371c9c..7f8fb7f4b521 100644 --- a/crates/hir-ty/src/diagnostics/expr.rs +++ b/crates/hir-ty/src/diagnostics/expr.rs @@ -27,7 +27,7 @@ use crate::{ pub(crate) use hir_def::{ body::Body, - hir::{Expr, ExprId, MatchArm, Pat, PatId}, + hir::{Expr, ExprId, MatchArm, Pat, PatId, Statement}, LocalFieldId, VariantId, }; @@ -44,6 +44,12 @@ pub enum BodyValidationDiagnostic { match_expr: ExprId, uncovered_patterns: String, }, + RemoveTrailingReturn { + return_expr: ExprId, + }, + RemoveUnnecessaryElse { + if_expr: ExprId, + }, } impl BodyValidationDiagnostic { @@ -72,6 +78,10 @@ impl ExprValidator { let body = db.body(self.owner); let mut filter_map_next_checker = None; + if matches!(self.owner, DefWithBodyId::FunctionId(_)) { + self.check_for_trailing_return(body.body_expr, &body); + } + for (id, expr) in body.exprs.iter() { if let Some((variant, missed_fields, true)) = record_literal_missing_fields(db, &self.infer, id, expr) @@ -90,9 +100,16 @@ impl ExprValidator { Expr::Call { .. } | Expr::MethodCall { .. } => { self.validate_call(db, id, expr, &mut filter_map_next_checker); } + Expr::Closure { body: body_expr, .. } => { + self.check_for_trailing_return(*body_expr, &body); + } + Expr::If { .. } => { + self.check_for_unnecessary_else(id, expr, &body); + } _ => {} } } + for (id, pat) in body.pats.iter() { if let Some((variant, missed_fields, true)) = record_pattern_missing_fields(db, &self.infer, id, pat) @@ -153,14 +170,7 @@ impl ExprValidator { } let pattern_arena = Arena::new(); - let ty_arena = Arena::new(); - let cx = MatchCheckCtx::new( - self.owner.module(db.upcast()), - self.owner, - db, - &pattern_arena, - &ty_arena, - ); + let cx = MatchCheckCtx::new(self.owner.module(db.upcast()), self.owner, db, &pattern_arena); let mut m_arms = Vec::with_capacity(arms.len()); let mut has_lowering_errors = false; @@ -207,7 +217,7 @@ impl ExprValidator { } let report = match compute_match_usefulness( - rustc_pattern_analysis::MatchCtxt { tycx: &cx }, + &cx, m_arms.as_slice(), scrut_ty.clone(), ValidityConstraint::ValidOnly, @@ -244,6 +254,59 @@ impl ExprValidator { } pattern } + + fn check_for_trailing_return(&mut self, body_expr: ExprId, body: &Body) { + match &body.exprs[body_expr] { + Expr::Block { statements, tail, .. } => { + let last_stmt = tail.or_else(|| match statements.last()? { + Statement::Expr { expr, .. } => Some(*expr), + _ => None, + }); + if let Some(last_stmt) = last_stmt { + self.check_for_trailing_return(last_stmt, body); + } + } + Expr::If { then_branch, else_branch, .. } => { + self.check_for_trailing_return(*then_branch, body); + if let Some(else_branch) = else_branch { + self.check_for_trailing_return(*else_branch, body); + } + } + Expr::Match { arms, .. } => { + for arm in arms.iter() { + let MatchArm { expr, .. } = arm; + self.check_for_trailing_return(*expr, body); + } + } + Expr::Return { .. } => { + self.diagnostics.push(BodyValidationDiagnostic::RemoveTrailingReturn { + return_expr: body_expr, + }); + } + _ => (), + } + } + + fn check_for_unnecessary_else(&mut self, id: ExprId, expr: &Expr, body: &Body) { + if let Expr::If { condition: _, then_branch, else_branch } = expr { + if else_branch.is_none() { + return; + } + if let Expr::Block { statements, tail, .. } = &body.exprs[*then_branch] { + let last_then_expr = tail.or_else(|| match statements.last()? { + Statement::Expr { expr, .. } => Some(*expr), + _ => None, + }); + if let Some(last_then_expr) = last_then_expr { + let last_then_expr_ty = &self.infer[last_then_expr]; + if last_then_expr_ty.is_never() { + self.diagnostics + .push(BodyValidationDiagnostic::RemoveUnnecessaryElse { if_expr: id }) + } + } + } + } + } } struct FilterMapNextChecker { diff --git a/crates/hir-ty/src/diagnostics/match_check/pat_analysis.rs b/crates/hir-ty/src/diagnostics/match_check/pat_analysis.rs index 0b595042cd56..712842372b62 100644 --- a/crates/hir-ty/src/diagnostics/match_check/pat_analysis.rs +++ b/crates/hir-ty/src/diagnostics/match_check/pat_analysis.rs @@ -9,7 +9,7 @@ use rustc_pattern_analysis::{ index::IdxContainer, Captures, TypeCx, }; -use smallvec::SmallVec; +use smallvec::{smallvec, SmallVec}; use stdx::never; use typed_arena::Arena; @@ -41,8 +41,14 @@ pub(crate) struct MatchCheckCtx<'p> { body: DefWithBodyId, pub(crate) db: &'p dyn HirDatabase, pub(crate) pattern_arena: &'p Arena>, - ty_arena: &'p Arena, exhaustive_patterns: bool, + min_exhaustive_patterns: bool, +} + +#[derive(Clone)] +pub(crate) struct PatData<'p> { + /// Keep db around so that we can print variant names in `Debug`. + pub(crate) db: &'p dyn HirDatabase, } impl<'p> MatchCheckCtx<'p> { @@ -51,11 +57,12 @@ impl<'p> MatchCheckCtx<'p> { body: DefWithBodyId, db: &'p dyn HirDatabase, pattern_arena: &'p Arena>, - ty_arena: &'p Arena, ) -> Self { let def_map = db.crate_def_map(module.krate()); let exhaustive_patterns = def_map.is_unstable_feature_enabled("exhaustive_patterns"); - Self { module, body, db, pattern_arena, exhaustive_patterns, ty_arena } + let min_exhaustive_patterns = + def_map.is_unstable_feature_enabled("min_exhaustive_patterns"); + Self { module, body, db, pattern_arena, exhaustive_patterns, min_exhaustive_patterns } } fn is_uninhabited(&self, ty: &Ty) -> bool { @@ -75,18 +82,15 @@ impl<'p> MatchCheckCtx<'p> { } } - fn variant_id_for_adt(&self, ctor: &Constructor, adt: hir_def::AdtId) -> VariantId { + fn variant_id_for_adt(ctor: &Constructor, adt: hir_def::AdtId) -> Option { match ctor { - &Variant(id) => id.into(), - Struct | UnionField => { - assert!(!matches!(adt, hir_def::AdtId::EnumId(_))); - match adt { - hir_def::AdtId::EnumId(_) => unreachable!(), - hir_def::AdtId::StructId(id) => id.into(), - hir_def::AdtId::UnionId(id) => id.into(), - } - } - _ => panic!("bad constructor {self:?} for adt {adt:?}"), + &Variant(id) => Some(id.into()), + Struct | UnionField => match adt { + hir_def::AdtId::EnumId(_) => None, + hir_def::AdtId::StructId(id) => Some(id.into()), + hir_def::AdtId::UnionId(id) => Some(id.into()), + }, + _ => panic!("bad constructor {ctor:?} for adt {adt:?}"), } } @@ -200,7 +204,7 @@ impl<'p> MatchCheckCtx<'p> { Wildcard } }; - let variant = self.variant_id_for_adt(&ctor, adt.0); + let variant = Self::variant_id_for_adt(&ctor, adt.0).unwrap(); let fields_len = variant.variant_data(self.db.upcast()).fields().len(); // For each field in the variant, we store the relevant index into `self.fields` if any. let mut field_id_to_id: Vec> = vec![None; fields_len]; @@ -241,7 +245,8 @@ impl<'p> MatchCheckCtx<'p> { fields = self.pattern_arena.alloc_extend(subpats); } } - DeconstructedPat::new(ctor, fields, pat.ty.clone(), ()) + let data = PatData { db: self.db }; + DeconstructedPat::new(ctor, fields, pat.ty.clone(), data) } pub(crate) fn hoist_witness_pat(&self, pat: &WitnessPat<'p>) -> Pat { @@ -266,7 +271,7 @@ impl<'p> MatchCheckCtx<'p> { PatKind::Deref { subpattern: subpatterns.next().unwrap() } } TyKind::Adt(adt, substs) => { - let variant = self.variant_id_for_adt(pat.ctor(), adt.0); + let variant = Self::variant_id_for_adt(pat.ctor(), adt.0).unwrap(); let subpatterns = self .list_variant_nonhidden_fields(pat.ty(), variant) .zip(subpatterns) @@ -307,11 +312,14 @@ impl<'p> TypeCx for MatchCheckCtx<'p> { type VariantIdx = EnumVariantId; type StrLit = Void; type ArmData = (); - type PatData = (); + type PatData = PatData<'p>; fn is_exhaustive_patterns_feature_on(&self) -> bool { self.exhaustive_patterns } + fn is_min_exhaustive_patterns_feature_on(&self) -> bool { + self.min_exhaustive_patterns + } fn ctor_arity( &self, @@ -327,7 +335,7 @@ impl<'p> TypeCx for MatchCheckCtx<'p> { // patterns. If we're here we can assume this is a box pattern. 1 } else { - let variant = self.variant_id_for_adt(ctor, adt); + let variant = Self::variant_id_for_adt(ctor, adt).unwrap(); self.list_variant_nonhidden_fields(ty, variant).count() } } @@ -347,54 +355,51 @@ impl<'p> TypeCx for MatchCheckCtx<'p> { } } - fn ctor_sub_tys( - &self, - ctor: &rustc_pattern_analysis::constructor::Constructor, - ty: &Self::Ty, - ) -> &[Self::Ty] { - use std::iter::once; - fn alloc<'a>(cx: &'a MatchCheckCtx<'_>, iter: impl Iterator) -> &'a [Ty] { - cx.ty_arena.alloc_extend(iter) - } - match ctor { + fn ctor_sub_tys<'a>( + &'a self, + ctor: &'a rustc_pattern_analysis::constructor::Constructor, + ty: &'a Self::Ty, + ) -> impl ExactSizeIterator + Captures<'a> { + let single = |ty| smallvec![ty]; + let tys: SmallVec<[_; 2]> = match ctor { Struct | Variant(_) | UnionField => match ty.kind(Interner) { TyKind::Tuple(_, substs) => { let tys = substs.iter(Interner).map(|ty| ty.assert_ty_ref(Interner)); - alloc(self, tys.cloned()) + tys.cloned().collect() } - TyKind::Ref(.., rty) => alloc(self, once(rty.clone())), + TyKind::Ref(.., rty) => single(rty.clone()), &TyKind::Adt(AdtId(adt), ref substs) => { if is_box(self.db, adt) { // The only legal patterns of type `Box` (outside `std`) are `_` and box // patterns. If we're here we can assume this is a box pattern. let subst_ty = substs.at(Interner, 0).assert_ty_ref(Interner).clone(); - alloc(self, once(subst_ty)) + single(subst_ty) } else { - let variant = self.variant_id_for_adt(ctor, adt); - let tys = self.list_variant_nonhidden_fields(ty, variant).map(|(_, ty)| ty); - alloc(self, tys) + let variant = Self::variant_id_for_adt(ctor, adt).unwrap(); + self.list_variant_nonhidden_fields(ty, variant).map(|(_, ty)| ty).collect() } } ty_kind => { never!("Unexpected type for `{:?}` constructor: {:?}", ctor, ty_kind); - alloc(self, once(ty.clone())) + single(ty.clone()) } }, Ref => match ty.kind(Interner) { - TyKind::Ref(.., rty) => alloc(self, once(rty.clone())), + TyKind::Ref(.., rty) => single(rty.clone()), ty_kind => { never!("Unexpected type for `{:?}` constructor: {:?}", ctor, ty_kind); - alloc(self, once(ty.clone())) + single(ty.clone()) } }, Slice(_) => unreachable!("Found a `Slice` constructor in match checking"), Bool(..) | IntRange(..) | F32Range(..) | F64Range(..) | Str(..) | Opaque(..) - | NonExhaustive | Hidden | Missing | Wildcard => &[], + | NonExhaustive | Hidden | Missing | Wildcard => smallvec![], Or => { never!("called `Fields::wildcards` on an `Or` ctor"); - &[] + smallvec![] } - } + }; + tys.into_iter() } fn ctors_for_ty( @@ -456,11 +461,27 @@ impl<'p> TypeCx for MatchCheckCtx<'p> { }) } - fn debug_pat( - _f: &mut fmt::Formatter<'_>, - _pat: &rustc_pattern_analysis::pat::DeconstructedPat<'_, Self>, + fn write_variant_name( + f: &mut fmt::Formatter<'_>, + pat: &rustc_pattern_analysis::pat::DeconstructedPat<'_, Self>, ) -> fmt::Result { - // FIXME: implement this, as using `unimplemented!()` causes panics in `tracing`. + let variant = + pat.ty().as_adt().and_then(|(adt, _)| Self::variant_id_for_adt(pat.ctor(), adt)); + + let db = pat.data().unwrap().db; + if let Some(variant) = variant { + match variant { + VariantId::EnumVariantId(v) => { + write!(f, "{}", db.enum_variant_data(v).name.display(db.upcast()))?; + } + VariantId::StructId(s) => { + write!(f, "{}", db.struct_data(s).name.display(db.upcast()))? + } + VariantId::UnionId(u) => { + write!(f, "{}", db.union_data(u).name.display(db.upcast()))? + } + } + } Ok(()) } diff --git a/crates/hir-ty/src/display.rs b/crates/hir-ty/src/display.rs index 2327c8df1b44..fe51ec3f8210 100644 --- a/crates/hir-ty/src/display.rs +++ b/crates/hir-ty/src/display.rs @@ -32,7 +32,7 @@ use triomphe::Arc; use crate::{ consteval::try_const_usize, - db::HirDatabase, + db::{HirDatabase, InternedClosure}, from_assoc_type_id, from_foreign_def_id, from_placeholder_idx, layout::Layout, lt_from_placeholder_idx, @@ -814,9 +814,8 @@ impl HirDisplay for Ty { // Don't count Sized but count when it absent // (i.e. when explicit ?Sized bound is set). - let default_sized = SizedByDefault::Sized { - anchor: func.lookup(db.upcast()).module(db.upcast()).krate(), - }; + let default_sized = + SizedByDefault::Sized { anchor: func.krate(db.upcast()) }; let sized_bounds = bounds .skip_binders() .iter() @@ -1025,7 +1024,7 @@ impl HirDisplay for Ty { let data = (*datas).as_ref().map(|rpit| rpit.impl_traits[idx].bounds.clone()); let bounds = data.substitute(Interner, ¶meters); - let krate = func.lookup(db.upcast()).module(db.upcast()).krate(); + let krate = func.krate(db.upcast()); write_bounds_like_dyn_trait_with_prefix( f, "impl", @@ -1086,7 +1085,7 @@ impl HirDisplay for Ty { } let sig = ClosureSubst(substs).sig_ty().callable_sig(db); if let Some(sig) = sig { - let (def, _) = db.lookup_intern_closure((*id).into()); + let InternedClosure(def, _) = db.lookup_intern_closure((*id).into()); let infer = db.infer(def); let (_, kind) = infer.closure_info(id); match f.closure_style { @@ -1191,7 +1190,7 @@ impl HirDisplay for Ty { let data = (*datas).as_ref().map(|rpit| rpit.impl_traits[idx].bounds.clone()); let bounds = data.substitute(Interner, &opaque_ty.substitution); - let krate = func.lookup(db.upcast()).module(db.upcast()).krate(); + let krate = func.krate(db.upcast()); write_bounds_like_dyn_trait_with_prefix( f, "impl", diff --git a/crates/hir-ty/src/infer/closure.rs b/crates/hir-ty/src/infer/closure.rs index 572df8f71374..c3746f787067 100644 --- a/crates/hir-ty/src/infer/closure.rs +++ b/crates/hir-ty/src/infer/closure.rs @@ -21,7 +21,7 @@ use smallvec::SmallVec; use stdx::never; use crate::{ - db::HirDatabase, + db::{HirDatabase, InternedClosure}, from_placeholder_idx, make_binders, mir::{BorrowKind, MirSpan, ProjectionElem}, static_lifetime, to_chalk_trait_id, @@ -194,17 +194,15 @@ impl CapturedItem { } let variant_data = f.parent.variant_data(db.upcast()); let field = match &*variant_data { - VariantData::Record(fields) => fields[f.local_id] - .name - .as_str() - .unwrap_or("[missing field]") - .to_string(), + VariantData::Record(fields) => { + fields[f.local_id].name.as_str().unwrap_or("[missing field]").to_owned() + } VariantData::Tuple(fields) => fields .iter() .position(|it| it.0 == f.local_id) .unwrap_or_default() .to_string(), - VariantData::Unit => "[missing field]".to_string(), + VariantData::Unit => "[missing field]".to_owned(), }; result = format!("{result}.{field}"); field_need_paren = false; @@ -718,7 +716,7 @@ impl InferenceContext<'_> { fn is_upvar(&self, place: &HirPlace) -> bool { if let Some(c) = self.current_closure { - let (_, root) = self.db.lookup_intern_closure(c.into()); + let InternedClosure(_, root) = self.db.lookup_intern_closure(c.into()); return self.body.is_binding_upvar(place.local, root); } false @@ -940,7 +938,7 @@ impl InferenceContext<'_> { } fn analyze_closure(&mut self, closure: ClosureId) -> FnTrait { - let (_, root) = self.db.lookup_intern_closure(closure.into()); + let InternedClosure(_, root) = self.db.lookup_intern_closure(closure.into()); self.current_closure = Some(closure); let Expr::Closure { body, capture_by, .. } = &self.body[root] else { unreachable!("Closure expression id is always closure"); diff --git a/crates/hir-ty/src/infer/expr.rs b/crates/hir-ty/src/infer/expr.rs index 842f7bdafe7a..8b8e97b0081c 100644 --- a/crates/hir-ty/src/infer/expr.rs +++ b/crates/hir-ty/src/infer/expr.rs @@ -23,6 +23,7 @@ use syntax::ast::RangeOp; use crate::{ autoderef::{builtin_deref, deref_by_trait, Autoderef}, consteval, + db::{InternedClosure, InternedCoroutine}, infer::{ coerce::{CoerceMany, CoercionCause}, find_continuable, @@ -253,13 +254,17 @@ impl InferenceContext<'_> { .push(ret_ty.clone()) .build(); - let coroutine_id = self.db.intern_coroutine((self.owner, tgt_expr)).into(); + let coroutine_id = self + .db + .intern_coroutine(InternedCoroutine(self.owner, tgt_expr)) + .into(); let coroutine_ty = TyKind::Coroutine(coroutine_id, subst).intern(Interner); (None, coroutine_ty, Some((resume_ty, yield_ty))) } ClosureKind::Closure | ClosureKind::Async => { - let closure_id = self.db.intern_closure((self.owner, tgt_expr)).into(); + let closure_id = + self.db.intern_closure(InternedClosure(self.owner, tgt_expr)).into(); let closure_ty = TyKind::Closure( closure_id, TyBuilder::subst_for_closure(self.db, self.owner, sig_ty.clone()), diff --git a/crates/hir-ty/src/infer/unify.rs b/crates/hir-ty/src/infer/unify.rs index 61c82339508d..de23ca34990b 100644 --- a/crates/hir-ty/src/infer/unify.rs +++ b/crates/hir-ty/src/infer/unify.rs @@ -469,12 +469,14 @@ impl<'a> InferenceTable<'a> { } } + #[tracing::instrument(skip_all)] pub(crate) fn rollback_to(&mut self, snapshot: InferenceTableSnapshot) { self.var_unification_table.rollback_to(snapshot.var_table_snapshot); self.type_variable_table = snapshot.type_variable_table_snapshot; self.pending_obligations = snapshot.pending_obligations; } + #[tracing::instrument(skip_all)] pub(crate) fn run_in_snapshot(&mut self, f: impl FnOnce(&mut InferenceTable<'_>) -> T) -> T { let snapshot = self.snapshot(); let result = f(self); diff --git a/crates/hir-ty/src/layout.rs b/crates/hir-ty/src/layout.rs index 310c4cc9ffec..be1c8d9094bf 100644 --- a/crates/hir-ty/src/layout.rs +++ b/crates/hir-ty/src/layout.rs @@ -19,8 +19,12 @@ use stdx::never; use triomphe::Arc; use crate::{ - consteval::try_const_usize, db::HirDatabase, infer::normalize, layout::adt::struct_variant_idx, - utils::ClosureSubst, Interner, ProjectionTy, Substitution, TraitEnvironment, Ty, + consteval::try_const_usize, + db::{HirDatabase, InternedClosure}, + infer::normalize, + layout::adt::struct_variant_idx, + utils::ClosureSubst, + Interner, ProjectionTy, Substitution, TraitEnvironment, Ty, }; pub use self::{ @@ -391,7 +395,7 @@ pub fn layout_of_ty_query( } } TyKind::Closure(c, subst) => { - let (def, _) = db.lookup_intern_closure((*c).into()); + let InternedClosure(def, _) = db.lookup_intern_closure((*c).into()); let infer = db.infer(def); let (captures, _) = infer.closure_info(c); let fields = captures diff --git a/crates/hir-ty/src/layout/tests.rs b/crates/hir-ty/src/layout/tests.rs index 1f2ea753c1b7..ba3dfe8100d1 100644 --- a/crates/hir-ty/src/layout/tests.rs +++ b/crates/hir-ty/src/layout/tests.rs @@ -1,8 +1,7 @@ -use std::collections::HashMap; - use chalk_ir::{AdtId, TyKind}; use either::Either; use hir_def::db::DefDatabase; +use rustc_hash::FxHashMap; use test_fixture::WithFixture; use triomphe::Arc; @@ -16,7 +15,7 @@ use crate::{ mod closure; fn current_machine_data_layout() -> String { - project_model::target_data_layout::get(None, None, &HashMap::default()).unwrap() + project_model::target_data_layout::get(None, None, &FxHashMap::default()).unwrap() } fn eval_goal(ra_fixture: &str, minicore: &str) -> Result, LayoutError> { diff --git a/crates/hir-ty/src/lib.rs b/crates/hir-ty/src/lib.rs index 288c42405d6a..70138633341c 100644 --- a/crates/hir-ty/src/lib.rs +++ b/crates/hir-ty/src/lib.rs @@ -51,6 +51,7 @@ use std::{ hash::{BuildHasherDefault, Hash}, }; +use base_db::salsa::impl_intern_value_trivial; use chalk_ir::{ fold::{Shift, TypeFoldable}, interner::HasInterner, @@ -228,7 +229,7 @@ impl MemoryMap { &self, mut f: impl FnMut(&[u8], usize) -> Result, ) -> Result, MirEvalError> { - let mut transform = |(addr, val): (&usize, &Box<[u8]>)| { + let mut transform = |(addr, val): (&usize, &[u8])| { let addr = *addr; let align = if addr == 0 { 64 } else { (addr - (addr & (addr - 1))).min(64) }; f(val, align).map(|it| (addr, it)) @@ -240,7 +241,9 @@ impl MemoryMap { map.insert(addr, val); map }), - MemoryMap::Complex(cm) => cm.memory.iter().map(transform).collect(), + MemoryMap::Complex(cm) => { + cm.memory.iter().map(|(addr, val)| transform((addr, val))).collect() + } } } @@ -584,6 +587,7 @@ pub enum ImplTraitId { ReturnTypeImplTrait(hir_def::FunctionId, RpitId), AsyncBlockTypeImplTrait(hir_def::DefWithBodyId, ExprId), } +impl_intern_value_trivial!(ImplTraitId); #[derive(Clone, PartialEq, Eq, Debug, Hash)] pub struct ReturnTypeImplTraits { diff --git a/crates/hir-ty/src/lower.rs b/crates/hir-ty/src/lower.rs index 386a03d93f37..75ac3b0d66b8 100644 --- a/crates/hir-ty/src/lower.rs +++ b/crates/hir-ty/src/lower.rs @@ -10,7 +10,10 @@ use std::{ iter, }; -use base_db::{salsa::Cycle, CrateId}; +use base_db::{ + salsa::{impl_intern_value_trivial, Cycle}, + CrateId, +}; use chalk_ir::{ cast::Cast, fold::Shift, fold::TypeFoldable, interner::HasInterner, Mutability, Safety, }; @@ -1225,7 +1228,7 @@ impl<'a> TyLoweringContext<'a> { .collect(); if !ctx.unsized_types.borrow().contains(&self_ty) { - let krate = func.lookup(ctx.db.upcast()).module(ctx.db.upcast()).krate(); + let krate = func.krate(ctx.db.upcast()); let sized_trait = ctx .db .lang_item(krate, LangItem::Sized) @@ -1809,6 +1812,7 @@ pub enum CallableDefId { StructId(StructId), EnumVariantId(EnumVariantId), } +impl_intern_value_trivial!(CallableDefId); impl_from!(FunctionId, StructId, EnumVariantId for CallableDefId); impl From for ModuleDefId { fn from(def: CallableDefId) -> ModuleDefId { @@ -1824,11 +1828,10 @@ impl CallableDefId { pub fn krate(self, db: &dyn HirDatabase) -> CrateId { let db = db.upcast(); match self { - CallableDefId::FunctionId(f) => f.lookup(db).module(db), - CallableDefId::StructId(s) => s.lookup(db).container, - CallableDefId::EnumVariantId(e) => e.module(db), + CallableDefId::FunctionId(f) => f.krate(db), + CallableDefId::StructId(s) => s.krate(db), + CallableDefId::EnumVariantId(e) => e.krate(db), } - .krate() } } diff --git a/crates/hir-ty/src/method_resolution.rs b/crates/hir-ty/src/method_resolution.rs index 1c068bf684ae..a4baf572d9e3 100644 --- a/crates/hir-ty/src/method_resolution.rs +++ b/crates/hir-ty/src/method_resolution.rs @@ -931,6 +931,15 @@ pub fn iterate_method_candidates_dyn( mode: LookupMode, callback: &mut dyn FnMut(ReceiverAdjustments, AssocItemId, bool) -> ControlFlow<()>, ) -> ControlFlow<()> { + let _p = tracing::span!( + tracing::Level::INFO, + "iterate_method_candidates_dyn", + ?mode, + ?name, + traits_in_scope_len = traits_in_scope.len() + ) + .entered(); + match mode { LookupMode::MethodCall => { // For method calls, rust first does any number of autoderef, and @@ -984,6 +993,7 @@ pub fn iterate_method_candidates_dyn( } } +#[tracing::instrument(skip_all, fields(name = ?name))] fn iterate_method_candidates_with_autoref( receiver_ty: &Canonical, first_adjustment: ReceiverAdjustments, @@ -1041,6 +1051,7 @@ fn iterate_method_candidates_with_autoref( ) } +#[tracing::instrument(skip_all, fields(name = ?name))] fn iterate_method_candidates_by_receiver( receiver_ty: &Canonical, receiver_adjustments: ReceiverAdjustments, @@ -1088,6 +1099,7 @@ fn iterate_method_candidates_by_receiver( ControlFlow::Continue(()) } +#[tracing::instrument(skip_all, fields(name = ?name))] fn iterate_method_candidates_for_self_ty( self_ty: &Canonical, db: &dyn HirDatabase, @@ -1119,6 +1131,7 @@ fn iterate_method_candidates_for_self_ty( ) } +#[tracing::instrument(skip_all, fields(name = ?name, visible_from_module, receiver_ty))] fn iterate_trait_method_candidates( self_ty: &Ty, table: &mut InferenceTable<'_>, @@ -1175,6 +1188,7 @@ fn iterate_trait_method_candidates( ControlFlow::Continue(()) } +#[tracing::instrument(skip_all, fields(name = ?name, visible_from_module, receiver_ty))] fn iterate_inherent_methods( self_ty: &Ty, table: &mut InferenceTable<'_>, @@ -1267,6 +1281,7 @@ fn iterate_inherent_methods( } return ControlFlow::Continue(()); + #[tracing::instrument(skip_all, fields(name = ?name, visible_from_module, receiver_ty))] fn iterate_inherent_trait_methods( self_ty: &Ty, table: &mut InferenceTable<'_>, @@ -1293,6 +1308,7 @@ fn iterate_inherent_methods( ControlFlow::Continue(()) } + #[tracing::instrument(skip_all, fields(name = ?name, visible_from_module, receiver_ty))] fn impls_for_self_ty( impls: &InherentImpls, self_ty: &Ty, @@ -1356,6 +1372,7 @@ macro_rules! check_that { }; } +#[tracing::instrument(skip_all, fields(name))] fn is_valid_candidate( table: &mut InferenceTable<'_>, name: Option<&Name>, @@ -1403,6 +1420,7 @@ enum IsValidCandidate { NotVisible, } +#[tracing::instrument(skip_all, fields(name))] fn is_valid_fn_candidate( table: &mut InferenceTable<'_>, fn_id: FunctionId, @@ -1439,15 +1457,15 @@ fn is_valid_fn_candidate( _ => unreachable!(), }; - let fn_subst = TyBuilder::subst_for_def(db, fn_id, Some(impl_subst.clone())) - .fill_with_inference_vars(table) - .build(); - check_that!(table.unify(&expect_self_ty, self_ty)); if let Some(receiver_ty) = receiver_ty { check_that!(data.has_self_param()); + let fn_subst = TyBuilder::subst_for_def(db, fn_id, Some(impl_subst.clone())) + .fill_with_inference_vars(table) + .build(); + let sig = db.callable_item_signature(fn_id.into()); let expected_receiver = sig.map(|s| s.params()[0].clone()).substitute(Interner, &fn_subst); @@ -1540,6 +1558,7 @@ pub fn implements_trait_unique( /// This creates Substs for a trait with the given Self type and type variables /// for all other parameters, to query Chalk with it. +#[tracing::instrument(skip_all)] fn generic_implements_goal( db: &dyn HirDatabase, env: Arc, diff --git a/crates/hir-ty/src/mir/borrowck.rs b/crates/hir-ty/src/mir/borrowck.rs index ea4e60cad300..9089c11c5d9b 100644 --- a/crates/hir-ty/src/mir/borrowck.rs +++ b/crates/hir-ty/src/mir/borrowck.rs @@ -11,7 +11,10 @@ use stdx::never; use triomphe::Arc; use crate::{ - db::HirDatabase, mir::Operand, utils::ClosureSubst, ClosureId, Interner, Ty, TyExt, TypeFlags, + db::{HirDatabase, InternedClosure}, + mir::Operand, + utils::ClosureSubst, + ClosureId, Interner, Ty, TyExt, TypeFlags, }; use super::{ @@ -97,7 +100,7 @@ fn moved_out_of_ref(db: &dyn HirDatabase, body: &MirBody) -> Vec ty, db, |c, subst, f| { - let (def, _) = db.lookup_intern_closure(c.into()); + let InternedClosure(def, _) = db.lookup_intern_closure(c.into()); let infer = db.infer(def); let (captures, _) = infer.closure_info(&c); let parent_subst = ClosureSubst(subst).parent_subst(); @@ -215,7 +218,7 @@ fn place_case(db: &dyn HirDatabase, body: &MirBody, lvalue: &Place) -> Projectio ty, db, |c, subst, f| { - let (def, _) = db.lookup_intern_closure(c.into()); + let InternedClosure(def, _) = db.lookup_intern_closure(c.into()); let infer = db.infer(def); let (captures, _) = infer.closure_info(&c); let parent_subst = ClosureSubst(subst).parent_subst(); diff --git a/crates/hir-ty/src/mir/eval.rs b/crates/hir-ty/src/mir/eval.rs index 50c4d00660b7..2428678d72b5 100644 --- a/crates/hir-ty/src/mir/eval.rs +++ b/crates/hir-ty/src/mir/eval.rs @@ -25,7 +25,7 @@ use triomphe::Arc; use crate::{ consteval::{intern_const_scalar, try_const_usize, ConstEvalError}, - db::HirDatabase, + db::{HirDatabase, InternedClosure}, display::{ClosureStyle, HirDisplay}, infer::PointerCast, layout::{Layout, LayoutError, RustcEnumVariantIdx}, @@ -647,7 +647,7 @@ impl Evaluator<'_> { ty.clone(), self.db, |c, subst, f| { - let (def, _) = self.db.lookup_intern_closure(c.into()); + let InternedClosure(def, _) = self.db.lookup_intern_closure(c.into()); let infer = self.db.infer(def); let (captures, _) = infer.closure_info(&c); let parent_subst = ClosureSubst(subst).parent_subst(); @@ -1763,7 +1763,7 @@ impl Evaluator<'_> { } }; mem.get(pos..pos + size) - .ok_or_else(|| MirEvalError::UndefinedBehavior("out of bound memory read".to_string())) + .ok_or_else(|| MirEvalError::UndefinedBehavior("out of bound memory read".to_owned())) } fn write_memory_using_ref(&mut self, addr: Address, size: usize) -> Result<&mut [u8]> { @@ -1777,7 +1777,7 @@ impl Evaluator<'_> { } }; mem.get_mut(pos..pos + size) - .ok_or_else(|| MirEvalError::UndefinedBehavior("out of bound memory write".to_string())) + .ok_or_else(|| MirEvalError::UndefinedBehavior("out of bound memory write".to_owned())) } fn write_memory(&mut self, addr: Address, r: &[u8]) -> Result<()> { @@ -1800,7 +1800,7 @@ impl Evaluator<'_> { return Ok(()); } - let oob = || MirEvalError::UndefinedBehavior("out of bounds memory write".to_string()); + let oob = || MirEvalError::UndefinedBehavior("out of bounds memory write".to_owned()); match (addr, r.addr) { (Stack(dst), Stack(src)) => { @@ -2653,7 +2653,7 @@ pub fn render_const_using_debug_impl( ptr: ArenaMap::new(), body: db .mir_body(owner.into()) - .map_err(|_| MirEvalError::NotSupported("unreachable".to_string()))?, + .map_err(|_| MirEvalError::NotSupported("unreachable".to_owned()))?, drop_flags: DropFlags::default(), }; let data = evaluator.allocate_const_in_heap(locals, c)?; diff --git a/crates/hir-ty/src/mir/eval/shim.rs b/crates/hir-ty/src/mir/eval/shim.rs index b4fb99acae77..d68803fe2801 100644 --- a/crates/hir-ty/src/mir/eval/shim.rs +++ b/crates/hir-ty/src/mir/eval/shim.rs @@ -178,7 +178,7 @@ impl Evaluator<'_> { not_supported!("wrong arg count for clone"); }; let addr = Address::from_bytes(arg.get(self)?)?; - let (closure_owner, _) = self.db.lookup_intern_closure((*id).into()); + let InternedClosure(closure_owner, _) = self.db.lookup_intern_closure((*id).into()); let infer = self.db.infer(closure_owner); let (captures, _) = infer.closure_info(id); let layout = self.layout(&self_ty)?; @@ -304,7 +304,7 @@ impl Evaluator<'_> { use LangItem::*; let mut args = args.iter(); match it { - BeginPanic => Err(MirEvalError::Panic("".to_string())), + BeginPanic => Err(MirEvalError::Panic("".to_owned())), PanicFmt => { let message = (|| { let resolver = self diff --git a/crates/hir-ty/src/mir/lower.rs b/crates/hir-ty/src/mir/lower.rs index 28d26c6c8ae6..1572a6d497c5 100644 --- a/crates/hir-ty/src/mir/lower.rs +++ b/crates/hir-ty/src/mir/lower.rs @@ -25,7 +25,7 @@ use triomphe::Arc; use crate::{ consteval::ConstEvalError, - db::HirDatabase, + db::{HirDatabase, InternedClosure}, display::HirDisplay, infer::{CaptureKind, CapturedItem, TypeMismatch}, inhabitedness::is_ty_uninhabited_from, @@ -126,6 +126,10 @@ impl DropScopeToken { } } +impl Drop for DropScopeToken { + fn drop(&mut self) {} +} + // Uncomment this to make `DropScopeToken` a drop bomb. Unfortunately we can't do this in release, since // in cases that mir lowering fails, we don't handle (and don't need to handle) drop scopes so it will be // actually reached. `pop_drop_scope_assert_finished` will also detect this case, but doesn't show useful @@ -1630,7 +1634,7 @@ impl<'ctx> MirLowerCtx<'ctx> { self.set_goto(prev_block, begin, span); f(self, begin)?; let my = mem::replace(&mut self.current_loop_blocks, prev).ok_or( - MirLowerError::ImplementationError("current_loop_blocks is corrupt".to_string()), + MirLowerError::ImplementationError("current_loop_blocks is corrupt".to_owned()), )?; if let Some(prev) = prev_label { self.labeled_loop_blocks.insert(label.unwrap(), prev); @@ -1665,7 +1669,7 @@ impl<'ctx> MirLowerCtx<'ctx> { .current_loop_blocks .as_mut() .ok_or(MirLowerError::ImplementationError( - "Current loop access out of loop".to_string(), + "Current loop access out of loop".to_owned(), ))? .end { @@ -1675,7 +1679,7 @@ impl<'ctx> MirLowerCtx<'ctx> { self.current_loop_blocks .as_mut() .ok_or(MirLowerError::ImplementationError( - "Current loop access out of loop".to_string(), + "Current loop access out of loop".to_owned(), ))? .end = Some(s); s @@ -1973,7 +1977,7 @@ pub fn mir_body_for_closure_query( db: &dyn HirDatabase, closure: ClosureId, ) -> Result> { - let (owner, expr) = db.lookup_intern_closure(closure.into()); + let InternedClosure(owner, expr) = db.lookup_intern_closure(closure.into()); let body = db.body(owner); let infer = db.infer(owner); let Expr::Closure { args, body: root, .. } = &body[expr] else { diff --git a/crates/hir-ty/src/mir/lower/as_place.rs b/crates/hir-ty/src/mir/lower/as_place.rs index 8d157944020d..afe33607d468 100644 --- a/crates/hir-ty/src/mir/lower/as_place.rs +++ b/crates/hir-ty/src/mir/lower/as_place.rs @@ -225,7 +225,7 @@ impl MirLowerCtx<'_> { { let Some(index_fn) = self.infer.method_resolution(expr_id) else { return Err(MirLowerError::UnresolvedMethod( - "[overloaded index]".to_string(), + "[overloaded index]".to_owned(), )); }; let Some((base_place, current)) = diff --git a/crates/hir-ty/src/mir/monomorphization.rs b/crates/hir-ty/src/mir/monomorphization.rs index 46dec257e89a..d2e413f0a33a 100644 --- a/crates/hir-ty/src/mir/monomorphization.rs +++ b/crates/hir-ty/src/mir/monomorphization.rs @@ -19,7 +19,7 @@ use triomphe::Arc; use crate::{ consteval::{intern_const_scalar, unknown_const}, - db::HirDatabase, + db::{HirDatabase, InternedClosure}, from_placeholder_idx, infer::normalize, utils::{generics, Generics}, @@ -315,7 +315,7 @@ pub fn monomorphized_mir_body_for_closure_query( subst: Substitution, trait_env: Arc, ) -> Result, MirLowerError> { - let (owner, _) = db.lookup_intern_closure(closure.into()); + let InternedClosure(owner, _) = db.lookup_intern_closure(closure.into()); let generics = owner.as_generic_def_id().map(|g_def| generics(db.upcast(), g_def)); let filler = &mut Filler { db, subst: &subst, trait_env, generics, owner }; let body = db.mir_body_for_closure(closure)?; diff --git a/crates/hir-ty/src/tests.rs b/crates/hir-ty/src/tests.rs index 9804910c878d..5e159236f488 100644 --- a/crates/hir-ty/src/tests.rs +++ b/crates/hir-ty/src/tests.rs @@ -10,7 +10,7 @@ mod regression; mod simple; mod traits; -use std::{collections::HashMap, env}; +use std::env; use base_db::{FileRange, SourceDatabaseExt}; use expect_test::Expect; @@ -25,6 +25,7 @@ use hir_def::{ }; use hir_expand::{db::ExpandDatabase, InFile}; use once_cell::race::OnceBool; +use rustc_hash::FxHashMap; use stdx::format_to; use syntax::{ ast::{self, AstNode, HasName}, @@ -90,16 +91,16 @@ fn check_impl(ra_fixture: &str, allow_none: bool, only_types: bool, display_sour let (db, files) = TestDB::with_many_files(ra_fixture); let mut had_annotations = false; - let mut mismatches = HashMap::new(); - let mut types = HashMap::new(); - let mut adjustments = HashMap::<_, Vec<_>>::new(); + let mut mismatches = FxHashMap::default(); + let mut types = FxHashMap::default(); + let mut adjustments = FxHashMap::<_, Vec<_>>::default(); for (file_id, annotations) in db.extract_annotations() { for (range, expected) in annotations { let file_range = FileRange { file_id, range }; if only_types { types.insert(file_range, expected); } else if expected.starts_with("type: ") { - types.insert(file_range, expected.trim_start_matches("type: ").to_string()); + types.insert(file_range, expected.trim_start_matches("type: ").to_owned()); } else if expected.starts_with("expected") { mismatches.insert(file_range, expected); } else if expected.starts_with("adjustments:") { @@ -109,7 +110,7 @@ fn check_impl(ra_fixture: &str, allow_none: bool, only_types: bool, display_sour .trim_start_matches("adjustments:") .trim() .split(',') - .map(|it| it.trim().to_string()) + .map(|it| it.trim().to_owned()) .filter(|it| !it.is_empty()) .collect(), ); @@ -330,7 +331,7 @@ fn infer_with_mismatches(content: &str, include_mismatches: bool) -> String { }); for (node, ty) in &types { let (range, text) = if let Some(self_param) = ast::SelfParam::cast(node.value.clone()) { - (self_param.name().unwrap().syntax().text_range(), "self".to_string()) + (self_param.name().unwrap().syntax().text_range(), "self".to_owned()) } else { (node.value.text_range(), node.value.text().to_string().replace('\n', " ")) }; diff --git a/crates/hir-ty/src/tests/macros.rs b/crates/hir-ty/src/tests/macros.rs index b0a9361f1c51..2f75338f9943 100644 --- a/crates/hir-ty/src/tests/macros.rs +++ b/crates/hir-ty/src/tests/macros.rs @@ -1373,3 +1373,34 @@ pub fn attr_macro() {} "#, ); } + +#[test] +fn clone_with_type_bound() { + check_types( + r#" +//- minicore: derive, clone, builtin_impls +#[derive(Clone)] +struct Float; + +trait TensorKind: Clone { + /// The primitive type of the tensor. + type Primitive: Clone; +} + +impl TensorKind for Float { + type Primitive = f64; +} + +#[derive(Clone)] +struct Tensor where K: TensorKind +{ + primitive: K::Primitive, +} + +fn foo(t: Tensor) { + let x = t.clone(); + //^ Tensor +} +"#, + ); +} diff --git a/crates/hir-ty/src/traits.rs b/crates/hir-ty/src/traits.rs index 3a1a4e63ea12..b2232b920aa0 100644 --- a/crates/hir-ty/src/traits.rs +++ b/crates/hir-ty/src/traits.rs @@ -104,8 +104,8 @@ pub(crate) fn trait_solve_query( GoalData::DomainGoal(DomainGoal::Holds(WhereClause::Implemented(it))) => { db.trait_data(it.hir_trait_id()).name.display(db.upcast()).to_string() } - GoalData::DomainGoal(DomainGoal::Holds(WhereClause::AliasEq(_))) => "alias_eq".to_string(), - _ => "??".to_string(), + GoalData::DomainGoal(DomainGoal::Holds(WhereClause::AliasEq(_))) => "alias_eq".to_owned(), + _ => "??".to_owned(), }; let _p = tracing::span!(tracing::Level::INFO, "trait_solve_query", ?detail).entered(); tracing::info!("trait_solve_query({:?})", goal.value.goal); @@ -187,7 +187,7 @@ struct LoggingRustIrDatabaseLoggingOnDrop<'a>(LoggingRustIrDatabase { fn drop(&mut self) { - eprintln!("chalk program:\n{}", self.0); + tracing::info!("chalk program:\n{}", self.0); } } diff --git a/crates/hir/src/attrs.rs b/crates/hir/src/attrs.rs index 5c369f42e6e7..7d637bac0966 100644 --- a/crates/hir/src/attrs.rs +++ b/crates/hir/src/attrs.rs @@ -30,7 +30,7 @@ macro_rules! impl_has_attrs { impl HasAttrs for $def { fn attrs(self, db: &dyn HirDatabase) -> AttrsWithOwner { let def = AttrDefId::$def_id(self.into()); - AttrsWithOwner::attrs_with_owner(db.upcast(), def) + AttrsWithOwner::new(db.upcast(), def) } fn attr_id(self) -> AttrDefId { AttrDefId::$def_id(self.into()) diff --git a/crates/hir/src/diagnostics.rs b/crates/hir/src/diagnostics.rs index 2d8f1dbad51a..08843a6c9994 100644 --- a/crates/hir/src/diagnostics.rs +++ b/crates/hir/src/diagnostics.rs @@ -67,6 +67,8 @@ diagnostics![ NoSuchField, PrivateAssocItem, PrivateField, + RemoveTrailingReturn, + RemoveUnnecessaryElse, ReplaceFilterMapNextWithFindMap, TraitImplIncorrectSafety, TraitImplMissingAssocItems, @@ -342,6 +344,16 @@ pub struct TraitImplRedundantAssocItems { pub assoc_item: (Name, AssocItem), } +#[derive(Debug)] +pub struct RemoveTrailingReturn { + pub return_expr: InFile>, +} + +#[derive(Debug)] +pub struct RemoveUnnecessaryElse { + pub if_expr: InFile>, +} + impl AnyDiagnostic { pub(crate) fn body_validation_diagnostic( db: &dyn HirDatabase, @@ -444,6 +456,29 @@ impl AnyDiagnostic { Err(SyntheticSyntax) => (), } } + BodyValidationDiagnostic::RemoveTrailingReturn { return_expr } => { + if let Ok(source_ptr) = source_map.expr_syntax(return_expr) { + // Filters out desugared return expressions (e.g. desugared try operators). + if let Some(ptr) = source_ptr.value.cast::() { + return Some( + RemoveTrailingReturn { + return_expr: InFile::new(source_ptr.file_id, ptr), + } + .into(), + ); + } + } + } + BodyValidationDiagnostic::RemoveUnnecessaryElse { if_expr } => { + if let Ok(source_ptr) = source_map.expr_syntax(if_expr) { + if let Some(ptr) = source_ptr.value.cast::() { + return Some( + RemoveUnnecessaryElse { if_expr: InFile::new(source_ptr.file_id, ptr) } + .into(), + ); + } + } + } } None } @@ -546,9 +581,7 @@ impl AnyDiagnostic { source_map.pat_syntax(pat).expect("unexpected synthetic"); // cast from Either -> Either<_, Pat> - let Some(ptr) = AstPtr::try_from_raw(value.syntax_node_ptr()) else { - return None; - }; + let ptr = AstPtr::try_from_raw(value.syntax_node_ptr())?; InFile { file_id, value: ptr } } }; diff --git a/crates/hir/src/display.rs b/crates/hir/src/display.rs index 9b99b141fc5e..30f402a79f3d 100644 --- a/crates/hir/src/display.rs +++ b/crates/hir/src/display.rs @@ -158,7 +158,8 @@ impl HirDisplay for Adt { impl HirDisplay for Struct { fn hir_fmt(&self, f: &mut HirFormatter<'_>) -> Result<(), HirDisplayError> { - write_visibility(self.module(f.db).id, self.visibility(f.db), f)?; + let module_id = self.module(f.db).id; + write_visibility(module_id, self.visibility(f.db), f)?; f.write_str("struct ")?; write!(f, "{}", self.name(f.db).display(f.db.upcast()))?; let def_id = GenericDefId::AdtId(AdtId::StructId(self.id)); @@ -171,6 +172,7 @@ impl HirDisplay for Struct { while let Some((id, _)) = it.next() { let field = Field { parent: (*self).into(), id }; + write_visibility(module_id, field.visibility(f.db), f)?; field.ty(f.db).hir_fmt(f)?; if it.peek().is_some() { f.write_str(", ")?; diff --git a/crates/hir/src/lib.rs b/crates/hir/src/lib.rs index 1e21045e9818..32abbc80c6af 100644 --- a/crates/hir/src/lib.rs +++ b/crates/hir/src/lib.rs @@ -44,7 +44,7 @@ use hir_def::{ data::adt::VariantData, generics::{LifetimeParamData, TypeOrConstParamData, TypeParamProvenance}, hir::{BindingAnnotation, BindingId, ExprOrPatId, LabelId, Pat}, - item_tree::ItemTreeModItemNode, + item_tree::ItemTreeNode, lang_item::LangItemTarget, layout::{self, ReprOptions, TargetDataLayout}, nameres::{self, diagnostics::DefDiagnostic}, @@ -62,6 +62,7 @@ use hir_expand::{attrs::collect_attrs, name::name, proc_macro::ProcMacroKind, Ma use hir_ty::{ all_super_traits, autoderef, check_orphan_rules, consteval::{try_const_usize, unknown_const_as_generic, ConstExt}, + db::InternedClosure, diagnostics::BodyValidationDiagnostic, known_const_to_ast, layout::{Layout as TyLayout, RustcEnumVariantIdx, RustcFieldIdx, TagEncoding}, @@ -563,6 +564,11 @@ impl Module { for diag in db.trait_data_with_diagnostics(t.id).1.iter() { emit_def_diagnostic(db, acc, diag); } + + for item in t.items(db) { + item.diagnostics(db, acc); + } + acc.extend(def.diagnostics(db)) } ModuleDef::Adt(adt) => { @@ -730,13 +736,7 @@ impl Module { } for &item in &db.impl_data(impl_def.id).items { - let def: DefWithBody = match AssocItem::from(item) { - AssocItem::Function(it) => it.into(), - AssocItem::Const(it) => it.into(), - AssocItem::TypeAlias(_) => continue, - }; - - def.diagnostics(db, acc); + AssocItem::from(item).diagnostics(db, acc); } } } @@ -1769,7 +1769,7 @@ pub struct Function { impl Function { pub fn module(self, db: &dyn HirDatabase) -> Module { - self.id.lookup(db.upcast()).module(db.upcast()).into() + self.id.module(db.upcast()).into() } pub fn name(self, db: &dyn HirDatabase) -> Name { @@ -1910,8 +1910,7 @@ impl Function { { return None; } - let loc = self.id.lookup(db.upcast()); - let def_map = db.crate_def_map(loc.krate(db).into()); + let def_map = db.crate_def_map(HasModule::krate(&self.id, db.upcast())); def_map.fn_as_proc_macro(self.id).map(|id| Macro { id: id.into() }) } @@ -1934,7 +1933,7 @@ impl Function { }; let (result, output) = interpret_mir(db, body, false, None); let mut text = match result { - Ok(_) => "pass".to_string(), + Ok(_) => "pass".to_owned(), Err(e) => { let mut r = String::new(); _ = e.pretty_print(&mut r, db, &span_formatter); @@ -2120,7 +2119,7 @@ pub struct Const { impl Const { pub fn module(self, db: &dyn HirDatabase) -> Module { - Module { id: self.id.lookup(db.upcast()).module(db.upcast()) } + Module { id: self.id.module(db.upcast()) } } pub fn name(self, db: &dyn HirDatabase) -> Option { @@ -2175,7 +2174,7 @@ pub struct Static { impl Static { pub fn module(self, db: &dyn HirDatabase) -> Module { - Module { id: self.id.lookup(db.upcast()).module(db.upcast()) } + Module { id: self.id.module(db.upcast()) } } pub fn name(self, db: &dyn HirDatabase) -> Name { @@ -2294,7 +2293,7 @@ impl TypeAlias { } pub fn module(self, db: &dyn HirDatabase) -> Module { - Module { id: self.id.lookup(db.upcast()).module(db.upcast()) } + Module { id: self.id.module(db.upcast()) } } pub fn type_ref(self, db: &dyn HirDatabase) -> Option { @@ -2517,11 +2516,13 @@ pub enum AssocItem { Const(Const), TypeAlias(TypeAlias), } + #[derive(Debug, Clone)] pub enum AssocItemContainer { Trait(Trait), Impl(Impl), } + pub trait AsAssocItem { fn as_assoc_item(self, db: &dyn HirDatabase) -> Option; } @@ -2531,16 +2532,19 @@ impl AsAssocItem for Function { as_assoc_item(db, AssocItem::Function, self.id) } } + impl AsAssocItem for Const { fn as_assoc_item(self, db: &dyn HirDatabase) -> Option { as_assoc_item(db, AssocItem::Const, self.id) } } + impl AsAssocItem for TypeAlias { fn as_assoc_item(self, db: &dyn HirDatabase) -> Option { as_assoc_item(db, AssocItem::TypeAlias, self.id) } } + impl AsAssocItem for ModuleDef { fn as_assoc_item(self, db: &dyn HirDatabase) -> Option { match self { @@ -2551,6 +2555,7 @@ impl AsAssocItem for ModuleDef { } } } + impl AsAssocItem for DefWithBody { fn as_assoc_item(self, db: &dyn HirDatabase) -> Option { match self { @@ -2561,16 +2566,15 @@ impl AsAssocItem for DefWithBody { } } -fn as_assoc_item<'db, ID, DEF, CTOR, AST>( +fn as_assoc_item<'db, ID, DEF, LOC>( db: &(dyn HirDatabase + 'db), - ctor: CTOR, + ctor: impl FnOnce(DEF) -> AssocItem, id: ID, ) -> Option where - ID: Lookup = dyn DefDatabase + 'db, Data = AssocItemLoc>, + ID: Lookup = dyn DefDatabase + 'db, Data = AssocItemLoc>, DEF: From, - CTOR: FnOnce(DEF) -> AssocItem, - AST: ItemTreeModItemNode, + LOC: ItemTreeNode, { match id.lookup(db.upcast()).container { ItemContainerId::TraitId(_) | ItemContainerId::ImplId(_) => Some(ctor(DEF::from(id))), @@ -2610,27 +2614,34 @@ impl AssocItem { } } - pub fn containing_trait(self, db: &dyn HirDatabase) -> Option { + pub fn container_trait(self, db: &dyn HirDatabase) -> Option { match self.container(db) { AssocItemContainer::Trait(t) => Some(t), _ => None, } } - pub fn containing_trait_impl(self, db: &dyn HirDatabase) -> Option { + pub fn implemented_trait(self, db: &dyn HirDatabase) -> Option { match self.container(db) { AssocItemContainer::Impl(i) => i.trait_(db), _ => None, } } - pub fn containing_trait_or_trait_impl(self, db: &dyn HirDatabase) -> Option { + pub fn container_or_implemented_trait(self, db: &dyn HirDatabase) -> Option { match self.container(db) { AssocItemContainer::Trait(t) => Some(t), AssocItemContainer::Impl(i) => i.trait_(db), } } + pub fn implementing_ty(self, db: &dyn HirDatabase) -> Option { + match self.container(db) { + AssocItemContainer::Impl(i) => Some(i.self_ty(db)), + _ => None, + } + } + pub fn as_function(self) -> Option { match self { Self::Function(v) => Some(v), @@ -2651,6 +2662,22 @@ impl AssocItem { _ => None, } } + + pub fn diagnostics(self, db: &dyn HirDatabase, acc: &mut Vec) { + match self { + AssocItem::Function(func) => { + DefWithBody::from(func).diagnostics(db, acc); + } + AssocItem::Const(const_) => { + DefWithBody::from(const_).diagnostics(db, acc); + } + AssocItem::TypeAlias(type_alias) => { + for diag in hir_ty::diagnostics::incorrect_case(db, type_alias.id.into()) { + acc.push(diag.into()); + } + } + } + } } impl HasVisibility for AssocItem { @@ -3306,7 +3333,7 @@ impl Impl { } pub fn items(self, db: &dyn HirDatabase) -> Vec { - db.impl_data(self.id).items.iter().map(|it| (*it).into()).collect() + db.impl_data(self.id).items.iter().map(|&it| it.into()).collect() } pub fn is_negative(self, db: &dyn HirDatabase) -> bool { @@ -3662,7 +3689,7 @@ impl Type { .and_then(|it| { let into_future_fn = it.as_function()?; let assoc_item = as_assoc_item(db, AssocItem::Function, into_future_fn)?; - let into_future_trait = assoc_item.containing_trait_or_trait_impl(db)?; + let into_future_trait = assoc_item.container_or_implemented_trait(db)?; Some(into_future_trait.id) }) .or_else(|| { @@ -4101,6 +4128,14 @@ impl Type { name: Option<&Name>, callback: &mut dyn FnMut(AssocItemId) -> ControlFlow<()>, ) { + let _p = tracing::span!( + tracing::Level::INFO, + "iterate_method_candidates_dyn", + with_local_impls = traits_in_scope.len(), + traits_in_scope = traits_in_scope.len(), + ?name, + ) + .entered(); // There should be no inference vars in types passed here let canonical = hir_ty::replace_errors_with_variables(&self.ty); @@ -4122,6 +4157,7 @@ impl Type { ); } + #[tracing::instrument(skip_all, fields(name = ?name))] pub fn iterate_path_candidates( &self, db: &dyn HirDatabase, @@ -4150,6 +4186,7 @@ impl Type { slot } + #[tracing::instrument(skip_all, fields(name = ?name))] fn iterate_path_candidates_dyn( &self, db: &dyn HirDatabase, @@ -4463,7 +4500,7 @@ impl Callable { } fn closure_source(db: &dyn HirDatabase, closure: ClosureId) -> Option { - let (owner, expr_id) = db.lookup_intern_closure(closure.into()); + let InternedClosure(owner, expr_id) = db.lookup_intern_closure(closure.into()); let (_, source_map) = db.body_with_source_map(owner); let ast = source_map.expr_syntax(expr_id).ok()?; let root = ast.file_syntax(db.upcast()); diff --git a/crates/ide-assists/src/handlers/apply_demorgan.rs b/crates/ide-assists/src/handlers/apply_demorgan.rs index 2d41243c20eb..55e0d7f3b28c 100644 --- a/crates/ide-assists/src/handlers/apply_demorgan.rs +++ b/crates/ide-assists/src/handlers/apply_demorgan.rs @@ -96,7 +96,7 @@ pub(crate) fn apply_demorgan(acc: &mut Assists, ctx: &AssistContext<'_>) -> Opti let dm_lhs = demorganed.lhs()?; acc.add_group( - &GroupLabel("Apply De Morgan's law".to_string()), + &GroupLabel("Apply De Morgan's law".to_owned()), AssistId("apply_demorgan", AssistKind::RefactorRewrite), "Apply De Morgan's law", op_range, @@ -187,7 +187,7 @@ pub(crate) fn apply_demorgan_iterator(acc: &mut Assists, ctx: &AssistContext<'_> let op_range = method_call.syntax().text_range(); let label = format!("Apply De Morgan's law to `Iterator::{}`", name.text().as_str()); acc.add_group( - &GroupLabel("Apply De Morgan's law".to_string()), + &GroupLabel("Apply De Morgan's law".to_owned()), AssistId("apply_demorgan_iterator", AssistKind::RefactorRewrite), label, op_range, diff --git a/crates/ide-assists/src/handlers/convert_bool_then.rs b/crates/ide-assists/src/handlers/convert_bool_then.rs index d231708c55b3..61b7b4121776 100644 --- a/crates/ide-assists/src/handlers/convert_bool_then.rs +++ b/crates/ide-assists/src/handlers/convert_bool_then.rs @@ -163,9 +163,8 @@ pub(crate) fn convert_bool_then_to_if(acc: &mut Assists, ctx: &AssistContext<'_> return None; } let assoc = func.as_assoc_item(ctx.sema.db)?; - match assoc.container(ctx.sema.db) { - hir::AssocItemContainer::Impl(impl_) if impl_.self_ty(ctx.sema.db).is_bool() => {} - _ => return None, + if !assoc.implementing_ty(ctx.sema.db)?.is_bool() { + return None; } let target = mcall.syntax().text_range(); diff --git a/crates/ide-assists/src/handlers/convert_comment_block.rs b/crates/ide-assists/src/handlers/convert_comment_block.rs index 3f478ee7d39a..fbc0b9f6739f 100644 --- a/crates/ide-assists/src/handlers/convert_comment_block.rs +++ b/crates/ide-assists/src/handlers/convert_comment_block.rs @@ -57,7 +57,7 @@ fn block_to_line(acc: &mut Assists, comment: ast::Comment) -> Option<()> { // Don't introduce trailing whitespace if line.is_empty() { - line_prefix.to_string() + line_prefix.to_owned() } else { format!("{line_prefix} {line}") } diff --git a/crates/ide-assists/src/handlers/convert_to_guarded_return.rs b/crates/ide-assists/src/handlers/convert_to_guarded_return.rs index 6f30ffa622dc..e1966d476c5d 100644 --- a/crates/ide-assists/src/handlers/convert_to_guarded_return.rs +++ b/crates/ide-assists/src/handlers/convert_to_guarded_return.rs @@ -1,6 +1,9 @@ use std::iter::once; -use ide_db::syntax_helpers::node_ext::{is_pattern_cond, single_let}; +use ide_db::{ + syntax_helpers::node_ext::{is_pattern_cond, single_let}, + ty_filter::TryEnum, +}; use syntax::{ ast::{ self, @@ -41,13 +44,35 @@ use crate::{ // } // ``` pub(crate) fn convert_to_guarded_return(acc: &mut Assists, ctx: &AssistContext<'_>) -> Option<()> { - let if_expr: ast::IfExpr = ctx.find_node_at_offset()?; + if let Some(let_stmt) = ctx.find_node_at_offset() { + let_stmt_to_guarded_return(let_stmt, acc, ctx) + } else if let Some(if_expr) = ctx.find_node_at_offset() { + if_expr_to_guarded_return(if_expr, acc, ctx) + } else { + None + } +} + +fn if_expr_to_guarded_return( + if_expr: ast::IfExpr, + acc: &mut Assists, + ctx: &AssistContext<'_>, +) -> Option<()> { if if_expr.else_branch().is_some() { return None; } let cond = if_expr.condition()?; + let if_token_range = if_expr.if_token()?.text_range(); + let if_cond_range = cond.syntax().text_range(); + + let cursor_in_range = + if_token_range.cover(if_cond_range).contains_range(ctx.selection_trimmed()); + if !cursor_in_range { + return None; + } + // Check if there is an IfLet that we can handle. let (if_let_pat, cond_expr) = if is_pattern_cond(cond.clone()) { let let_ = single_let(cond)?; @@ -148,6 +173,65 @@ pub(crate) fn convert_to_guarded_return(acc: &mut Assists, ctx: &AssistContext<' ) } +fn let_stmt_to_guarded_return( + let_stmt: ast::LetStmt, + acc: &mut Assists, + ctx: &AssistContext<'_>, +) -> Option<()> { + let pat = let_stmt.pat()?; + let expr = let_stmt.initializer()?; + + let let_token_range = let_stmt.let_token()?.text_range(); + let let_pattern_range = pat.syntax().text_range(); + let cursor_in_range = + let_token_range.cover(let_pattern_range).contains_range(ctx.selection_trimmed()); + + if !cursor_in_range { + return None; + } + + let try_enum = + ctx.sema.type_of_expr(&expr).and_then(|ty| TryEnum::from_ty(&ctx.sema, &ty.adjusted()))?; + + let happy_pattern = try_enum.happy_pattern(pat); + let target = let_stmt.syntax().text_range(); + + let early_expression: ast::Expr = { + let parent_block = + let_stmt.syntax().parent()?.ancestors().find_map(ast::BlockExpr::cast)?; + let parent_container = parent_block.syntax().parent()?; + + match parent_container.kind() { + WHILE_EXPR | LOOP_EXPR | FOR_EXPR => make::expr_continue(None), + FN => make::expr_return(None), + _ => return None, + } + }; + + acc.add( + AssistId("convert_to_guarded_return", AssistKind::RefactorRewrite), + "Convert to guarded return", + target, + |edit| { + let let_stmt = edit.make_mut(let_stmt); + let let_indent_level = IndentLevel::from_node(let_stmt.syntax()); + + let replacement = { + let let_else_stmt = make::let_else_stmt( + happy_pattern, + let_stmt.ty(), + expr, + ast::make::tail_only_block_expr(early_expression), + ); + let let_else_stmt = let_else_stmt.indent(let_indent_level); + let_else_stmt.syntax().clone_for_update() + }; + + ted::replace(let_stmt.syntax(), replacement) + }, + ) +} + #[cfg(test)] mod tests { use crate::tests::{check_assist, check_assist_not_applicable}; @@ -450,6 +534,62 @@ fn main() { ); } + #[test] + fn convert_let_stmt_inside_fn() { + check_assist( + convert_to_guarded_return, + r#" +//- minicore: option +fn foo() -> Option { + None +} + +fn main() { + let x$0 = foo(); +} +"#, + r#" +fn foo() -> Option { + None +} + +fn main() { + let Some(x) = foo() else { return }; +} +"#, + ); + } + + #[test] + fn convert_let_stmt_inside_loop() { + check_assist( + convert_to_guarded_return, + r#" +//- minicore: option +fn foo() -> Option { + None +} + +fn main() { + loop { + let x$0 = foo(); + } +} +"#, + r#" +fn foo() -> Option { + None +} + +fn main() { + loop { + let Some(x) = foo() else { continue }; + } +} +"#, + ); + } + #[test] fn convert_arbitrary_if_let_patterns() { check_assist( @@ -591,6 +731,37 @@ fn main() { } } } +"#, + ); + } + + #[test] + fn ignore_inside_if_stmt() { + check_assist_not_applicable( + convert_to_guarded_return, + r#" +fn main() { + if false { + foo()$0; + } +} +"#, + ); + } + + #[test] + fn ignore_inside_let_initializer() { + check_assist_not_applicable( + convert_to_guarded_return, + r#" +//- minicore: option +fn foo() -> Option { + None +} + +fn main() { + let x = foo()$0; +} "#, ); } diff --git a/crates/ide-assists/src/handlers/desugar_doc_comment.rs b/crates/ide-assists/src/handlers/desugar_doc_comment.rs index c859e98524e8..d26492804670 100644 --- a/crates/ide-assists/src/handlers/desugar_doc_comment.rs +++ b/crates/ide-assists/src/handlers/desugar_doc_comment.rs @@ -27,9 +27,7 @@ use crate::{ pub(crate) fn desugar_doc_comment(acc: &mut Assists, ctx: &AssistContext<'_>) -> Option<()> { let comment = ctx.find_token_at_offset::()?; // Only allow doc comments - let Some(placement) = comment.kind().doc else { - return None; - }; + let placement = comment.kind().doc?; // Only allow comments which are alone on their line if let Some(prev) = comment.syntax().prev_token() { diff --git a/crates/ide-assists/src/handlers/extract_function.rs b/crates/ide-assists/src/handlers/extract_function.rs index 1eb28626f75d..d111005c2ec4 100644 --- a/crates/ide-assists/src/handlers/extract_function.rs +++ b/crates/ide-assists/src/handlers/extract_function.rs @@ -1,4 +1,4 @@ -use std::iter; +use std::{iter, ops::RangeInclusive}; use ast::make; use either::Either; @@ -12,27 +12,25 @@ use ide_db::{ helpers::mod_path_to_ast, imports::insert_use::{insert_use, ImportScope}, search::{FileReference, ReferenceCategory, SearchScope}, + source_change::SourceChangeBuilder, syntax_helpers::node_ext::{ for_each_tail_expr, preorder_expr, walk_expr, walk_pat, walk_patterns_in_expr, }, FxIndexSet, RootDatabase, }; -use itertools::Itertools; -use stdx::format_to; use syntax::{ ast::{ - self, - edit::{AstNodeEdit, IndentLevel}, - AstNode, HasGenericParams, + self, edit::IndentLevel, edit_in_place::Indent, AstNode, AstToken, HasGenericParams, + HasName, }, - match_ast, ted, AstToken, SyntaxElement, + match_ast, ted, SyntaxElement, SyntaxKind::{self, COMMENT}, SyntaxNode, SyntaxToken, TextRange, TextSize, TokenAtOffset, WalkEvent, T, }; use crate::{ assist_context::{AssistContext, Assists, TreeMutator}, - utils::generate_impl_text, + utils::generate_impl, AssistId, }; @@ -134,17 +132,65 @@ pub(crate) fn extract_function(acc: &mut Assists, ctx: &AssistContext<'_>) -> Op let new_indent = IndentLevel::from_node(&insert_after); let old_indent = fun.body.indent_level(); - builder.replace(target_range, make_call(ctx, &fun, old_indent)); + let insert_after = builder.make_syntax_mut(insert_after); + + let call_expr = make_call(ctx, &fun, old_indent); + + // Map the element range to replace into the mutable version + let elements = match &fun.body { + FunctionBody::Expr(expr) => { + // expr itself becomes the replacement target + let expr = &builder.make_mut(expr.clone()); + let node = SyntaxElement::Node(expr.syntax().clone()); + + node.clone()..=node + } + FunctionBody::Span { parent, elements, .. } => { + // Map the element range into the mutable versions + let parent = builder.make_mut(parent.clone()); + + let start = parent + .syntax() + .children_with_tokens() + .nth(elements.start().index()) + .expect("should be able to find mutable start element"); + + let end = parent + .syntax() + .children_with_tokens() + .nth(elements.end().index()) + .expect("should be able to find mutable end element"); + + start..=end + } + }; let has_impl_wrapper = insert_after.ancestors().any(|a| a.kind() == SyntaxKind::IMPL && a != insert_after); + let fn_def = format_function(ctx, module, &fun, old_indent).clone_for_update(); + + if let Some(cap) = ctx.config.snippet_cap { + if let Some(name) = fn_def.name() { + builder.add_tabstop_before(cap, name); + } + } + let fn_def = match fun.self_param_adt(ctx) { Some(adt) if anchor == Anchor::Method && !has_impl_wrapper => { - let fn_def = format_function(ctx, module, &fun, old_indent, new_indent + 1); - generate_impl_text(&adt, &fn_def).replace("{\n\n", "{") + fn_def.indent(1.into()); + + let impl_ = generate_impl(&adt); + impl_.indent(new_indent); + impl_.get_or_create_assoc_item_list().add_item(fn_def.into()); + + impl_.syntax().clone() + } + _ => { + fn_def.indent(new_indent); + + fn_def.syntax().clone() } - _ => format_function(ctx, module, &fun, old_indent, new_indent), }; // There are external control flows @@ -177,12 +223,15 @@ pub(crate) fn extract_function(acc: &mut Assists, ctx: &AssistContext<'_>) -> Op } } - let insert_offset = insert_after.text_range().end(); + // Replace the call site with the call to the new function + fixup_call_site(builder, &fun.body); + ted::replace_all(elements, vec![call_expr.into()]); - match ctx.config.snippet_cap { - Some(cap) => builder.insert_snippet(cap, insert_offset, fn_def), - None => builder.insert(insert_offset, fn_def), - }; + // Insert the newly extracted function (or impl) + ted::insert_all_raw( + ted::Position::after(insert_after), + vec![make::tokens::whitespace(&format!("\n\n{new_indent}")).into(), fn_def.into()], + ); }, ) } @@ -195,7 +244,7 @@ fn make_function_name(semantics_scope: &hir::SemanticsScope<'_>) -> ast::NameRef let default_name = "fun_name"; - let mut name = default_name.to_string(); + let mut name = default_name.to_owned(); let mut counter = 0; while names_in_scope.contains(&name) { counter += 1; @@ -225,10 +274,10 @@ fn extraction_target(node: &SyntaxNode, selection_range: TextRange) -> Option None, - ast::Stmt::ExprStmt(_) | ast::Stmt::LetStmt(_) => Some(FunctionBody::from_range( + ast::Stmt::ExprStmt(_) | ast::Stmt::LetStmt(_) => FunctionBody::from_range( node.parent().and_then(ast::StmtList::cast)?, node.text_range(), - )), + ), }; } @@ -241,7 +290,7 @@ fn extraction_target(node: &SyntaxNode, selection_range: TextRange) -> Option, text_range: TextRange }, } #[derive(Debug)] @@ -569,26 +618,38 @@ impl FunctionBody { } } - fn from_range(parent: ast::StmtList, selected: TextRange) -> FunctionBody { + fn from_range(parent: ast::StmtList, selected: TextRange) -> Option { let full_body = parent.syntax().children_with_tokens(); - let mut text_range = full_body + // Get all of the elements intersecting with the selection + let mut stmts_in_selection = full_body .filter(|it| ast::Stmt::can_cast(it.kind()) || it.kind() == COMMENT) - .map(|element| element.text_range()) - .filter(|&range| selected.intersect(range).filter(|it| !it.is_empty()).is_some()) - .reduce(|acc, stmt| acc.cover(stmt)); + .filter(|it| selected.intersect(it.text_range()).filter(|it| !it.is_empty()).is_some()); - if let Some(tail_range) = parent - .tail_expr() - .map(|it| it.syntax().text_range()) - .filter(|&it| selected.intersect(it).is_some()) + let first_element = stmts_in_selection.next(); + + // If the tail expr is part of the selection too, make that the last element + // Otherwise use the last stmt + let last_element = if let Some(tail_expr) = + parent.tail_expr().filter(|it| selected.intersect(it.syntax().text_range()).is_some()) { - text_range = Some(match text_range { - Some(text_range) => text_range.cover(tail_range), - None => tail_range, - }); - } - Self::Span { parent, text_range: text_range.unwrap_or(selected) } + Some(tail_expr.syntax().clone().into()) + } else { + stmts_in_selection.last() + }; + + let elements = match (first_element, last_element) { + (None, _) => { + cov_mark::hit!(extract_function_empty_selection_is_not_applicable); + return None; + } + (Some(first), None) => first.clone()..=first, + (Some(first), Some(last)) => first..=last, + }; + + let text_range = elements.start().text_range().cover(elements.end().text_range()); + + Some(Self::Span { parent, elements, text_range }) } fn indent_level(&self) -> IndentLevel { @@ -601,7 +662,7 @@ impl FunctionBody { fn tail_expr(&self) -> Option { match &self { FunctionBody::Expr(expr) => Some(expr.clone()), - FunctionBody::Span { parent, text_range } => { + FunctionBody::Span { parent, text_range, .. } => { let tail_expr = parent.tail_expr()?; text_range.contains_range(tail_expr.syntax().text_range()).then_some(tail_expr) } @@ -611,7 +672,7 @@ impl FunctionBody { fn walk_expr(&self, cb: &mut dyn FnMut(ast::Expr)) { match self { FunctionBody::Expr(expr) => walk_expr(expr, cb), - FunctionBody::Span { parent, text_range } => { + FunctionBody::Span { parent, text_range, .. } => { parent .statements() .filter(|stmt| text_range.contains_range(stmt.syntax().text_range())) @@ -634,7 +695,7 @@ impl FunctionBody { fn preorder_expr(&self, cb: &mut dyn FnMut(WalkEvent) -> bool) { match self { FunctionBody::Expr(expr) => preorder_expr(expr, cb), - FunctionBody::Span { parent, text_range } => { + FunctionBody::Span { parent, text_range, .. } => { parent .statements() .filter(|stmt| text_range.contains_range(stmt.syntax().text_range())) @@ -657,7 +718,7 @@ impl FunctionBody { fn walk_pat(&self, cb: &mut dyn FnMut(ast::Pat)) { match self { FunctionBody::Expr(expr) => walk_patterns_in_expr(expr, cb), - FunctionBody::Span { parent, text_range } => { + FunctionBody::Span { parent, text_range, .. } => { parent .statements() .filter(|stmt| text_range.contains_range(stmt.syntax().text_range())) @@ -1151,7 +1212,7 @@ impl HasTokenAtOffset for FunctionBody { fn token_at_offset(&self, offset: TextSize) -> TokenAtOffset { match self { FunctionBody::Expr(expr) => expr.syntax().token_at_offset(offset), - FunctionBody::Span { parent, text_range } => { + FunctionBody::Span { parent, text_range, .. } => { match parent.syntax().token_at_offset(offset) { TokenAtOffset::None => TokenAtOffset::None, TokenAtOffset::Single(t) => { @@ -1316,7 +1377,19 @@ fn impl_type_name(impl_node: &ast::Impl) -> Option { Some(impl_node.self_ty()?.to_string()) } -fn make_call(ctx: &AssistContext<'_>, fun: &Function, indent: IndentLevel) -> String { +/// Fixes up the call site before the target expressions are replaced with the call expression +fn fixup_call_site(builder: &mut SourceChangeBuilder, body: &FunctionBody) { + let parent_match_arm = body.parent().and_then(ast::MatchArm::cast); + + if let Some(parent_match_arm) = parent_match_arm { + if parent_match_arm.comma_token().is_none() { + let parent_match_arm = builder.make_mut(parent_match_arm); + ted::append_child_raw(parent_match_arm.syntax(), make::token(T![,])); + } + } +} + +fn make_call(ctx: &AssistContext<'_>, fun: &Function, indent: IndentLevel) -> SyntaxNode { let ret_ty = fun.return_type(ctx); let args = make::arg_list(fun.params.iter().map(|param| param.to_arg(ctx))); @@ -1334,44 +1407,45 @@ fn make_call(ctx: &AssistContext<'_>, fun: &Function, indent: IndentLevel) -> St if fun.control_flow.is_async { call_expr = make::expr_await(call_expr); } - let expr = handler.make_call_expr(call_expr).indent(indent); - let mut_modifier = |var: &OutlivedLocal| if var.mut_usage_outside_body { "mut " } else { "" }; + let expr = handler.make_call_expr(call_expr).clone_for_update(); + expr.indent(indent); - let mut buf = String::new(); - match fun.outliving_locals.as_slice() { - [] => {} + let outliving_bindings = match fun.outliving_locals.as_slice() { + [] => None, [var] => { - let modifier = mut_modifier(var); let name = var.local.name(ctx.db()); - format_to!(buf, "let {modifier}{} = ", name.display(ctx.db())) + let name = make::name(&name.display(ctx.db()).to_string()); + Some(ast::Pat::IdentPat(make::ident_pat(false, var.mut_usage_outside_body, name))) } vars => { - buf.push_str("let ("); - let bindings = vars.iter().format_with(", ", |local, f| { - let modifier = mut_modifier(local); - let name = local.local.name(ctx.db()); - f(&format_args!("{modifier}{}", name.display(ctx.db())))?; - Ok(()) + let binding_pats = vars.iter().map(|var| { + let name = var.local.name(ctx.db()); + let name = make::name(&name.display(ctx.db()).to_string()); + make::ident_pat(false, var.mut_usage_outside_body, name).into() }); - format_to!(buf, "{bindings}"); - buf.push_str(") = "); + Some(ast::Pat::TuplePat(make::tuple_pat(binding_pats))) } - } + }; - format_to!(buf, "{expr}"); let parent_match_arm = fun.body.parent().and_then(ast::MatchArm::cast); - let insert_comma = parent_match_arm.as_ref().is_some_and(|it| it.comma_token().is_none()); - if insert_comma { - buf.push(','); - } else if parent_match_arm.is_none() + if let Some(bindings) = outliving_bindings { + // with bindings that outlive it + make::let_stmt(bindings, None, Some(expr)).syntax().clone_for_update() + } else if parent_match_arm.as_ref().is_some() { + // as a tail expr for a match arm + expr.syntax().clone() + } else if parent_match_arm.as_ref().is_none() && fun.ret_ty.is_unit() && (!fun.outliving_locals.is_empty() || !expr.is_block_like()) { - buf.push(';'); + // as an expr stmt + make::expr_stmt(expr).syntax().clone_for_update() + } else { + // as a tail expr, or a block + expr.syntax().clone() } - buf } enum FlowHandler { @@ -1500,42 +1574,25 @@ fn format_function( module: hir::Module, fun: &Function, old_indent: IndentLevel, - new_indent: IndentLevel, -) -> String { - let mut fn_def = String::new(); - - let fun_name = &fun.name; +) -> ast::Fn { + let fun_name = make::name(&fun.name.text()); let params = fun.make_param_list(ctx, module); let ret_ty = fun.make_ret_ty(ctx, module); - let body = make_body(ctx, old_indent, new_indent, fun); - let const_kw = if fun.mods.is_const { "const " } else { "" }; - let async_kw = if fun.control_flow.is_async { "async " } else { "" }; - let unsafe_kw = if fun.control_flow.is_unsafe { "unsafe " } else { "" }; + let body = make_body(ctx, old_indent, fun); let (generic_params, where_clause) = make_generic_params_and_where_clause(ctx, fun); - format_to!(fn_def, "\n\n{new_indent}{const_kw}{async_kw}{unsafe_kw}"); - match ctx.config.snippet_cap { - Some(_) => format_to!(fn_def, "fn $0{fun_name}"), - None => format_to!(fn_def, "fn {fun_name}"), - } - - if let Some(generic_params) = generic_params { - format_to!(fn_def, "{generic_params}"); - } - - format_to!(fn_def, "{params}"); - - if let Some(ret_ty) = ret_ty { - format_to!(fn_def, " {ret_ty}"); - } - - if let Some(where_clause) = where_clause { - format_to!(fn_def, " {where_clause}"); - } - - format_to!(fn_def, " {body}"); - - fn_def + make::fn_( + None, + fun_name, + generic_params, + where_clause, + params, + body, + ret_ty, + fun.control_flow.is_async, + fun.mods.is_const, + fun.control_flow.is_unsafe, + ) } fn make_generic_params_and_where_clause( @@ -1716,12 +1773,7 @@ impl FunType { } } -fn make_body( - ctx: &AssistContext<'_>, - old_indent: IndentLevel, - new_indent: IndentLevel, - fun: &Function, -) -> ast::BlockExpr { +fn make_body(ctx: &AssistContext<'_>, old_indent: IndentLevel, fun: &Function) -> ast::BlockExpr { let ret_ty = fun.return_type(ctx); let handler = FlowHandler::from_ret_ty(fun, &ret_ty); @@ -1732,7 +1784,7 @@ fn make_body( match expr { ast::Expr::BlockExpr(block) => { // If the extracted expression is itself a block, there is no need to wrap it inside another block. - let block = block.dedent(old_indent); + block.dedent(old_indent); let elements = block.stmt_list().map_or_else( || Either::Left(iter::empty()), |stmt_list| { @@ -1752,13 +1804,13 @@ fn make_body( make::hacky_block_expr(elements, block.tail_expr()) } _ => { - let expr = expr.dedent(old_indent).indent(IndentLevel(1)); + expr.reindent_to(1.into()); make::block_expr(Vec::new(), Some(expr)) } } } - FunctionBody::Span { parent, text_range } => { + FunctionBody::Span { parent, text_range, .. } => { let mut elements: Vec<_> = parent .syntax() .children_with_tokens() @@ -1801,8 +1853,8 @@ fn make_body( .map(|node_or_token| match &node_or_token { syntax::NodeOrToken::Node(node) => match ast::Stmt::cast(node.clone()) { Some(stmt) => { - let indented = stmt.dedent(old_indent).indent(body_indent); - let ast_node = indented.syntax().clone_subtree(); + stmt.reindent_to(body_indent); + let ast_node = stmt.syntax().clone_subtree(); syntax::NodeOrToken::Node(ast_node) } _ => node_or_token, @@ -1810,13 +1862,15 @@ fn make_body( _ => node_or_token, }) .collect::>(); - let tail_expr = tail_expr.map(|expr| expr.dedent(old_indent).indent(body_indent)); + if let Some(tail_expr) = &mut tail_expr { + tail_expr.reindent_to(body_indent); + } make::hacky_block_expr(elements, tail_expr) } }; - let block = match &handler { + match &handler { FlowHandler::None => block, FlowHandler::Try { kind } => { let block = with_default_tail_expr(block, make::expr_unit()); @@ -1851,9 +1905,7 @@ fn make_body( let args = make::arg_list(iter::once(tail_expr)); make::expr_call(ok, args) }), - }; - - block.indent(new_indent) + } } fn map_tail_expr(block: ast::BlockExpr, f: impl FnOnce(ast::Expr) -> ast::Expr) -> ast::BlockExpr { @@ -1897,7 +1949,7 @@ fn with_tail_expr(block: ast::BlockExpr, tail_expr: ast::Expr) -> ast::BlockExpr } fn format_type(ty: &hir::Type, ctx: &AssistContext<'_>, module: hir::Module) -> String { - ty.display_source_code(ctx.db(), module.into(), true).ok().unwrap_or_else(|| "_".to_string()) + ty.display_source_code(ctx.db(), module.into(), true).ok().unwrap_or_else(|| "_".to_owned()) } fn make_ty(ty: &hir::Type, ctx: &AssistContext<'_>, module: hir::Module) -> ast::Type { @@ -2551,6 +2603,20 @@ fn $0fun_name(n: u32) -> u32 { check_assist_not_applicable(extract_function, r"fn main() { 1 + /* $0comment$0 */ 1; }"); } + #[test] + fn empty_selection_is_not_applicable() { + cov_mark::check!(extract_function_empty_selection_is_not_applicable); + check_assist_not_applicable( + extract_function, + r#" +fn main() { + $0 + + $0 +}"#, + ); + } + #[test] fn part_of_expr_stmt() { check_assist( diff --git a/crates/ide-assists/src/handlers/extract_module.rs b/crates/ide-assists/src/handlers/extract_module.rs index 30c3983dc411..af834c8a53db 100644 --- a/crates/ide-assists/src/handlers/extract_module.rs +++ b/crates/ide-assists/src/handlers/extract_module.rs @@ -1,7 +1,4 @@ -use std::{ - collections::{HashMap, HashSet}, - iter, -}; +use std::iter; use hir::{HasSource, HirFileIdExt, ModuleSource}; use ide_db::{ @@ -9,6 +6,7 @@ use ide_db::{ base_db::FileId, defs::{Definition, NameClass, NameRefClass}, search::{FileReference, SearchScope}, + FxHashMap, FxHashSet, }; use itertools::Itertools; use smallvec::SmallVec; @@ -235,9 +233,9 @@ impl Module { fn get_usages_and_record_fields( &self, ctx: &AssistContext<'_>, - ) -> (HashMap>, Vec) { + ) -> (FxHashMap>, Vec) { let mut adt_fields = Vec::new(); - let mut refs: HashMap> = HashMap::new(); + let mut refs: FxHashMap> = FxHashMap::default(); //Here impl is not included as each item inside impl will be tied to the parent of //implementing block(a struct, enum, etc), if the parent is in selected module, it will @@ -320,7 +318,7 @@ impl Module { &self, ctx: &AssistContext<'_>, node_def: Definition, - refs_in_files: &mut HashMap>, + refs_in_files: &mut FxHashMap>, ) { for (file_id, references) in node_def.usages(&ctx.sema).all() { let source_file = ctx.sema.parse(file_id); @@ -400,7 +398,7 @@ impl Module { ctx: &AssistContext<'_>, ) -> Vec { let mut import_paths_to_be_removed: Vec = vec![]; - let mut node_set: HashSet = HashSet::new(); + let mut node_set: FxHashSet = FxHashSet::default(); for item in self.body_items.clone() { for x in item.syntax().descendants() { diff --git a/crates/ide-assists/src/handlers/extract_variable.rs b/crates/ide-assists/src/handlers/extract_variable.rs index 22d16cf6b36e..36d312265120 100644 --- a/crates/ide-assists/src/handlers/extract_variable.rs +++ b/crates/ide-assists/src/handlers/extract_variable.rs @@ -115,7 +115,7 @@ pub(crate) fn extract_variable(acc: &mut Assists, ctx: &AssistContext<'_>) -> Op let trailing_ws = if prev_ws.is_some_and(|it| it.text().starts_with('\n')) { format!("\n{indent_to}") } else { - " ".to_string() + " ".to_owned() }; ted::insert_all_raw( diff --git a/crates/ide-assists/src/handlers/flip_trait_bound.rs b/crates/ide-assists/src/handlers/flip_trait_bound.rs index 430cd5b080bd..70b5efcb645a 100644 --- a/crates/ide-assists/src/handlers/flip_trait_bound.rs +++ b/crates/ide-assists/src/handlers/flip_trait_bound.rs @@ -58,6 +58,11 @@ mod tests { check_assist_not_applicable(flip_trait_bound, "struct S where T: $0A { }") } + #[test] + fn flip_trait_bound_works_for_dyn() { + check_assist(flip_trait_bound, "fn f<'a>(x: dyn Copy $0+ 'a)", "fn f<'a>(x: dyn 'a + Copy)") + } + #[test] fn flip_trait_bound_works_for_struct() { check_assist( diff --git a/crates/ide-assists/src/handlers/generate_delegate_methods.rs b/crates/ide-assists/src/handlers/generate_delegate_methods.rs index d59bd71d312d..4f2df5633c3c 100644 --- a/crates/ide-assists/src/handlers/generate_delegate_methods.rs +++ b/crates/ide-assists/src/handlers/generate_delegate_methods.rs @@ -1,7 +1,5 @@ -use std::collections::HashSet; - use hir::{self, HasCrate, HasVisibility}; -use ide_db::path_transform::PathTransform; +use ide_db::{path_transform::PathTransform, FxHashSet}; use syntax::{ ast::{ self, edit_in_place::Indent, make, AstNode, HasGenericParams, HasName, HasVisibility as _, @@ -71,7 +69,7 @@ pub(crate) fn generate_delegate_methods(acc: &mut Assists, ctx: &AssistContext<' let sema_field_ty = ctx.sema.resolve_type(&field_ty)?; let mut methods = vec![]; - let mut seen_names = HashSet::new(); + let mut seen_names = FxHashSet::default(); for ty in sema_field_ty.autoderef(ctx.db()) { let krate = ty.krate(ctx.db()); @@ -163,13 +161,13 @@ pub(crate) fn generate_delegate_methods(acc: &mut Assists, ctx: &AssistContext<' Some(impl_def) => edit.make_mut(impl_def), None => { let name = &strukt_name.to_string(); - let params = strukt.generic_param_list(); - let ty_params = params; + let ty_params = strukt.generic_param_list(); + let ty_args = ty_params.as_ref().map(|it| it.to_generic_args()); let where_clause = strukt.where_clause(); let impl_def = make::impl_( ty_params, - None, + ty_args, make::ty_path(make::ext::ident_path(name)), where_clause, None, diff --git a/crates/ide-assists/src/handlers/generate_delegate_trait.rs b/crates/ide-assists/src/handlers/generate_delegate_trait.rs index 898bd01291a2..7a60287f923f 100644 --- a/crates/ide-assists/src/handlers/generate_delegate_trait.rs +++ b/crates/ide-assists/src/handlers/generate_delegate_trait.rs @@ -502,9 +502,7 @@ fn generate_args_for_impl( trait_params: &Option, old_trait_args: &FxHashSet, ) -> Option { - let Some(old_impl_args) = old_impl_gpl.map(|gpl| gpl.to_generic_args().generic_args()) else { - return None; - }; + let old_impl_args = old_impl_gpl.map(|gpl| gpl.to_generic_args().generic_args())?; // Create pairs of the args of `self_ty` and corresponding `field_ty` to // form the substitution list let mut arg_substs = FxHashMap::default(); @@ -958,7 +956,8 @@ where impl AnotherTrait for S where T: AnotherTrait, -{}"#, +{ +}"#, ); } @@ -1448,7 +1447,8 @@ where impl AnotherTrait for S where T: AnotherTrait, -{}"#, +{ +}"#, ); } diff --git a/crates/ide-assists/src/handlers/generate_documentation_template.rs b/crates/ide-assists/src/handlers/generate_documentation_template.rs index f298ce8916db..38b24fd19cad 100644 --- a/crates/ide-assists/src/handlers/generate_documentation_template.rs +++ b/crates/ide-assists/src/handlers/generate_documentation_template.rs @@ -364,7 +364,7 @@ fn is_in_trait_impl(ast_func: &ast::Fn, ctx: &AssistContext<'_>) -> bool { ctx.sema .to_def(ast_func) .and_then(|hir_func| hir_func.as_assoc_item(ctx.db())) - .and_then(|assoc_item| assoc_item.containing_trait_impl(ctx.db())) + .and_then(|assoc_item| assoc_item.implemented_trait(ctx.db())) .is_some() } @@ -373,7 +373,7 @@ fn is_in_trait_def(ast_func: &ast::Fn, ctx: &AssistContext<'_>) -> bool { ctx.sema .to_def(ast_func) .and_then(|hir_func| hir_func.as_assoc_item(ctx.db())) - .and_then(|assoc_item| assoc_item.containing_trait(ctx.db())) + .and_then(|assoc_item| assoc_item.container_trait(ctx.db())) .is_some() } @@ -416,9 +416,9 @@ fn arguments_from_params(param_list: &ast::ParamList) -> String { true => format!("&mut {name}"), false => name.to_string(), }, - None => "_".to_string(), + None => "_".to_owned(), }, - _ => "_".to_string(), + _ => "_".to_owned(), }); args_iter.format(", ").to_string() } diff --git a/crates/ide-assists/src/handlers/generate_enum_variant.rs b/crates/ide-assists/src/handlers/generate_enum_variant.rs index 681f8c1fcf52..7faf2d5b1320 100644 --- a/crates/ide-assists/src/handlers/generate_enum_variant.rs +++ b/crates/ide-assists/src/handlers/generate_enum_variant.rs @@ -162,7 +162,7 @@ fn make_record_field_list( fn name_from_field(field: &ast::RecordExprField) -> ast::Name { let text = match field.name_ref() { Some(it) => it.to_string(), - None => name_from_field_shorthand(field).unwrap_or("unknown".to_string()), + None => name_from_field_shorthand(field).unwrap_or("unknown".to_owned()), }; make::name(&text) } diff --git a/crates/ide-assists/src/handlers/generate_function.rs b/crates/ide-assists/src/handlers/generate_function.rs index 50528e1caaac..fe2f8ed6417d 100644 --- a/crates/ide-assists/src/handlers/generate_function.rs +++ b/crates/ide-assists/src/handlers/generate_function.rs @@ -202,7 +202,7 @@ fn get_adt_source( let file = ctx.sema.parse(range.file_id); let adt_source = ctx.sema.find_node_at_offset_with_macros(file.syntax(), range.range.start())?; - find_struct_impl(ctx, &adt_source, &[fn_name.to_string()]).map(|impl_| (impl_, range.file_id)) + find_struct_impl(ctx, &adt_source, &[fn_name.to_owned()]).map(|impl_| (impl_, range.file_id)) } struct FunctionTemplate { @@ -908,7 +908,7 @@ fn filter_unnecessary_bounds( } } - let starting_nodes = necessary_params.iter().map(|param| param_map[param]); + let starting_nodes = necessary_params.iter().flat_map(|param| param_map.get(param).copied()); let reachable = graph.compute_reachable_nodes(starting_nodes); // Not pretty, but effective. If only there were `Vec::retain_index()`... @@ -1007,7 +1007,7 @@ fn fn_arg_name(sema: &Semantics<'_, RootDatabase>, arg_expr: &ast::Expr) -> Stri name } Some(name) => name, - None => "arg".to_string(), + None => "arg".to_owned(), } } @@ -1033,7 +1033,7 @@ fn fn_arg_type( if ty.is_reference() || ty.is_mutable_reference() { let famous_defs = &FamousDefs(&ctx.sema, ctx.sema.scope(fn_arg.syntax())?.krate()); convert_reference_type(ty.strip_references(), ctx.db(), famous_defs) - .map(|conversion| conversion.convert_type(ctx.db())) + .map(|conversion| conversion.convert_type(ctx.db()).to_string()) .or_else(|| ty.display_source_code(ctx.db(), target_module.into(), true).ok()) } else { ty.display_source_code(ctx.db(), target_module.into(), true).ok() diff --git a/crates/ide-assists/src/handlers/generate_getter_or_setter.rs b/crates/ide-assists/src/handlers/generate_getter_or_setter.rs index 79307fcec5af..e90a032f1cb4 100644 --- a/crates/ide-assists/src/handlers/generate_getter_or_setter.rs +++ b/crates/ide-assists/src/handlers/generate_getter_or_setter.rs @@ -1,12 +1,12 @@ use ide_db::{famous_defs::FamousDefs, source_change::SourceChangeBuilder}; use stdx::{format_to, to_lower_snake_case}; use syntax::{ - ast::{self, AstNode, HasName, HasVisibility}, - TextRange, + ast::{self, edit_in_place::Indent, make, AstNode, HasName, HasVisibility}, + ted, TextRange, }; use crate::{ - utils::{convert_reference_type, find_impl_block_end, find_struct_impl, generate_impl_text}, + utils::{convert_reference_type, find_struct_impl, generate_impl}, AssistContext, AssistId, AssistKind, Assists, GroupLabel, }; @@ -75,7 +75,7 @@ pub(crate) fn generate_setter(acc: &mut Assists, ctx: &AssistContext<'_>) -> Opt // Generate a getter method. // // ``` -// # //- minicore: as_ref +// # //- minicore: as_ref, deref // # pub struct String; // # impl AsRef for String { // # fn as_ref(&self) -> &str { @@ -83,6 +83,13 @@ pub(crate) fn generate_setter(acc: &mut Assists, ctx: &AssistContext<'_>) -> Opt // # } // # } // # +// # impl core::ops::Deref for String { +// # type Target = str; +// # fn deref(&self) -> &Self::Target { +// # "" +// # } +// # } +// # // struct Person { // nam$0e: String, // } @@ -96,13 +103,20 @@ pub(crate) fn generate_setter(acc: &mut Assists, ctx: &AssistContext<'_>) -> Opt // # } // # } // # +// # impl core::ops::Deref for String { +// # type Target = str; +// # fn deref(&self) -> &Self::Target { +// # "" +// # } +// # } +// # // struct Person { // name: String, // } // // impl Person { // fn $0name(&self) -> &str { -// self.name.as_ref() +// &self.name // } // } // ``` @@ -200,14 +214,14 @@ fn generate_getter_from_info( ctx: &AssistContext<'_>, info: &AssistInfo, record_field_info: &RecordFieldInfo, -) -> String { - let mut buf = String::with_capacity(512); - - let vis = info.strukt.visibility().map_or(String::new(), |v| format!("{v} ")); +) -> ast::Fn { let (ty, body) = if matches!(info.assist_type, AssistType::MutGet) { ( - format!("&mut {}", record_field_info.field_ty), - format!("&mut self.{}", record_field_info.field_name), + make::ty_ref(record_field_info.field_ty.clone(), true), + make::expr_ref( + make::expr_field(make::ext::expr_self(), &record_field_info.field_name.text()), + true, + ), ) } else { (|| { @@ -226,41 +240,52 @@ fn generate_getter_from_info( })() .unwrap_or_else(|| { ( - format!("&{}", record_field_info.field_ty), - format!("&self.{}", record_field_info.field_name), + make::ty_ref(record_field_info.field_ty.clone(), false), + make::expr_ref( + make::expr_field(make::ext::expr_self(), &record_field_info.field_name.text()), + false, + ), ) }) }; - format_to!( - buf, - " {}fn {}(&{}self) -> {} {{ - {} - }}", - vis, - record_field_info.fn_name, - matches!(info.assist_type, AssistType::MutGet).then_some("mut ").unwrap_or_default(), - ty, - body, - ); + let self_param = if matches!(info.assist_type, AssistType::MutGet) { + make::mut_self_param() + } else { + make::self_param() + }; - buf + let strukt = &info.strukt; + let fn_name = make::name(&record_field_info.fn_name); + let params = make::param_list(Some(self_param), []); + let ret_type = Some(make::ret_type(ty)); + let body = make::block_expr([], Some(body)); + + make::fn_(strukt.visibility(), fn_name, None, None, params, body, ret_type, false, false, false) } -fn generate_setter_from_info(info: &AssistInfo, record_field_info: &RecordFieldInfo) -> String { - let mut buf = String::with_capacity(512); +fn generate_setter_from_info(info: &AssistInfo, record_field_info: &RecordFieldInfo) -> ast::Fn { let strukt = &info.strukt; - let fn_name = &record_field_info.fn_name; + let field_name = &record_field_info.fn_name; + let fn_name = make::name(&format!("set_{field_name}")); let field_ty = &record_field_info.field_ty; - let vis = strukt.visibility().map_or(String::new(), |v| format!("{v} ")); - format_to!( - buf, - " {vis}fn set_{fn_name}(&mut self, {fn_name}: {field_ty}) {{ - self.{fn_name} = {fn_name}; - }}" - ); - buf + // Make the param list + // `(&mut self, $field_name: $field_ty)` + let field_param = + make::param(make::ident_pat(false, false, make::name(field_name)).into(), field_ty.clone()); + let params = make::param_list(Some(make::mut_self_param()), [field_param]); + + // Make the assignment body + // `self.$field_name = $field_name` + let self_expr = make::ext::expr_self(); + let lhs = make::expr_field(self_expr, field_name); + let rhs = make::expr_path(make::ext::ident_path(field_name)); + let assign_stmt = make::expr_stmt(make::expr_assignment(lhs, rhs)); + let body = make::block_expr([assign_stmt.into()], None); + + // Make the setter fn + make::fn_(strukt.visibility(), fn_name, None, None, params, body, None, false, false, false) } fn extract_and_parse( @@ -353,74 +378,45 @@ fn build_source_change( ) { let record_fields_count = info_of_record_fields.len(); - let mut buf = String::with_capacity(512); + let impl_def = if let Some(impl_def) = &assist_info.impl_def { + // We have an existing impl to add to + builder.make_mut(impl_def.clone()) + } else { + // Generate a new impl to add the methods to + let impl_def = generate_impl(&ast::Adt::Struct(assist_info.strukt.clone())); - // Check if an impl exists - if let Some(impl_def) = &assist_info.impl_def { - // Check if impl is empty - if let Some(assoc_item_list) = impl_def.assoc_item_list() { - if assoc_item_list.assoc_items().next().is_some() { - // If not empty then only insert a new line - buf.push('\n'); - } - } - } + // Insert it after the adt + let strukt = builder.make_mut(assist_info.strukt.clone()); + + ted::insert_all_raw( + ted::Position::after(strukt.syntax()), + vec![make::tokens::blank_line().into(), impl_def.syntax().clone().into()], + ); + + impl_def + }; + + let assoc_item_list = impl_def.get_or_create_assoc_item_list(); for (i, record_field_info) in info_of_record_fields.iter().enumerate() { - // this buf inserts a newline at the end of a getter - // automatically, if one wants to add one more newline - // for separating it from other assoc items, that needs - // to be handled separately - let mut getter_buf = match assist_info.assist_type { + // Make the new getter or setter fn + let new_fn = match assist_info.assist_type { AssistType::Set => generate_setter_from_info(&assist_info, record_field_info), _ => generate_getter_from_info(ctx, &assist_info, record_field_info), - }; + } + .clone_for_update(); + new_fn.indent(1.into()); - // Insert `$0` only for last getter we generate - if i == record_fields_count - 1 && ctx.config.snippet_cap.is_some() { - getter_buf = getter_buf.replacen("fn ", "fn $0", 1); + // Insert a tabstop only for last method we generate + if i == record_fields_count - 1 { + if let Some(cap) = ctx.config.snippet_cap { + if let Some(name) = new_fn.name() { + builder.add_tabstop_before(cap, name); + } + } } - // For first element we do not merge with '\n', as - // that can be inserted by impl_def check defined - // above, for other cases which are: - // - // - impl exists but it empty, here we would ideally - // not want to keep newline between impl { - // and fn () { line - // - // - next if impl itself does not exist, in this - // case we ourselves generate a new impl and that - // again ends up with the same reasoning as above - // for not keeping newline - if i == 0 { - buf = buf + &getter_buf; - } else { - buf = buf + "\n" + &getter_buf; - } - - // We don't insert a new line at the end of - // last getter as it will end up in the end - // of an impl where we would not like to keep - // getter and end of impl ( i.e. `}` ) with an - // extra line for no reason - if i < record_fields_count - 1 { - buf += "\n"; - } - } - - let start_offset = assist_info - .impl_def - .as_ref() - .and_then(|impl_def| find_impl_block_end(impl_def.to_owned(), &mut buf)) - .unwrap_or_else(|| { - buf = generate_impl_text(&ast::Adt::Struct(assist_info.strukt.clone()), &buf); - assist_info.strukt.syntax().text_range().end() - }); - - match ctx.config.snippet_cap { - Some(cap) => builder.insert_snippet(cap, start_offset, buf), - None => builder.insert(start_offset, buf), + assoc_item_list.add_item(new_fn.clone().into()); } } diff --git a/crates/ide-assists/src/handlers/generate_impl.rs b/crates/ide-assists/src/handlers/generate_impl.rs index d52d778d344a..821783c28313 100644 --- a/crates/ide-assists/src/handlers/generate_impl.rs +++ b/crates/ide-assists/src/handlers/generate_impl.rs @@ -1,10 +1,10 @@ -use syntax::ast::{self, AstNode, HasName}; - -use crate::{ - utils::{generate_impl_text, generate_trait_impl_text_intransitive}, - AssistContext, AssistId, AssistKind, Assists, +use syntax::{ + ast::{self, make, AstNode, HasName}, + ted, }; +use crate::{utils, AssistContext, AssistId, AssistKind, Assists}; + // Assist: generate_impl // // Adds a new inherent impl for a type. @@ -20,9 +20,7 @@ use crate::{ // data: T, // } // -// impl Ctx { -// $0 -// } +// impl Ctx {$0} // ``` pub(crate) fn generate_impl(acc: &mut Assists, ctx: &AssistContext<'_>) -> Option<()> { let nominal = ctx.find_node_at_offset::()?; @@ -38,17 +36,22 @@ pub(crate) fn generate_impl(acc: &mut Assists, ctx: &AssistContext<'_>) -> Optio format!("Generate impl for `{name}`"), target, |edit| { - let start_offset = nominal.syntax().text_range().end(); - match ctx.config.snippet_cap { - Some(cap) => { - let snippet = generate_impl_text(&nominal, " $0"); - edit.insert_snippet(cap, start_offset, snippet); - } - None => { - let snippet = generate_impl_text(&nominal, ""); - edit.insert(start_offset, snippet); + // Generate the impl + let impl_ = utils::generate_impl(&nominal); + + // Add a tabstop after the left curly brace + if let Some(cap) = ctx.config.snippet_cap { + if let Some(l_curly) = impl_.assoc_item_list().and_then(|it| it.l_curly_token()) { + edit.add_tabstop_after_token(cap, l_curly); } } + + // Add the impl after the adt + let nominal = edit.make_mut(nominal); + ted::insert_all_raw( + ted::Position::after(nominal.syntax()), + vec![make::tokens::blank_line().into(), impl_.syntax().clone().into()], + ); }, ) } @@ -68,9 +71,7 @@ pub(crate) fn generate_impl(acc: &mut Assists, ctx: &AssistContext<'_>) -> Optio // data: T, // } // -// impl $0 for Ctx { -// -// } +// impl ${0:_} for Ctx {} // ``` pub(crate) fn generate_trait_impl(acc: &mut Assists, ctx: &AssistContext<'_>) -> Option<()> { let nominal = ctx.find_node_at_offset::()?; @@ -86,17 +87,22 @@ pub(crate) fn generate_trait_impl(acc: &mut Assists, ctx: &AssistContext<'_>) -> format!("Generate trait impl for `{name}`"), target, |edit| { - let start_offset = nominal.syntax().text_range().end(); - match ctx.config.snippet_cap { - Some(cap) => { - let snippet = generate_trait_impl_text_intransitive(&nominal, "$0", ""); - edit.insert_snippet(cap, start_offset, snippet); - } - None => { - let text = generate_trait_impl_text_intransitive(&nominal, "", ""); - edit.insert(start_offset, text); + // Generate the impl + let impl_ = utils::generate_trait_impl_intransitive(&nominal, make::ty_placeholder()); + + // Make the trait type a placeholder snippet + if let Some(cap) = ctx.config.snippet_cap { + if let Some(trait_) = impl_.trait_() { + edit.add_placeholder_snippet(cap, trait_); } } + + // Add the impl after the adt + let nominal = edit.make_mut(nominal); + ted::insert_all_raw( + ted::Position::after(nominal.syntax()), + vec![make::tokens::blank_line().into(), impl_.syntax().clone().into()], + ); }, ) } @@ -117,9 +123,7 @@ mod tests { r#" struct Foo {} - impl Foo { - $0 - } + impl Foo {$0} "#, ); } @@ -134,9 +138,7 @@ mod tests { r#" struct Foo {} - impl Foo { - $0 - } + impl Foo {$0} "#, ); } @@ -151,9 +153,7 @@ mod tests { r#" struct Foo<'a, T: Foo<'a>> {} - impl<'a, T: Foo<'a>> Foo<'a, T> { - $0 - } + impl<'a, T: Foo<'a>> Foo<'a, T> {$0} "#, ); } @@ -171,9 +171,7 @@ mod tests { struct Foo<'a, T: Foo<'a>> {} #[cfg(feature = "foo")] - impl<'a, T: Foo<'a>> Foo<'a, T> { - $0 - } + impl<'a, T: Foo<'a>> Foo<'a, T> {$0} "#, ); } @@ -188,9 +186,7 @@ mod tests { r#" struct Defaulted {} - impl Defaulted { - $0 - } + impl Defaulted {$0} "#, ); } @@ -205,9 +201,7 @@ mod tests { r#" struct Defaulted<'a, 'b: 'a, T: Debug + Clone + 'a + 'b = String, const S: usize> {} - impl<'a, 'b: 'a, T: Debug + Clone + 'a + 'b, const S: usize> Defaulted<'a, 'b, T, S> { - $0 - } + impl<'a, 'b: 'a, T: Debug + Clone + 'a + 'b, const S: usize> Defaulted<'a, 'b, T, S> {$0} "#, ); } @@ -222,9 +216,7 @@ mod tests { r#" struct Defaulted {} - impl Defaulted { - $0 - } + impl Defaulted {$0} "#, ); } @@ -254,8 +246,7 @@ mod tests { impl Struct where T: Trait, - { - $0 + {$0 } "#, ); @@ -285,9 +276,7 @@ mod tests { r#" struct Foo {} - impl $0 for Foo { - - } + impl ${0:_} for Foo {} "#, ); } @@ -302,9 +291,7 @@ mod tests { r#" struct Foo {} - impl $0 for Foo { - - } + impl ${0:_} for Foo {} "#, ); } @@ -319,9 +306,7 @@ mod tests { r#" struct Foo<'a, T: Foo<'a>> {} - impl<'a, T: Foo<'a>> $0 for Foo<'a, T> { - - } + impl<'a, T: Foo<'a>> ${0:_} for Foo<'a, T> {} "#, ); } @@ -339,9 +324,7 @@ mod tests { struct Foo<'a, T: Foo<'a>> {} #[cfg(feature = "foo")] - impl<'a, T: Foo<'a>> $0 for Foo<'a, T> { - - } + impl<'a, T: Foo<'a>> ${0:_} for Foo<'a, T> {} "#, ); } @@ -356,9 +339,7 @@ mod tests { r#" struct Defaulted {} - impl $0 for Defaulted { - - } + impl ${0:_} for Defaulted {} "#, ); } @@ -373,9 +354,7 @@ mod tests { r#" struct Defaulted<'a, 'b: 'a, T: Debug + Clone + 'a + 'b = String, const S: usize> {} - impl<'a, 'b: 'a, T: Debug + Clone + 'a + 'b, const S: usize> $0 for Defaulted<'a, 'b, T, S> { - - } + impl<'a, 'b: 'a, T: Debug + Clone + 'a + 'b, const S: usize> ${0:_} for Defaulted<'a, 'b, T, S> {} "#, ); } @@ -390,9 +369,7 @@ mod tests { r#" struct Defaulted {} - impl $0 for Defaulted { - - } + impl ${0:_} for Defaulted {} "#, ); } @@ -419,11 +396,10 @@ mod tests { inner: T, } - impl $0 for Struct + impl ${0:_} for Struct where T: Trait, { - } "#, ); diff --git a/crates/ide-assists/src/handlers/generate_is_empty_from_len.rs b/crates/ide-assists/src/handlers/generate_is_empty_from_len.rs index 6bfc69b0ada6..4d369e705e88 100644 --- a/crates/ide-assists/src/handlers/generate_is_empty_from_len.rs +++ b/crates/ide-assists/src/handlers/generate_is_empty_from_len.rs @@ -79,7 +79,7 @@ pub(crate) fn generate_is_empty_from_len(acc: &mut Assists, ctx: &AssistContext< pub fn is_empty(&self) -> bool { self.len() == 0 }"# - .to_string(); + .to_owned(); builder.insert(range.end(), code) }, ) diff --git a/crates/ide-assists/src/handlers/generate_new.rs b/crates/ide-assists/src/handlers/generate_new.rs index 7bfd599660db..22c75cd5eefc 100644 --- a/crates/ide-assists/src/handlers/generate_new.rs +++ b/crates/ide-assists/src/handlers/generate_new.rs @@ -1,12 +1,13 @@ use ide_db::{ imports::import_assets::item_for_path_search, use_trivial_constructor::use_trivial_constructor, }; -use itertools::Itertools; -use stdx::format_to; -use syntax::ast::{self, AstNode, HasName, HasVisibility, StructKind}; +use syntax::{ + ast::{self, edit_in_place::Indent, make, AstNode, HasName, HasVisibility, StructKind}, + ted, +}; use crate::{ - utils::{find_impl_block_start, find_struct_impl, generate_impl_text}, + utils::{find_struct_impl, generate_impl}, AssistContext, AssistId, AssistKind, Assists, }; @@ -26,7 +27,9 @@ use crate::{ // } // // impl Ctx { -// fn $0new(data: T) -> Self { Self { data } } +// fn $0new(data: T) -> Self { +// Self { data } +// } // } // ``` pub(crate) fn generate_new(acc: &mut Assists, ctx: &AssistContext<'_>) -> Option<()> { @@ -46,14 +49,6 @@ pub(crate) fn generate_new(acc: &mut Assists, ctx: &AssistContext<'_>) -> Option let target = strukt.syntax().text_range(); acc.add(AssistId("generate_new", AssistKind::Generate), "Generate `new`", target, |builder| { - let mut buf = String::with_capacity(512); - - if impl_def.is_some() { - buf.push('\n'); - } - - let vis = strukt.visibility().map_or(String::new(), |v| format!("{v} ")); - let trivial_constructors = field_list .fields() .map(|f| { @@ -76,54 +71,79 @@ pub(crate) fn generate_new(acc: &mut Assists, ctx: &AssistContext<'_>) -> Option &ty, )?; - Some(format!("{name}: {expr}")) + Some(make::record_expr_field(make::name_ref(&name.text()), Some(expr))) }) .collect::>(); - let params = field_list - .fields() - .enumerate() - .filter_map(|(i, f)| { - if trivial_constructors[i].is_none() { - let name = f.name()?; - let ty = f.ty()?; + let params = field_list.fields().enumerate().filter_map(|(i, f)| { + if trivial_constructors[i].is_none() { + let name = f.name()?; + let ty = f.ty()?; - Some(format!("{name}: {ty}")) - } else { - None - } - }) - .format(", "); + Some(make::param(make::ident_pat(false, false, name).into(), ty)) + } else { + None + } + }); + let params = make::param_list(None, params); - let fields = field_list - .fields() - .enumerate() - .filter_map(|(i, f)| { - let constructor = trivial_constructors[i].clone(); - if constructor.is_some() { - constructor - } else { - Some(f.name()?.to_string()) - } - }) - .format(", "); + let fields = field_list.fields().enumerate().filter_map(|(i, f)| { + let constructor = trivial_constructors[i].clone(); + if constructor.is_some() { + constructor + } else { + Some(make::record_expr_field(make::name_ref(&f.name()?.text()), None)) + } + }); + let fields = make::record_expr_field_list(fields); - format_to!(buf, " {vis}fn new({params}) -> Self {{ Self {{ {fields} }} }}"); + let record_expr = make::record_expr(make::ext::ident_path("Self"), fields); + let body = make::block_expr(None, Some(record_expr.into())); - let start_offset = impl_def - .and_then(|impl_def| find_impl_block_start(impl_def, &mut buf)) - .unwrap_or_else(|| { - buf = generate_impl_text(&ast::Adt::Struct(strukt.clone()), &buf); - strukt.syntax().text_range().end() - }); + let ret_type = make::ret_type(make::ty_path(make::ext::ident_path("Self"))); - match ctx.config.snippet_cap { - None => builder.insert(start_offset, buf), - Some(cap) => { - buf = buf.replace("fn new", "fn $0new"); - builder.insert_snippet(cap, start_offset, buf); + let fn_ = make::fn_( + strukt.visibility(), + make::name("new"), + None, + None, + params, + body, + Some(ret_type), + false, + false, + false, + ) + .clone_for_update(); + fn_.indent(1.into()); + + // Add a tabstop before the name + if let Some(cap) = ctx.config.snippet_cap { + if let Some(name) = fn_.name() { + builder.add_tabstop_before(cap, name); } } + + // Get the mutable version of the impl to modify + let impl_def = if let Some(impl_def) = impl_def { + builder.make_mut(impl_def) + } else { + // Generate a new impl to add the method to + let impl_def = generate_impl(&ast::Adt::Struct(strukt.clone())); + + // Insert it after the adt + let strukt = builder.make_mut(strukt.clone()); + + ted::insert_all_raw( + ted::Position::after(strukt.syntax()), + vec![make::tokens::blank_line().into(), impl_def.syntax().clone().into()], + ); + + impl_def + }; + + // Add the `new` method at the start of the impl + impl_def.get_or_create_assoc_item_list().add_item_at_start(fn_.into()); }) } @@ -148,7 +168,9 @@ struct Empty; struct Foo { empty: Empty } impl Foo { - fn $0new() -> Self { Self { empty: Empty } } + fn $0new() -> Self { + Self { empty: Empty } + } } "#, ); @@ -165,7 +187,9 @@ struct Empty; struct Foo { baz: String, empty: Empty } impl Foo { - fn $0new(baz: String) -> Self { Self { baz, empty: Empty } } + fn $0new(baz: String) -> Self { + Self { baz, empty: Empty } + } } "#, ); @@ -182,7 +206,9 @@ enum Empty { Bar } struct Foo { empty: Empty } impl Foo { - fn $0new() -> Self { Self { empty: Empty::Bar } } + fn $0new() -> Self { + Self { empty: Empty::Bar } + } } "#, ); @@ -201,7 +227,9 @@ struct Empty {} struct Foo { empty: Empty } impl Foo { - fn $0new(empty: Empty) -> Self { Self { empty } } + fn $0new(empty: Empty) -> Self { + Self { empty } + } } "#, ); @@ -218,7 +246,9 @@ enum Empty { Bar {} } struct Foo { empty: Empty } impl Foo { - fn $0new(empty: Empty) -> Self { Self { empty } } + fn $0new(empty: Empty) -> Self { + Self { empty } + } } "#, ); @@ -235,7 +265,9 @@ struct Foo {$0} struct Foo {} impl Foo { - fn $0new() -> Self { Self { } } + fn $0new() -> Self { + Self { } + } } "#, ); @@ -248,7 +280,9 @@ struct Foo {$0} struct Foo {} impl Foo { - fn $0new() -> Self { Self { } } + fn $0new() -> Self { + Self { } + } } "#, ); @@ -261,7 +295,9 @@ struct Foo<'a, T: Foo<'a>> {$0} struct Foo<'a, T: Foo<'a>> {} impl<'a, T: Foo<'a>> Foo<'a, T> { - fn $0new() -> Self { Self { } } + fn $0new() -> Self { + Self { } + } } "#, ); @@ -274,7 +310,9 @@ struct Foo { baz: String $0} struct Foo { baz: String } impl Foo { - fn $0new(baz: String) -> Self { Self { baz } } + fn $0new(baz: String) -> Self { + Self { baz } + } } "#, ); @@ -287,7 +325,9 @@ struct Foo { baz: String, qux: Vec $0} struct Foo { baz: String, qux: Vec } impl Foo { - fn $0new(baz: String, qux: Vec) -> Self { Self { baz, qux } } + fn $0new(baz: String, qux: Vec) -> Self { + Self { baz, qux } + } } "#, ); @@ -304,7 +344,9 @@ struct Foo { pub baz: String, pub qux: Vec $0} struct Foo { pub baz: String, pub qux: Vec } impl Foo { - fn $0new(baz: String, qux: Vec) -> Self { Self { baz, qux } } + fn $0new(baz: String, qux: Vec) -> Self { + Self { baz, qux } + } } "#, ); @@ -323,7 +365,9 @@ impl Foo {} struct Foo {} impl Foo { - fn $0new() -> Self { Self { } } + fn $0new() -> Self { + Self { } + } } "#, ); @@ -340,7 +384,9 @@ impl Foo { struct Foo {} impl Foo { - fn $0new() -> Self { Self { } } + fn $0new() -> Self { + Self { } + } fn qux(&self) {} } @@ -363,7 +409,9 @@ impl Foo { struct Foo {} impl Foo { - fn $0new() -> Self { Self { } } + fn $0new() -> Self { + Self { } + } fn qux(&self) {} fn baz() -> i32 { @@ -385,7 +433,9 @@ pub struct Foo {$0} pub struct Foo {} impl Foo { - pub fn $0new() -> Self { Self { } } + pub fn $0new() -> Self { + Self { } + } } "#, ); @@ -398,7 +448,9 @@ pub(crate) struct Foo {$0} pub(crate) struct Foo {} impl Foo { - pub(crate) fn $0new() -> Self { Self { } } + pub(crate) fn $0new() -> Self { + Self { } + } } "#, ); @@ -493,7 +545,9 @@ pub struct Source { } impl Source { - pub fn $0new(file_id: HirFileId, ast: T) -> Self { Self { file_id, ast } } + pub fn $0new(file_id: HirFileId, ast: T) -> Self { + Self { file_id, ast } + } pub fn map U, U>(self, f: F) -> Source { Source { file_id: self.file_id, ast: f(self.ast) } diff --git a/crates/ide-assists/src/handlers/generate_trait_from_impl.rs b/crates/ide-assists/src/handlers/generate_trait_from_impl.rs index 8881aa69f29d..24094de22c8d 100644 --- a/crates/ide-assists/src/handlers/generate_trait_from_impl.rs +++ b/crates/ide-assists/src/handlers/generate_trait_from_impl.rs @@ -118,7 +118,7 @@ pub(crate) fn generate_trait_from_impl(acc: &mut Assists, ctx: &AssistContext<'_ let arg_list = if let Some(genpars) = impl_ast.generic_param_list() { genpars.to_generic_args().to_string() } else { - "".to_string() + "".to_owned() }; if let Some(snippet_cap) = ctx.config.snippet_cap { diff --git a/crates/ide-assists/src/handlers/inline_call.rs b/crates/ide-assists/src/handlers/inline_call.rs index 4ba33ada48c7..11b22b65205b 100644 --- a/crates/ide-assists/src/handlers/inline_call.rs +++ b/crates/ide-assists/src/handlers/inline_call.rs @@ -415,7 +415,24 @@ fn inline( let expr: &ast::Expr = expr; let mut insert_let_stmt = || { - let ty = sema.type_of_expr(expr).filter(TypeInfo::has_adjustment).and(param_ty.clone()); + let param_ty = match param_ty { + None => None, + Some(param_ty) => { + if sema.hir_file_for(param_ty.syntax()).is_macro() { + if let Some(param_ty) = + ast::Type::cast(insert_ws_into(param_ty.syntax().clone())) + { + Some(param_ty) + } else { + Some(param_ty.clone_for_update()) + } + } else { + Some(param_ty.clone_for_update()) + } + } + }; + let ty: Option = + sema.type_of_expr(expr).filter(TypeInfo::has_adjustment).and(param_ty); let is_self = param .name(sema.db) @@ -1732,6 +1749,49 @@ pub fn main() { this.0 += 1; }; } +"#, + ) + } + + #[test] + fn inline_call_with_reference_in_macros() { + check_assist( + inline_call, + r#" +fn _write_u64(s: &mut u64, x: u64) { + *s += x; +} +macro_rules! impl_write { + ($(($ty:ident, $meth:ident),)*) => {$( + fn _hash(inner_self_: &u64, state: &mut u64) { + $meth(state, *inner_self_) + } + )*} +} +impl_write! { (u64, _write_u64), } +fn _hash2(self_: &u64, state: &mut u64) { + $0_hash(&self_, state); +} +"#, + r#" +fn _write_u64(s: &mut u64, x: u64) { + *s += x; +} +macro_rules! impl_write { + ($(($ty:ident, $meth:ident),)*) => {$( + fn _hash(inner_self_: &u64, state: &mut u64) { + $meth(state, *inner_self_) + } + )*} +} +impl_write! { (u64, _write_u64), } +fn _hash2(self_: &u64, state: &mut u64) { + { + let inner_self_: &u64 = &self_; + let state: &mut u64 = state; + _write_u64(state, *inner_self_) + }; +} "#, ) } diff --git a/crates/ide-assists/src/handlers/inline_const_as_literal.rs b/crates/ide-assists/src/handlers/inline_const_as_literal.rs index 18437453761c..111ea50fdc94 100644 --- a/crates/ide-assists/src/handlers/inline_const_as_literal.rs +++ b/crates/ide-assists/src/handlers/inline_const_as_literal.rs @@ -60,7 +60,7 @@ pub(crate) fn inline_const_as_literal(acc: &mut Assists, ctx: &AssistContext<'_> let id = AssistId("inline_const_as_literal", AssistKind::RefactorInline); - let label = "Inline const as literal".to_string(); + let label = "Inline const as literal".to_owned(); let target = variable.syntax().text_range(); return acc.add(id, label, target, |edit| { diff --git a/crates/ide-assists/src/handlers/inline_macro.rs b/crates/ide-assists/src/handlers/inline_macro.rs index c1beb46c8096..0c9e971dd23c 100644 --- a/crates/ide-assists/src/handlers/inline_macro.rs +++ b/crates/ide-assists/src/handlers/inline_macro.rs @@ -41,7 +41,7 @@ pub(crate) fn inline_macro(acc: &mut Assists, ctx: &AssistContext<'_>) -> Option acc.add( AssistId("inline_macro", AssistKind::RefactorInline), - "Inline macro".to_string(), + "Inline macro".to_owned(), text_range, |builder| builder.replace(text_range, expanded.to_string()), ) diff --git a/crates/ide-assists/src/handlers/inline_type_alias.rs b/crates/ide-assists/src/handlers/inline_type_alias.rs index 5982e9d61dbf..e2f3d9edcd10 100644 --- a/crates/ide-assists/src/handlers/inline_type_alias.rs +++ b/crates/ide-assists/src/handlers/inline_type_alias.rs @@ -3,12 +3,12 @@ // - Remove unused aliases if there are no longer any users, see inline_call.rs. use hir::{HasSource, PathResolution}; +use ide_db::FxHashMap; use ide_db::{ defs::Definition, imports::insert_use::ast_to_remove_for_path_in_use_stmt, search::FileReference, }; use itertools::Itertools; -use std::collections::HashMap; use syntax::{ ast::{self, make, HasGenericParams, HasName}, ted, AstNode, NodeOrToken, SyntaxNode, @@ -189,14 +189,14 @@ fn inline(alias_def: &ast::TypeAlias, alias_instance: &ast::PathType) -> Option< Some(repl) } -struct LifetimeMap(HashMap); +struct LifetimeMap(FxHashMap); impl LifetimeMap { fn new( instance_args: &Option, alias_generics: &ast::GenericParamList, ) -> Option { - let mut inner = HashMap::new(); + let mut inner = FxHashMap::default(); let wildcard_lifetime = make::lifetime("'_"); let lifetimes = alias_generics @@ -231,14 +231,14 @@ impl LifetimeMap { } } -struct ConstAndTypeMap(HashMap); +struct ConstAndTypeMap(FxHashMap); impl ConstAndTypeMap { fn new( instance_args: &Option, alias_generics: &ast::GenericParamList, ) -> Option { - let mut inner = HashMap::new(); + let mut inner = FxHashMap::default(); let instance_generics = generic_args_to_const_and_type_generics(instance_args); let alias_generics = generic_param_list_to_const_and_type_generics(alias_generics); diff --git a/crates/ide-assists/src/handlers/into_to_qualified_from.rs b/crates/ide-assists/src/handlers/into_to_qualified_from.rs index f7da88b2c183..dee74afcbe24 100644 --- a/crates/ide-assists/src/handlers/into_to_qualified_from.rs +++ b/crates/ide-assists/src/handlers/into_to_qualified_from.rs @@ -48,7 +48,7 @@ pub(crate) fn into_to_qualified_from(acc: &mut Assists, ctx: &AssistContext<'_>) let fnc = sema.resolve_method_call(&method_call)?; let scope = sema.scope(method_call.syntax())?; // Check if the method call refers to Into trait. - if fnc.as_assoc_item(db)?.containing_trait_impl(db)? + if fnc.as_assoc_item(db)?.implemented_trait(db)? == FamousDefs(sema, scope.krate()).core_convert_Into()? { let type_call = sema.type_of_expr(&method_call.clone().into())?; diff --git a/crates/ide-assists/src/handlers/introduce_named_lifetime.rs b/crates/ide-assists/src/handlers/introduce_named_lifetime.rs index c5aa9755bc0b..62909c586e3d 100644 --- a/crates/ide-assists/src/handlers/introduce_named_lifetime.rs +++ b/crates/ide-assists/src/handlers/introduce_named_lifetime.rs @@ -129,7 +129,7 @@ fn generate_unique_lifetime_param_name( type_params.lifetime_params().map(|p| p.syntax().text().to_string()).collect(); ('a'..='z').map(|it| format!("'{it}")).find(|it| !used_lifetime_params.contains(it)) } - None => Some("'a".to_string()), + None => Some("'a".to_owned()), } .map(|it| make::lifetime(&it)) } diff --git a/crates/ide-assists/src/handlers/merge_match_arms.rs b/crates/ide-assists/src/handlers/merge_match_arms.rs index aae9f20d4ea5..4608e9494bca 100644 --- a/crates/ide-assists/src/handlers/merge_match_arms.rs +++ b/crates/ide-assists/src/handlers/merge_match_arms.rs @@ -1,5 +1,6 @@ use hir::Type; -use std::{collections::HashMap, iter::successors}; +use ide_db::FxHashMap; +use std::iter::successors; use syntax::{ algo::neighbor, ast::{self, AstNode, HasName}, @@ -95,7 +96,7 @@ fn contains_placeholder(a: &ast::MatchArm) -> bool { } fn are_same_types( - current_arm_types: &HashMap>, + current_arm_types: &FxHashMap>, arm: &ast::MatchArm, ctx: &AssistContext<'_>, ) -> bool { @@ -114,11 +115,11 @@ fn are_same_types( fn get_arm_types( context: &AssistContext<'_>, arm: &ast::MatchArm, -) -> HashMap> { - let mut mapping: HashMap> = HashMap::new(); +) -> FxHashMap> { + let mut mapping: FxHashMap> = FxHashMap::default(); fn recurse( - map: &mut HashMap>, + map: &mut FxHashMap>, ctx: &AssistContext<'_>, pat: &Option, ) { diff --git a/crates/ide-assists/src/handlers/move_module_to_file.rs b/crates/ide-assists/src/handlers/move_module_to_file.rs index 166b25c69e18..048906d9d9f5 100644 --- a/crates/ide-assists/src/handlers/move_module_to_file.rs +++ b/crates/ide-assists/src/handlers/move_module_to_file.rs @@ -75,7 +75,7 @@ pub(crate) fn move_module_to_file(acc: &mut Assists, ctx: &AssistContext<'_>) -> let contents = { let items = module_items.dedent(IndentLevel(1)).to_string(); let mut items = - items.trim_start_matches('{').trim_end_matches('}').trim().to_string(); + items.trim_start_matches('{').trim_end_matches('}').trim().to_owned(); if !items.is_empty() { items.push('\n'); } diff --git a/crates/ide-assists/src/handlers/number_representation.rs b/crates/ide-assists/src/handlers/number_representation.rs index 7e3fef516bfd..a13799f9b131 100644 --- a/crates/ide-assists/src/handlers/number_representation.rs +++ b/crates/ide-assists/src/handlers/number_representation.rs @@ -33,7 +33,7 @@ pub(crate) fn reformat_number_literal(acc: &mut Assists, ctx: &AssistContext<'_> } let radix = literal.radix(); - let mut converted = prefix.to_string(); + let mut converted = prefix.to_owned(); converted.push_str(&add_group_separators(value, group_size(radix))); converted.push_str(suffix); diff --git a/crates/ide-assists/src/handlers/qualify_path.rs b/crates/ide-assists/src/handlers/qualify_path.rs index 086487184909..63a09ce2e976 100644 --- a/crates/ide-assists/src/handlers/qualify_path.rs +++ b/crates/ide-assists/src/handlers/qualify_path.rs @@ -203,7 +203,7 @@ fn item_as_trait(db: &RootDatabase, item: hir::ItemInNs) -> Option { match item_module_def { hir::ModuleDef::Trait(trait_) => Some(trait_), - _ => item_module_def.as_assoc_item(db)?.containing_trait(db), + _ => item_module_def.as_assoc_item(db)?.container_trait(db), } } diff --git a/crates/ide-assists/src/handlers/remove_unused_imports.rs b/crates/ide-assists/src/handlers/remove_unused_imports.rs index 35bf84c43498..d67b259d2f5f 100644 --- a/crates/ide-assists/src/handlers/remove_unused_imports.rs +++ b/crates/ide-assists/src/handlers/remove_unused_imports.rs @@ -1,11 +1,11 @@ -use std::collections::{hash_map::Entry, HashMap}; +use std::collections::hash_map::Entry; use hir::{HirFileIdExt, InFile, InRealFile, Module, ModuleSource}; use ide_db::{ base_db::FileRange, defs::Definition, search::{FileReference, ReferenceCategory, SearchScope}, - RootDatabase, + FxHashMap, RootDatabase, }; use syntax::{ast, AstNode}; use text_edit::TextRange; @@ -44,7 +44,7 @@ pub(crate) fn remove_unused_imports(acc: &mut Assists, ctx: &AssistContext<'_>) let uses = uses_up.chain(uses_down).collect::>(); // Maps use nodes to the scope that we should search through to find - let mut search_scopes = HashMap::>::new(); + let mut search_scopes = FxHashMap::>::default(); // iterator over all unused use trees let mut unused = uses diff --git a/crates/ide-assists/src/handlers/replace_derive_with_manual_impl.rs b/crates/ide-assists/src/handlers/replace_derive_with_manual_impl.rs index 788cc846c2a5..3420d906dea5 100644 --- a/crates/ide-assists/src/handlers/replace_derive_with_manual_impl.rs +++ b/crates/ide-assists/src/handlers/replace_derive_with_manual_impl.rs @@ -2,15 +2,17 @@ use hir::{InFile, MacroFileIdExt, ModuleDef}; use ide_db::{helpers::mod_path_to_ast, imports::import_assets::NameToImport, items_locator}; use itertools::Itertools; use syntax::{ - ast::{self, AstNode, HasName}, + ast::{self, make, AstNode, HasName}, + ted, SyntaxKind::WHITESPACE, + T, }; use crate::{ assist_context::{AssistContext, Assists, SourceChangeBuilder}, utils::{ - add_trait_assoc_items_to_impl, filter_assoc_items, gen_trait_fn_body, - generate_trait_impl_text, render_snippet, Cursor, DefaultMethods, IgnoreAssocItems, + add_trait_assoc_items_to_impl, filter_assoc_items, gen_trait_fn_body, generate_trait_impl, + DefaultMethods, IgnoreAssocItems, }, AssistId, AssistKind, }; @@ -132,35 +134,59 @@ fn add_assist( label, target, |builder| { - let insert_pos = adt.syntax().text_range().end(); + let insert_after = ted::Position::after(builder.make_mut(adt.clone()).syntax()); + let impl_def_with_items = impl_def_from_trait(&ctx.sema, adt, &annotated_name, trait_, replace_trait_path); update_attribute(builder, old_derives, old_tree, old_trait_path, attr); - let trait_path = replace_trait_path.to_string(); + + let trait_path = make::ty_path(replace_trait_path.clone()); + match (ctx.config.snippet_cap, impl_def_with_items) { (None, _) => { - builder.insert(insert_pos, generate_trait_impl_text(adt, &trait_path, "")) + let impl_def = generate_trait_impl(adt, trait_path); + + ted::insert_all( + insert_after, + vec![make::tokens::blank_line().into(), impl_def.syntax().clone().into()], + ); + } + (Some(cap), None) => { + let impl_def = generate_trait_impl(adt, trait_path); + + if let Some(l_curly) = + impl_def.assoc_item_list().and_then(|it| it.l_curly_token()) + { + builder.add_tabstop_after_token(cap, l_curly); + } + + ted::insert_all( + insert_after, + vec![make::tokens::blank_line().into(), impl_def.syntax().clone().into()], + ); } - (Some(cap), None) => builder.insert_snippet( - cap, - insert_pos, - generate_trait_impl_text(adt, &trait_path, " $0"), - ), (Some(cap), Some((impl_def, first_assoc_item))) => { - let mut cursor = Cursor::Before(first_assoc_item.syntax()); - let placeholder; + let mut added_snippet = false; if let ast::AssocItem::Fn(ref func) = first_assoc_item { if let Some(m) = func.syntax().descendants().find_map(ast::MacroCall::cast) { if m.syntax().text() == "todo!()" { - placeholder = m; - cursor = Cursor::Replace(placeholder.syntax()); + // Make the `todo!()` a placeholder + builder.add_placeholder_snippet(cap, m); + added_snippet = true; } } } - let rendered = render_snippet(cap, impl_def.syntax(), cursor); - builder.insert_snippet(cap, insert_pos, format!("\n\n{rendered}")) + if !added_snippet { + // If we haven't already added a snippet, add a tabstop before the generated function + builder.add_tabstop_before(cap, first_assoc_item); + } + + ted::insert_all( + insert_after, + vec![make::tokens::blank_line().into(), impl_def.syntax().clone().into()], + ); } }; }, @@ -190,28 +216,7 @@ fn impl_def_from_trait( if trait_items.is_empty() { return None; } - let impl_def = { - use syntax::ast::Impl; - let text = generate_trait_impl_text(adt, trait_path.to_string().as_str(), ""); - // FIXME: `generate_trait_impl_text` currently generates two newlines - // at the front, but these leading newlines should really instead be - // inserted at the same time the impl is inserted - assert_eq!(&text[..2], "\n\n", "`generate_trait_impl_text` output changed"); - let parse = syntax::SourceFile::parse(&text[2..]); - let node = match parse.tree().syntax().descendants().find_map(Impl::cast) { - Some(it) => it, - None => { - panic!( - "Failed to make ast node `{}` from text {}", - std::any::type_name::(), - text - ) - } - }; - let node = node.clone_for_update(); - assert_eq!(node.syntax().text_range().start(), 0.into()); - node - }; + let impl_def = generate_trait_impl(adt, make::ty_path(trait_path.clone())); let first_assoc_item = add_trait_assoc_items_to_impl(sema, &trait_items, trait_, &impl_def, target_scope); @@ -238,20 +243,34 @@ fn update_attribute( let has_more_derives = !new_derives.is_empty(); if has_more_derives { - let new_derives = format!("({})", new_derives.iter().format(", ")); - builder.replace(old_tree.syntax().text_range(), new_derives); - } else { - let attr_range = attr.syntax().text_range(); - builder.delete(attr_range); + let old_tree = builder.make_mut(old_tree.clone()); - if let Some(line_break_range) = attr - .syntax() - .next_sibling_or_token() - .filter(|t| t.kind() == WHITESPACE) - .map(|t| t.text_range()) + // Make the paths into flat lists of tokens in a vec + let tt = new_derives.iter().map(|path| path.syntax().clone()).map(|node| { + node.descendants_with_tokens() + .filter_map(|element| element.into_token()) + .collect::>() + }); + // ...which are interspersed with ", " + let tt = Itertools::intersperse(tt, vec![make::token(T![,]), make::tokens::single_space()]); + // ...wrap them into the appropriate `NodeOrToken` variant + let tt = tt.flatten().map(syntax::NodeOrToken::Token); + // ...and make them into a flat list of tokens + let tt = tt.collect::>(); + + let new_tree = make::token_tree(T!['('], tt).clone_for_update(); + ted::replace(old_tree.syntax(), new_tree.syntax()); + } else { + // Remove the attr and any trailing whitespace + let attr = builder.make_mut(attr.clone()); + + if let Some(line_break) = + attr.syntax().next_sibling_or_token().filter(|t| t.kind() == WHITESPACE) { - builder.delete(line_break_range); + ted::remove(line_break) } + + ted::remove(attr.syntax()) } } @@ -1168,9 +1187,7 @@ struct Foo { bar: String, } -impl Debug for Foo { - $0 -} +impl Debug for Foo {$0} "#, ) } @@ -1191,9 +1208,7 @@ pub struct Foo { bar: String, } -impl Debug for Foo { - $0 -} +impl Debug for Foo {$0} "#, ) } @@ -1211,9 +1226,7 @@ struct Foo {} #[derive(Display, Serialize)] struct Foo {} -impl Debug for Foo { - $0 -} +impl Debug for Foo {$0} "#, ) } diff --git a/crates/ide-assists/src/handlers/unwrap_result_return_type.rs b/crates/ide-assists/src/handlers/unwrap_result_return_type.rs index 03e6dfebebfb..8a9e669630bf 100644 --- a/crates/ide-assists/src/handlers/unwrap_result_return_type.rs +++ b/crates/ide-assists/src/handlers/unwrap_result_return_type.rs @@ -47,9 +47,7 @@ pub(crate) fn unwrap_result_return_type(acc: &mut Assists, ctx: &AssistContext<' return None; } - let Some(ok_type) = unwrap_result_type(type_ref) else { - return None; - }; + let ok_type = unwrap_result_type(type_ref)?; acc.add( AssistId("unwrap_result_return_type", AssistKind::RefactorRewrite), diff --git a/crates/ide-assists/src/tests.rs b/crates/ide-assists/src/tests.rs index 573d69b5c6d3..9b6f7d018ee3 100644 --- a/crates/ide-assists/src/tests.rs +++ b/crates/ide-assists/src/tests.rs @@ -474,7 +474,7 @@ pub fn test_some_range(a: int) -> bool { &db, &cfg, AssistResolveStrategy::Single(SingleResolve { - assist_id: "SOMETHING_MISMATCHING".to_string(), + assist_id: "SOMETHING_MISMATCHING".to_owned(), assist_kind: AssistKind::RefactorExtract, }), frange, @@ -520,7 +520,7 @@ pub fn test_some_range(a: int) -> bool { &db, &cfg, AssistResolveStrategy::Single(SingleResolve { - assist_id: "extract_variable".to_string(), + assist_id: "extract_variable".to_owned(), assist_kind: AssistKind::RefactorExtract, }), frange, @@ -687,12 +687,21 @@ pub fn test_some_range(a: int) -> bool { delete: 59..60, }, Indel { - insert: "\n\nfn $0fun_name() -> i32 {\n 5\n}", + insert: "\n\nfn fun_name() -> i32 {\n 5\n}", delete: 110..110, }, ], }, - None, + Some( + SnippetEdit( + [ + ( + 0, + 124..124, + ), + ], + ), + ), ), }, file_system_edits: [], diff --git a/crates/ide-assists/src/tests/generated.rs b/crates/ide-assists/src/tests/generated.rs index 8d7c49d52c23..8ad735d0ae80 100644 --- a/crates/ide-assists/src/tests/generated.rs +++ b/crates/ide-assists/src/tests/generated.rs @@ -1429,7 +1429,7 @@ fn doctest_generate_getter() { check_doc_test( "generate_getter", r#####" -//- minicore: as_ref +//- minicore: as_ref, deref pub struct String; impl AsRef for String { fn as_ref(&self) -> &str { @@ -1437,6 +1437,13 @@ impl AsRef for String { } } +impl core::ops::Deref for String { + type Target = str; + fn deref(&self) -> &Self::Target { + "" + } +} + struct Person { nam$0e: String, } @@ -1449,13 +1456,20 @@ impl AsRef for String { } } +impl core::ops::Deref for String { + type Target = str; + fn deref(&self) -> &Self::Target { + "" + } +} + struct Person { name: String, } impl Person { fn $0name(&self) -> &str { - self.name.as_ref() + &self.name } } "#####, @@ -1499,9 +1513,7 @@ struct Ctx { data: T, } -impl Ctx { - $0 -} +impl Ctx {$0} "#####, ) } @@ -1589,7 +1601,9 @@ struct Ctx { } impl Ctx { - fn $0new(data: T) -> Self { Self { data } } + fn $0new(data: T) -> Self { + Self { data } + } } "#####, ) @@ -1688,9 +1702,7 @@ struct Ctx { data: T, } -impl $0 for Ctx { - -} +impl ${0:_} for Ctx {} "#####, ) } diff --git a/crates/ide-assists/src/tests/sourcegen.rs b/crates/ide-assists/src/tests/sourcegen.rs index 088d93f9a6ba..847cb1af51e0 100644 --- a/crates/ide-assists/src/tests/sourcegen.rs +++ b/crates/ide-assists/src/tests/sourcegen.rs @@ -15,7 +15,7 @@ fn sourcegen_assists_docs() { let mut buf = " use super::check_doc_test; " - .to_string(); + .to_owned(); for assist in assists.iter() { for (idx, section) in assist.sections.iter().enumerate() { let test_id = @@ -101,7 +101,7 @@ impl Assist { let mut assist = Assist { id, location, sections: Vec::new() }; while lines.peek().is_some() { - let doc = take_until(lines.by_ref(), "```").trim().to_string(); + let doc = take_until(lines.by_ref(), "```").trim().to_owned(); assert!( (doc.chars().next().unwrap().is_ascii_uppercase() && doc.ends_with('.')) || !assist.sections.is_empty(), diff --git a/crates/ide-assists/src/utils.rs b/crates/ide-assists/src/utils.rs index eeb3d80d07bd..a4f14326751b 100644 --- a/crates/ide-assists/src/utils.rs +++ b/crates/ide-assists/src/utils.rs @@ -1,12 +1,10 @@ //! Assorted functions shared by several assists. -use std::ops; - pub(crate) use gen_trait_fn_body::gen_trait_fn_body; use hir::{db::HirDatabase, HasAttrs as HirHasAttrs, HirDisplay, InFile, Semantics}; use ide_db::{ famous_defs::FamousDefs, path_transform::PathTransform, - syntax_helpers::insert_whitespace_into_node::insert_ws_into, RootDatabase, SnippetCap, + syntax_helpers::insert_whitespace_into_node::insert_ws_into, RootDatabase, }; use stdx::format_to; use syntax::{ @@ -217,43 +215,6 @@ pub fn add_trait_assoc_items_to_impl( first_item.unwrap() } -#[derive(Clone, Copy, Debug)] -pub(crate) enum Cursor<'a> { - Replace(&'a SyntaxNode), - Before(&'a SyntaxNode), -} - -impl<'a> Cursor<'a> { - fn node(self) -> &'a SyntaxNode { - match self { - Cursor::Replace(node) | Cursor::Before(node) => node, - } - } -} - -pub(crate) fn render_snippet(_cap: SnippetCap, node: &SyntaxNode, cursor: Cursor<'_>) -> String { - assert!(cursor.node().ancestors().any(|it| it == *node)); - let range = cursor.node().text_range() - node.text_range().start(); - let range: ops::Range = range.into(); - - let mut placeholder = cursor.node().to_string(); - escape(&mut placeholder); - let tab_stop = match cursor { - Cursor::Replace(placeholder) => format!("${{0:{placeholder}}}"), - Cursor::Before(placeholder) => format!("$0{placeholder}"), - }; - - let mut buf = node.to_string(); - buf.replace_range(range, &tab_stop); - return buf; - - fn escape(buf: &mut String) { - stdx::replace(buf, '{', r"\{"); - stdx::replace(buf, '}', r"\}"); - stdx::replace(buf, '$', r"\$"); - } -} - pub(crate) fn vis_offset(node: &SyntaxNode) -> TextSize { node.children_with_tokens() .find(|it| !matches!(it.kind(), WHITESPACE | COMMENT | ATTR)) @@ -445,15 +406,6 @@ fn has_any_fn(imp: &ast::Impl, names: &[String]) -> bool { false } -/// Find the start of the `impl` block for the given `ast::Impl`. -// -// FIXME: this partially overlaps with `find_struct_impl` -pub(crate) fn find_impl_block_start(impl_def: ast::Impl, buf: &mut String) -> Option { - buf.push('\n'); - let start = impl_def.assoc_item_list().and_then(|it| it.l_curly_token())?.text_range().end(); - Some(start) -} - /// Find the end of the `impl` block for the given `ast::Impl`. // // FIXME: this partially overlaps with `find_struct_impl` @@ -470,6 +422,7 @@ pub(crate) fn find_impl_block_end(impl_def: ast::Impl, buf: &mut String) -> Opti /// Generates the surrounding `impl Type { }` including type and lifetime /// parameters. +// FIXME: migrate remaining uses to `generate_impl` pub(crate) fn generate_impl_text(adt: &ast::Adt, code: &str) -> String { generate_impl_text_inner(adt, None, true, code) } @@ -478,6 +431,7 @@ pub(crate) fn generate_impl_text(adt: &ast::Adt, code: &str) -> String { /// and lifetime parameters, with `` appended to `impl`'s generic parameters' bounds. /// /// This is useful for traits like `PartialEq`, since `impl PartialEq for U` often requires `T: PartialEq`. +// FIXME: migrate remaining uses to `generate_trait_impl` pub(crate) fn generate_trait_impl_text(adt: &ast::Adt, trait_text: &str, code: &str) -> String { generate_impl_text_inner(adt, Some(trait_text), true, code) } @@ -486,6 +440,7 @@ pub(crate) fn generate_trait_impl_text(adt: &ast::Adt, trait_text: &str, code: & /// and lifetime parameters, with `impl`'s generic parameters' bounds kept as-is. /// /// This is useful for traits like `From`, since `impl From for U` doesn't require `T: From`. +// FIXME: migrate remaining uses to `generate_trait_impl_intransitive` pub(crate) fn generate_trait_impl_text_intransitive( adt: &ast::Adt, trait_text: &str, @@ -516,7 +471,7 @@ fn generate_impl_text_inner( // Add the current trait to `bounds` if the trait is transitive, // meaning `impl Trait for U` requires `T: Trait`. if trait_is_transitive { - bounds.push(make::type_bound(trait_)); + bounds.push(make::type_bound_text(trait_)); } }; // `{ty_param}: {bounds}` @@ -574,6 +529,101 @@ fn generate_impl_text_inner( buf } +/// Generates the corresponding `impl Type {}` including type and lifetime +/// parameters. +pub(crate) fn generate_impl(adt: &ast::Adt) -> ast::Impl { + generate_impl_inner(adt, None, true) +} + +/// Generates the corresponding `impl for Type {}` including type +/// and lifetime parameters, with `` appended to `impl`'s generic parameters' bounds. +/// +/// This is useful for traits like `PartialEq`, since `impl PartialEq for U` often requires `T: PartialEq`. +pub(crate) fn generate_trait_impl(adt: &ast::Adt, trait_: ast::Type) -> ast::Impl { + generate_impl_inner(adt, Some(trait_), true) +} + +/// Generates the corresponding `impl for Type {}` including type +/// and lifetime parameters, with `impl`'s generic parameters' bounds kept as-is. +/// +/// This is useful for traits like `From`, since `impl From for U` doesn't require `T: From`. +pub(crate) fn generate_trait_impl_intransitive(adt: &ast::Adt, trait_: ast::Type) -> ast::Impl { + generate_impl_inner(adt, Some(trait_), false) +} + +fn generate_impl_inner( + adt: &ast::Adt, + trait_: Option, + trait_is_transitive: bool, +) -> ast::Impl { + // Ensure lifetime params are before type & const params + let generic_params = adt.generic_param_list().map(|generic_params| { + let lifetime_params = + generic_params.lifetime_params().map(ast::GenericParam::LifetimeParam); + let ty_or_const_params = generic_params.type_or_const_params().map(|param| { + match param { + ast::TypeOrConstParam::Type(param) => { + let param = param.clone_for_update(); + // remove defaults since they can't be specified in impls + param.remove_default(); + let mut bounds = + param.type_bound_list().map_or_else(Vec::new, |it| it.bounds().collect()); + if let Some(trait_) = &trait_ { + // Add the current trait to `bounds` if the trait is transitive, + // meaning `impl Trait for U` requires `T: Trait`. + if trait_is_transitive { + bounds.push(make::type_bound(trait_.clone())); + } + }; + // `{ty_param}: {bounds}` + let param = + make::type_param(param.name().unwrap(), make::type_bound_list(bounds)); + ast::GenericParam::TypeParam(param) + } + ast::TypeOrConstParam::Const(param) => { + let param = param.clone_for_update(); + // remove defaults since they can't be specified in impls + param.remove_default(); + ast::GenericParam::ConstParam(param) + } + } + }); + + make::generic_param_list(itertools::chain(lifetime_params, ty_or_const_params)) + }); + let generic_args = + generic_params.as_ref().map(|params| params.to_generic_args().clone_for_update()); + let ty = make::ty_path(make::ext::ident_path(&adt.name().unwrap().text())); + + let impl_ = match trait_ { + Some(trait_) => make::impl_trait( + false, + None, + None, + generic_params, + generic_args, + false, + trait_, + ty, + None, + adt.where_clause(), + None, + ), + None => make::impl_(generic_params, generic_args, ty, adt.where_clause(), None), + } + .clone_for_update(); + + // Copy any cfg attrs from the original adt + let cfg_attrs = adt + .attrs() + .filter(|attr| attr.as_simple_call().map(|(name, _arg)| name == "cfg").unwrap_or(false)); + for attr in cfg_attrs { + impl_.add_attr(attr.clone_for_update()); + } + + impl_ +} + pub(crate) fn add_method_to_adt( builder: &mut SourceChangeBuilder, adt: &ast::Adt, @@ -600,6 +650,7 @@ pub(crate) fn add_method_to_adt( pub(crate) struct ReferenceConversion { conversion: ReferenceConversionType, ty: hir::Type, + impls_deref: bool, } #[derive(Debug)] @@ -619,10 +670,10 @@ enum ReferenceConversionType { } impl ReferenceConversion { - pub(crate) fn convert_type(&self, db: &dyn HirDatabase) -> String { - match self.conversion { + pub(crate) fn convert_type(&self, db: &dyn HirDatabase) -> ast::Type { + let ty = match self.conversion { ReferenceConversionType::Copy => self.ty.display(db).to_string(), - ReferenceConversionType::AsRefStr => "&str".to_string(), + ReferenceConversionType::AsRefStr => "&str".to_owned(), ReferenceConversionType::AsRefSlice => { let type_argument_name = self.ty.type_arguments().next().unwrap().display(db).to_string(); @@ -646,17 +697,27 @@ impl ReferenceConversion { type_arguments.next().unwrap().display(db).to_string(); format!("Result<&{first_type_argument_name}, &{second_type_argument_name}>") } - } + }; + + make::ty(&ty) } - pub(crate) fn getter(&self, field_name: String) -> String { + pub(crate) fn getter(&self, field_name: String) -> ast::Expr { + let expr = make::expr_field(make::ext::expr_self(), &field_name); + match self.conversion { - ReferenceConversionType::Copy => format!("self.{field_name}"), + ReferenceConversionType::Copy => expr, ReferenceConversionType::AsRefStr | ReferenceConversionType::AsRefSlice | ReferenceConversionType::Dereferenced | ReferenceConversionType::Option - | ReferenceConversionType::Result => format!("self.{field_name}.as_ref()"), + | ReferenceConversionType::Result => { + if self.impls_deref { + make::expr_ref(expr, false) + } else { + make::expr_method_call(expr, make::name_ref("as_ref"), make::arg_list([])) + } + } } } } @@ -675,54 +736,64 @@ pub(crate) fn convert_reference_type( .or_else(|| handle_dereferenced(&ty, db, famous_defs)) .or_else(|| handle_option_as_ref(&ty, db, famous_defs)) .or_else(|| handle_result_as_ref(&ty, db, famous_defs)) - .map(|conversion| ReferenceConversion { ty, conversion }) + .map(|(conversion, impls_deref)| ReferenceConversion { ty, conversion, impls_deref }) } -fn handle_copy(ty: &hir::Type, db: &dyn HirDatabase) -> Option { - ty.is_copy(db).then_some(ReferenceConversionType::Copy) +fn could_deref_to_target(ty: &hir::Type, target: &hir::Type, db: &dyn HirDatabase) -> bool { + let ty_ref = hir::Type::reference(ty, hir::Mutability::Shared); + let target_ref = hir::Type::reference(target, hir::Mutability::Shared); + ty_ref.could_coerce_to(db, &target_ref) +} + +fn handle_copy(ty: &hir::Type, db: &dyn HirDatabase) -> Option<(ReferenceConversionType, bool)> { + ty.is_copy(db).then_some((ReferenceConversionType::Copy, true)) } fn handle_as_ref_str( ty: &hir::Type, db: &dyn HirDatabase, famous_defs: &FamousDefs<'_, '_>, -) -> Option { +) -> Option<(ReferenceConversionType, bool)> { let str_type = hir::BuiltinType::str().ty(db); - ty.impls_trait(db, famous_defs.core_convert_AsRef()?, &[str_type]) - .then_some(ReferenceConversionType::AsRefStr) + ty.impls_trait(db, famous_defs.core_convert_AsRef()?, &[str_type.clone()]) + .then_some((ReferenceConversionType::AsRefStr, could_deref_to_target(ty, &str_type, db))) } fn handle_as_ref_slice( ty: &hir::Type, db: &dyn HirDatabase, famous_defs: &FamousDefs<'_, '_>, -) -> Option { +) -> Option<(ReferenceConversionType, bool)> { let type_argument = ty.type_arguments().next()?; let slice_type = hir::Type::new_slice(type_argument); - ty.impls_trait(db, famous_defs.core_convert_AsRef()?, &[slice_type]) - .then_some(ReferenceConversionType::AsRefSlice) + ty.impls_trait(db, famous_defs.core_convert_AsRef()?, &[slice_type.clone()]).then_some(( + ReferenceConversionType::AsRefSlice, + could_deref_to_target(ty, &slice_type, db), + )) } fn handle_dereferenced( ty: &hir::Type, db: &dyn HirDatabase, famous_defs: &FamousDefs<'_, '_>, -) -> Option { +) -> Option<(ReferenceConversionType, bool)> { let type_argument = ty.type_arguments().next()?; - ty.impls_trait(db, famous_defs.core_convert_AsRef()?, &[type_argument]) - .then_some(ReferenceConversionType::Dereferenced) + ty.impls_trait(db, famous_defs.core_convert_AsRef()?, &[type_argument.clone()]).then_some(( + ReferenceConversionType::Dereferenced, + could_deref_to_target(ty, &type_argument, db), + )) } fn handle_option_as_ref( ty: &hir::Type, db: &dyn HirDatabase, famous_defs: &FamousDefs<'_, '_>, -) -> Option { +) -> Option<(ReferenceConversionType, bool)> { if ty.as_adt() == famous_defs.core_option_Option()?.ty(db).as_adt() { - Some(ReferenceConversionType::Option) + Some((ReferenceConversionType::Option, false)) } else { None } @@ -732,9 +803,9 @@ fn handle_result_as_ref( ty: &hir::Type, db: &dyn HirDatabase, famous_defs: &FamousDefs<'_, '_>, -) -> Option { +) -> Option<(ReferenceConversionType, bool)> { if ty.as_adt() == famous_defs.core_result_Result()?.ty(db).as_adt() { - Some(ReferenceConversionType::Result) + Some((ReferenceConversionType::Result, false)) } else { None } diff --git a/crates/ide-assists/src/utils/suggest_name.rs b/crates/ide-assists/src/utils/suggest_name.rs index 78dee24a6d3a..1859825b3d65 100644 --- a/crates/ide-assists/src/utils/suggest_name.rs +++ b/crates/ide-assists/src/utils/suggest_name.rs @@ -1,9 +1,7 @@ //! This module contains functions to suggest names for expressions, functions and other items -use std::collections::HashSet; - use hir::Semantics; -use ide_db::RootDatabase; +use ide_db::{FxHashSet, RootDatabase}; use itertools::Itertools; use stdx::to_lower_snake_case; use syntax::{ @@ -78,8 +76,8 @@ pub(crate) fn for_unique_generic_name( ast::GenericParam::TypeParam(t) => t.name().unwrap().to_string(), p => p.to_string(), }) - .collect::>(); - let mut name = name.to_string(); + .collect::>(); + let mut name = name.to_owned(); let base_len = name.len(); let mut count = 0; while param_names.contains(&name) { @@ -167,7 +165,7 @@ pub(crate) fn for_variable(expr: &ast::Expr, sema: &Semantics<'_, RootDatabase>) } } - "var_name".to_string() + "var_name".to_owned() } fn normalize(name: &str) -> Option { diff --git a/crates/ide-completion/src/completions/dot.rs b/crates/ide-completion/src/completions/dot.rs index 00135a6d202c..24a1f9492e22 100644 --- a/crates/ide-completion/src/completions/dot.rs +++ b/crates/ide-completion/src/completions/dot.rs @@ -4,7 +4,10 @@ use ide_db::FxHashSet; use syntax::SmolStr; use crate::{ - context::{CompletionContext, DotAccess, DotAccessKind, ExprCtx, PathCompletionCtx, Qualified}, + context::{ + CompletionContext, DotAccess, DotAccessExprCtx, DotAccessKind, PathCompletionCtx, + PathExprCtx, Qualified, + }, CompletionItem, CompletionItemKind, Completions, }; @@ -51,7 +54,7 @@ pub(crate) fn complete_undotted_self( acc: &mut Completions, ctx: &CompletionContext<'_>, path_ctx: &PathCompletionCtx, - expr_ctx: &ExprCtx, + expr_ctx: &PathExprCtx, ) { if !ctx.config.enable_self_on_the_fly { return; @@ -66,7 +69,7 @@ pub(crate) fn complete_undotted_self( return; } let self_param = match expr_ctx { - ExprCtx { self_param: Some(self_param), .. } => self_param, + PathExprCtx { self_param: Some(self_param), .. } => self_param, _ => return, }; @@ -82,6 +85,10 @@ pub(crate) fn complete_undotted_self( receiver: None, receiver_ty: None, kind: DotAccessKind::Field { receiver_is_ambiguous_float_literal: false }, + ctx: DotAccessExprCtx { + in_block_expr: expr_ctx.in_block_expr, + in_breakable: expr_ctx.in_breakable, + }, }, Some(hir::known::SELF_PARAM), field, @@ -99,6 +106,10 @@ pub(crate) fn complete_undotted_self( receiver: None, receiver_ty: None, kind: DotAccessKind::Method { has_parens: false }, + ctx: DotAccessExprCtx { + in_block_expr: expr_ctx.in_block_expr, + in_breakable: expr_ctx.in_breakable, + }, }, func, Some(hir::known::SELF_PARAM), diff --git a/crates/ide-completion/src/completions/expr.rs b/crates/ide-completion/src/completions/expr.rs index 1433216d6111..77fd5dd98b8d 100644 --- a/crates/ide-completion/src/completions/expr.rs +++ b/crates/ide-completion/src/completions/expr.rs @@ -5,7 +5,7 @@ use syntax::ast; use crate::{ completions::record::add_default_update, - context::{ExprCtx, PathCompletionCtx, Qualified}, + context::{BreakableKind, PathCompletionCtx, PathExprCtx, Qualified}, CompletionContext, Completions, }; @@ -13,16 +13,16 @@ pub(crate) fn complete_expr_path( acc: &mut Completions, ctx: &CompletionContext<'_>, path_ctx @ PathCompletionCtx { qualified, .. }: &PathCompletionCtx, - expr_ctx: &ExprCtx, + expr_ctx: &PathExprCtx, ) { let _p = tracing::span!(tracing::Level::INFO, "complete_expr_path").entered(); if !ctx.qualifier_ctx.none() { return; } - let &ExprCtx { + let &PathExprCtx { in_block_expr, - in_loop_body, + in_breakable, after_if_expr, in_condition, incomplete_let, @@ -290,7 +290,7 @@ pub(crate) fn complete_expr_path( add_keyword("mut", "mut "); } - if in_loop_body { + if in_breakable != BreakableKind::None { if in_block_expr { add_keyword("continue", "continue;"); add_keyword("break", "break;"); diff --git a/crates/ide-completion/src/completions/extern_crate.rs b/crates/ide-completion/src/completions/extern_crate.rs index f9cde44667b7..b67d82c20d84 100644 --- a/crates/ide-completion/src/completions/extern_crate.rs +++ b/crates/ide-completion/src/completions/extern_crate.rs @@ -46,7 +46,7 @@ mod other_mod {} let completion_list = completion_list_no_kw(case); - assert_eq!("md other_crate_a\n".to_string(), completion_list); + assert_eq!("md other_crate_a\n".to_owned(), completion_list); } #[test] @@ -66,6 +66,6 @@ mod other_mod {} let completion_list = completion_list_no_kw(case); - assert_eq!("md other_crate_a\n".to_string(), completion_list); + assert_eq!("md other_crate_a\n".to_owned(), completion_list); } } diff --git a/crates/ide-completion/src/completions/flyimport.rs b/crates/ide-completion/src/completions/flyimport.rs index 0e04ad35d33d..b9f91d34b2c2 100644 --- a/crates/ide-completion/src/completions/flyimport.rs +++ b/crates/ide-completion/src/completions/flyimport.rs @@ -395,6 +395,14 @@ fn import_assets_for_path( potential_import_name: &str, qualifier: Option, ) -> Option { + let _p = tracing::span!( + tracing::Level::INFO, + "import_assets_for_path", + ?potential_import_name, + ?qualifier + ) + .entered(); + let fuzzy_name_length = potential_import_name.len(); let mut assets_for_path = ImportAssets::for_fuzzy_path( ctx.module, diff --git a/crates/ide-completion/src/completions/item_list.rs b/crates/ide-completion/src/completions/item_list.rs index addd9dac1a76..0a6a8633a2c1 100644 --- a/crates/ide-completion/src/completions/item_list.rs +++ b/crates/ide-completion/src/completions/item_list.rs @@ -1,7 +1,7 @@ //! Completion of paths and keywords at item list position. use crate::{ - context::{ExprCtx, ItemListKind, PathCompletionCtx, Qualified}, + context::{ItemListKind, PathCompletionCtx, PathExprCtx, Qualified}, CompletionContext, Completions, }; @@ -11,7 +11,7 @@ pub(crate) fn complete_item_list_in_expr( acc: &mut Completions, ctx: &CompletionContext<'_>, path_ctx: &PathCompletionCtx, - expr_ctx: &ExprCtx, + expr_ctx: &PathExprCtx, ) { if !expr_ctx.in_block_expr { return; diff --git a/crates/ide-completion/src/completions/keyword.rs b/crates/ide-completion/src/completions/keyword.rs index b9ab2afca2b5..ed32a5db23e7 100644 --- a/crates/ide-completion/src/completions/keyword.rs +++ b/crates/ide-completion/src/completions/keyword.rs @@ -81,11 +81,13 @@ fn foo(a: A) { a.$0 } sn call function(expr) sn dbg dbg!(expr) sn dbgr dbg!(&expr) + sn deref *expr sn let let sn letm let mut sn match match expr {} sn ref &expr sn refm &mut expr + sn return return expr sn unsafe unsafe {} "#]], ); @@ -106,11 +108,13 @@ fn foo() { sn call function(expr) sn dbg dbg!(expr) sn dbgr dbg!(&expr) + sn deref *expr sn let let sn letm let mut sn match match expr {} sn ref &expr sn refm &mut expr + sn return return expr sn unsafe unsafe {} "#]], ); @@ -133,11 +137,13 @@ fn foo(a: A) { a.$0 } sn call function(expr) sn dbg dbg!(expr) sn dbgr dbg!(&expr) + sn deref *expr sn let let sn letm let mut sn match match expr {} sn ref &expr sn refm &mut expr + sn return return expr sn unsafe unsafe {} "#]], ); diff --git a/crates/ide-completion/src/completions/postfix.rs b/crates/ide-completion/src/completions/postfix.rs index af83d4104f76..72c0885e92fa 100644 --- a/crates/ide-completion/src/completions/postfix.rs +++ b/crates/ide-completion/src/completions/postfix.rs @@ -2,6 +2,7 @@ mod format_like; +use hir::ItemInNs; use ide_db::{ documentation::{Documentation, HasDocs}, imports::insert_use::ImportScope, @@ -17,7 +18,7 @@ use text_edit::TextEdit; use crate::{ completions::postfix::format_like::add_format_like_completions, - context::{CompletionContext, DotAccess, DotAccessKind}, + context::{BreakableKind, CompletionContext, DotAccess, DotAccessKind}, item::{Builder, CompletionRelevancePostfixMatch}, CompletionItem, CompletionItemKind, CompletionRelevance, Completions, SnippetScope, }; @@ -44,6 +45,7 @@ pub(crate) fn complete_postfix( ), _ => return, }; + let expr_ctx = &dot_access.ctx; let receiver_text = get_receiver_text(dot_receiver, receiver_is_ambiguous_float_literal); @@ -59,16 +61,22 @@ pub(crate) fn complete_postfix( if let Some(drop_trait) = ctx.famous_defs().core_ops_Drop() { if receiver_ty.impls_trait(ctx.db, drop_trait, &[]) { - if let &[hir::AssocItem::Function(drop_fn)] = &*drop_trait.items(ctx.db) { - cov_mark::hit!(postfix_drop_completion); - // FIXME: check that `drop` is in scope, use fully qualified path if it isn't/if shadowed - let mut item = postfix_snippet( - "drop", - "fn drop(&mut self)", - &format!("drop($0{receiver_text})"), - ); - item.set_documentation(drop_fn.docs(ctx.db)); - item.add_to(acc, ctx.db); + if let Some(drop_fn) = ctx.famous_defs().core_mem_drop() { + if let Some(path) = ctx.module.find_use_path( + ctx.db, + ItemInNs::Values(drop_fn.into()), + ctx.config.prefer_no_std, + ctx.config.prefer_prelude, + ) { + cov_mark::hit!(postfix_drop_completion); + let mut item = postfix_snippet( + "drop", + "fn drop(&mut self)", + &format!("{path}($0{receiver_text})", path = path.display(ctx.db)), + ); + item.set_documentation(drop_fn.docs(ctx.db)); + item.add_to(acc, ctx.db); + } } } } @@ -140,6 +148,7 @@ pub(crate) fn complete_postfix( postfix_snippet("ref", "&expr", &format!("&{receiver_text}")).add_to(acc, ctx.db); postfix_snippet("refm", "&mut expr", &format!("&mut {receiver_text}")).add_to(acc, ctx.db); + postfix_snippet("deref", "*expr", &format!("*{receiver_text}")).add_to(acc, ctx.db); let mut unsafe_should_be_wrapped = true; if dot_receiver.syntax().kind() == BLOCK_EXPR { @@ -224,6 +233,28 @@ pub(crate) fn complete_postfix( add_format_like_completions(acc, ctx, &dot_receiver, cap, &literal_text); } } + + postfix_snippet( + "return", + "return expr", + &format!( + "return {receiver_text}{semi}", + semi = if expr_ctx.in_block_expr { ";" } else { "" } + ), + ) + .add_to(acc, ctx.db); + + if let BreakableKind::Block | BreakableKind::Loop = expr_ctx.in_breakable { + postfix_snippet( + "break", + "break expr", + &format!( + "break {receiver_text}{semi}", + semi = if expr_ctx.in_block_expr { ";" } else { "" } + ), + ) + .add_to(acc, ctx.db); + } } fn get_receiver_text(receiver: &ast::Expr, receiver_is_ambiguous_float_literal: bool) -> String { @@ -295,7 +326,7 @@ fn build_postfix_snippet_builder<'ctx>( delete_range: TextRange, ) -> impl Fn(&str, &str, &str) -> Builder + 'ctx { move |label, detail, snippet| { - let edit = TextEdit::replace(delete_range, snippet.to_string()); + let edit = TextEdit::replace(delete_range, snippet.to_owned()); let mut item = CompletionItem::new(CompletionItemKind::Snippet, ctx.source_range(), label); item.detail(detail).snippet_edit(cap, edit); @@ -368,6 +399,7 @@ fn main() { sn call function(expr) sn dbg dbg!(expr) sn dbgr dbg!(&expr) + sn deref *expr sn if if expr {} sn let let sn letm let mut @@ -375,6 +407,7 @@ fn main() { sn not !expr sn ref &expr sn refm &mut expr + sn return return expr sn unsafe unsafe {} sn while while expr {} "#]], @@ -399,11 +432,13 @@ fn main() { sn call function(expr) sn dbg dbg!(expr) sn dbgr dbg!(&expr) + sn deref *expr sn if if expr {} sn match match expr {} sn not !expr sn ref &expr sn refm &mut expr + sn return return expr sn unsafe unsafe {} sn while while expr {} "#]], @@ -424,11 +459,13 @@ fn main() { sn call function(expr) sn dbg dbg!(expr) sn dbgr dbg!(&expr) + sn deref *expr sn let let sn letm let mut sn match match expr {} sn ref &expr sn refm &mut expr + sn return return expr sn unsafe unsafe {} "#]], ) @@ -448,6 +485,7 @@ fn main() { sn call function(expr) sn dbg dbg!(expr) sn dbgr dbg!(&expr) + sn deref *expr sn if if expr {} sn let let sn letm let mut @@ -455,6 +493,7 @@ fn main() { sn not !expr sn ref &expr sn refm &mut expr + sn return return expr sn unsafe unsafe {} sn while while expr {} "#]], diff --git a/crates/ide-completion/src/completions/record.rs b/crates/ide-completion/src/completions/record.rs index e53d1cc6322e..1dcf41f8dd23 100644 --- a/crates/ide-completion/src/completions/record.rs +++ b/crates/ide-completion/src/completions/record.rs @@ -6,7 +6,7 @@ use syntax::{ }; use crate::{ - context::{DotAccess, DotAccessKind, PatternContext}, + context::{DotAccess, DotAccessExprCtx, DotAccessKind, PatternContext}, CompletionContext, CompletionItem, CompletionItemKind, CompletionRelevance, CompletionRelevancePostfixMatch, Completions, }; @@ -118,12 +118,17 @@ fn complete_fields( missing_fields: Vec<(hir::Field, hir::Type)>, ) { for (field, ty) in missing_fields { + // This should call something else, we shouldn't be synthesizing a DotAccess here acc.add_field( ctx, &DotAccess { receiver: None, receiver_ty: None, kind: DotAccessKind::Field { receiver_is_ambiguous_float_literal: false }, + ctx: DotAccessExprCtx { + in_block_expr: false, + in_breakable: crate::context::BreakableKind::None, + }, }, None, field, diff --git a/crates/ide-completion/src/completions/snippet.rs b/crates/ide-completion/src/completions/snippet.rs index a01919220503..e831113350ac 100644 --- a/crates/ide-completion/src/completions/snippet.rs +++ b/crates/ide-completion/src/completions/snippet.rs @@ -3,7 +3,7 @@ use ide_db::{documentation::Documentation, imports::insert_use::ImportScope, SnippetCap}; use crate::{ - context::{ExprCtx, ItemListKind, PathCompletionCtx, Qualified}, + context::{ItemListKind, PathCompletionCtx, PathExprCtx, Qualified}, item::Builder, CompletionContext, CompletionItem, CompletionItemKind, Completions, SnippetScope, }; @@ -12,7 +12,7 @@ pub(crate) fn complete_expr_snippet( acc: &mut Completions, ctx: &CompletionContext<'_>, path_ctx: &PathCompletionCtx, - &ExprCtx { in_block_expr, .. }: &ExprCtx, + &PathExprCtx { in_block_expr, .. }: &PathExprCtx, ) { if !matches!(path_ctx.qualified, Qualified::No) { return; diff --git a/crates/ide-completion/src/context.rs b/crates/ide-completion/src/context.rs index 2c0370c58f70..2a0004f60b82 100644 --- a/crates/ide-completion/src/context.rs +++ b/crates/ide-completion/src/context.rs @@ -45,13 +45,13 @@ pub(crate) enum Visible { /// Existing qualifiers for the thing we are currently completing. #[derive(Debug, Default)] -pub(super) struct QualifierCtx { - pub(super) unsafe_tok: Option, - pub(super) vis_node: Option, +pub(crate) struct QualifierCtx { + pub(crate) unsafe_tok: Option, + pub(crate) vis_node: Option, } impl QualifierCtx { - pub(super) fn none(&self) -> bool { + pub(crate) fn none(&self) -> bool { self.unsafe_tok.is_none() && self.vis_node.is_none() } } @@ -60,27 +60,27 @@ impl QualifierCtx { #[derive(Debug)] pub(crate) struct PathCompletionCtx { /// If this is a call with () already there (or {} in case of record patterns) - pub(super) has_call_parens: bool, + pub(crate) has_call_parens: bool, /// If this has a macro call bang ! - pub(super) has_macro_bang: bool, + pub(crate) has_macro_bang: bool, /// The qualifier of the current path. - pub(super) qualified: Qualified, + pub(crate) qualified: Qualified, /// The parent of the path we are completing. - pub(super) parent: Option, + pub(crate) parent: Option, #[allow(dead_code)] /// The path of which we are completing the segment - pub(super) path: ast::Path, + pub(crate) path: ast::Path, /// The path of which we are completing the segment in the original file pub(crate) original_path: Option, - pub(super) kind: PathKind, + pub(crate) kind: PathKind, /// Whether the path segment has type args or not. - pub(super) has_type_args: bool, + pub(crate) has_type_args: bool, /// Whether the qualifier comes from a use tree parent or not pub(crate) use_tree_parent: bool, } impl PathCompletionCtx { - pub(super) fn is_trivial_path(&self) -> bool { + pub(crate) fn is_trivial_path(&self) -> bool { matches!( self, PathCompletionCtx { @@ -97,9 +97,9 @@ impl PathCompletionCtx { /// The kind of path we are completing right now. #[derive(Debug, PartialEq, Eq)] -pub(super) enum PathKind { +pub(crate) enum PathKind { Expr { - expr_ctx: ExprCtx, + expr_ctx: PathExprCtx, }, Type { location: TypeLocation, @@ -132,9 +132,9 @@ pub(crate) struct AttrCtx { } #[derive(Debug, PartialEq, Eq)] -pub(crate) struct ExprCtx { +pub(crate) struct PathExprCtx { pub(crate) in_block_expr: bool, - pub(crate) in_loop_body: bool, + pub(crate) in_breakable: BreakableKind, pub(crate) after_if_expr: bool, /// Whether this expression is the direct condition of an if or while expression pub(crate) in_condition: bool, @@ -221,7 +221,7 @@ pub(crate) enum TypeAscriptionTarget { /// The kind of item list a [`PathKind::Item`] belongs to. #[derive(Debug, PartialEq, Eq)] -pub(super) enum ItemListKind { +pub(crate) enum ItemListKind { SourceFile, Module, Impl, @@ -231,7 +231,7 @@ pub(super) enum ItemListKind { } #[derive(Debug)] -pub(super) enum Qualified { +pub(crate) enum Qualified { No, With { path: ast::Path, @@ -259,37 +259,37 @@ pub(super) enum Qualified { /// The state of the pattern we are completing. #[derive(Debug, Clone, PartialEq, Eq)] -pub(super) struct PatternContext { - pub(super) refutability: PatternRefutability, - pub(super) param_ctx: Option, - pub(super) has_type_ascription: bool, - pub(super) parent_pat: Option, - pub(super) ref_token: Option, - pub(super) mut_token: Option, +pub(crate) struct PatternContext { + pub(crate) refutability: PatternRefutability, + pub(crate) param_ctx: Option, + pub(crate) has_type_ascription: bool, + pub(crate) parent_pat: Option, + pub(crate) ref_token: Option, + pub(crate) mut_token: Option, /// The record pattern this name or ref is a field of - pub(super) record_pat: Option, - pub(super) impl_: Option, + pub(crate) record_pat: Option, + pub(crate) impl_: Option, /// List of missing variants in a match expr - pub(super) missing_variants: Vec, + pub(crate) missing_variants: Vec, } #[derive(Debug, Clone, PartialEq, Eq)] -pub(super) struct ParamContext { - pub(super) param_list: ast::ParamList, - pub(super) param: ast::Param, - pub(super) kind: ParamKind, +pub(crate) struct ParamContext { + pub(crate) param_list: ast::ParamList, + pub(crate) param: ast::Param, + pub(crate) kind: ParamKind, } /// The state of the lifetime we are completing. #[derive(Debug)] -pub(super) struct LifetimeContext { - pub(super) lifetime: Option, - pub(super) kind: LifetimeKind, +pub(crate) struct LifetimeContext { + pub(crate) lifetime: Option, + pub(crate) kind: LifetimeKind, } /// The kind of lifetime we are completing. #[derive(Debug)] -pub(super) enum LifetimeKind { +pub(crate) enum LifetimeKind { LifetimeParam { is_decl: bool, param: ast::LifetimeParam }, Lifetime, LabelRef, @@ -298,16 +298,16 @@ pub(super) enum LifetimeKind { /// The state of the name we are completing. #[derive(Debug)] -pub(super) struct NameContext { +pub(crate) struct NameContext { #[allow(dead_code)] - pub(super) name: Option, - pub(super) kind: NameKind, + pub(crate) name: Option, + pub(crate) kind: NameKind, } /// The kind of the name we are completing. #[derive(Debug)] #[allow(dead_code)] -pub(super) enum NameKind { +pub(crate) enum NameKind { Const, ConstParam, Enum, @@ -331,15 +331,15 @@ pub(super) enum NameKind { /// The state of the NameRef we are completing. #[derive(Debug)] -pub(super) struct NameRefContext { +pub(crate) struct NameRefContext { /// NameRef syntax in the original file - pub(super) nameref: Option, - pub(super) kind: NameRefKind, + pub(crate) nameref: Option, + pub(crate) kind: NameRefKind, } /// The kind of the NameRef we are completing. #[derive(Debug)] -pub(super) enum NameRefKind { +pub(crate) enum NameRefKind { Path(PathCompletionCtx), DotAccess(DotAccess), /// Position where we are only interested in keyword completions @@ -355,7 +355,7 @@ pub(super) enum NameRefKind { /// The identifier we are currently completing. #[derive(Debug)] -pub(super) enum CompletionAnalysis { +pub(crate) enum CompletionAnalysis { Name(NameContext), NameRef(NameRefContext), Lifetime(LifetimeContext), @@ -376,14 +376,15 @@ pub(super) enum CompletionAnalysis { /// Information about the field or method access we are completing. #[derive(Debug)] -pub(super) struct DotAccess { - pub(super) receiver: Option, - pub(super) receiver_ty: Option, - pub(super) kind: DotAccessKind, +pub(crate) struct DotAccess { + pub(crate) receiver: Option, + pub(crate) receiver_ty: Option, + pub(crate) kind: DotAccessKind, + pub(crate) ctx: DotAccessExprCtx, } #[derive(Debug)] -pub(super) enum DotAccessKind { +pub(crate) enum DotAccessKind { Field { /// True if the receiver is an integer and there is no ident in the original file after it yet /// like `0.$0` @@ -394,6 +395,21 @@ pub(super) enum DotAccessKind { }, } +#[derive(Debug, PartialEq, Eq)] +pub(crate) struct DotAccessExprCtx { + pub(crate) in_block_expr: bool, + pub(crate) in_breakable: BreakableKind, +} + +#[derive(Copy, Clone, Debug, PartialEq, Eq)] +pub(crate) enum BreakableKind { + None, + Loop, + For, + While, + Block, +} + #[derive(Clone, Debug, PartialEq, Eq)] pub(crate) enum ParamKind { Function(ast::Fn), @@ -404,39 +420,39 @@ pub(crate) enum ParamKind { /// exactly is the cursor, syntax-wise. #[derive(Debug)] pub(crate) struct CompletionContext<'a> { - pub(super) sema: Semantics<'a, RootDatabase>, - pub(super) scope: SemanticsScope<'a>, - pub(super) db: &'a RootDatabase, - pub(super) config: &'a CompletionConfig, - pub(super) position: FilePosition, + pub(crate) sema: Semantics<'a, RootDatabase>, + pub(crate) scope: SemanticsScope<'a>, + pub(crate) db: &'a RootDatabase, + pub(crate) config: &'a CompletionConfig, + pub(crate) position: FilePosition, /// The token before the cursor, in the original file. - pub(super) original_token: SyntaxToken, + pub(crate) original_token: SyntaxToken, /// The token before the cursor, in the macro-expanded file. - pub(super) token: SyntaxToken, + pub(crate) token: SyntaxToken, /// The crate of the current file. - pub(super) krate: hir::Crate, + pub(crate) krate: hir::Crate, /// The module of the `scope`. - pub(super) module: hir::Module, + pub(crate) module: hir::Module, /// Whether nightly toolchain is used. Cached since this is looked up a lot. is_nightly: bool, /// The expected name of what we are completing. /// This is usually the parameter name of the function argument we are completing. - pub(super) expected_name: Option, + pub(crate) expected_name: Option, /// The expected type of what we are completing. - pub(super) expected_type: Option, + pub(crate) expected_type: Option, - pub(super) qualifier_ctx: QualifierCtx, + pub(crate) qualifier_ctx: QualifierCtx, - pub(super) locals: FxHashMap, + pub(crate) locals: FxHashMap, /// The module depth of the current module of the cursor position. /// - crate-root /// - mod foo /// - mod bar /// Here depth will be 2 - pub(super) depth_from_crate_root: usize, + pub(crate) depth_from_crate_root: usize, } impl CompletionContext<'_> { @@ -634,7 +650,7 @@ impl CompletionContext<'_> { // CompletionContext construction impl<'a> CompletionContext<'a> { - pub(super) fn new( + pub(crate) fn new( db: &'a RootDatabase, position @ FilePosition { file_id, offset }: FilePosition, config: &'a CompletionConfig, @@ -649,7 +665,7 @@ impl<'a> CompletionContext<'a> { // actual completion. let file_with_fake_ident = { let parse = db.parse(file_id); - let edit = Indel::insert(offset, COMPLETION_MARKER.to_string()); + let edit = Indel::insert(offset, COMPLETION_MARKER.to_owned()); parse.reparse(&edit).tree() }; diff --git a/crates/ide-completion/src/context/analysis.rs b/crates/ide-completion/src/context/analysis.rs index c06b64df1c52..92af68897783 100644 --- a/crates/ide-completion/src/context/analysis.rs +++ b/crates/ide-completion/src/context/analysis.rs @@ -11,10 +11,11 @@ use syntax::{ }; use crate::context::{ - AttrCtx, CompletionAnalysis, DotAccess, DotAccessKind, ExprCtx, ItemListKind, LifetimeContext, - LifetimeKind, NameContext, NameKind, NameRefContext, NameRefKind, ParamContext, ParamKind, - PathCompletionCtx, PathKind, PatternContext, PatternRefutability, Qualified, QualifierCtx, - TypeAscriptionTarget, TypeLocation, COMPLETION_MARKER, + AttrCtx, BreakableKind, CompletionAnalysis, DotAccess, DotAccessExprCtx, DotAccessKind, + ItemListKind, LifetimeContext, LifetimeKind, NameContext, NameKind, NameRefContext, + NameRefKind, ParamContext, ParamKind, PathCompletionCtx, PathExprCtx, PathKind, PatternContext, + PatternRefutability, Qualified, QualifierCtx, TypeAscriptionTarget, TypeLocation, + COMPLETION_MARKER, }; struct ExpansionResult { @@ -623,7 +624,8 @@ fn classify_name_ref( let kind = NameRefKind::DotAccess(DotAccess { receiver_ty: receiver.as_ref().and_then(|it| sema.type_of_expr(it)), kind: DotAccessKind::Field { receiver_is_ambiguous_float_literal }, - receiver + receiver, + ctx: DotAccessExprCtx { in_block_expr: is_in_block(field.syntax()), in_breakable: is_in_breakable(field.syntax()) } }); return Some(make_res(kind)); }, @@ -636,7 +638,8 @@ fn classify_name_ref( let kind = NameRefKind::DotAccess(DotAccess { receiver_ty: receiver.as_ref().and_then(|it| sema.type_of_expr(it)), kind: DotAccessKind::Method { has_parens: method.arg_list().map_or(false, |it| it.l_paren_token().is_some()) }, - receiver + receiver, + ctx: DotAccessExprCtx { in_block_expr: is_in_block(method.syntax()), in_breakable: is_in_breakable(method.syntax()) } }); return Some(make_res(kind)); }, @@ -659,13 +662,6 @@ fn classify_name_ref( use_tree_parent: false, }; - let is_in_block = |it: &SyntaxNode| { - it.parent() - .map(|node| { - ast::ExprStmt::can_cast(node.kind()) || ast::StmtList::can_cast(node.kind()) - }) - .unwrap_or(false) - }; let func_update_record = |syn: &SyntaxNode| { if let Some(record_expr) = syn.ancestors().nth(2).and_then(ast::RecordExpr::cast) { find_node_in_file_compensated(sema, original_file, &record_expr) @@ -932,7 +928,7 @@ fn classify_name_ref( let make_path_kind_expr = |expr: ast::Expr| { let it = expr.syntax(); let in_block_expr = is_in_block(it); - let in_loop_body = is_in_loop_body(it); + let in_loop_body = is_in_breakable(it); let after_if_expr = after_if_expr(it.clone()); let ref_expr_parent = path.as_single_name_ref().and_then(|_| it.parent()).and_then(ast::RefExpr::cast); @@ -998,9 +994,9 @@ fn classify_name_ref( }; PathKind::Expr { - expr_ctx: ExprCtx { + expr_ctx: PathExprCtx { in_block_expr, - in_loop_body, + in_breakable: in_loop_body, after_if_expr, in_condition, ref_expr_parent, @@ -1202,7 +1198,7 @@ fn classify_name_ref( if path_ctx.is_trivial_path() { // fetch the full expression that may have qualifiers attached to it let top_node = match path_ctx.kind { - PathKind::Expr { expr_ctx: ExprCtx { in_block_expr: true, .. } } => { + PathKind::Expr { expr_ctx: PathExprCtx { in_block_expr: true, .. } } => { parent.ancestors().find(|it| ast::PathExpr::can_cast(it.kind())).and_then(|p| { let parent = p.parent()?; if ast::StmtList::can_cast(parent.kind()) { @@ -1467,21 +1463,30 @@ fn is_in_token_of_for_loop(path: &ast::Path) -> bool { .unwrap_or(false) } -fn is_in_loop_body(node: &SyntaxNode) -> bool { +fn is_in_breakable(node: &SyntaxNode) -> BreakableKind { node.ancestors() .take_while(|it| it.kind() != SyntaxKind::FN && it.kind() != SyntaxKind::CLOSURE_EXPR) .find_map(|it| { - let loop_body = match_ast! { + let (breakable, loop_body) = match_ast! { match it { - ast::ForExpr(it) => it.loop_body(), - ast::WhileExpr(it) => it.loop_body(), - ast::LoopExpr(it) => it.loop_body(), - _ => None, + ast::ForExpr(it) => (BreakableKind::For, it.loop_body()), + ast::WhileExpr(it) => (BreakableKind::While, it.loop_body()), + ast::LoopExpr(it) => (BreakableKind::Loop, it.loop_body()), + ast::BlockExpr(it) => return it.label().map(|_| BreakableKind::Block), + _ => return None, } }; - loop_body.filter(|it| it.syntax().text_range().contains_range(node.text_range())) + loop_body + .filter(|it| it.syntax().text_range().contains_range(node.text_range())) + .map(|_| breakable) }) - .is_some() + .unwrap_or(BreakableKind::None) +} + +fn is_in_block(node: &SyntaxNode) -> bool { + node.parent() + .map(|node| ast::ExprStmt::can_cast(node.kind()) || ast::StmtList::can_cast(node.kind())) + .unwrap_or(false) } fn previous_non_trivia_token(e: impl Into) -> Option { diff --git a/crates/ide-completion/src/item.rs b/crates/ide-completion/src/item.rs index bcf169f46530..8552a20392ab 100644 --- a/crates/ide-completion/src/item.rs +++ b/crates/ide-completion/src/item.rs @@ -553,7 +553,7 @@ impl Builder { self.detail = detail.map(Into::into); if let Some(detail) = &self.detail { if never!(detail.contains('\n'), "multiline detail:\n{}", detail) { - self.detail = Some(detail.split('\n').next().unwrap().to_string()); + self.detail = Some(detail.split('\n').next().unwrap().to_owned()); } } self diff --git a/crates/ide-completion/src/lib.rs b/crates/ide-completion/src/lib.rs index 733523d36942..912f2fba2b3d 100644 --- a/crates/ide-completion/src/lib.rs +++ b/crates/ide-completion/src/lib.rs @@ -64,6 +64,7 @@ pub use crate::{ // - `expr.ref` -> `&expr` // - `expr.refm` -> `&mut expr` // - `expr.let` -> `let $0 = expr;` +// - `expr.lete` -> `let $1 = expr else { $0 };` // - `expr.letm` -> `let mut $0 = expr;` // - `expr.not` -> `!expr` // - `expr.dbg` -> `dbg!(expr)` diff --git a/crates/ide-completion/src/render.rs b/crates/ide-completion/src/render.rs index 4d49d2f4987f..2ed080a83479 100644 --- a/crates/ide-completion/src/render.rs +++ b/crates/ide-completion/src/render.rs @@ -112,7 +112,7 @@ impl<'a> RenderContext<'a> { }; is_assoc_deprecated || assoc - .containing_trait_or_trait_impl(db) + .container_or_implemented_trait(db) .map(|trait_| self.is_deprecated(trait_)) .unwrap_or(false) } @@ -167,14 +167,14 @@ pub(crate) fn render_field( if !expected_fn_type { if let Some(receiver) = &dot_access.receiver { if let Some(receiver) = ctx.completion.sema.original_ast_node(receiver.clone()) { - builder.insert(receiver.syntax().text_range().start(), "(".to_string()); - builder.insert(ctx.source_range().end(), ")".to_string()); + builder.insert(receiver.syntax().text_range().start(), "(".to_owned()); + builder.insert(ctx.source_range().end(), ")".to_owned()); let is_parens_needed = !matches!(dot_access.kind, DotAccessKind::Method { has_parens: true }); if is_parens_needed { - builder.insert(ctx.source_range().end(), "()".to_string()); + builder.insert(ctx.source_range().end(), "()".to_owned()); } } } @@ -2199,12 +2199,14 @@ fn main() { sn while [] sn ref [] sn refm [] + sn deref [] sn unsafe [] sn match [] sn box [] sn dbg [] sn dbgr [] sn call [] + sn return [] "#]], ); } @@ -2227,6 +2229,7 @@ fn main() { me f() [] sn ref [] sn refm [] + sn deref [] sn unsafe [] sn match [] sn box [] @@ -2235,6 +2238,7 @@ fn main() { sn call [] sn let [] sn letm [] + sn return [] "#]], ); } diff --git a/crates/ide-completion/src/render/const_.rs b/crates/ide-completion/src/render/const_.rs index a2bfac994ff0..0d24882156d3 100644 --- a/crates/ide-completion/src/render/const_.rs +++ b/crates/ide-completion/src/render/const_.rs @@ -23,7 +23,7 @@ fn render(ctx: RenderContext<'_>, const_: hir::Const) -> Option .set_relevance(ctx.completion_relevance()); if let Some(actm) = const_.as_assoc_item(db) { - if let Some(trt) = actm.containing_trait_or_trait_impl(db) { + if let Some(trt) = actm.container_or_implemented_trait(db) { item.trait_name(trt.name(db).to_smol_str()); } } diff --git a/crates/ide-completion/src/render/function.rs b/crates/ide-completion/src/render/function.rs index 4ae7ea861c75..27186a2b7ffb 100644 --- a/crates/ide-completion/src/render/function.rs +++ b/crates/ide-completion/src/render/function.rs @@ -75,7 +75,7 @@ fn render( let ret_type = func.ret_type(db); let assoc_item = func.as_assoc_item(db); - let trait_ = assoc_item.and_then(|trait_| trait_.containing_trait_or_trait_impl(db)); + let trait_ = assoc_item.and_then(|trait_| trait_.container_or_implemented_trait(db)); let is_op_method = trait_.map_or(false, |trait_| completion.is_ops_trait(trait_)); let is_item_from_notable_trait = @@ -145,7 +145,7 @@ fn render( } None => { if let Some(actm) = assoc_item { - if let Some(trt) = actm.containing_trait_or_trait_impl(db) { + if let Some(trt) = actm.container_or_implemented_trait(db) { item.trait_name(trt.name(db).to_smol_str()); } } @@ -184,12 +184,12 @@ pub(super) fn add_call_parens<'b>( } None => { let name = match param.ty().as_adt() { - None => "_".to_string(), + None => "_".to_owned(), Some(adt) => adt .name(ctx.db) .as_text() .map(|s| to_lower_snake_case(s.as_str())) - .unwrap_or_else(|| "_".to_string()), + .unwrap_or_else(|| "_".to_owned()), }; f(&format_args!("${{{}:{name}}}", index + offset)) } diff --git a/crates/ide-completion/src/render/pattern.rs b/crates/ide-completion/src/render/pattern.rs index a5f851566cb0..c07966f7a7a8 100644 --- a/crates/ide-completion/src/render/pattern.rs +++ b/crates/ide-completion/src/render/pattern.rs @@ -140,7 +140,7 @@ fn render_pat( StructKind::Record => { render_record_as_pat(ctx.db(), ctx.snippet_cap(), fields, name, fields_omitted) } - StructKind::Unit => name.to_string(), + StructKind::Unit => name.to_owned(), }; let needs_ascription = matches!( diff --git a/crates/ide-completion/src/render/type_alias.rs b/crates/ide-completion/src/render/type_alias.rs index b192309e93f3..8f80793dd72d 100644 --- a/crates/ide-completion/src/render/type_alias.rs +++ b/crates/ide-completion/src/render/type_alias.rs @@ -47,7 +47,7 @@ fn render( .set_relevance(ctx.completion_relevance()); if let Some(actm) = type_alias.as_assoc_item(db) { - if let Some(trt) = actm.containing_trait_or_trait_impl(db) { + if let Some(trt) = actm.container_or_implemented_trait(db) { item.trait_name(trt.name(db).to_smol_str()); } } diff --git a/crates/ide-completion/src/render/variant.rs b/crates/ide-completion/src/render/variant.rs index a9a01a3a30f5..28238de45594 100644 --- a/crates/ide-completion/src/render/variant.rs +++ b/crates/ide-completion/src/render/variant.rs @@ -23,7 +23,7 @@ pub(crate) fn render_record_lit( path: &str, ) -> RenderedLiteral { if snippet_cap.is_none() { - return RenderedLiteral { literal: path.to_string(), detail: path.to_string() }; + return RenderedLiteral { literal: path.to_owned(), detail: path.to_owned() }; } let completions = fields.iter().enumerate().format_with(", ", |(idx, field), f| { if snippet_cap.is_some() { @@ -52,7 +52,7 @@ pub(crate) fn render_tuple_lit( path: &str, ) -> RenderedLiteral { if snippet_cap.is_none() { - return RenderedLiteral { literal: path.to_string(), detail: path.to_string() }; + return RenderedLiteral { literal: path.to_owned(), detail: path.to_owned() }; } let completions = fields.iter().enumerate().format_with(", ", |(idx, _), f| { if snippet_cap.is_some() { diff --git a/crates/ide-completion/src/tests/expression.rs b/crates/ide-completion/src/tests/expression.rs index 758c254a8828..78907a2896c4 100644 --- a/crates/ide-completion/src/tests/expression.rs +++ b/crates/ide-completion/src/tests/expression.rs @@ -362,6 +362,27 @@ fn completes_in_loop_ctx() { sn ppd "#]], ); + check_empty( + r"fn my() { loop { foo.$0 } }", + expect![[r#" + sn box Box::new(expr) + sn break break expr + sn call function(expr) + sn dbg dbg!(expr) + sn dbgr dbg!(&expr) + sn deref *expr + sn if if expr {} + sn let let + sn letm let mut + sn match match expr {} + sn not !expr + sn ref &expr + sn refm &mut expr + sn return return expr + sn unsafe unsafe {} + sn while while expr {} + "#]], + ); } #[test] @@ -1115,9 +1136,11 @@ fn main() { sn call function(expr) sn dbg dbg!(expr) sn dbgr dbg!(&expr) + sn deref *expr sn match match expr {} sn ref &expr sn refm &mut expr + sn return return expr sn unsafe unsafe {} "#]], ); @@ -1139,9 +1162,11 @@ fn main() { sn call function(expr) sn dbg dbg!(expr) sn dbgr dbg!(&expr) + sn deref *expr sn match match expr {} sn ref &expr sn refm &mut expr + sn return return expr sn unsafe unsafe {} "#]], ); @@ -1167,9 +1192,11 @@ fn main() { sn call function(expr) sn dbg dbg!(expr) sn dbgr dbg!(&expr) + sn deref *expr sn match match expr {} sn ref &expr sn refm &mut expr + sn return return expr sn unsafe unsafe {} "#]], ); @@ -1191,9 +1218,11 @@ fn main() { sn call function(expr) sn dbg dbg!(expr) sn dbgr dbg!(&expr) + sn deref *expr sn match match expr {} sn ref &expr sn refm &mut expr + sn return return expr sn unsafe unsafe {} "#]], ); @@ -1215,9 +1244,11 @@ fn main() { sn call function(expr) sn dbg dbg!(expr) sn dbgr dbg!(&expr) + sn deref *expr sn match match expr {} sn ref &expr sn refm &mut expr + sn return return expr sn unsafe unsafe {} "#]], ); @@ -1238,11 +1269,13 @@ fn main() { sn call function(expr) sn dbg dbg!(expr) sn dbgr dbg!(&expr) + sn deref *expr sn if if expr {} sn match match expr {} sn not !expr sn ref &expr sn refm &mut expr + sn return return expr sn unsafe unsafe {} sn while while expr {} "#]], diff --git a/crates/ide-completion/src/tests/proc_macros.rs b/crates/ide-completion/src/tests/proc_macros.rs index 2d6234e310c6..613f33309f5d 100644 --- a/crates/ide-completion/src/tests/proc_macros.rs +++ b/crates/ide-completion/src/tests/proc_macros.rs @@ -29,11 +29,13 @@ fn main() { sn call function(expr) sn dbg dbg!(expr) sn dbgr dbg!(&expr) + sn deref *expr sn let let sn letm let mut sn match match expr {} sn ref &expr sn refm &mut expr + sn return return expr sn unsafe unsafe {} "#]], ) @@ -60,11 +62,13 @@ fn main() { sn call function(expr) sn dbg dbg!(expr) sn dbgr dbg!(&expr) + sn deref *expr sn let let sn letm let mut sn match match expr {} sn ref &expr sn refm &mut expr + sn return return expr sn unsafe unsafe {} "#]], ) @@ -93,11 +97,13 @@ fn main() {} sn call function(expr) sn dbg dbg!(expr) sn dbgr dbg!(&expr) + sn deref *expr sn let let sn letm let mut sn match match expr {} sn ref &expr sn refm &mut expr + sn return return expr sn unsafe unsafe {} "#]], ) @@ -126,11 +132,13 @@ fn main() {} sn call function(expr) sn dbg dbg!(expr) sn dbgr dbg!(&expr) + sn deref *expr sn let let sn letm let mut sn match match expr {} sn ref &expr sn refm &mut expr + sn return return expr sn unsafe unsafe {} "#]], ) diff --git a/crates/ide-completion/src/tests/special.rs b/crates/ide-completion/src/tests/special.rs index f96fb71f2893..a87d16c789fa 100644 --- a/crates/ide-completion/src/tests/special.rs +++ b/crates/ide-completion/src/tests/special.rs @@ -1157,11 +1157,13 @@ fn here_we_go() { sn call function(expr) sn dbg dbg!(expr) sn dbgr dbg!(&expr) + sn deref *expr sn let let sn letm let mut sn match match expr {} sn ref &expr sn refm &mut expr + sn return return expr sn unsafe unsafe {} "#]], ); diff --git a/crates/ide-db/src/defs.rs b/crates/ide-db/src/defs.rs index 81f2f87d9623..d95d94ec72e2 100644 --- a/crates/ide-db/src/defs.rs +++ b/crates/ide-db/src/defs.rs @@ -206,7 +206,7 @@ impl Definition { // docs are missing, for assoc items of trait impls try to fall back to the docs of the // original item of the trait let assoc = self.as_assoc_item(db)?; - let trait_ = assoc.containing_trait_impl(db)?; + let trait_ = assoc.implemented_trait(db)?; let name = Some(assoc.name(db)?); let item = trait_.items(db).into_iter().find(|it| it.name(db) == name)?; item.docs(db) diff --git a/crates/ide-db/src/famous_defs.rs b/crates/ide-db/src/famous_defs.rs index 722517a7677b..4edfa37b3290 100644 --- a/crates/ide-db/src/famous_defs.rs +++ b/crates/ide-db/src/famous_defs.rs @@ -1,7 +1,7 @@ //! See [`FamousDefs`]. use base_db::{CrateOrigin, LangCrateOrigin, SourceDatabase}; -use hir::{Crate, Enum, Macro, Module, ScopeDef, Semantics, Trait}; +use hir::{Crate, Enum, Function, Macro, Module, ScopeDef, Semantics, Trait}; use crate::RootDatabase; @@ -110,6 +110,10 @@ impl FamousDefs<'_, '_> { self.find_macro("core:macros:builtin:derive") } + pub fn core_mem_drop(&self) -> Option { + self.find_function("core:mem:drop") + } + pub fn builtin_crates(&self) -> impl Iterator { IntoIterator::into_iter([ self.std(), @@ -149,6 +153,13 @@ impl FamousDefs<'_, '_> { } } + fn find_function(&self, path: &str) -> Option { + match self.find_def(path)? { + hir::ScopeDef::ModuleDef(hir::ModuleDef::Function(it)) => Some(it), + _ => None, + } + } + fn find_lang_crate(&self, origin: LangCrateOrigin) -> Option { let krate = self.1; let db = self.0.db; diff --git a/crates/ide-db/src/generated/lints.rs b/crates/ide-db/src/generated/lints.rs index 677c8fd54c05..2fc079332003 100644 --- a/crates/ide-db/src/generated/lints.rs +++ b/crates/ide-db/src/generated/lints.rs @@ -5044,17 +5044,6 @@ against are compatible with those of the `#[ffi_pure]`. [ARM C/C++ compiler]: http://infocenter.arm.com/help/index.jsp?topic=/com.arm.doc.dui0491c/Cacigdac.html [GCC]: https://gcc.gnu.org/onlinedocs/gcc/Common-Function-Attributes.html#index-pure-function-attribute [IBM ILE C/C++]: https://www.ibm.com/support/knowledgecenter/fr/ssw_ibm_i_71/rzarg/fn_attrib_pure.htm -"##, - }, - Lint { - label: "ffi_returns_twice", - description: r##"# `ffi_returns_twice` - -The tracking issue for this feature is: [#58314] - -[#58314]: https://github.com/rust-lang/rust/issues/58314 - ------------------------- "##, }, Lint { diff --git a/crates/ide-db/src/imports/import_assets.rs b/crates/ide-db/src/imports/import_assets.rs index cb3f01f34582..a71d8e9002d4 100644 --- a/crates/ide-db/src/imports/import_assets.rs +++ b/crates/ide-db/src/imports/import_assets.rs @@ -529,7 +529,7 @@ fn trait_applicable_items( return None; } - let assoc_item_trait = assoc.containing_trait(db)?; + let assoc_item_trait = assoc.container_trait(db)?; if related_traits.contains(&assoc_item_trait) { return None; } @@ -550,8 +550,7 @@ fn trait_applicable_items( None, |assoc| { if required_assoc_items.contains(&assoc) { - let located_trait = - assoc.containing_trait(db).filter(|&it| scope_filter(it))?; + let located_trait = assoc.container_trait(db).filter(|&it| scope_filter(it))?; let trait_item = ItemInNs::from(ModuleDef::from(located_trait)); let import_path = trait_import_paths .entry(trait_item) @@ -576,8 +575,7 @@ fn trait_applicable_items( |function| { let assoc = function.as_assoc_item(db)?; if required_assoc_items.contains(&assoc) { - let located_trait = - assoc.containing_trait(db).filter(|&it| scope_filter(it))?; + let located_trait = assoc.container_trait(db).filter(|&it| scope_filter(it))?; let trait_item = ItemInNs::from(ModuleDef::from(located_trait)); let import_path = trait_import_paths .entry(trait_item) @@ -605,6 +603,7 @@ fn assoc_to_item(assoc: AssocItem) -> ItemInNs { } } +#[tracing::instrument(skip_all)] fn get_mod_path( db: &RootDatabase, item_to_search: ItemInNs, diff --git a/crates/ide-db/src/imports/insert_use.rs b/crates/ide-db/src/imports/insert_use.rs index f29f91eea84f..bd5c464c5574 100644 --- a/crates/ide-db/src/imports/insert_use.rs +++ b/crates/ide-db/src/imports/insert_use.rs @@ -214,7 +214,7 @@ fn insert_use_with_alias_option( }; } - let mut use_tree = make::use_tree(path.clone(), None, alias, false); + let mut use_tree = make::use_tree(path, None, alias, false); if mb == Some(MergeBehavior::One) && use_tree.path().is_some() { use_tree = use_tree.clone_for_update(); use_tree.wrap_in_tree_list(); diff --git a/crates/ide-db/src/syntax_helpers/format_string_exprs.rs b/crates/ide-db/src/syntax_helpers/format_string_exprs.rs index fc230818193d..49594aee9f35 100644 --- a/crates/ide-db/src/syntax_helpers/format_string_exprs.rs +++ b/crates/ide-db/src/syntax_helpers/format_string_exprs.rs @@ -203,7 +203,7 @@ mod tests { use expect_test::{expect, Expect}; fn check(input: &str, expect: &Expect) { - let (output, exprs) = parse_format_exprs(input).unwrap_or(("-".to_string(), vec![])); + let (output, exprs) = parse_format_exprs(input).unwrap_or(("-".to_owned(), vec![])); let outcome_repr = if !exprs.is_empty() { format!("{output}; {}", with_placeholders(exprs).join(", ")) } else { diff --git a/crates/ide-db/src/tests/sourcegen_lints.rs b/crates/ide-db/src/tests/sourcegen_lints.rs index c8cf87d3c205..86ed01c8e748 100644 --- a/crates/ide-db/src/tests/sourcegen_lints.rs +++ b/crates/ide-db/src/tests/sourcegen_lints.rs @@ -52,7 +52,7 @@ pub struct LintGroup { generate_lint_descriptor(sh, &mut contents); contents.push('\n'); - let cargo = std::env::var("CARGO").unwrap_or_else(|_| "cargo".to_string()); + let cargo = std::env::var("CARGO").unwrap_or_else(|_| "cargo".to_owned()); let unstable_book = project_root().join("./target/unstable-book-gen"); cmd!( sh, @@ -241,6 +241,7 @@ fn unescape(s: &str) -> String { s.replace(r#"\""#, "").replace(r#"\n"#, "\n").replace(r#"\r"#, "") } +#[allow(clippy::print_stderr)] fn generate_descriptor_clippy(buf: &mut String, path: &Path) { let file_content = std::fs::read_to_string(path).unwrap(); let mut clippy_lints: Vec = Vec::new(); @@ -282,7 +283,7 @@ fn generate_descriptor_clippy(buf: &mut String, path: &Path) { let line = &line[..up_to]; let clippy_lint = clippy_lints.last_mut().expect("clippy lint must already exist"); - clippy_lint.help = unescape(line).trim().to_string(); + clippy_lint.help = unescape(line).trim().to_owned(); } } clippy_lints.sort_by(|lint, lint2| lint.id.cmp(&lint2.id)); diff --git a/crates/ide-db/src/traits.rs b/crates/ide-db/src/traits.rs index bbdfd81d653f..ee7c448bb897 100644 --- a/crates/ide-db/src/traits.rs +++ b/crates/ide-db/src/traits.rs @@ -75,7 +75,7 @@ pub fn get_missing_assoc_items( pub(crate) fn convert_to_def_in_trait(db: &dyn HirDatabase, def: Definition) -> Definition { (|| { let assoc = def.as_assoc_item(db)?; - let trait_ = assoc.containing_trait_impl(db)?; + let trait_ = assoc.implemented_trait(db)?; assoc_item_of_trait(db, assoc, trait_) })() .unwrap_or(def) diff --git a/crates/ide-diagnostics/src/handlers/inactive_code.rs b/crates/ide-diagnostics/src/handlers/inactive_code.rs index 9f754f9c6fca..7db5ea04fbd0 100644 --- a/crates/ide-diagnostics/src/handlers/inactive_code.rs +++ b/crates/ide-diagnostics/src/handlers/inactive_code.rs @@ -16,7 +16,7 @@ pub(crate) fn inactive_code( } let inactive = DnfExpr::new(d.cfg.clone()).why_inactive(&d.opts); - let mut message = "code is inactive due to #[cfg] directives".to_string(); + let mut message = "code is inactive due to #[cfg] directives".to_owned(); if let Some(inactive) = inactive { let inactive_reasons = inactive.to_string(); diff --git a/crates/ide-diagnostics/src/handlers/incoherent_impl.rs b/crates/ide-diagnostics/src/handlers/incoherent_impl.rs index 3b4d400912fc..9f56e1041454 100644 --- a/crates/ide-diagnostics/src/handlers/incoherent_impl.rs +++ b/crates/ide-diagnostics/src/handlers/incoherent_impl.rs @@ -9,7 +9,7 @@ pub(crate) fn incoherent_impl(ctx: &DiagnosticsContext<'_>, d: &hir::IncoherentI Diagnostic::new_with_syntax_node_ptr( ctx, DiagnosticCode::RustcHardError("E0210"), - "cannot define inherent `impl` for foreign type".to_string(), + "cannot define inherent `impl` for foreign type".to_owned(), InFile::new(d.file_id, d.impl_.into()), ) } diff --git a/crates/ide-diagnostics/src/handlers/incorrect_case.rs b/crates/ide-diagnostics/src/handlers/incorrect_case.rs index f5a6aa11979d..dd64b93e4548 100644 --- a/crates/ide-diagnostics/src/handlers/incorrect_case.rs +++ b/crates/ide-diagnostics/src/handlers/incorrect_case.rs @@ -52,7 +52,7 @@ fn fixes(ctx: &DiagnosticsContext<'_>, d: &hir::IncorrectCase) -> Option bool { false } @@ -403,7 +402,9 @@ impl T for U { trait __BitFlagsBad { const HiImAlsoBad: u8 = 2; + // ^^^^^^^^^^^ 💡 warn: Constant `HiImAlsoBad` should have UPPER_SNAKE_CASE name, e.g. `HI_IM_ALSO_BAD` fn Dirty(&self) -> bool { false } + // ^^^^^💡 warn: Function `Dirty` should have snake_case name, e.g. `dirty` } } } @@ -462,19 +463,59 @@ extern { } #[test] - fn bug_traits_arent_checked() { - // FIXME: Traits and functions in traits aren't currently checked by - // r-a, even though rustc will complain about them. + fn incorrect_trait_and_assoc_item_names() { check_diagnostics( r#" trait BAD_TRAIT { + // ^^^^^^^^^ 💡 warn: Trait `BAD_TRAIT` should have CamelCase name, e.g. `BadTrait` + const bad_const: u8; + // ^^^^^^^^^ 💡 warn: Constant `bad_const` should have UPPER_SNAKE_CASE name, e.g. `BAD_CONST` + type BAD_TYPE; + // ^^^^^^^^ 💡 warn: Type alias `BAD_TYPE` should have CamelCase name, e.g. `BadType` fn BAD_FUNCTION(); + // ^^^^^^^^^^^^ 💡 warn: Function `BAD_FUNCTION` should have snake_case name, e.g. `bad_function` fn BadFunction(); + // ^^^^^^^^^^^ 💡 warn: Function `BadFunction` should have snake_case name, e.g. `bad_function` } "#, ); } + #[test] + fn no_diagnostics_for_trait_impl_assoc_items_except_pats_in_body() { + cov_mark::check!(trait_impl_assoc_const_incorrect_case_ignored); + cov_mark::check!(trait_impl_assoc_type_incorrect_case_ignored); + cov_mark::check_count!(trait_impl_assoc_func_name_incorrect_case_ignored, 2); + check_diagnostics_with_disabled( + r#" +trait BAD_TRAIT { + // ^^^^^^^^^ 💡 warn: Trait `BAD_TRAIT` should have CamelCase name, e.g. `BadTrait` + const bad_const: u8; + // ^^^^^^^^^ 💡 warn: Constant `bad_const` should have UPPER_SNAKE_CASE name, e.g. `BAD_CONST` + type BAD_TYPE; + // ^^^^^^^^ 💡 warn: Type alias `BAD_TYPE` should have CamelCase name, e.g. `BadType` + fn BAD_FUNCTION(BAD_PARAM: u8); + // ^^^^^^^^^^^^ 💡 warn: Function `BAD_FUNCTION` should have snake_case name, e.g. `bad_function` + // ^^^^^^^^^ 💡 warn: Parameter `BAD_PARAM` should have snake_case name, e.g. `bad_param` + fn BadFunction(); + // ^^^^^^^^^^^ 💡 warn: Function `BadFunction` should have snake_case name, e.g. `bad_function` +} + +impl BAD_TRAIT for () { + const bad_const: u8 = 0; + type BAD_TYPE = (); + fn BAD_FUNCTION(BAD_PARAM: u8) { + // ^^^^^^^^^ 💡 warn: Parameter `BAD_PARAM` should have snake_case name, e.g. `bad_param` + let BAD_VAR = 0; + // ^^^^^^^ 💡 warn: Variable `BAD_VAR` should have snake_case name, e.g. `bad_var` + } + fn BadFunction() {} +} + "#, + std::iter::once("unused_variables".to_owned()), + ); + } + #[test] fn allow_attributes() { check_diagnostics( @@ -519,6 +560,14 @@ pub const some_const: u8 = 10; #[allow(non_upper_case_globals)] pub static SomeStatic: u8 = 10; + +#[allow(non_snake_case, non_camel_case_types, non_upper_case_globals)] +trait BAD_TRAIT { + const bad_const: u8; + type BAD_TYPE; + fn BAD_FUNCTION(BAD_PARAM: u8); + fn BadFunction(); +} "#, ); } @@ -578,6 +627,20 @@ pub const some_const: u8 = 10; #[deny(non_upper_case_globals)] pub static SomeStatic: u8 = 10; //^^^^^^^^^^ 💡 error: Static variable `SomeStatic` should have UPPER_SNAKE_CASE name, e.g. `SOME_STATIC` + +#[deny(non_snake_case, non_camel_case_types, non_upper_case_globals)] +trait BAD_TRAIT { + // ^^^^^^^^^ 💡 error: Trait `BAD_TRAIT` should have CamelCase name, e.g. `BadTrait` + const bad_const: u8; + // ^^^^^^^^^ 💡 error: Constant `bad_const` should have UPPER_SNAKE_CASE name, e.g. `BAD_CONST` + type BAD_TYPE; + // ^^^^^^^^ 💡 error: Type alias `BAD_TYPE` should have CamelCase name, e.g. `BadType` + fn BAD_FUNCTION(BAD_PARAM: u8); + // ^^^^^^^^^^^^ 💡 error: Function `BAD_FUNCTION` should have snake_case name, e.g. `bad_function` + // ^^^^^^^^^ 💡 error: Parameter `BAD_PARAM` should have snake_case name, e.g. `bad_param` + fn BadFunction(); + // ^^^^^^^^^^^ 💡 error: Function `BadFunction` should have snake_case name, e.g. `bad_function` +} "#, ); } diff --git a/crates/ide-diagnostics/src/handlers/json_is_not_rust.rs b/crates/ide-diagnostics/src/handlers/json_is_not_rust.rs index d330973aaaa3..241fddbb9063 100644 --- a/crates/ide-diagnostics/src/handlers/json_is_not_rust.rs +++ b/crates/ide-diagnostics/src/handlers/json_is_not_rust.rs @@ -42,12 +42,12 @@ impl State { v.push("Deserialize"); } match v.as_slice() { - [] => "".to_string(), + [] => "".to_owned(), [x] => format!("#[derive({x})]\n"), [x, y] => format!("#[derive({x}, {y})]\n"), _ => { never!(); - "".to_string() + "".to_owned() } } } @@ -176,7 +176,7 @@ mod tests { #[test] fn diagnostic_for_simple_case() { let mut config = DiagnosticsConfig::test_sample(); - config.disabled.insert("syntax-error".to_string()); + config.disabled.insert("syntax-error".to_owned()); check_diagnostics_with_config( config, r#" diff --git a/crates/ide-diagnostics/src/handlers/macro_error.rs b/crates/ide-diagnostics/src/handlers/macro_error.rs index fc5c715981f5..e4cb53f3a2f7 100644 --- a/crates/ide-diagnostics/src/handlers/macro_error.rs +++ b/crates/ide-diagnostics/src/handlers/macro_error.rs @@ -99,7 +99,7 @@ pub macro panic { // FIXME: This is a false-positive, the file is actually linked in via // `include!` macro - config.disabled.insert("unlinked-file".to_string()); + config.disabled.insert("unlinked-file".to_owned()); check_diagnostics_with_config( config, @@ -268,8 +268,8 @@ fn f() { #[test] fn include_does_not_break_diagnostics() { let mut config = DiagnosticsConfig::test_sample(); - config.disabled.insert("inactive-code".to_string()); - config.disabled.insert("unlinked-file".to_string()); + config.disabled.insert("inactive-code".to_owned()); + config.disabled.insert("unlinked-file".to_owned()); check_diagnostics_with_config( config, r#" diff --git a/crates/ide-diagnostics/src/handlers/mismatched_arg_count.rs b/crates/ide-diagnostics/src/handlers/mismatched_arg_count.rs index 66ebf593505d..41c762c85b2c 100644 --- a/crates/ide-diagnostics/src/handlers/mismatched_arg_count.rs +++ b/crates/ide-diagnostics/src/handlers/mismatched_arg_count.rs @@ -199,7 +199,7 @@ fn f() { // future, but we shouldn't emit an argument count diagnostic here check_diagnostics( r#" -trait Foo { fn method(&self, arg: usize) {} } +trait Foo { fn method(&self, _arg: usize) {} } fn f() { let x; diff --git a/crates/ide-diagnostics/src/handlers/missing_fields.rs b/crates/ide-diagnostics/src/handlers/missing_fields.rs index 37ac912f0642..c70f39eb286f 100644 --- a/crates/ide-diagnostics/src/handlers/missing_fields.rs +++ b/crates/ide-diagnostics/src/handlers/missing_fields.rs @@ -59,9 +59,15 @@ fn fixes(ctx: &DiagnosticsContext<'_>, d: &hir::MissingFields) -> Option, d: &hir::MissingFields) -> Option, d: &hir::MissingFields) -> Option, d: &hir::MissingFields) -> Option { let missing_fields = ctx.sema.record_pattern_missing_fields(field_list_parent); @@ -160,11 +161,7 @@ fn fixes(ctx: &DiagnosticsContext<'_>, d: &hir::MissingFields) -> Option ast::Ty let ty_str = match ty.as_adt() { Some(adt) => adt.name(db).display(db.upcast()).to_string(), None => { - ty.display_source_code(db, module.into(), false).ok().unwrap_or_else(|| "_".to_string()) + ty.display_source_code(db, module.into(), false).ok().unwrap_or_else(|| "_".to_owned()) } }; diff --git a/crates/ide-diagnostics/src/handlers/missing_match_arms.rs b/crates/ide-diagnostics/src/handlers/missing_match_arms.rs index cb6d568442d3..17dc679e055b 100644 --- a/crates/ide-diagnostics/src/handlers/missing_match_arms.rs +++ b/crates/ide-diagnostics/src/handlers/missing_match_arms.rs @@ -31,7 +31,7 @@ mod tests { #[test] fn empty_body() { let mut config = DiagnosticsConfig::test_sample(); - config.disabled.insert("syntax-error".to_string()); + config.disabled.insert("syntax-error".to_owned()); check_diagnostics_with_config( config, r#" diff --git a/crates/ide-diagnostics/src/handlers/mutability_errors.rs b/crates/ide-diagnostics/src/handlers/mutability_errors.rs index 773a075f8f59..bdb55a9d98a2 100644 --- a/crates/ide-diagnostics/src/handlers/mutability_errors.rs +++ b/crates/ide-diagnostics/src/handlers/mutability_errors.rs @@ -19,7 +19,7 @@ pub(crate) fn need_mut(ctx: &DiagnosticsContext<'_>, d: &hir::NeedMut) -> Diagno for source in d.local.sources(ctx.sema.db) { let Some(ast) = source.name() else { continue }; // FIXME: macros - edit_builder.insert(ast.value.syntax().text_range().start(), "mut ".to_string()); + edit_builder.insert(ast.value.syntax().text_range().start(), "mut ".to_owned()); } let edit = edit_builder.finish(); Some(vec![fix( @@ -86,7 +86,7 @@ pub(super) fn token(parent: &SyntaxNode, kind: SyntaxKind) -> Option X { @@ -448,8 +448,9 @@ fn main(b: bool) { &mut x; } "#, + std::iter::once("remove-unnecessary-else".to_owned()), ); - check_diagnostics( + check_diagnostics_with_disabled( r#" fn main(b: bool) { if b { @@ -462,6 +463,7 @@ fn main(b: bool) { &mut x; } "#, + std::iter::once("remove-unnecessary-else".to_owned()), ); } diff --git a/crates/ide-diagnostics/src/handlers/private_field.rs b/crates/ide-diagnostics/src/handlers/private_field.rs index 3179a632e26b..e91e64c81b0b 100644 --- a/crates/ide-diagnostics/src/handlers/private_field.rs +++ b/crates/ide-diagnostics/src/handlers/private_field.rs @@ -83,6 +83,32 @@ fn main() { }; strukt.field; } +"#, + ); + } + + #[test] + fn block_module_madness2() { + check_diagnostics( + r#" +fn main() { + use crate as ForceParentBlockDefMap; + let strukt = { + use crate as ForceParentBlockDefMap; + { + pub struct Struct { + field: (), + } + { + use crate as ForceParentBlockDefMap; + { + Struct { field: () } + } + } + } + }; + strukt.field; +} "#, ); } diff --git a/crates/ide-diagnostics/src/handlers/remove_trailing_return.rs b/crates/ide-diagnostics/src/handlers/remove_trailing_return.rs new file mode 100644 index 000000000000..a0d5d742d362 --- /dev/null +++ b/crates/ide-diagnostics/src/handlers/remove_trailing_return.rs @@ -0,0 +1,375 @@ +use hir::{db::ExpandDatabase, diagnostics::RemoveTrailingReturn}; +use ide_db::{assists::Assist, base_db::FileRange, source_change::SourceChange}; +use syntax::{ast, AstNode}; +use text_edit::TextEdit; + +use crate::{adjusted_display_range, fix, Diagnostic, DiagnosticCode, DiagnosticsContext}; + +// Diagnostic: remove-trailing-return +// +// This diagnostic is triggered when there is a redundant `return` at the end of a function +// or closure. +pub(crate) fn remove_trailing_return( + ctx: &DiagnosticsContext<'_>, + d: &RemoveTrailingReturn, +) -> Diagnostic { + let display_range = adjusted_display_range(ctx, d.return_expr, &|return_expr| { + return_expr + .syntax() + .parent() + .and_then(ast::ExprStmt::cast) + .map(|stmt| stmt.syntax().text_range()) + }); + Diagnostic::new( + DiagnosticCode::Clippy("needless_return"), + "replace return ; with ", + display_range, + ) + .with_fixes(fixes(ctx, d)) +} + +fn fixes(ctx: &DiagnosticsContext<'_>, d: &RemoveTrailingReturn) -> Option> { + let root = ctx.sema.db.parse_or_expand(d.return_expr.file_id); + let return_expr = d.return_expr.value.to_node(&root); + let stmt = return_expr.syntax().parent().and_then(ast::ExprStmt::cast); + + let FileRange { range, file_id } = + ctx.sema.original_range_opt(stmt.as_ref().map_or(return_expr.syntax(), AstNode::syntax))?; + if Some(file_id) != d.return_expr.file_id.file_id() { + return None; + } + + let replacement = + return_expr.expr().map_or_else(String::new, |expr| format!("{}", expr.syntax().text())); + let edit = TextEdit::replace(range, replacement); + let source_change = SourceChange::from_text_edit(file_id, edit); + + Some(vec![fix( + "remove_trailing_return", + "Replace return ; with ", + source_change, + range, + )]) +} + +#[cfg(test)] +mod tests { + use crate::tests::{ + check_diagnostics, check_diagnostics_with_disabled, check_fix, check_fix_with_disabled, + }; + + #[test] + fn remove_trailing_return() { + check_diagnostics( + r#" +fn foo() -> u8 { + return 2; +} //^^^^^^^^^ 💡 weak: replace return ; with +"#, + ); + } + + #[test] + fn remove_trailing_return_inner_function() { + check_diagnostics( + r#" +fn foo() -> u8 { + fn bar() -> u8 { + return 2; + } //^^^^^^^^^ 💡 weak: replace return ; with + bar() +} +"#, + ); + } + + #[test] + fn remove_trailing_return_closure() { + check_diagnostics( + r#" +fn foo() -> u8 { + let bar = || return 2; + bar() //^^^^^^^^ 💡 weak: replace return ; with +} +"#, + ); + check_diagnostics( + r#" +fn foo() -> u8 { + let bar = || { + return 2; + };//^^^^^^^^^ 💡 weak: replace return ; with + bar() +} +"#, + ); + } + + #[test] + fn remove_trailing_return_unit() { + check_diagnostics( + r#" +fn foo() { + return +} //^^^^^^ 💡 weak: replace return ; with +"#, + ); + } + + #[test] + fn remove_trailing_return_no_semi() { + check_diagnostics( + r#" +fn foo() -> u8 { + return 2 +} //^^^^^^^^ 💡 weak: replace return ; with +"#, + ); + } + + #[test] + fn remove_trailing_return_in_if() { + check_diagnostics_with_disabled( + r#" +fn foo(x: usize) -> u8 { + if x > 0 { + return 1; + //^^^^^^^^^ 💡 weak: replace return ; with + } else { + return 0; + } //^^^^^^^^^ 💡 weak: replace return ; with +} +"#, + std::iter::once("remove-unnecessary-else".to_owned()), + ); + } + + #[test] + fn remove_trailing_return_in_match() { + check_diagnostics( + r#" +fn foo(x: Result) -> u8 { + match x { + Ok(_) => return 1, + //^^^^^^^^ 💡 weak: replace return ; with + Err(_) => return 0, + } //^^^^^^^^ 💡 weak: replace return ; with +} +"#, + ); + } + + #[test] + fn no_diagnostic_if_no_return_keyword() { + check_diagnostics( + r#" +fn foo() -> u8 { + 3 +} +"#, + ); + } + + #[test] + fn no_diagnostic_if_not_last_statement() { + check_diagnostics( + r#" +fn foo() -> u8 { + if true { return 2; } + 3 +} +"#, + ); + } + + #[test] + fn replace_with_expr() { + check_fix( + r#" +fn foo() -> u8 { + return$0 2; +} +"#, + r#" +fn foo() -> u8 { + 2 +} +"#, + ); + } + + #[test] + fn replace_with_unit() { + check_fix( + r#" +fn foo() { + return$0/*ensure tidy is happy*/ +} +"#, + r#" +fn foo() { + /*ensure tidy is happy*/ +} +"#, + ); + } + + #[test] + fn replace_with_expr_no_semi() { + check_fix( + r#" +fn foo() -> u8 { + return$0 2 +} +"#, + r#" +fn foo() -> u8 { + 2 +} +"#, + ); + } + + #[test] + fn replace_in_inner_function() { + check_fix( + r#" +fn foo() -> u8 { + fn bar() -> u8 { + return$0 2; + } + bar() +} +"#, + r#" +fn foo() -> u8 { + fn bar() -> u8 { + 2 + } + bar() +} +"#, + ); + } + + #[test] + fn replace_in_closure() { + check_fix( + r#" +fn foo() -> u8 { + let bar = || return$0 2; + bar() +} +"#, + r#" +fn foo() -> u8 { + let bar = || 2; + bar() +} +"#, + ); + check_fix( + r#" +fn foo() -> u8 { + let bar = || { + return$0 2; + }; + bar() +} +"#, + r#" +fn foo() -> u8 { + let bar = || { + 2 + }; + bar() +} +"#, + ); + } + + #[test] + fn replace_in_if() { + check_fix_with_disabled( + r#" +fn foo(x: usize) -> u8 { + if x > 0 { + return$0 1; + } else { + 0 + } +} +"#, + r#" +fn foo(x: usize) -> u8 { + if x > 0 { + 1 + } else { + 0 + } +} +"#, + std::iter::once("remove-unnecessary-else".to_owned()), + ); + check_fix( + r#" +fn foo(x: usize) -> u8 { + if x > 0 { + 1 + } else { + return$0 0; + } +} +"#, + r#" +fn foo(x: usize) -> u8 { + if x > 0 { + 1 + } else { + 0 + } +} +"#, + ); + } + + #[test] + fn replace_in_match() { + check_fix( + r#" +fn foo(x: Result) -> u8 { + match x { + Ok(_) => return$0 1, + Err(_) => 0, + } +} +"#, + r#" +fn foo(x: Result) -> u8 { + match x { + Ok(_) => 1, + Err(_) => 0, + } +} +"#, + ); + check_fix( + r#" +fn foo(x: Result) -> u8 { + match x { + Ok(_) => 1, + Err(_) => return$0 0, + } +} +"#, + r#" +fn foo(x: Result) -> u8 { + match x { + Ok(_) => 1, + Err(_) => 0, + } +} +"#, + ); + } +} diff --git a/crates/ide-diagnostics/src/handlers/remove_unnecessary_else.rs b/crates/ide-diagnostics/src/handlers/remove_unnecessary_else.rs new file mode 100644 index 000000000000..ae8241ec2c69 --- /dev/null +++ b/crates/ide-diagnostics/src/handlers/remove_unnecessary_else.rs @@ -0,0 +1,390 @@ +use hir::{db::ExpandDatabase, diagnostics::RemoveUnnecessaryElse, HirFileIdExt}; +use ide_db::{assists::Assist, source_change::SourceChange}; +use itertools::Itertools; +use syntax::{ + ast::{self, edit::IndentLevel}, + AstNode, SyntaxToken, TextRange, +}; +use text_edit::TextEdit; + +use crate::{ + adjusted_display_range, fix, Diagnostic, DiagnosticCode, DiagnosticsContext, Severity, +}; + +// Diagnostic: remove-unnecessary-else +// +// This diagnostic is triggered when there is an `else` block for an `if` expression whose +// then branch diverges (e.g. ends with a `return`, `continue`, `break` e.t.c). +pub(crate) fn remove_unnecessary_else( + ctx: &DiagnosticsContext<'_>, + d: &RemoveUnnecessaryElse, +) -> Diagnostic { + let display_range = adjusted_display_range(ctx, d.if_expr, &|if_expr| { + if_expr.else_token().as_ref().map(SyntaxToken::text_range) + }); + Diagnostic::new( + DiagnosticCode::Ra("remove-unnecessary-else", Severity::WeakWarning), + "remove unnecessary else block", + display_range, + ) + .with_fixes(fixes(ctx, d)) +} + +fn fixes(ctx: &DiagnosticsContext<'_>, d: &RemoveUnnecessaryElse) -> Option> { + let root = ctx.sema.db.parse_or_expand(d.if_expr.file_id); + let if_expr = d.if_expr.value.to_node(&root); + let if_expr = ctx.sema.original_ast_node(if_expr.clone())?; + + let mut indent = IndentLevel::from_node(if_expr.syntax()); + let has_parent_if_expr = if_expr.syntax().parent().and_then(ast::IfExpr::cast).is_some(); + if has_parent_if_expr { + indent = indent + 1; + } + let else_replacement = match if_expr.else_branch()? { + ast::ElseBranch::Block(ref block) => { + block.statements().map(|stmt| format!("\n{indent}{stmt}")).join("") + } + ast::ElseBranch::IfExpr(ref nested_if_expr) => { + format!("\n{indent}{nested_if_expr}") + } + }; + let (replacement, range) = if has_parent_if_expr { + let base_indent = IndentLevel::from_node(if_expr.syntax()); + let then_indent = base_indent + 1; + let then_child_indent = then_indent + 1; + + let condition = if_expr.condition()?; + let then_stmts = if_expr + .then_branch()? + .statements() + .map(|stmt| format!("\n{then_child_indent}{stmt}")) + .join(""); + let then_replacement = + format!("\n{then_indent}if {condition} {{{then_stmts}\n{then_indent}}}",); + let replacement = format!("{{{then_replacement}{else_replacement}\n{base_indent}}}"); + (replacement, if_expr.syntax().text_range()) + } else { + ( + else_replacement, + TextRange::new( + if_expr.then_branch()?.syntax().text_range().end(), + if_expr.syntax().text_range().end(), + ), + ) + }; + + let edit = TextEdit::replace(range, replacement); + let source_change = + SourceChange::from_text_edit(d.if_expr.file_id.original_file(ctx.sema.db), edit); + + Some(vec![fix( + "remove_unnecessary_else", + "Remove unnecessary else block", + source_change, + range, + )]) +} + +#[cfg(test)] +mod tests { + use crate::tests::{check_diagnostics, check_diagnostics_with_disabled, check_fix}; + + fn check_diagnostics_with_needless_return_disabled(ra_fixture: &str) { + check_diagnostics_with_disabled(ra_fixture, std::iter::once("needless_return".to_owned())); + } + + #[test] + fn remove_unnecessary_else_for_return() { + check_diagnostics_with_needless_return_disabled( + r#" +fn test() { + if foo { + return bar; + } else { + //^^^^ 💡 weak: remove unnecessary else block + do_something_else(); + } +} +"#, + ); + check_fix( + r#" +fn test() { + if foo { + return bar; + } else$0 { + do_something_else(); + } +} +"#, + r#" +fn test() { + if foo { + return bar; + } + do_something_else(); +} +"#, + ); + } + + #[test] + fn remove_unnecessary_else_for_return2() { + check_diagnostics_with_needless_return_disabled( + r#" +fn test() { + if foo { + return bar; + } else if qux { + //^^^^ 💡 weak: remove unnecessary else block + do_something_else(); + } else { + do_something_else2(); + } +} +"#, + ); + check_fix( + r#" +fn test() { + if foo { + return bar; + } else$0 if qux { + do_something_else(); + } else { + do_something_else2(); + } +} +"#, + r#" +fn test() { + if foo { + return bar; + } + if qux { + do_something_else(); + } else { + do_something_else2(); + } +} +"#, + ); + } + + #[test] + fn remove_unnecessary_else_for_return_in_child_if_expr() { + check_diagnostics_with_needless_return_disabled( + r#" +fn test() { + if foo { + do_something(); + } else if qux { + return bar; + } else { + //^^^^ 💡 weak: remove unnecessary else block + do_something_else(); + } +} +"#, + ); + check_fix( + r#" +fn test() { + if foo { + do_something(); + } else if qux { + return bar; + } else$0 { + do_something_else(); + } +} +"#, + r#" +fn test() { + if foo { + do_something(); + } else { + if qux { + return bar; + } + do_something_else(); + } +} +"#, + ); + } + + #[test] + fn remove_unnecessary_else_for_break() { + check_diagnostics( + r#" +fn test() { + loop { + if foo { + break; + } else { + //^^^^ 💡 weak: remove unnecessary else block + do_something_else(); + } + } +} +"#, + ); + check_fix( + r#" +fn test() { + loop { + if foo { + break; + } else$0 { + do_something_else(); + } + } +} +"#, + r#" +fn test() { + loop { + if foo { + break; + } + do_something_else(); + } +} +"#, + ); + } + + #[test] + fn remove_unnecessary_else_for_continue() { + check_diagnostics( + r#" +fn test() { + loop { + if foo { + continue; + } else { + //^^^^ 💡 weak: remove unnecessary else block + do_something_else(); + } + } +} +"#, + ); + check_fix( + r#" +fn test() { + loop { + if foo { + continue; + } else$0 { + do_something_else(); + } + } +} +"#, + r#" +fn test() { + loop { + if foo { + continue; + } + do_something_else(); + } +} +"#, + ); + } + + #[test] + fn remove_unnecessary_else_for_never() { + check_diagnostics( + r#" +fn test() { + if foo { + never(); + } else { + //^^^^ 💡 weak: remove unnecessary else block + do_something_else(); + } +} + +fn never() -> ! { + loop {} +} +"#, + ); + check_fix( + r#" +fn test() { + if foo { + never(); + } else$0 { + do_something_else(); + } +} + +fn never() -> ! { + loop {} +} +"#, + r#" +fn test() { + if foo { + never(); + } + do_something_else(); +} + +fn never() -> ! { + loop {} +} +"#, + ); + } + + #[test] + fn no_diagnostic_if_no_else_branch() { + check_diagnostics( + r#" +fn test() { + if foo { + return bar; + } + + do_something_else(); +} +"#, + ); + } + + #[test] + fn no_diagnostic_if_no_divergence() { + check_diagnostics( + r#" +fn test() { + if foo { + do_something(); + } else { + do_something_else(); + } +} +"#, + ); + } + + #[test] + fn no_diagnostic_if_no_divergence_in_else_branch() { + check_diagnostics_with_needless_return_disabled( + r#" +fn test() { + if foo { + do_something(); + } else { + return bar; + } +} +"#, + ); + } +} diff --git a/crates/ide-diagnostics/src/handlers/replace_filter_map_next_with_find_map.rs b/crates/ide-diagnostics/src/handlers/replace_filter_map_next_with_find_map.rs index 72896b891bdc..6d3dcf31ab4d 100644 --- a/crates/ide-diagnostics/src/handlers/replace_filter_map_next_with_find_map.rs +++ b/crates/ide-diagnostics/src/handlers/replace_filter_map_next_with_find_map.rs @@ -63,8 +63,8 @@ mod tests { #[track_caller] pub(crate) fn check_diagnostics(ra_fixture: &str) { let mut config = DiagnosticsConfig::test_sample(); - config.disabled.insert("inactive-code".to_string()); - config.disabled.insert("E0599".to_string()); + config.disabled.insert("inactive-code".to_owned()); + config.disabled.insert("E0599".to_owned()); check_diagnostics_with_config(config, ra_fixture) } diff --git a/crates/ide-diagnostics/src/handlers/trait_impl_orphan.rs b/crates/ide-diagnostics/src/handlers/trait_impl_orphan.rs index d36813381e43..78a04e15424f 100644 --- a/crates/ide-diagnostics/src/handlers/trait_impl_orphan.rs +++ b/crates/ide-diagnostics/src/handlers/trait_impl_orphan.rs @@ -13,7 +13,7 @@ pub(crate) fn trait_impl_orphan( ctx, DiagnosticCode::RustcHardError("E0117"), "only traits defined in the current crate can be implemented for arbitrary types" - .to_string(), + .to_owned(), InFile::new(d.file_id, d.impl_.into()), ) // Not yet checked for false positives diff --git a/crates/ide-diagnostics/src/handlers/trait_impl_redundant_assoc_item.rs b/crates/ide-diagnostics/src/handlers/trait_impl_redundant_assoc_item.rs index f58fcd1f7e2b..00710ef50760 100644 --- a/crates/ide-diagnostics/src/handlers/trait_impl_redundant_assoc_item.rs +++ b/crates/ide-diagnostics/src/handlers/trait_impl_redundant_assoc_item.rs @@ -103,7 +103,7 @@ fn quickfix_for_redundant_assoc_item( Some(vec![Assist { id: AssistId("add assoc item def into trait def", AssistKind::QuickFix), - label: Label::new("Add assoc item def into trait def".to_string()), + label: Label::new("Add assoc item def into trait def".to_owned()), group: None, target: range, source_change: Some(source_change_builder.finish()), diff --git a/crates/ide-diagnostics/src/handlers/type_mismatch.rs b/crates/ide-diagnostics/src/handlers/type_mismatch.rs index 750189beecb1..e93eea8ce29e 100644 --- a/crates/ide-diagnostics/src/handlers/type_mismatch.rs +++ b/crates/ide-diagnostics/src/handlers/type_mismatch.rs @@ -120,7 +120,7 @@ fn add_missing_ok_or_some( let mut builder = TextEdit::builder(); builder.insert(expr.syntax().text_range().start(), format!("{variant_name}(")); - builder.insert(expr.syntax().text_range().end(), ")".to_string()); + builder.insert(expr.syntax().text_range().end(), ")".to_owned()); let source_change = SourceChange::from_text_edit(expr_ptr.file_id.original_file(ctx.sema.db), builder.finish()); let name = format!("Wrap in {variant_name}"); @@ -174,7 +174,7 @@ fn str_ref_to_owned( let expr = expr_ptr.value.to_node(&root); let expr_range = expr.syntax().text_range(); - let to_owned = ".to_owned()".to_string(); + let to_owned = ".to_owned()".to_owned(); let edit = TextEdit::insert(expr.syntax().text_range().end(), to_owned); let source_change = @@ -186,7 +186,9 @@ fn str_ref_to_owned( #[cfg(test)] mod tests { - use crate::tests::{check_diagnostics, check_fix, check_no_fix}; + use crate::tests::{ + check_diagnostics, check_diagnostics_with_disabled, check_fix, check_no_fix, + }; #[test] fn missing_reference() { @@ -718,7 +720,7 @@ struct Bar { #[test] fn return_no_value() { - check_diagnostics( + check_diagnostics_with_disabled( r#" fn f() -> i32 { return; @@ -727,6 +729,7 @@ fn f() -> i32 { } fn g() { return; } "#, + std::iter::once("needless_return".to_owned()), ); } diff --git a/crates/ide-diagnostics/src/handlers/unimplemented_builtin_macro.rs b/crates/ide-diagnostics/src/handlers/unimplemented_builtin_macro.rs index 996b6eda59cf..06f176f86f4e 100644 --- a/crates/ide-diagnostics/src/handlers/unimplemented_builtin_macro.rs +++ b/crates/ide-diagnostics/src/handlers/unimplemented_builtin_macro.rs @@ -10,7 +10,7 @@ pub(crate) fn unimplemented_builtin_macro( Diagnostic::new_with_syntax_node_ptr( ctx, DiagnosticCode::Ra("unimplemented-builtin-macro", Severity::WeakWarning), - "unimplemented built-in macro".to_string(), + "unimplemented built-in macro".to_owned(), d.node, ) } diff --git a/crates/ide-diagnostics/src/handlers/unresolved_field.rs b/crates/ide-diagnostics/src/handlers/unresolved_field.rs index 0e7a5720d4d2..65abfd8a294b 100644 --- a/crates/ide-diagnostics/src/handlers/unresolved_field.rs +++ b/crates/ide-diagnostics/src/handlers/unresolved_field.rs @@ -65,7 +65,7 @@ fn method_fix( let FileRange { range, file_id } = ctx.sema.original_range_opt(expr.syntax())?; Some(vec![Assist { id: AssistId("expected-field-found-method-call-fix", AssistKind::QuickFix), - label: Label::new("Use parentheses to call the method".to_string()), + label: Label::new("Use parentheses to call the method".to_owned()), group: None, target: range, source_change: Some(SourceChange::from_text_edit( diff --git a/crates/ide-diagnostics/src/handlers/unresolved_method.rs b/crates/ide-diagnostics/src/handlers/unresolved_method.rs index 9f8fee67f31c..648d081898ce 100644 --- a/crates/ide-diagnostics/src/handlers/unresolved_method.rs +++ b/crates/ide-diagnostics/src/handlers/unresolved_method.rs @@ -101,7 +101,7 @@ fn field_fix( }; Some(Assist { id: AssistId("expected-method-found-field-fix", AssistKind::QuickFix), - label: Label::new("Use parentheses to call the value of the field".to_string()), + label: Label::new("Use parentheses to call the value of the field".to_owned()), group: None, target: range, source_change: Some(SourceChange::from_iter([ diff --git a/crates/ide-diagnostics/src/handlers/unresolved_module.rs b/crates/ide-diagnostics/src/handlers/unresolved_module.rs index 1604decf9073..115568832496 100644 --- a/crates/ide-diagnostics/src/handlers/unresolved_module.rs +++ b/crates/ide-diagnostics/src/handlers/unresolved_module.rs @@ -16,7 +16,7 @@ pub(crate) fn unresolved_module( ctx, DiagnosticCode::RustcHardError("E0583"), match &*d.candidates { - [] => "unresolved module".to_string(), + [] => "unresolved module".to_owned(), [candidate] => format!("unresolved module, can't find module file: {candidate}"), [candidates @ .., last] => { format!( @@ -46,7 +46,7 @@ fn fixes(ctx: &DiagnosticsContext<'_>, d: &hir::UnresolvedModule) -> Option format!("proc macro `{name}` not expanded"), - None => "proc macro not expanded".to_string(), + None => "proc macro not expanded".to_owned(), }; let severity = if config_enabled { Severity::Error } else { Severity::WeakWarning }; let def_map = ctx.sema.db.crate_def_map(d.krate); let message = if config_enabled { - def_map.proc_macro_loading_error().unwrap_or("proc macro not found in the built dylib") + def_map.proc_macro_loading_error().unwrap_or("internal error") } else { match d.kind { hir::MacroKind::Attr if proc_macros_enabled => "attribute macro expansion is disabled", diff --git a/crates/ide-diagnostics/src/handlers/useless_braces.rs b/crates/ide-diagnostics/src/handlers/useless_braces.rs index 8dce2af23e32..863a7ab783ec 100644 --- a/crates/ide-diagnostics/src/handlers/useless_braces.rs +++ b/crates/ide-diagnostics/src/handlers/useless_braces.rs @@ -40,7 +40,7 @@ pub(crate) fn useless_braces( acc.push( Diagnostic::new( DiagnosticCode::RustcLint("unused_braces"), - "Unnecessary braces in use statement".to_string(), + "Unnecessary braces in use statement".to_owned(), FileRange { file_id, range: use_range }, ) .with_main_node(InFile::new(file_id.into(), node.clone())) @@ -112,7 +112,7 @@ mod a { ); let mut config = DiagnosticsConfig::test_sample(); - config.disabled.insert("syntax-error".to_string()); + config.disabled.insert("syntax-error".to_owned()); check_diagnostics_with_config( config, r#" diff --git a/crates/ide-diagnostics/src/lib.rs b/crates/ide-diagnostics/src/lib.rs index 5ad7069e317a..9d21bb4cd9fb 100644 --- a/crates/ide-diagnostics/src/lib.rs +++ b/crates/ide-diagnostics/src/lib.rs @@ -43,6 +43,8 @@ mod handlers { pub(crate) mod no_such_field; pub(crate) mod private_assoc_item; pub(crate) mod private_field; + pub(crate) mod remove_trailing_return; + pub(crate) mod remove_unnecessary_else; pub(crate) mod replace_filter_map_next_with_find_map; pub(crate) mod trait_impl_incorrect_safety; pub(crate) mod trait_impl_missing_assoc_item; @@ -73,8 +75,6 @@ mod handlers { #[cfg(test)] mod tests; -use std::collections::HashMap; - use hir::{diagnostics::AnyDiagnostic, InFile, Semantics}; use ide_db::{ assists::{Assist, AssistId, AssistKind, AssistResolveStrategy}, @@ -384,6 +384,8 @@ pub fn diagnostics( AnyDiagnostic::UnusedVariable(d) => handlers::unused_variables::unused_variables(&ctx, &d), AnyDiagnostic::BreakOutsideOfLoop(d) => handlers::break_outside_of_loop::break_outside_of_loop(&ctx, &d), AnyDiagnostic::MismatchedTupleStructPatArgCount(d) => handlers::mismatched_arg_count::mismatched_tuple_struct_pat_arg_count(&ctx, &d), + AnyDiagnostic::RemoveTrailingReturn(d) => handlers::remove_trailing_return::remove_trailing_return(&ctx, &d), + AnyDiagnostic::RemoveUnnecessaryElse(d) => handlers::remove_unnecessary_else::remove_unnecessary_else(&ctx, &d), }; res.push(d) } @@ -413,18 +415,18 @@ pub fn diagnostics( // `__RA_EVERY_LINT` is a fake lint group to allow every lint in proc macros -static RUSTC_LINT_GROUPS_DICT: Lazy>> = +static RUSTC_LINT_GROUPS_DICT: Lazy>> = Lazy::new(|| build_group_dict(DEFAULT_LINT_GROUPS, &["warnings", "__RA_EVERY_LINT"], "")); -static CLIPPY_LINT_GROUPS_DICT: Lazy>> = +static CLIPPY_LINT_GROUPS_DICT: Lazy>> = Lazy::new(|| build_group_dict(CLIPPY_LINT_GROUPS, &["__RA_EVERY_LINT"], "clippy::")); fn build_group_dict( lint_group: &'static [LintGroup], all_groups: &'static [&'static str], prefix: &'static str, -) -> HashMap<&'static str, Vec<&'static str>> { - let mut r: HashMap<&str, Vec<&str>> = HashMap::new(); +) -> FxHashMap<&'static str, Vec<&'static str>> { + let mut r: FxHashMap<&str, Vec<&str>> = FxHashMap::default(); for g in lint_group { for child in g.children { r.entry(child.strip_prefix(prefix).unwrap()) @@ -561,7 +563,7 @@ fn unresolved_fix(id: &'static str, label: &str, target: TextRange) -> Assist { assert!(!id.contains(' ')); Assist { id: AssistId(id, AssistKind::QuickFix), - label: Label::new(label.to_string()), + label: Label::new(label.to_owned()), group: None, target, source_change: None, diff --git a/crates/ide-diagnostics/src/tests.rs b/crates/ide-diagnostics/src/tests.rs index f394a491b512..b62bb5affdd8 100644 --- a/crates/ide-diagnostics/src/tests.rs +++ b/crates/ide-diagnostics/src/tests.rs @@ -1,3 +1,4 @@ +#![allow(clippy::print_stderr)] #[cfg(not(feature = "in-rust-tree"))] mod sourcegen; @@ -33,13 +34,35 @@ pub(crate) fn check_fixes(ra_fixture_before: &str, ra_fixtures_after: Vec<&str>) #[track_caller] fn check_nth_fix(nth: usize, ra_fixture_before: &str, ra_fixture_after: &str) { + let mut config = DiagnosticsConfig::test_sample(); + config.expr_fill_default = ExprFillDefaultMode::Default; + check_nth_fix_with_config(config, nth, ra_fixture_before, ra_fixture_after) +} + +#[track_caller] +pub(crate) fn check_fix_with_disabled( + ra_fixture_before: &str, + ra_fixture_after: &str, + disabled: impl Iterator, +) { + let mut config = DiagnosticsConfig::test_sample(); + config.expr_fill_default = ExprFillDefaultMode::Default; + config.disabled.extend(disabled); + check_nth_fix_with_config(config, 0, ra_fixture_before, ra_fixture_after) +} + +#[track_caller] +fn check_nth_fix_with_config( + config: DiagnosticsConfig, + nth: usize, + ra_fixture_before: &str, + ra_fixture_after: &str, +) { let after = trim_indent(ra_fixture_after); let (db, file_position) = RootDatabase::with_position(ra_fixture_before); - let mut conf = DiagnosticsConfig::test_sample(); - conf.expr_fill_default = ExprFillDefaultMode::Default; let diagnostic = - super::diagnostics(&db, &conf, &AssistResolveStrategy::All, file_position.file_id) + super::diagnostics(&db, &config, &AssistResolveStrategy::All, file_position.file_id) .pop() .expect("no diagnostics"); let fix = &diagnostic @@ -85,7 +108,17 @@ pub(crate) fn check_no_fix(ra_fixture: &str) { #[track_caller] pub(crate) fn check_diagnostics(ra_fixture: &str) { let mut config = DiagnosticsConfig::test_sample(); - config.disabled.insert("inactive-code".to_string()); + config.disabled.insert("inactive-code".to_owned()); + check_diagnostics_with_config(config, ra_fixture) +} + +#[track_caller] +pub(crate) fn check_diagnostics_with_disabled( + ra_fixture: &str, + disabled: impl Iterator, +) { + let mut config = DiagnosticsConfig::test_sample(); + config.disabled.extend(disabled); check_diagnostics_with_config(config, ra_fixture) } @@ -174,7 +207,8 @@ fn minicore_smoke_test() { let source = minicore.source_code(); let mut config = DiagnosticsConfig::test_sample(); // This should be ignored since we conditionally remove code which creates single item use with braces - config.disabled.insert("unused_braces".to_string()); + config.disabled.insert("unused_braces".to_owned()); + config.disabled.insert("unused_variables".to_owned()); check_diagnostics_with_config(config, &source); } diff --git a/crates/ide-ssr/src/matching.rs b/crates/ide-ssr/src/matching.rs index 060897a68529..fb98e9568474 100644 --- a/crates/ide-ssr/src/matching.rs +++ b/crates/ide-ssr/src/matching.rs @@ -456,7 +456,7 @@ impl<'db, 'sema> Matcher<'db, 'sema> { SyntaxElement::Token(t) => Some(t.clone()), SyntaxElement::Node(n) => n.first_token(), }) - .map(|p| p.text().to_string()); + .map(|p| p.text().to_owned()); let first_matched_token = child.clone(); let mut last_matched_token = child; // Read code tokens util we reach one equal to the next token from our pattern @@ -706,7 +706,7 @@ where // we are trying to match that bit of code. This saves us having to pass a boolean into all the bits // of code that can make the decision to not match. thread_local! { - pub static RECORDING_MATCH_FAIL_REASONS: Cell = Cell::new(false); + pub static RECORDING_MATCH_FAIL_REASONS: Cell = const { Cell::new(false) }; } fn recording_match_fail_reasons() -> bool { @@ -795,7 +795,7 @@ mod tests { let edits = match_finder.edits(); assert_eq!(edits.len(), 1); let edit = &edits[&position.file_id]; - let mut after = input.to_string(); + let mut after = input.to_owned(); edit.apply(&mut after); assert_eq!(after, "fn foo() {} fn bar() {} fn main() { bar(1+2); }"); } diff --git a/crates/ide-ssr/src/parsing.rs b/crates/ide-ssr/src/parsing.rs index d78d009681a6..2f91271c4653 100644 --- a/crates/ide-ssr/src/parsing.rs +++ b/crates/ide-ssr/src/parsing.rs @@ -152,7 +152,7 @@ impl FromStr for SsrRule { .next() .ok_or_else(|| SsrError("Cannot find delimiter `==>>`".into()))? .trim() - .to_string(); + .to_owned(); if it.next().is_some() { return Err(SsrError("More than one delimiter found".into())); } diff --git a/crates/ide-ssr/src/tests.rs b/crates/ide-ssr/src/tests.rs index 7c7d146cb4a8..e608b0a7c426 100644 --- a/crates/ide-ssr/src/tests.rs +++ b/crates/ide-ssr/src/tests.rs @@ -113,6 +113,7 @@ fn assert_ssr_transforms(rules: &[&str], input: &str, expected: Expect) { expected.assert_eq(&actual); } +#[allow(clippy::print_stdout)] fn print_match_debug_info(match_finder: &MatchFinder<'_>, file_id: FileId, snippet: &str) { let debug_info = match_finder.debug_where_text_equal(file_id, snippet); println!( diff --git a/crates/ide/src/doc_links.rs b/crates/ide/src/doc_links.rs index f22198571912..dbe6a5507cc3 100644 --- a/crates/ide/src/doc_links.rs +++ b/crates/ide/src/doc_links.rs @@ -58,7 +58,7 @@ pub(crate) fn rewrite_links(db: &RootDatabase, markdown: &str, definition: Defin // and valid URLs so we choose to be too eager to try to resolve what might be // a URL. if target.contains("://") { - (Some(LinkType::Inline), target.to_string(), title.to_string()) + (Some(LinkType::Inline), target.to_owned(), title.to_owned()) } else { // Two possibilities: // * path-based links: `../../module/struct.MyStruct.html` @@ -66,9 +66,9 @@ pub(crate) fn rewrite_links(db: &RootDatabase, markdown: &str, definition: Defin if let Some((target, title)) = rewrite_intra_doc_link(db, definition, target, title) { (None, target, title) } else if let Some(target) = rewrite_url_link(db, definition, target) { - (Some(LinkType::Inline), target, title.to_string()) + (Some(LinkType::Inline), target, title.to_owned()) } else { - (None, target.to_string(), title.to_string()) + (None, target.to_owned(), title.to_owned()) } } }); @@ -186,7 +186,7 @@ pub(crate) fn extract_definitions_from_docs( let (link, ns) = parse_intra_doc_link(&target); Some(( TextRange::new(range.start.try_into().ok()?, range.end.try_into().ok()?), - link.to_string(), + link.to_owned(), ns, )) } @@ -388,7 +388,7 @@ fn rewrite_intra_doc_link( url = url.join(&file).ok()?; url.set_fragment(anchor); - Some((url.into(), strip_prefixes_suffixes(title).to_string())) + Some((url.into(), strip_prefixes_suffixes(title).to_owned())) } /// Try to resolve path to local documentation via path-based links (i.e. `../gateway/struct.Shard.html`). @@ -668,7 +668,7 @@ fn get_assoc_item_fragment(db: &dyn HirDatabase, assoc_item: hir::AssocItem) -> Some(match assoc_item { AssocItem::Function(function) => { let is_trait_method = - function.as_assoc_item(db).and_then(|assoc| assoc.containing_trait(db)).is_some(); + function.as_assoc_item(db).and_then(|assoc| assoc.container_trait(db)).is_some(); // This distinction may get more complicated when specialization is available. // Rustdoc makes this decision based on whether a method 'has defaultness'. // Currently this is only the case for provided trait methods. diff --git a/crates/ide/src/doc_links/tests.rs b/crates/ide/src/doc_links/tests.rs index 3bb0fc606411..60e8d29a7163 100644 --- a/crates/ide/src/doc_links/tests.rs +++ b/crates/ide/src/doc_links/tests.rs @@ -29,9 +29,6 @@ fn check_external_docs( let web_url = links.web_url; let local_url = links.local_url; - println!("web_url: {:?}", web_url); - println!("local_url: {:?}", local_url); - match (expect_web_url, web_url) { (Some(expect), Some(url)) => expect.assert_eq(&url), (None, None) => (), diff --git a/crates/ide/src/file_structure.rs b/crates/ide/src/file_structure.rs index b278924721c0..0e790e14205f 100644 --- a/crates/ide/src/file_structure.rs +++ b/crates/ide/src/file_structure.rs @@ -193,7 +193,7 @@ fn structure_token(token: SyntaxToken) -> Option { if let Some(region_name) = text.strip_prefix("// region:").map(str::trim) { return Some(StructureNode { parent: None, - label: region_name.to_string(), + label: region_name.to_owned(), navigation_range: comment.syntax().text_range(), node_range: comment.syntax().text_range(), kind: StructureNodeKind::Region, diff --git a/crates/ide/src/goto_declaration.rs b/crates/ide/src/goto_declaration.rs index fae100743543..fab62e95d19a 100644 --- a/crates/ide/src/goto_declaration.rs +++ b/crates/ide/src/goto_declaration.rs @@ -61,7 +61,7 @@ pub(crate) fn goto_declaration( _ => None, }?; - let trait_ = assoc.containing_trait_impl(db)?; + let trait_ = assoc.implemented_trait(db)?; let name = Some(assoc.name(db)?); let item = trait_.items(db).into_iter().find(|it| it.name(db) == name)?; item.try_to_nav(db) diff --git a/crates/ide/src/goto_definition.rs b/crates/ide/src/goto_definition.rs index 4fed1f9158ce..88255d222ed8 100644 --- a/crates/ide/src/goto_definition.rs +++ b/crates/ide/src/goto_definition.rs @@ -182,11 +182,7 @@ fn try_filter_trait_item_definition( match assoc { AssocItem::Function(..) => None, AssocItem::Const(..) | AssocItem::TypeAlias(..) => { - let imp = match assoc.container(db) { - hir::AssocItemContainer::Impl(imp) => imp, - _ => return None, - }; - let trait_ = imp.trait_(db)?; + let trait_ = assoc.implemented_trait(db)?; let name = def.name(db)?; let discri_value = discriminant(&assoc); trait_ diff --git a/crates/ide/src/goto_implementation.rs b/crates/ide/src/goto_implementation.rs index c1a4a7b1fc79..8a12cbacccc7 100644 --- a/crates/ide/src/goto_implementation.rs +++ b/crates/ide/src/goto_implementation.rs @@ -60,13 +60,13 @@ pub(crate) fn goto_implementation( Definition::Function(f) => { let assoc = f.as_assoc_item(sema.db)?; let name = assoc.name(sema.db)?; - let trait_ = assoc.containing_trait_or_trait_impl(sema.db)?; + let trait_ = assoc.container_or_implemented_trait(sema.db)?; impls_for_trait_item(&sema, trait_, name) } Definition::Const(c) => { let assoc = c.as_assoc_item(sema.db)?; let name = assoc.name(sema.db)?; - let trait_ = assoc.containing_trait_or_trait_impl(sema.db)?; + let trait_ = assoc.container_or_implemented_trait(sema.db)?; impls_for_trait_item(&sema, trait_, name) } _ => return None, diff --git a/crates/ide/src/highlight_related.rs b/crates/ide/src/highlight_related.rs index 979ca4575d06..dd285e9b327c 100644 --- a/crates/ide/src/highlight_related.rs +++ b/crates/ide/src/highlight_related.rs @@ -521,7 +521,7 @@ mod tests { ReferenceCategory::Import => "import", ReferenceCategory::Test => "test", } - .to_string() + .to_owned() }), ) }) diff --git a/crates/ide/src/hover/render.rs b/crates/ide/src/hover/render.rs index 45386df2b266..eff055c95992 100644 --- a/crates/ide/src/hover/render.rs +++ b/crates/ide/src/hover/render.rs @@ -621,7 +621,7 @@ fn closure_ty( }) .join("\n"); if captures_rendered.trim().is_empty() { - captures_rendered = "This closure captures nothing".to_string(); + captures_rendered = "This closure captures nothing".to_owned(); } let mut targets: Vec = Vec::new(); let mut push_new_def = |item: hir::ModuleDef| { @@ -823,7 +823,7 @@ fn keyword_hints( } } _ => KeywordHint { - description: token.text().to_string(), + description: token.text().to_owned(), keyword_mod, actions: Vec::new(), }, @@ -835,9 +835,9 @@ fn keyword_hints( Some(_) => format!("prim_{}", token.text()), None => format!("{}_keyword", token.text()), }; - KeywordHint::new(token.text().to_string(), module) + KeywordHint::new(token.text().to_owned(), module) } - T![Self] => KeywordHint::new(token.text().to_string(), "self_upper_keyword".into()), - _ => KeywordHint::new(token.text().to_string(), format!("{}_keyword", token.text())), + T![Self] => KeywordHint::new(token.text().to_owned(), "self_upper_keyword".into()), + _ => KeywordHint::new(token.text().to_owned(), format!("{}_keyword", token.text())), } } diff --git a/crates/ide/src/hover/tests.rs b/crates/ide/src/hover/tests.rs index 9f4427090e9e..30bfe6ee9dc3 100644 --- a/crates/ide/src/hover/tests.rs +++ b/crates/ide/src/hover/tests.rs @@ -406,8 +406,8 @@ fn main() { file_id: FileId( 1, ), - full_range: 631..866, - focus_range: 692..698, + full_range: 632..867, + focus_range: 693..699, name: "FnOnce", kind: Trait, container_name: "function", @@ -702,7 +702,7 @@ fn hover_shows_struct_field_info() { // Hovering over the field when instantiating check( r#" -struct Foo { field_a: u32 } +struct Foo { pub field_a: u32 } fn main() { let foo = Foo { field_a$0: 0, }; @@ -717,7 +717,7 @@ fn main() { ```rust // size = 4, align = 4, offset = 0 - field_a: u32 + pub field_a: u32 ``` "#]], ); @@ -725,7 +725,7 @@ fn main() { // Hovering over the field in the definition check( r#" -struct Foo { field_a$0: u32 } +struct Foo { pub field_a$0: u32 } fn main() { let foo = Foo { field_a: 0 }; @@ -740,7 +740,74 @@ fn main() { ```rust // size = 4, align = 4, offset = 0 - field_a: u32 + pub field_a: u32 + ``` + "#]], + ); +} + +#[test] +fn hover_shows_tuple_struct_field_info() { + check( + r#" +struct Foo(pub u32) + +fn main() { + let foo = Foo { 0$0: 0, }; +} +"#, + expect![[r#" + *0* + + ```rust + test::Foo + ``` + + ```rust + // size = 4, align = 4, offset = 0 + pub 0: u32 + ``` + "#]], + ); + check( + r#" +struct Foo(pub u32) + +fn foo(foo: Foo) { + foo.0$0; +} +"#, + expect![[r#" + *0* + + ```rust + test::Foo + ``` + + ```rust + // size = 4, align = 4, offset = 0 + pub 0: u32 + ``` + "#]], + ); +} + +#[test] +fn hover_tuple_struct() { + check( + r#" +struct Foo$0(pub u32) +"#, + expect![[r#" + *Foo* + + ```rust + test + ``` + + ```rust + // size = 4, align = 4 + struct Foo(pub u32); ``` "#]], ); @@ -7196,8 +7263,8 @@ impl Iterator for S { file_id: FileId( 1, ), - full_range: 6156..6364, - focus_range: 6221..6227, + full_range: 6157..6365, + focus_range: 6222..6228, name: "Future", kind: Trait, container_name: "future", @@ -7210,8 +7277,8 @@ impl Iterator for S { file_id: FileId( 1, ), - full_range: 6994..7460, - focus_range: 7038..7046, + full_range: 6995..7461, + focus_range: 7039..7047, name: "Iterator", kind: Trait, container_name: "iterator", diff --git a/crates/ide/src/inlay_hints/implicit_drop.rs b/crates/ide/src/inlay_hints/implicit_drop.rs index 3104b85768f4..8d9ad5bda143 100644 --- a/crates/ide/src/inlay_hints/implicit_drop.rs +++ b/crates/ide/src/inlay_hints/implicit_drop.rs @@ -1,10 +1,8 @@ //! Implementation of "implicit drop" inlay hints: -//! ```no_run -//! fn main() { -//! let x = vec![2]; -//! if some_condition() { -//! /* drop(x) */return; -//! } +//! ```ignore +//! let x = vec![2]; +//! if some_condition() { +//! /* drop(x) */return; //! } //! ``` use hir::{ diff --git a/crates/ide/src/inlay_hints/range_exclusive.rs b/crates/ide/src/inlay_hints/range_exclusive.rs index 50ab15c504f6..c4b0c199fc23 100644 --- a/crates/ide/src/inlay_hints/range_exclusive.rs +++ b/crates/ide/src/inlay_hints/range_exclusive.rs @@ -1,5 +1,5 @@ //! Implementation of "range exclusive" inlay hints: -//! ```no_run +//! ```ignore //! for i in 0../* < */10 {} //! if let ../* < */100 = 50 {} //! ``` diff --git a/crates/ide/src/interpret_function.rs b/crates/ide/src/interpret_function.rs index adbd19188844..df444a3f4d06 100644 --- a/crates/ide/src/interpret_function.rs +++ b/crates/ide/src/interpret_function.rs @@ -15,8 +15,8 @@ use syntax::{algo::ancestors_at_offset, ast, AstNode, TextRange}; // |=== pub(crate) fn interpret_function(db: &RootDatabase, position: FilePosition) -> String { let start_time = Instant::now(); - let mut result = find_and_interpret(db, position) - .unwrap_or_else(|| "Not inside a function body".to_string()); + let mut result = + find_and_interpret(db, position).unwrap_or_else(|| "Not inside a function body".to_owned()); let duration = Instant::now() - start_time; writeln!(result).unwrap(); writeln!(result, "----------------------").unwrap(); diff --git a/crates/ide/src/join_lines.rs b/crates/ide/src/join_lines.rs index 1cfde2362455..fef0ec35ba09 100644 --- a/crates/ide/src/join_lines.rs +++ b/crates/ide/src/join_lines.rs @@ -115,7 +115,7 @@ fn remove_newline( let range = TextRange::at(offset, ((n_spaces_after_line_break + 1) as u32).into()); let replace_with = if no_space { "" } else { " " }; - edit.replace(range, replace_with.to_string()); + edit.replace(range, replace_with.to_owned()); return; } @@ -140,7 +140,7 @@ fn remove_newline( }; edit.replace( TextRange::new(prev.text_range().start(), token.text_range().end()), - space.to_string(), + space.to_owned(), ); return; } @@ -154,7 +154,7 @@ fn remove_newline( Some(_) => cov_mark::hit!(join_two_ifs_with_existing_else), None => { cov_mark::hit!(join_two_ifs); - edit.replace(token.text_range(), " else ".to_string()); + edit.replace(token.text_range(), " else ".to_owned()); return; } } @@ -203,7 +203,7 @@ fn remove_newline( } // Remove newline but add a computed amount of whitespace characters - edit.replace(token.text_range(), compute_ws(prev.kind(), next.kind()).to_string()); + edit.replace(token.text_range(), compute_ws(prev.kind(), next.kind()).to_owned()); } fn join_single_expr_block(edit: &mut TextEditBuilder, token: &SyntaxToken) -> Option<()> { diff --git a/crates/ide/src/lib.rs b/crates/ide/src/lib.rs index e9f42d478554..effdbf2c1f04 100644 --- a/crates/ide/src/lib.rs +++ b/crates/ide/src/lib.rs @@ -238,7 +238,7 @@ impl Analysis { let mut host = AnalysisHost::default(); let file_id = FileId::from_raw(0); let mut file_set = FileSet::default(); - file_set.insert(file_id, VfsPath::new_virtual_path("/main.rs".to_string())); + file_set.insert(file_id, VfsPath::new_virtual_path("/main.rs".to_owned())); let source_root = SourceRoot::new_local(file_set); let mut change = Change::new(); diff --git a/crates/ide/src/moniker.rs b/crates/ide/src/moniker.rs index c49d75b2f811..80d265ae3739 100644 --- a/crates/ide/src/moniker.rs +++ b/crates/ide/src/moniker.rs @@ -383,18 +383,18 @@ pub(crate) fn def_to_moniker( let (name, repo, version) = match krate.origin(db) { CrateOrigin::Library { repo, name } => (name, repo, krate.version(db)), CrateOrigin::Local { repo, name } => ( - name.unwrap_or(krate.display_name(db)?.canonical_name().to_string()), + name.unwrap_or(krate.display_name(db)?.canonical_name().to_owned()), repo, krate.version(db), ), CrateOrigin::Rustc { name } => ( name.clone(), - Some("https://github.com/rust-lang/rust/".to_string()), + Some("https://github.com/rust-lang/rust/".to_owned()), Some(format!("https://github.com/rust-lang/rust/compiler/{name}",)), ), CrateOrigin::Lang(lang) => ( - krate.display_name(db)?.canonical_name().to_string(), - Some("https://github.com/rust-lang/rust/".to_string()), + krate.display_name(db)?.canonical_name().to_owned(), + Some("https://github.com/rust-lang/rust/".to_owned()), Some(match lang { LangCrateOrigin::Other => { "https://github.com/rust-lang/rust/library/".into() diff --git a/crates/ide/src/navigation_target.rs b/crates/ide/src/navigation_target.rs index bfd91feeb390..674ce6d52bf7 100644 --- a/crates/ide/src/navigation_target.rs +++ b/crates/ide/src/navigation_target.rs @@ -860,7 +860,7 @@ fn foo() { enum FooInner { } } "#, ); - let navs = analysis.symbol_search(Query::new("FooInner".to_string()), !0).unwrap(); + let navs = analysis.symbol_search(Query::new("FooInner".to_owned()), !0).unwrap(); expect![[r#" [ NavigationTarget { @@ -898,7 +898,7 @@ struct Foo; "#, ); - let navs = analysis.symbol_search(Query::new("foo".to_string()), !0).unwrap(); + let navs = analysis.symbol_search(Query::new("foo".to_owned()), !0).unwrap(); assert_eq!(navs.len(), 2) } } diff --git a/crates/ide/src/prime_caches.rs b/crates/ide/src/prime_caches.rs index a95d1771ce0b..5c14f496a0bc 100644 --- a/crates/ide/src/prime_caches.rs +++ b/crates/ide/src/prime_caches.rs @@ -105,7 +105,7 @@ pub(crate) fn parallel_prime_caches( work_sender .send(( crate_id, - graph[crate_id].display_name.as_deref().unwrap_or_default().to_string(), + graph[crate_id].display_name.as_deref().unwrap_or_default().to_owned(), )) .ok(); } diff --git a/crates/ide/src/references.rs b/crates/ide/src/references.rs index bdda25a111f8..dcdc6118a348 100644 --- a/crates/ide/src/references.rs +++ b/crates/ide/src/references.rs @@ -9,8 +9,6 @@ //! at the index that the match starts at and its tree parent is //! resolved to the search element definition, we get a reference. -use std::collections::HashMap; - use hir::{DescendPreference, PathResolution, Semantics}; use ide_db::{ base_db::FileId, @@ -79,7 +77,7 @@ pub(crate) fn find_all_refs( .collect(), ) }) - .collect::, _>>(); + .collect::>>(); let declaration = match def { Definition::Module(module) => { Some(NavigationTarget::from_module_to_decl(sema.db, module)) diff --git a/crates/ide/src/runnables.rs b/crates/ide/src/runnables.rs index 3008722cdbb6..ae107a96040d 100644 --- a/crates/ide/src/runnables.rs +++ b/crates/ide/src/runnables.rs @@ -72,7 +72,7 @@ impl Runnable { RunnableKind::Bench { test_id } => format!("bench {test_id}"), RunnableKind::DocTest { test_id, .. } => format!("doctest {test_id}"), RunnableKind::Bin => { - target.map_or_else(|| "run binary".to_string(), |t| format!("run {t}")) + target.map_or_else(|| "run binary".to_owned(), |t| format!("run {t}")) } } } @@ -442,8 +442,7 @@ fn module_def_doctest(db: &RootDatabase, def: Definition) -> Option { .for_each(|name| format_to!(path, "{}::", name.display(db))); // This probably belongs to canonical_path? if let Some(assoc_item) = def.as_assoc_item(db) { - if let hir::AssocItemContainer::Impl(imp) = assoc_item.container(db) { - let ty = imp.self_ty(db); + if let Some(ty) = assoc_item.implementing_ty(db) { if let Some(adt) = ty.as_adt() { let name = adt.name(db); let mut ty_args = ty.generic_parameters(db).peekable(); diff --git a/crates/ide/src/ssr.rs b/crates/ide/src/ssr.rs index f0d18fdefa71..b49fe391bf25 100644 --- a/crates/ide/src/ssr.rs +++ b/crates/ide/src/ssr.rs @@ -41,7 +41,7 @@ pub(crate) fn ssr_assists( for (label, source_change) in assists.into_iter() { let assist = Assist { id, - label: Label::new(label.to_string()), + label: Label::new(label.to_owned()), group: Some(GroupLabel("Apply SSR".into())), target: comment_range, source_change, diff --git a/crates/ide/src/static_index.rs b/crates/ide/src/static_index.rs index 5b7094e6bcc0..dee5afbf8d9e 100644 --- a/crates/ide/src/static_index.rs +++ b/crates/ide/src/static_index.rs @@ -1,14 +1,12 @@ //! This module provides `StaticIndex` which is used for powering //! read-only code browsers and emitting LSIF -use std::collections::HashMap; - use hir::{db::HirDatabase, Crate, HirFileIdExt, Module}; -use ide_db::helpers::get_definition; use ide_db::{ base_db::{FileId, FileRange, SourceDatabaseExt}, defs::Definition, - FxHashSet, RootDatabase, + helpers::get_definition, + FxHashMap, FxHashSet, RootDatabase, }; use syntax::{AstNode, SyntaxKind::*, TextRange, T}; @@ -31,7 +29,7 @@ pub struct StaticIndex<'a> { pub tokens: TokenStore, analysis: &'a Analysis, db: &'a RootDatabase, - def_map: HashMap, + def_map: FxHashMap, } #[derive(Debug)] @@ -232,14 +230,13 @@ impl StaticIndex<'_> { #[cfg(test)] mod tests { use crate::{fixture, StaticIndex}; - use ide_db::base_db::FileRange; - use std::collections::HashSet; + use ide_db::{base_db::FileRange, FxHashSet}; use syntax::TextSize; fn check_all_ranges(ra_fixture: &str) { let (analysis, ranges) = fixture::annotations_without_marker(ra_fixture); let s = StaticIndex::compute(&analysis); - let mut range_set: HashSet<_> = ranges.iter().map(|it| it.0).collect(); + let mut range_set: FxHashSet<_> = ranges.iter().map(|it| it.0).collect(); for f in s.files { for (range, _) in f.tokens { let it = FileRange { file_id: f.file_id, range }; @@ -258,7 +255,7 @@ mod tests { fn check_definitions(ra_fixture: &str) { let (analysis, ranges) = fixture::annotations_without_marker(ra_fixture); let s = StaticIndex::compute(&analysis); - let mut range_set: HashSet<_> = ranges.iter().map(|it| it.0).collect(); + let mut range_set: FxHashSet<_> = ranges.iter().map(|it| it.0).collect(); for (_, t) in s.tokens.iter() { if let Some(t) = t.definition { if t.range.start() == TextSize::from(0) { diff --git a/crates/ide/src/status.rs b/crates/ide/src/status.rs index b2b305c1d380..3321a0513b6f 100644 --- a/crates/ide/src/status.rs +++ b/crates/ide/src/status.rs @@ -105,7 +105,7 @@ pub(crate) fn status(db: &RootDatabase, file_id: Option) -> String { } } - buf.trim().to_string() + buf.trim().to_owned() } fn collect_query<'q, Q>(table: QueryTable<'q, Q>) -> ::Collector diff --git a/crates/ide/src/syntax_highlighting/highlight.rs b/crates/ide/src/syntax_highlighting/highlight.rs index d686652bb3ec..e7c1b4497e2d 100644 --- a/crates/ide/src/syntax_highlighting/highlight.rs +++ b/crates/ide/src/syntax_highlighting/highlight.rs @@ -559,7 +559,7 @@ fn highlight_method_call( } if func .as_assoc_item(sema.db) - .and_then(|it| it.containing_trait_or_trait_impl(sema.db)) + .and_then(|it| it.container_or_implemented_trait(sema.db)) .is_some() { h |= HlMod::Trait; diff --git a/crates/ide/src/syntax_tree.rs b/crates/ide/src/syntax_tree.rs index 2108b53861c1..1065d5899ab7 100644 --- a/crates/ide/src/syntax_tree.rs +++ b/crates/ide/src/syntax_tree.rs @@ -55,7 +55,7 @@ fn syntax_tree_for_string(token: &SyntaxToken, text_range: TextRange) -> Option< fn syntax_tree_for_token(node: &SyntaxToken, text_range: TextRange) -> Option { // Range of the full node let node_range = node.text_range(); - let text = node.text().to_string(); + let text = node.text().to_owned(); // We start at some point inside the node // Either we have selected the whole string diff --git a/crates/ide/src/typing.rs b/crates/ide/src/typing.rs index b8856882ed7f..e87fc89fea2a 100644 --- a/crates/ide/src/typing.rs +++ b/crates/ide/src/typing.rs @@ -149,10 +149,7 @@ fn on_opening_bracket_typed( let tree: ast::UseTree = find_node_at_offset(file.syntax(), offset)?; - Some(TextEdit::insert( - tree.syntax().text_range().end() + TextSize::of("{"), - "}".to_string(), - )) + Some(TextEdit::insert(tree.syntax().text_range().end() + TextSize::of("{"), "}".to_owned())) } fn bracket_expr( @@ -235,7 +232,7 @@ fn on_eq_typed(file: &SourceFile, offset: TextSize) -> Option { return None; } let offset = expr.syntax().text_range().end(); - Some(TextEdit::insert(offset, ";".to_string())) + Some(TextEdit::insert(offset, ";".to_owned())) } /// `a =$0 b;` removes the semicolon if an expression is valid in this context. @@ -275,7 +272,7 @@ fn on_eq_typed(file: &SourceFile, offset: TextSize) -> Option { return None; } let offset = let_stmt.syntax().text_range().end(); - Some(TextEdit::insert(offset, ";".to_string())) + Some(TextEdit::insert(offset, ";".to_owned())) } } @@ -353,7 +350,7 @@ fn on_left_angle_typed(file: &SourceFile, offset: TextSize) -> Option".to_string()), + edit: TextEdit::replace(range, "<$0>".to_owned()), is_snippet: true, }); } @@ -363,7 +360,7 @@ fn on_left_angle_typed(file: &SourceFile, offset: TextSize) -> Option".to_string()), + edit: TextEdit::replace(range, "<$0>".to_owned()), is_snippet: true, }) } else { @@ -383,7 +380,7 @@ fn on_right_angle_typed(file: &SourceFile, offset: TextSize) -> Option } find_node_at_offset::(file.syntax(), offset)?; - Some(TextEdit::insert(after_arrow, " ".to_string())) + Some(TextEdit::insert(after_arrow, " ".to_owned())) } #[cfg(test)] diff --git a/crates/ide/src/view_hir.rs b/crates/ide/src/view_hir.rs index 9abe54cd3903..51cf45bd22b1 100644 --- a/crates/ide/src/view_hir.rs +++ b/crates/ide/src/view_hir.rs @@ -12,7 +12,7 @@ use syntax::{algo::ancestors_at_offset, ast, AstNode}; // |=== // image::https://user-images.githubusercontent.com/48062697/113065588-068bdb80-91b1-11eb-9a78-0b4ef1e972fb.gif[] pub(crate) fn view_hir(db: &RootDatabase, position: FilePosition) -> String { - body_hir(db, position).unwrap_or_else(|| "Not inside a function body".to_string()) + body_hir(db, position).unwrap_or_else(|| "Not inside a function body".to_owned()) } fn body_hir(db: &RootDatabase, position: FilePosition) -> Option { diff --git a/crates/ide/src/view_mir.rs b/crates/ide/src/view_mir.rs index 08d810c13462..5fb47039890b 100644 --- a/crates/ide/src/view_mir.rs +++ b/crates/ide/src/view_mir.rs @@ -11,7 +11,7 @@ use syntax::{algo::ancestors_at_offset, ast, AstNode}; // | VS Code | **rust-analyzer: View Mir** // |=== pub(crate) fn view_mir(db: &RootDatabase, position: FilePosition) -> String { - body_mir(db, position).unwrap_or_else(|| "Not inside a function body".to_string()) + body_mir(db, position).unwrap_or_else(|| "Not inside a function body".to_owned()) } fn body_mir(db: &RootDatabase, position: FilePosition) -> Option { diff --git a/crates/limit/src/lib.rs b/crates/limit/src/lib.rs index 7f4b00df0bac..27471db6a341 100644 --- a/crates/limit/src/lib.rs +++ b/crates/limit/src/lib.rs @@ -55,13 +55,12 @@ impl Limit { if other <= old_max || old_max == 0 { break; } - if self - .max - .compare_exchange_weak(old_max, other, Ordering::Relaxed, Ordering::Relaxed) - .is_ok() - { - eprintln!("new max: {other}"); - } + _ = self.max.compare_exchange_weak( + old_max, + other, + Ordering::Relaxed, + Ordering::Relaxed, + ); } Ok(()) diff --git a/crates/load-cargo/src/lib.rs b/crates/load-cargo/src/lib.rs index 3878e20a2a63..c6dc071c394e 100644 --- a/crates/load-cargo/src/lib.rs +++ b/crates/load-cargo/src/lib.rs @@ -279,7 +279,7 @@ pub fn load_proc_macro( let dylib = MacroDylib::new(path.to_path_buf()); let vec = server.load_dylib(dylib).map_err(|e| format!("{e}"))?; if vec.is_empty() { - return Err("proc macro library returned no proc macros".to_string()); + return Err("proc macro library returned no proc macros".to_owned()); } Ok(vec .into_iter() @@ -382,7 +382,7 @@ impl ProcMacroExpander for Expander { call_site: Span, mixed_site: Span, ) -> Result, ProcMacroExpansionError> { - let env = env.iter().map(|(k, v)| (k.to_string(), v.to_string())).collect(); + let env = env.iter().map(|(k, v)| (k.to_owned(), v.to_owned())).collect(); match self.0.expand(subtree, attrs, env, def_site, call_site, mixed_site) { Ok(Ok(subtree)) => Ok(subtree), Ok(Err(err)) => Err(ProcMacroExpansionError::Panic(err.0)), diff --git a/crates/mbe/src/benchmark.rs b/crates/mbe/src/benchmark.rs index 6c3917b37f1f..d946ecc1caf5 100644 --- a/crates/mbe/src/benchmark.rs +++ b/crates/mbe/src/benchmark.rs @@ -100,17 +100,19 @@ fn invocation_fixtures( // So we just skip any error cases and try again let mut try_cnt = 0; loop { - let mut subtree = tt::Subtree { + let mut token_trees = Vec::new(); + for op in rule.lhs.iter() { + collect_from_op(op, &mut token_trees, &mut seed); + } + + let subtree = tt::Subtree { delimiter: tt::Delimiter { open: DUMMY, close: DUMMY, kind: tt::DelimiterKind::Invisible, }, - token_trees: vec![], + token_trees: token_trees.into_boxed_slice(), }; - for op in rule.lhs.iter() { - collect_from_op(op, &mut subtree, &mut seed); - } if it.expand(&subtree, |_| (), true, DUMMY).err.is_none() { res.push((name.clone(), subtree)); break; @@ -127,45 +129,45 @@ fn invocation_fixtures( fn collect_from_op( op: &Op, - parent: &mut tt::Subtree, + token_trees: &mut Vec>, seed: &mut usize, ) { return match op { Op::Var { kind, .. } => match kind.as_ref() { - Some(MetaVarKind::Ident) => parent.token_trees.push(make_ident("foo")), - Some(MetaVarKind::Ty) => parent.token_trees.push(make_ident("Foo")), - Some(MetaVarKind::Tt) => parent.token_trees.push(make_ident("foo")), - Some(MetaVarKind::Vis) => parent.token_trees.push(make_ident("pub")), - Some(MetaVarKind::Pat) => parent.token_trees.push(make_ident("foo")), - Some(MetaVarKind::Path) => parent.token_trees.push(make_ident("foo")), - Some(MetaVarKind::Literal) => parent.token_trees.push(make_literal("1")), - Some(MetaVarKind::Expr) => parent.token_trees.push(make_ident("foo")), + Some(MetaVarKind::Ident) => token_trees.push(make_ident("foo")), + Some(MetaVarKind::Ty) => token_trees.push(make_ident("Foo")), + Some(MetaVarKind::Tt) => token_trees.push(make_ident("foo")), + Some(MetaVarKind::Vis) => token_trees.push(make_ident("pub")), + Some(MetaVarKind::Pat) => token_trees.push(make_ident("foo")), + Some(MetaVarKind::Path) => token_trees.push(make_ident("foo")), + Some(MetaVarKind::Literal) => token_trees.push(make_literal("1")), + Some(MetaVarKind::Expr) => token_trees.push(make_ident("foo")), Some(MetaVarKind::Lifetime) => { - parent.token_trees.push(make_punct('\'')); - parent.token_trees.push(make_ident("a")); + token_trees.push(make_punct('\'')); + token_trees.push(make_ident("a")); } Some(MetaVarKind::Block) => { - parent.token_trees.push(make_subtree(tt::DelimiterKind::Brace, None)) + token_trees.push(make_subtree(tt::DelimiterKind::Brace, None)) } Some(MetaVarKind::Item) => { - parent.token_trees.push(make_ident("fn")); - parent.token_trees.push(make_ident("foo")); - parent.token_trees.push(make_subtree(tt::DelimiterKind::Parenthesis, None)); - parent.token_trees.push(make_subtree(tt::DelimiterKind::Brace, None)); + token_trees.push(make_ident("fn")); + token_trees.push(make_ident("foo")); + token_trees.push(make_subtree(tt::DelimiterKind::Parenthesis, None)); + token_trees.push(make_subtree(tt::DelimiterKind::Brace, None)); } Some(MetaVarKind::Meta) => { - parent.token_trees.push(make_ident("foo")); - parent.token_trees.push(make_subtree(tt::DelimiterKind::Parenthesis, None)); + token_trees.push(make_ident("foo")); + token_trees.push(make_subtree(tt::DelimiterKind::Parenthesis, None)); } None => (), Some(kind) => panic!("Unhandled kind {kind:?}"), }, - Op::Literal(it) => parent.token_trees.push(tt::Leaf::from(it.clone()).into()), - Op::Ident(it) => parent.token_trees.push(tt::Leaf::from(it.clone()).into()), + Op::Literal(it) => token_trees.push(tt::Leaf::from(it.clone()).into()), + Op::Ident(it) => token_trees.push(tt::Leaf::from(it.clone()).into()), Op::Punct(puncts) => { for punct in puncts { - parent.token_trees.push(tt::Leaf::from(*punct).into()); + token_trees.push(tt::Leaf::from(*punct).into()); } } Op::Repeat { tokens, kind, separator } => { @@ -177,20 +179,20 @@ fn invocation_fixtures( }; for i in 0..cnt { for it in tokens.iter() { - collect_from_op(it, parent, seed); + collect_from_op(it, token_trees, seed); } if i + 1 != cnt { if let Some(sep) = separator { match sep { Separator::Literal(it) => { - parent.token_trees.push(tt::Leaf::Literal(it.clone()).into()) + token_trees.push(tt::Leaf::Literal(it.clone()).into()) } Separator::Ident(it) => { - parent.token_trees.push(tt::Leaf::Ident(it.clone()).into()) + token_trees.push(tt::Leaf::Ident(it.clone()).into()) } Separator::Puncts(puncts) => { for it in puncts { - parent.token_trees.push(tt::Leaf::Punct(*it).into()) + token_trees.push(tt::Leaf::Punct(*it).into()) } } }; @@ -199,11 +201,15 @@ fn invocation_fixtures( } } Op::Subtree { tokens, delimiter } => { - let mut subtree = tt::Subtree { delimiter: *delimiter, token_trees: Vec::new() }; + let mut subtree = Vec::new(); tokens.iter().for_each(|it| { collect_from_op(it, &mut subtree, seed); }); - parent.token_trees.push(subtree.into()); + + let subtree = + tt::Subtree { delimiter: *delimiter, token_trees: subtree.into_boxed_slice() }; + + token_trees.push(subtree.into()); } Op::Ignore { .. } | Op::Index { .. } | Op::Count { .. } | Op::Length { .. } => {} }; @@ -230,7 +236,7 @@ fn invocation_fixtures( ) -> tt::TokenTree { tt::Subtree { delimiter: tt::Delimiter { open: DUMMY, close: DUMMY, kind }, - token_trees: token_trees.unwrap_or_default(), + token_trees: token_trees.map(Vec::into_boxed_slice).unwrap_or_default(), } .into() } diff --git a/crates/mbe/src/expander.rs b/crates/mbe/src/expander.rs index 60483809dc18..9366048fd955 100644 --- a/crates/mbe/src/expander.rs +++ b/crates/mbe/src/expander.rs @@ -58,7 +58,7 @@ pub(crate) fn expand_rules( ExpandResult::new( tt::Subtree { delimiter: tt::Delimiter::invisible_spanned(call_site), - token_trees: vec![], + token_trees: Box::new([]), }, ExpandError::NoMatchingRule, ) diff --git a/crates/mbe/src/expander/matcher.rs b/crates/mbe/src/expander/matcher.rs index 40b4c7cdd656..eea92cfba4c2 100644 --- a/crates/mbe/src/expander/matcher.rs +++ b/crates/mbe/src/expander/matcher.rs @@ -798,7 +798,7 @@ fn match_meta_var( tt.map(|tt| match tt { tt::TokenTree::Leaf(leaf) => tt::Subtree { delimiter: tt::Delimiter::invisible_spanned(*leaf.span()), - token_trees: vec![leaf.into()], + token_trees: Box::new([leaf.into()]), }, tt::TokenTree::Subtree(mut s) => { if s.delimiter.kind == tt::DelimiterKind::Invisible { @@ -832,7 +832,7 @@ fn match_meta_var( None => lit.into(), Some(neg) => tt::TokenTree::Subtree(tt::Subtree { delimiter: tt::Delimiter::invisible_spanned(*literal.span()), - token_trees: vec![neg, lit.into()], + token_trees: Box::new([neg, lit.into()]), }), } }) @@ -989,10 +989,10 @@ impl TtIter<'_, S> { close: ident.span, kind: tt::DelimiterKind::Invisible, }, - token_trees: vec![ + token_trees: Box::new([ tt::Leaf::Punct(*punct).into(), tt::Leaf::Ident(ident.clone()).into(), - ], + ]), } .into()) } diff --git a/crates/mbe/src/expander/transcriber.rs b/crates/mbe/src/expander/transcriber.rs index 800bc994adab..9291f799cca7 100644 --- a/crates/mbe/src/expander/transcriber.rs +++ b/crates/mbe/src/expander/transcriber.rs @@ -83,7 +83,7 @@ impl Bindings { close: span, kind: tt::DelimiterKind::Brace, }, - token_trees: vec![], + token_trees: Box::new([]), })), // FIXME: Meta and Item should get proper defaults MetaVarKind::Meta | MetaVarKind::Item | MetaVarKind::Tt | MetaVarKind::Vis => { @@ -349,11 +349,11 @@ fn expand_var( // We just treat it a normal tokens let tt = tt::Subtree { delimiter: tt::Delimiter::invisible_spanned(id), - token_trees: vec![ + token_trees: Box::new([ tt::Leaf::from(tt::Punct { char: '$', spacing: tt::Spacing::Alone, span: id }) .into(), tt::Leaf::from(tt::Ident { text: v.clone(), span: id }).into(), - ], + ]), } .into(); ExpandResult::ok(Fragment::Tokens(tt)) @@ -406,7 +406,7 @@ fn expand_repeat( value: Fragment::Tokens( tt::Subtree { delimiter: tt::Delimiter::invisible_spanned(ctx.call_site), - token_trees: vec![], + token_trees: Box::new([]), } .into(), ), @@ -455,7 +455,7 @@ fn expand_repeat( // e.g {Delimiter:None> ['>'] /Delimiter:None>} let tt = tt::Subtree { delimiter: tt::Delimiter::invisible_spanned(ctx.call_site), - token_trees: buf, + token_trees: buf.into_boxed_slice(), } .into(); @@ -486,7 +486,7 @@ fn push_fragment( fn push_subtree(buf: &mut Vec>, tt: tt::Subtree) { match tt.delimiter.kind { - tt::DelimiterKind::Invisible => buf.extend(tt.token_trees), + tt::DelimiterKind::Invisible => buf.extend(Vec::from(tt.token_trees)), _ => buf.push(tt.into()), } } @@ -504,7 +504,7 @@ fn fix_up_and_push_path_tt( // Note that we only need to fix up the top-level `TokenTree`s because the // context of the paths in the descendant `Subtree`s won't be changed by the // mbe transcription. - for tt in subtree.token_trees { + for tt in Vec::from(subtree.token_trees) { if prev_was_ident { // Pedantically, `(T) -> U` in `FnOnce(T) -> U` is treated as a generic // argument list and thus needs `::` between it and `FnOnce`. However in diff --git a/crates/mbe/src/syntax_bridge.rs b/crates/mbe/src/syntax_bridge.rs index d6c3bd189269..bfc5d197f683 100644 --- a/crates/mbe/src/syntax_bridge.rs +++ b/crates/mbe/src/syntax_bridge.rs @@ -130,7 +130,7 @@ where tt::Subtree { delimiter: tt::Delimiter { kind: tt::DelimiterKind::Invisible, .. }, token_trees, - } => TokenBuffer::from_tokens(token_trees.as_slice()), + } => TokenBuffer::from_tokens(token_trees), _ => TokenBuffer::from_subtree(tt), }; let parser_input = to_parser_input(&buffer); @@ -146,7 +146,7 @@ where } parser::Step::Enter { kind } => tree_sink.start_node(kind), parser::Step::Exit => tree_sink.finish_node(), - parser::Step::Error { msg } => tree_sink.error(msg.to_string()), + parser::Step::Error { msg } => tree_sink.error(msg.to_owned()), } } tree_sink.finish() @@ -227,14 +227,14 @@ where C: TokenConverter, S: Span, { - let entry = tt::Subtree { + let entry = tt::SubtreeBuilder { delimiter: tt::Delimiter::invisible_spanned(conv.call_site()), token_trees: vec![], }; let mut stack = NonEmptyVec::new(entry); while let Some((token, abs_range)) = conv.bump() { - let tt::Subtree { delimiter, token_trees } = stack.last_mut(); + let tt::SubtreeBuilder { delimiter, token_trees } = stack.last_mut(); let tt = match token.as_leaf() { Some(leaf) => tt::TokenTree::Leaf(leaf.clone()), @@ -260,7 +260,7 @@ where if matches!(expected, Some(expected) if expected == kind) { if let Some(mut subtree) = stack.pop() { subtree.delimiter.close = conv.span_for(abs_range); - stack.last_mut().token_trees.push(subtree.into()); + stack.last_mut().token_trees.push(subtree.build().into()); } continue; } @@ -275,7 +275,7 @@ where // Start a new subtree if let Some(kind) = delim { let open = conv.span_for(abs_range); - stack.push(tt::Subtree { + stack.push(tt::SubtreeBuilder { delimiter: tt::Delimiter { open, // will be overwritten on subtree close above @@ -361,7 +361,7 @@ where parent.token_trees.extend(entry.token_trees); } - let subtree = stack.into_last(); + let subtree = stack.into_last().build(); if let [tt::TokenTree::Subtree(first)] = &*subtree.token_trees { first.clone() } else { @@ -454,7 +454,7 @@ fn convert_doc_comment( }; // Make `doc="\" Comments\"" - let meta_tkns = vec![mk_ident("doc"), mk_punct('='), mk_doc_literal(&comment)]; + let meta_tkns = Box::new([mk_ident("doc"), mk_punct('='), mk_doc_literal(&comment)]); // Make `#![]` let mut token_trees = Vec::with_capacity(3); diff --git a/crates/mbe/src/syntax_bridge/tests.rs b/crates/mbe/src/syntax_bridge/tests.rs index e5569138dbf2..11d1a7287996 100644 --- a/crates/mbe/src/syntax_bridge/tests.rs +++ b/crates/mbe/src/syntax_bridge/tests.rs @@ -1,5 +1,4 @@ -use std::collections::HashMap; - +use rustc_hash::FxHashMap; use syntax::{ast, AstNode}; use test_utils::extract_annotations; use tt::{ @@ -12,7 +11,7 @@ use crate::{syntax_node_to_token_tree, DummyTestSpanData, DummyTestSpanMap, DUMM fn check_punct_spacing(fixture: &str) { let source_file = ast::SourceFile::parse(fixture).ok().unwrap(); let subtree = syntax_node_to_token_tree(source_file.syntax(), DummyTestSpanMap, DUMMY); - let mut annotations: HashMap<_, _> = extract_annotations(fixture) + let mut annotations: FxHashMap<_, _> = extract_annotations(fixture) .into_iter() .map(|(range, annotation)| { let spacing = match annotation.as_str() { diff --git a/crates/mbe/src/tt_iter.rs b/crates/mbe/src/tt_iter.rs index 71513ef43917..f9913cb6f9b8 100644 --- a/crates/mbe/src/tt_iter.rs +++ b/crates/mbe/src/tt_iter.rs @@ -180,7 +180,7 @@ impl<'a, S: Span> TtIter<'a, S> { [] | [_] => res.pop(), [first, ..] => Some(tt::TokenTree::Subtree(tt::Subtree { delimiter: tt::Delimiter::invisible_spanned(first.first_span()), - token_trees: res, + token_trees: res.into_boxed_slice(), })), }; ExpandResult { value: res, err } diff --git a/crates/parser/src/grammar.rs b/crates/parser/src/grammar.rs index 53fda3ae4fd2..34715628f180 100644 --- a/crates/parser/src/grammar.rs +++ b/crates/parser/src/grammar.rs @@ -393,11 +393,26 @@ fn delimited( bra: SyntaxKind, ket: SyntaxKind, delim: SyntaxKind, + unexpected_delim_message: impl Fn() -> String, first_set: TokenSet, mut parser: impl FnMut(&mut Parser<'_>) -> bool, ) { p.bump(bra); while !p.at(ket) && !p.at(EOF) { + if p.at(delim) { + // Recover if an argument is missing and only got a delimiter, + // e.g. `(a, , b)`. + + // Wrap the erroneous delimiter in an error node so that fixup logic gets rid of it. + // FIXME: Ideally this should be handled in fixup in a structured way, but our list + // nodes currently have no concept of a missing node between two delimiters. + // So doing it this way is easier. + let m = p.start(); + p.error(unexpected_delim_message()); + p.bump(delim); + m.complete(p, ERROR); + continue; + } if !parser(p) { break; } diff --git a/crates/parser/src/grammar/expressions.rs b/crates/parser/src/grammar/expressions.rs index c8626111145d..f40c515fa079 100644 --- a/crates/parser/src/grammar/expressions.rs +++ b/crates/parser/src/grammar/expressions.rs @@ -530,6 +530,15 @@ fn method_call_expr( generic_args::opt_generic_arg_list(p, true); if p.at(T!['(']) { arg_list(p); + } else { + // emit an error when argument list is missing + + // test_err method_call_missing_argument_list + // fn func() { + // foo.bar::<> + // foo.bar::; + // } + p.error("expected argument list"); } m.complete(p, METHOD_CALL_EXPR) } @@ -602,6 +611,7 @@ fn cast_expr(p: &mut Parser<'_>, lhs: CompletedMarker) -> CompletedMarker { // foo(bar::); // foo(bar:); // foo(bar+); +// foo(a, , b); // } fn arg_list(p: &mut Parser<'_>) { assert!(p.at(T!['('])); @@ -615,8 +625,9 @@ fn arg_list(p: &mut Parser<'_>) { T!['('], T![')'], T![,], + || "expected expression".into(), EXPR_FIRST.union(ATTRIBUTE_FIRST), - |p: &mut Parser<'_>| expr(p).is_some(), + |p| expr(p).is_some(), ); m.complete(p, ARG_LIST); } diff --git a/crates/parser/src/grammar/generic_args.rs b/crates/parser/src/grammar/generic_args.rs index 211af98e6ef0..249be2a33355 100644 --- a/crates/parser/src/grammar/generic_args.rs +++ b/crates/parser/src/grammar/generic_args.rs @@ -1,5 +1,7 @@ use super::*; +// test_err generic_arg_list_recover +// type T = T<0, ,T>; pub(super) fn opt_generic_arg_list(p: &mut Parser<'_>, colon_colon_required: bool) { let m; if p.at(T![::]) && p.nth(2) == T![<] { @@ -11,7 +13,15 @@ pub(super) fn opt_generic_arg_list(p: &mut Parser<'_>, colon_colon_required: boo return; } - delimited(p, T![<], T![>], T![,], GENERIC_ARG_FIRST, generic_arg); + delimited( + p, + T![<], + T![>], + T![,], + || "expected generic argument".into(), + GENERIC_ARG_FIRST, + generic_arg, + ); m.complete(p, GENERIC_ARG_LIST); } diff --git a/crates/parser/src/grammar/generic_params.rs b/crates/parser/src/grammar/generic_params.rs index 29d9b05d3f33..3c577aa3cb49 100644 --- a/crates/parser/src/grammar/generic_params.rs +++ b/crates/parser/src/grammar/generic_params.rs @@ -10,16 +10,27 @@ pub(super) fn opt_generic_param_list(p: &mut Parser<'_>) { // test generic_param_list // fn f() {} + +// test_err generic_param_list_recover +// fn f() {} fn generic_param_list(p: &mut Parser<'_>) { assert!(p.at(T![<])); let m = p.start(); - delimited(p, T![<], T![>], T![,], GENERIC_PARAM_FIRST.union(ATTRIBUTE_FIRST), |p| { - // test generic_param_attribute - // fn foo<#[lt_attr] 'a, #[t_attr] T>() {} - let m = p.start(); - attributes::outer_attrs(p); - generic_param(p, m) - }); + delimited( + p, + T![<], + T![>], + T![,], + || "expected generic parameter".into(), + GENERIC_PARAM_FIRST.union(ATTRIBUTE_FIRST), + |p| { + // test generic_param_attribute + // fn foo<#[lt_attr] 'a, #[t_attr] T>() {} + let m = p.start(); + attributes::outer_attrs(p); + generic_param(p, m) + }, + ); m.complete(p, GENERIC_PARAM_LIST); } diff --git a/crates/parser/src/grammar/items/adt.rs b/crates/parser/src/grammar/items/adt.rs index 17f41b8e13a4..21078175c0ec 100644 --- a/crates/parser/src/grammar/items/adt.rs +++ b/crates/parser/src/grammar/items/adt.rs @@ -146,28 +146,39 @@ pub(crate) fn record_field_list(p: &mut Parser<'_>) { const TUPLE_FIELD_FIRST: TokenSet = types::TYPE_FIRST.union(ATTRIBUTE_FIRST).union(VISIBILITY_FIRST); +// test_err tuple_field_list_recovery +// struct S(struct S; +// struct S(A,,B); fn tuple_field_list(p: &mut Parser<'_>) { assert!(p.at(T!['('])); let m = p.start(); - delimited(p, T!['('], T![')'], T![,], TUPLE_FIELD_FIRST, |p| { - let m = p.start(); - // test tuple_field_attrs - // struct S (#[attr] f32); - attributes::outer_attrs(p); - let has_vis = opt_visibility(p, true); - if !p.at_ts(types::TYPE_FIRST) { - p.error("expected a type"); - if has_vis { - m.complete(p, ERROR); - } else { - m.abandon(p); + delimited( + p, + T!['('], + T![')'], + T![,], + || "expected tuple field".into(), + TUPLE_FIELD_FIRST, + |p| { + let m = p.start(); + // test tuple_field_attrs + // struct S (#[attr] f32); + attributes::outer_attrs(p); + let has_vis = opt_visibility(p, true); + if !p.at_ts(types::TYPE_FIRST) { + p.error("expected a type"); + if has_vis { + m.complete(p, ERROR); + } else { + m.abandon(p); + } + return false; } - return false; - } - types::type_(p); - m.complete(p, TUPLE_FIELD); - true - }); + types::type_(p); + m.complete(p, TUPLE_FIELD); + true + }, + ); m.complete(p, TUPLE_FIELD_LIST); } diff --git a/crates/parser/src/grammar/items/use_item.rs b/crates/parser/src/grammar/items/use_item.rs index f689c06b31c2..675a1fd4650f 100644 --- a/crates/parser/src/grammar/items/use_item.rs +++ b/crates/parser/src/grammar/items/use_item.rs @@ -93,9 +93,16 @@ pub(crate) fn use_tree_list(p: &mut Parser<'_>) { // use b; // struct T; // fn test() {} - delimited(p, T!['{'], T!['}'], T![,], USE_TREE_LIST_FIRST_SET, |p: &mut Parser<'_>| { - use_tree(p, false) || p.at_ts(USE_TREE_LIST_RECOVERY_SET) - }); + // use {a ,, b}; + delimited( + p, + T!['{'], + T!['}'], + T![,], + || "expected use tree".into(), + USE_TREE_LIST_FIRST_SET, + |p: &mut Parser<'_>| use_tree(p, false) || p.at_ts(USE_TREE_LIST_RECOVERY_SET), + ); m.complete(p, USE_TREE_LIST); } diff --git a/crates/parser/src/lexed_str.rs b/crates/parser/src/lexed_str.rs index bf1feb9a7eb0..2da9184693d9 100644 --- a/crates/parser/src/lexed_str.rs +++ b/crates/parser/src/lexed_str.rs @@ -149,7 +149,7 @@ impl<'a> Converter<'a> { if let Some(err) = err { let token = self.res.len() as u32; - let msg = err.to_string(); + let msg = err.to_owned(); self.res.error.push(LexError { msg, token }); } } diff --git a/crates/parser/src/tests/sourcegen_inline_tests.rs b/crates/parser/src/tests/sourcegen_inline_tests.rs index bd9e188e4d84..5a71bfd82b1a 100644 --- a/crates/parser/src/tests/sourcegen_inline_tests.rs +++ b/crates/parser/src/tests/sourcegen_inline_tests.rs @@ -1,5 +1,6 @@ //! This module greps parser's code for specially formatted comments and turns //! them into tests. +#![allow(clippy::disallowed_types, clippy::print_stdout)] use std::{ collections::HashMap, @@ -59,9 +60,9 @@ fn collect_tests(s: &str) -> Vec { for comment_block in sourcegen::CommentBlock::extract_untagged(s) { let first_line = &comment_block.contents[0]; let (name, ok) = if let Some(name) = first_line.strip_prefix("test ") { - (name.to_string(), true) + (name.to_owned(), true) } else if let Some(name) = first_line.strip_prefix("test_err ") { - (name.to_string(), false) + (name.to_owned(), false) } else { continue; }; diff --git a/crates/parser/test_data/parser/inline/err/0015_arg_list_recovery.rast b/crates/parser/test_data/parser/inline/err/0015_arg_list_recovery.rast index 5d0fe859c296..cd5aa680c656 100644 --- a/crates/parser/test_data/parser/inline/err/0015_arg_list_recovery.rast +++ b/crates/parser/test_data/parser/inline/err/0015_arg_list_recovery.rast @@ -68,6 +68,33 @@ SOURCE_FILE PLUS "+" R_PAREN ")" SEMICOLON ";" + WHITESPACE "\n " + EXPR_STMT + CALL_EXPR + PATH_EXPR + PATH + PATH_SEGMENT + NAME_REF + IDENT "foo" + ARG_LIST + L_PAREN "(" + PATH_EXPR + PATH + PATH_SEGMENT + NAME_REF + IDENT "a" + COMMA "," + WHITESPACE " " + ERROR + COMMA "," + WHITESPACE " " + PATH_EXPR + PATH + PATH_SEGMENT + NAME_REF + IDENT "b" + R_PAREN ")" + SEMICOLON ";" WHITESPACE "\n" R_CURLY "}" WHITESPACE "\n" @@ -75,3 +102,4 @@ error 25: expected identifier error 39: expected COMMA error 39: expected expression error 55: expected expression +error 69: expected expression diff --git a/crates/parser/test_data/parser/inline/err/0015_arg_list_recovery.rs b/crates/parser/test_data/parser/inline/err/0015_arg_list_recovery.rs index 0e7ac9cc3075..175a31f8b588 100644 --- a/crates/parser/test_data/parser/inline/err/0015_arg_list_recovery.rs +++ b/crates/parser/test_data/parser/inline/err/0015_arg_list_recovery.rs @@ -2,4 +2,5 @@ fn main() { foo(bar::); foo(bar:); foo(bar+); + foo(a, , b); } diff --git a/crates/parser/test_data/parser/inline/err/0026_use_tree_list_err_recovery.rast b/crates/parser/test_data/parser/inline/err/0026_use_tree_list_err_recovery.rast index cb90b093ba0d..b576d872e132 100644 --- a/crates/parser/test_data/parser/inline/err/0026_use_tree_list_err_recovery.rast +++ b/crates/parser/test_data/parser/inline/err/0026_use_tree_list_err_recovery.rast @@ -43,4 +43,29 @@ SOURCE_FILE L_CURLY "{" R_CURLY "}" WHITESPACE "\n" + USE + USE_KW "use" + WHITESPACE " " + USE_TREE + USE_TREE_LIST + L_CURLY "{" + USE_TREE + PATH + PATH_SEGMENT + NAME_REF + IDENT "a" + WHITESPACE " " + COMMA "," + ERROR + COMMA "," + WHITESPACE " " + USE_TREE + PATH + PATH_SEGMENT + NAME_REF + IDENT "b" + R_CURLY "}" + SEMICOLON ";" + WHITESPACE "\n" error 6: expected R_CURLY +error 46: expected use tree diff --git a/crates/parser/test_data/parser/inline/err/0026_use_tree_list_err_recovery.rs b/crates/parser/test_data/parser/inline/err/0026_use_tree_list_err_recovery.rs index f16959c25f27..9885e6ab273c 100644 --- a/crates/parser/test_data/parser/inline/err/0026_use_tree_list_err_recovery.rs +++ b/crates/parser/test_data/parser/inline/err/0026_use_tree_list_err_recovery.rs @@ -2,3 +2,4 @@ use {a; use b; struct T; fn test() {} +use {a ,, b}; diff --git a/crates/parser/test_data/parser/inline/err/0028_method_call_missing_argument_list.rast b/crates/parser/test_data/parser/inline/err/0028_method_call_missing_argument_list.rast new file mode 100644 index 000000000000..02544b5e5326 --- /dev/null +++ b/crates/parser/test_data/parser/inline/err/0028_method_call_missing_argument_list.rast @@ -0,0 +1,56 @@ +SOURCE_FILE + FN + FN_KW "fn" + WHITESPACE " " + NAME + IDENT "func" + PARAM_LIST + L_PAREN "(" + R_PAREN ")" + WHITESPACE " " + BLOCK_EXPR + STMT_LIST + L_CURLY "{" + WHITESPACE "\n " + EXPR_STMT + METHOD_CALL_EXPR + PATH_EXPR + PATH + PATH_SEGMENT + NAME_REF + IDENT "foo" + DOT "." + NAME_REF + IDENT "bar" + GENERIC_ARG_LIST + COLON2 "::" + L_ANGLE "<" + R_ANGLE ">" + WHITESPACE "\n " + EXPR_STMT + METHOD_CALL_EXPR + PATH_EXPR + PATH + PATH_SEGMENT + NAME_REF + IDENT "foo" + DOT "." + NAME_REF + IDENT "bar" + GENERIC_ARG_LIST + COLON2 "::" + L_ANGLE "<" + TYPE_ARG + PATH_TYPE + PATH + PATH_SEGMENT + NAME_REF + IDENT "i32" + R_ANGLE ">" + SEMICOLON ";" + WHITESPACE "\n" + R_CURLY "}" + WHITESPACE "\n" +error 27: expected argument list +error 27: expected SEMICOLON +error 46: expected argument list diff --git a/crates/parser/test_data/parser/inline/err/0028_method_call_missing_argument_list.rs b/crates/parser/test_data/parser/inline/err/0028_method_call_missing_argument_list.rs new file mode 100644 index 000000000000..7c8baecaa9c7 --- /dev/null +++ b/crates/parser/test_data/parser/inline/err/0028_method_call_missing_argument_list.rs @@ -0,0 +1,4 @@ +fn func() { + foo.bar::<> + foo.bar::; +} diff --git a/crates/parser/test_data/parser/inline/err/0029_tuple_field_list_recovery.rast b/crates/parser/test_data/parser/inline/err/0029_tuple_field_list_recovery.rast new file mode 100644 index 000000000000..6b0bfa007e36 --- /dev/null +++ b/crates/parser/test_data/parser/inline/err/0029_tuple_field_list_recovery.rast @@ -0,0 +1,44 @@ +SOURCE_FILE + STRUCT + STRUCT_KW "struct" + WHITESPACE " " + NAME + IDENT "S" + TUPLE_FIELD_LIST + L_PAREN "(" + STRUCT + STRUCT_KW "struct" + WHITESPACE " " + NAME + IDENT "S" + SEMICOLON ";" + WHITESPACE "\n" + STRUCT + STRUCT_KW "struct" + WHITESPACE " " + NAME + IDENT "S" + TUPLE_FIELD_LIST + L_PAREN "(" + TUPLE_FIELD + PATH_TYPE + PATH + PATH_SEGMENT + NAME_REF + IDENT "A" + COMMA "," + ERROR + COMMA "," + TUPLE_FIELD + PATH_TYPE + PATH + PATH_SEGMENT + NAME_REF + IDENT "B" + R_PAREN ")" + SEMICOLON ";" + WHITESPACE "\n" +error 9: expected a type +error 9: expected R_PAREN +error 9: expected SEMICOLON +error 30: expected tuple field diff --git a/crates/parser/test_data/parser/inline/err/0029_tuple_field_list_recovery.rs b/crates/parser/test_data/parser/inline/err/0029_tuple_field_list_recovery.rs new file mode 100644 index 000000000000..ecb4d8bda14d --- /dev/null +++ b/crates/parser/test_data/parser/inline/err/0029_tuple_field_list_recovery.rs @@ -0,0 +1,2 @@ +struct S(struct S; +struct S(A,,B); diff --git a/crates/parser/test_data/parser/inline/err/0030_generic_arg_list_recover.rast b/crates/parser/test_data/parser/inline/err/0030_generic_arg_list_recover.rast new file mode 100644 index 000000000000..4cf5a3386b91 --- /dev/null +++ b/crates/parser/test_data/parser/inline/err/0030_generic_arg_list_recover.rast @@ -0,0 +1,33 @@ +SOURCE_FILE + TYPE_ALIAS + TYPE_KW "type" + WHITESPACE " " + NAME + IDENT "T" + WHITESPACE " " + EQ "=" + WHITESPACE " " + PATH_TYPE + PATH + PATH_SEGMENT + NAME_REF + IDENT "T" + GENERIC_ARG_LIST + L_ANGLE "<" + CONST_ARG + LITERAL + INT_NUMBER "0" + COMMA "," + WHITESPACE " " + ERROR + COMMA "," + TYPE_ARG + PATH_TYPE + PATH + PATH_SEGMENT + NAME_REF + IDENT "T" + R_ANGLE ">" + SEMICOLON ";" + WHITESPACE "\n" +error 14: expected generic argument diff --git a/crates/parser/test_data/parser/inline/err/0030_generic_arg_list_recover.rs b/crates/parser/test_data/parser/inline/err/0030_generic_arg_list_recover.rs new file mode 100644 index 000000000000..7d849aa1bee9 --- /dev/null +++ b/crates/parser/test_data/parser/inline/err/0030_generic_arg_list_recover.rs @@ -0,0 +1 @@ +type T = T<0, ,T>; diff --git a/crates/parser/test_data/parser/inline/err/0031_generic_param_list_recover.rast b/crates/parser/test_data/parser/inline/err/0031_generic_param_list_recover.rast new file mode 100644 index 000000000000..0a1ed01fbe63 --- /dev/null +++ b/crates/parser/test_data/parser/inline/err/0031_generic_param_list_recover.rast @@ -0,0 +1,45 @@ +SOURCE_FILE + FN + FN_KW "fn" + WHITESPACE " " + NAME + IDENT "f" + GENERIC_PARAM_LIST + L_ANGLE "<" + TYPE_PARAM + NAME + IDENT "T" + COLON ":" + WHITESPACE " " + TYPE_BOUND_LIST + TYPE_BOUND + PATH_TYPE + PATH + PATH_SEGMENT + NAME_REF + IDENT "Clone" + COMMA "," + ERROR + COMMA "," + WHITESPACE " " + TYPE_PARAM + NAME + IDENT "U" + COLON ":" + TYPE_BOUND_LIST + COMMA "," + WHITESPACE " " + TYPE_PARAM + NAME + IDENT "V" + R_ANGLE ">" + PARAM_LIST + L_PAREN "(" + R_PAREN ")" + WHITESPACE " " + BLOCK_EXPR + STMT_LIST + L_CURLY "{" + R_CURLY "}" + WHITESPACE "\n" +error 14: expected generic parameter diff --git a/crates/parser/test_data/parser/inline/err/0031_generic_param_list_recover.rs b/crates/parser/test_data/parser/inline/err/0031_generic_param_list_recover.rs new file mode 100644 index 000000000000..2b5149bb0dc3 --- /dev/null +++ b/crates/parser/test_data/parser/inline/err/0031_generic_param_list_recover.rs @@ -0,0 +1 @@ +fn f() {} diff --git a/crates/proc-macro-api/Cargo.toml b/crates/proc-macro-api/Cargo.toml index 49a0979f4f5c..cf01b94c0a2c 100644 --- a/crates/proc-macro-api/Cargo.toml +++ b/crates/proc-macro-api/Cargo.toml @@ -23,6 +23,7 @@ serde.workspace = true serde_json = { workspace = true, features = ["unbounded_depth"] } tracing.workspace = true triomphe.workspace = true +rustc-hash.workspace = true memmap2 = "0.5.4" snap = "1.1.0" indexmap = "2.1.0" @@ -40,4 +41,4 @@ base-db.workspace = true la-arena.workspace = true [lints] -workspace = true \ No newline at end of file +workspace = true diff --git a/crates/proc-macro-api/src/lib.rs b/crates/proc-macro-api/src/lib.rs index 379d184dd684..1dadfc40ac43 100644 --- a/crates/proc-macro-api/src/lib.rs +++ b/crates/proc-macro-api/src/lib.rs @@ -197,7 +197,7 @@ impl ProcMacro { &deserialize_span_data_index_map(&resp.span_data_table), ) })), - _ => Err(ServerError { message: "unexpected response".to_string(), io: None }), + _ => Err(ServerError { message: "unexpected response".to_owned(), io: None }), } } } diff --git a/crates/proc-macro-api/src/msg.rs b/crates/proc-macro-api/src/msg.rs index e28fe387b84b..aa5aff455fd8 100644 --- a/crates/proc-macro-api/src/msg.rs +++ b/crates/proc-macro-api/src/msg.rs @@ -187,7 +187,67 @@ mod tests { file_id: FileId::from_raw(0), ast_id: ErasedFileAstId::from_raw(RawIdx::from(0)), }; - let mut subtree = Subtree { + + let token_trees = Box::new([ + TokenTree::Leaf( + Ident { + text: "struct".into(), + span: Span { + range: TextRange::at(TextSize::new(0), TextSize::of("struct")), + anchor, + ctx: SyntaxContextId::ROOT, + }, + } + .into(), + ), + TokenTree::Leaf( + Ident { + text: "Foo".into(), + span: Span { + range: TextRange::at(TextSize::new(5), TextSize::of("Foo")), + anchor, + ctx: SyntaxContextId::ROOT, + }, + } + .into(), + ), + TokenTree::Leaf(Leaf::Literal(Literal { + text: "Foo".into(), + + span: Span { + range: TextRange::at(TextSize::new(8), TextSize::of("Foo")), + anchor, + ctx: SyntaxContextId::ROOT, + }, + })), + TokenTree::Leaf(Leaf::Punct(Punct { + char: '@', + span: Span { + range: TextRange::at(TextSize::new(11), TextSize::of('@')), + anchor, + ctx: SyntaxContextId::ROOT, + }, + spacing: Spacing::Joint, + })), + TokenTree::Subtree(Subtree { + delimiter: Delimiter { + open: Span { + range: TextRange::at(TextSize::new(12), TextSize::of('{')), + anchor, + ctx: SyntaxContextId::ROOT, + }, + close: Span { + range: TextRange::at(TextSize::new(13), TextSize::of('}')), + anchor, + ctx: SyntaxContextId::ROOT, + }, + kind: DelimiterKind::Brace, + }, + token_trees: Box::new([]), + }), + ]); + + Subtree { delimiter: Delimiter { open: Span { range: TextRange::empty(TextSize::new(0)), @@ -201,65 +261,8 @@ mod tests { }, kind: DelimiterKind::Invisible, }, - token_trees: Vec::new(), - }; - subtree.token_trees.push(TokenTree::Leaf( - Ident { - text: "struct".into(), - span: Span { - range: TextRange::at(TextSize::new(0), TextSize::of("struct")), - anchor, - ctx: SyntaxContextId::ROOT, - }, - } - .into(), - )); - subtree.token_trees.push(TokenTree::Leaf( - Ident { - text: "Foo".into(), - span: Span { - range: TextRange::at(TextSize::new(5), TextSize::of("Foo")), - anchor, - ctx: SyntaxContextId::ROOT, - }, - } - .into(), - )); - subtree.token_trees.push(TokenTree::Leaf(Leaf::Literal(Literal { - text: "Foo".into(), - - span: Span { - range: TextRange::at(TextSize::new(8), TextSize::of("Foo")), - anchor, - ctx: SyntaxContextId::ROOT, - }, - }))); - subtree.token_trees.push(TokenTree::Leaf(Leaf::Punct(Punct { - char: '@', - span: Span { - range: TextRange::at(TextSize::new(11), TextSize::of('@')), - anchor, - ctx: SyntaxContextId::ROOT, - }, - spacing: Spacing::Joint, - }))); - subtree.token_trees.push(TokenTree::Subtree(Subtree { - delimiter: Delimiter { - open: Span { - range: TextRange::at(TextSize::new(12), TextSize::of('{')), - anchor, - ctx: SyntaxContextId::ROOT, - }, - close: Span { - range: TextRange::at(TextSize::new(13), TextSize::of('}')), - anchor, - ctx: SyntaxContextId::ROOT, - }, - kind: DelimiterKind::Brace, - }, - token_trees: vec![], - })); - subtree + token_trees, + } } #[test] diff --git a/crates/proc-macro-api/src/msg/flat.rs b/crates/proc-macro-api/src/msg/flat.rs index 8dfaba52625d..caf9e237fdd7 100644 --- a/crates/proc-macro-api/src/msg/flat.rs +++ b/crates/proc-macro-api/src/msg/flat.rs @@ -35,10 +35,11 @@ //! as we don't have bincode in Cargo.toml yet, lets stick with serde_json for //! the time being. -use std::collections::{HashMap, VecDeque}; +use std::collections::VecDeque; use indexmap::IndexSet; use la_arena::RawIdx; +use rustc_hash::FxHashMap; use serde::{Deserialize, Serialize}; use span::{ErasedFileAstId, FileId, Span, SpanAnchor, SyntaxContextId}; use text_size::TextRange; @@ -129,7 +130,7 @@ impl FlatTree { span_data_table: &mut SpanDataIndexMap, ) -> FlatTree { let mut w = Writer { - string_table: HashMap::new(), + string_table: FxHashMap::default(), work: VecDeque::new(), span_data_table, @@ -158,7 +159,7 @@ impl FlatTree { pub fn new_raw(subtree: &tt::Subtree, version: u32) -> FlatTree { let mut w = Writer { - string_table: HashMap::new(), + string_table: FxHashMap::default(), work: VecDeque::new(), span_data_table: &mut (), @@ -340,7 +341,7 @@ impl InternableSpan for Span { struct Writer<'a, 'span, S: InternableSpan> { work: VecDeque<(usize, &'a tt::Subtree)>, - string_table: HashMap<&'a str, u32>, + string_table: FxHashMap<&'a str, u32>, span_data_table: &'span mut S::Table, subtree: Vec, @@ -370,7 +371,7 @@ impl<'a, 'span, S: InternableSpan> Writer<'a, 'span, S> { self.subtree[idx].tt = [first_tt as u32, (first_tt + n_tt) as u32]; - for child in &subtree.token_trees { + for child in subtree.token_trees.iter() { let idx_tag = match child { tt::TokenTree::Subtree(it) => { let idx = self.enqueue(it); @@ -418,7 +419,7 @@ impl<'a, 'span, S: InternableSpan> Writer<'a, 'span, S> { let table = &mut self.text; *self.string_table.entry(text).or_insert_with(|| { let idx = table.len(); - table.push(text.to_string()); + table.push(text.to_owned()); idx as u32 }) } diff --git a/crates/proc-macro-api/src/process.rs b/crates/proc-macro-api/src/process.rs index 5ce601bce692..96f97bf5e205 100644 --- a/crates/proc-macro-api/src/process.rs +++ b/crates/proc-macro-api/src/process.rs @@ -78,7 +78,7 @@ impl ProcMacroProcessSrv { match response { Response::ApiVersionCheck(version) => Ok(version), - _ => Err(ServerError { message: "unexpected response".to_string(), io: None }), + _ => Err(ServerError { message: "unexpected response".to_owned(), io: None }), } } @@ -90,7 +90,7 @@ impl ProcMacroProcessSrv { match response { Response::SetConfig(crate::msg::ServerConfig { span_mode }) => Ok(span_mode), - _ => Err(ServerError { message: "unexpected response".to_string(), io: None }), + _ => Err(ServerError { message: "unexpected response".to_owned(), io: None }), } } @@ -104,7 +104,7 @@ impl ProcMacroProcessSrv { match response { Response::ListMacros(it) => Ok(it), - _ => Err(ServerError { message: "unexpected response".to_string(), io: None }), + _ => Err(ServerError { message: "unexpected response".to_owned(), io: None }), } } diff --git a/crates/proc-macro-api/src/version.rs b/crates/proc-macro-api/src/version.rs index 5f81c0a96d96..f768de3e31d4 100644 --- a/crates/proc-macro-api/src/version.rs +++ b/crates/proc-macro-api/src/version.rs @@ -38,7 +38,7 @@ pub fn read_dylib_info(dylib_path: &AbsPath) -> io::Result { let version_part = items.next().ok_or_else(|| err!("no version string"))?; let mut version_parts = version_part.split('-'); let version = version_parts.next().ok_or_else(|| err!("no version"))?; - let channel = version_parts.next().unwrap_or_default().to_string(); + let channel = version_parts.next().unwrap_or_default().to_owned(); let commit = match items.next() { Some(commit) => { diff --git a/crates/proc-macro-srv-cli/src/main.rs b/crates/proc-macro-srv-cli/src/main.rs index a36200cdb4c3..df0ae3171f54 100644 --- a/crates/proc-macro-srv-cli/src/main.rs +++ b/crates/proc-macro-srv-cli/src/main.rs @@ -1,6 +1,8 @@ //! A standalone binary for `proc-macro-srv`. //! Driver for proc macro server #![cfg_attr(feature = "in-rust-tree", feature(rustc_private))] +#![allow(clippy::print_stderr)] + #[cfg(feature = "in-rust-tree")] extern crate rustc_driver as _; diff --git a/crates/proc-macro-srv/proc-macro-test/imp/src/lib.rs b/crates/proc-macro-srv/proc-macro-test/imp/src/lib.rs index d9018b1b87d3..5f8530d08c43 100644 --- a/crates/proc-macro-srv/proc-macro-test/imp/src/lib.rs +++ b/crates/proc-macro-srv/proc-macro-test/imp/src/lib.rs @@ -2,6 +2,7 @@ #![warn(rust_2018_idioms, unused_lifetimes)] #![feature(proc_macro_span, proc_macro_def_site)] +#![allow(clippy::all)] use proc_macro::{Group, Ident, Literal, Punct, Span, TokenStream, TokenTree}; diff --git a/crates/proc-macro-srv/src/server/rust_analyzer_span.rs b/crates/proc-macro-srv/src/server/rust_analyzer_span.rs index b864a5e4fd63..c7c7bea99410 100644 --- a/crates/proc-macro-srv/src/server/rust_analyzer_span.rs +++ b/crates/proc-macro-srv/src/server/rust_analyzer_span.rs @@ -104,7 +104,7 @@ impl server::TokenStream for RaSpanServer { delimiter: delim_to_internal(group.delimiter, group.span), token_trees: match group.stream { Some(stream) => stream.into_iter().collect(), - None => Vec::new(), + None => Box::new([]), }, }; let tree = tt::TokenTree::from(group); @@ -221,7 +221,7 @@ impl server::TokenStream for RaSpanServer { stream: if subtree.token_trees.is_empty() { None } else { - Some(subtree.token_trees.into_iter().collect()) + Some(subtree.token_trees.into_vec().into_iter().collect()) }, span: bridge::DelimSpan::from_single(subtree.delimiter.open), }), diff --git a/crates/proc-macro-srv/src/server/token_id.rs b/crates/proc-macro-srv/src/server/token_id.rs index c83e09af0d6e..edbdc67b482f 100644 --- a/crates/proc-macro-srv/src/server/token_id.rs +++ b/crates/proc-macro-srv/src/server/token_id.rs @@ -94,7 +94,7 @@ impl server::TokenStream for TokenIdServer { delimiter: delim_to_internal(group.delimiter, group.span), token_trees: match group.stream { Some(stream) => stream.into_iter().collect(), - None => Vec::new(), + None => Box::new([]), }, }; let tree = TokenTree::from(group); @@ -206,7 +206,7 @@ impl server::TokenStream for TokenIdServer { stream: if subtree.token_trees.is_empty() { None } else { - Some(TokenStream { token_trees: subtree.token_trees }) + Some(TokenStream { token_trees: subtree.token_trees.into_vec() }) }, span: bridge::DelimSpan::from_single(subtree.delimiter.open), }), @@ -338,7 +338,7 @@ mod tests { close: tt::TokenId(0), kind: tt::DelimiterKind::Brace, }, - token_trees: vec![], + token_trees: Box::new([]), }), ], }; @@ -354,10 +354,10 @@ mod tests { close: tt::TokenId(0), kind: tt::DelimiterKind::Parenthesis, }, - token_trees: vec![tt::TokenTree::Leaf(tt::Leaf::Ident(tt::Ident { + token_trees: Box::new([tt::TokenTree::Leaf(tt::Leaf::Ident(tt::Ident { text: "a".into(), span: tt::TokenId(0), - }))], + }))]), }); let t1 = TokenStream::from_str("(a)", tt::TokenId(0)).unwrap(); diff --git a/crates/proc-macro-srv/src/server/token_stream.rs b/crates/proc-macro-srv/src/server/token_stream.rs index 8f669a30494b..5edaa720fc7e 100644 --- a/crates/proc-macro-srv/src/server/token_stream.rs +++ b/crates/proc-macro-srv/src/server/token_stream.rs @@ -15,14 +15,14 @@ impl Default for TokenStream { impl TokenStream { pub(crate) fn new() -> Self { - TokenStream { token_trees: vec![] } + TokenStream::default() } pub(crate) fn with_subtree(subtree: tt::Subtree) -> Self { if subtree.delimiter.kind != tt::DelimiterKind::Invisible { TokenStream { token_trees: vec![TokenTree::Subtree(subtree)] } } else { - TokenStream { token_trees: subtree.token_trees } + TokenStream { token_trees: subtree.token_trees.into_vec() } } } @@ -36,7 +36,7 @@ impl TokenStream { close: call_site, kind: tt::DelimiterKind::Invisible, }, - token_trees: self.token_trees, + token_trees: self.token_trees.into_boxed_slice(), } } @@ -83,7 +83,7 @@ impl Extend> for TokenStream { tt::TokenTree::Subtree(subtree) if subtree.delimiter.kind == tt::DelimiterKind::Invisible => { - self.token_trees.extend(subtree.token_trees); + self.token_trees.extend(subtree.token_trees.into_vec().into_iter()); } _ => { self.token_trees.push(tkn); diff --git a/crates/profile/src/lib.rs b/crates/profile/src/lib.rs index 38c5b3fc9c72..363998156063 100644 --- a/crates/profile/src/lib.rs +++ b/crates/profile/src/lib.rs @@ -21,7 +21,7 @@ pub use countme; /// almost zero. pub use countme::Count; -thread_local!(static IN_SCOPE: RefCell = RefCell::new(false)); +thread_local!(static IN_SCOPE: RefCell = const { RefCell::new(false) }); /// Allows to check if the current code is within some dynamic scope, can be /// useful during debugging to figure out why a function is called. @@ -88,6 +88,7 @@ pub fn cpu_span() -> CpuSpan { } #[cfg(not(feature = "cpu_profiler"))] + #[allow(clippy::print_stderr)] { eprintln!( r#"cpu profiling is disabled, uncomment `default = [ "cpu_profiler" ]` in Cargo.toml to enable."# diff --git a/crates/profile/src/stop_watch.rs b/crates/profile/src/stop_watch.rs index 814a02574028..990b59cad42c 100644 --- a/crates/profile/src/stop_watch.rs +++ b/crates/profile/src/stop_watch.rs @@ -1,4 +1,7 @@ //! Like `std::time::Instant`, but also measures memory & CPU cycles. + +#![allow(clippy::print_stderr)] + use std::{ fmt, time::{Duration, Instant}, diff --git a/crates/project-model/src/build_scripts.rs b/crates/project-model/src/build_scripts.rs index c1670c200490..a2c9856a3f73 100644 --- a/crates/project-model/src/build_scripts.rs +++ b/crates/project-model/src/build_scripts.rs @@ -322,7 +322,7 @@ impl WorkspaceBuildScripts { let mut deserializer = serde_json::Deserializer::from_str(line); deserializer.disable_recursion_limit(); let message = Message::deserialize(&mut deserializer) - .unwrap_or_else(|_| Message::TextLine(line.to_string())); + .unwrap_or_else(|_| Message::TextLine(line.to_owned())); match message { Message::BuildScriptExecuted(mut message) => { @@ -356,7 +356,7 @@ impl WorkspaceBuildScripts { if let Some(out_dir) = out_dir.as_os_str().to_str().map(|s| s.to_owned()) { - data.envs.push(("OUT_DIR".to_string(), out_dir)); + data.envs.push(("OUT_DIR".to_owned(), out_dir)); } data.out_dir = Some(out_dir); data.cfgs = cfgs; @@ -396,7 +396,7 @@ impl WorkspaceBuildScripts { let errors = if !output.status.success() { let errors = errors.into_inner(); - Some(if errors.is_empty() { "cargo check failed".to_string() } else { errors }) + Some(if errors.is_empty() { "cargo check failed".to_owned() } else { errors }) } else { None }; @@ -490,7 +490,7 @@ impl WorkspaceBuildScripts { // FIXME: Find a better way to know if it is a dylib. fn is_dylib(path: &Utf8Path) -> bool { - match path.extension().map(|e| e.to_string().to_lowercase()) { + match path.extension().map(|e| e.to_owned().to_lowercase()) { None => false, Some(ext) => matches!(ext.as_str(), "dll" | "dylib" | "so"), } diff --git a/crates/project-model/src/cargo_workspace.rs b/crates/project-model/src/cargo_workspace.rs index 361f8721a4ec..a99ee6e664c5 100644 --- a/crates/project-model/src/cargo_workspace.rs +++ b/crates/project-model/src/cargo_workspace.rs @@ -285,7 +285,7 @@ impl CargoWorkspace { // FIXME: Fetching metadata is a slow process, as it might require // calling crates.io. We should be reporting progress here, but it's // unclear whether cargo itself supports it. - progress("metadata".to_string()); + progress("metadata".to_owned()); (|| -> Result { let mut command = meta.cargo_command(); @@ -399,7 +399,7 @@ impl CargoWorkspace { CargoWorkspace { packages, targets, workspace_root, target_directory } } - pub fn packages(&self) -> impl Iterator + ExactSizeIterator + '_ { + pub fn packages(&self) -> impl ExactSizeIterator + '_ { self.packages.iter().map(|(id, _pkg)| id) } @@ -502,7 +502,7 @@ fn rustc_discover_host_triple( let field = "host: "; let target = stdout.lines().find_map(|l| l.strip_prefix(field)); if let Some(target) = target { - Some(target.to_string()) + Some(target.to_owned()) } else { // If we fail to resolve the host platform, it's not the end of the world. tracing::info!("rustc -vV did not report host platform, got:\n{}", stdout); @@ -536,7 +536,7 @@ fn parse_output_cargo_config_build_target(stdout: String) -> Vec { let trimmed = stdout.trim_start_matches("build.target = ").trim_matches('"'); if !trimmed.starts_with('[') { - return [trimmed.to_string()].to_vec(); + return [trimmed.to_owned()].to_vec(); } let res = serde_json::from_str(trimmed); diff --git a/crates/project-model/src/cfg_flag.rs b/crates/project-model/src/cfg_flag.rs index e366d441c1bd..af682904b194 100644 --- a/crates/project-model/src/cfg_flag.rs +++ b/crates/project-model/src/cfg_flag.rs @@ -19,7 +19,7 @@ impl FromStr for CfgFlag { if !(value.starts_with('"') && value.ends_with('"')) { return Err(format!("Invalid cfg ({s:?}), value should be in quotes")); } - let key = key.to_string(); + let key = key.to_owned(); let value = value[1..value.len() - 1].to_string(); CfgFlag::KeyValue { key, value } } diff --git a/crates/project-model/src/lib.rs b/crates/project-model/src/lib.rs index 5114c9c016de..5b91f5d80589 100644 --- a/crates/project-model/src/lib.rs +++ b/crates/project-model/src/lib.rs @@ -167,7 +167,7 @@ fn utf8_stdout(mut cmd: Command) -> anyhow::Result { } } let stdout = String::from_utf8(output.stdout)?; - Ok(stdout.trim().to_string()) + Ok(stdout.trim().to_owned()) } #[derive(Copy, Clone, Debug, Default, PartialEq, Eq)] diff --git a/crates/project-model/src/rustc_cfg.rs b/crates/project-model/src/rustc_cfg.rs index cf12d5b71df5..0aee002fbb3f 100644 --- a/crates/project-model/src/rustc_cfg.rs +++ b/crates/project-model/src/rustc_cfg.rs @@ -33,7 +33,7 @@ pub(crate) fn get( res.push(CfgFlag::Atom("target_thread_local".into())); for ty in ["8", "16", "32", "64", "cas", "ptr"] { for key in ["target_has_atomic", "target_has_atomic_load_store"] { - res.push(CfgFlag::KeyValue { key: key.to_string(), value: ty.into() }); + res.push(CfgFlag::KeyValue { key: key.to_owned(), value: ty.into() }); } } diff --git a/crates/project-model/src/sysroot.rs b/crates/project-model/src/sysroot.rs index c24c0196dd9b..9e19a5258388 100644 --- a/crates/project-model/src/sysroot.rs +++ b/crates/project-model/src/sysroot.rs @@ -57,7 +57,7 @@ impl Stitched { self.by_name("proc_macro") } - pub(crate) fn crates(&self) -> impl Iterator + ExactSizeIterator + '_ { + pub(crate) fn crates(&self) -> impl ExactSizeIterator + '_ { self.crates.iter().map(|(id, _data)| id) } diff --git a/crates/project-model/src/tests.rs b/crates/project-model/src/tests.rs index 7c078f72f529..74042e925ede 100644 --- a/crates/project-model/src/tests.rs +++ b/crates/project-model/src/tests.rs @@ -129,7 +129,7 @@ fn get_fake_sysroot() -> Sysroot { } fn rooted_project_json(data: ProjectJsonData) -> ProjectJson { - let mut root = "$ROOT$".to_string(); + let mut root = "$ROOT$".to_owned(); replace_root(&mut root, true); let path = Path::new(&root); let base = AbsPath::assert(path); diff --git a/crates/project-model/src/workspace.rs b/crates/project-model/src/workspace.rs index 8c5ea0619ac7..cda5ad2f1109 100644 --- a/crates/project-model/src/workspace.rs +++ b/crates/project-model/src/workspace.rs @@ -241,7 +241,7 @@ impl ProjectWorkspace { .map_err(|p| Some(format!("rustc source path is not absolute: {p}"))), Some(RustLibSource::Discover) => { sysroot.as_ref().ok().and_then(Sysroot::discover_rustc_src).ok_or_else( - || Some("Failed to discover rustc source for sysroot.".to_string()), + || Some("Failed to discover rustc source for sysroot.".to_owned()), ) } None => Err(None), @@ -840,7 +840,7 @@ fn project_json_to_crate_graph( if let Some(name) = display_name.clone() { CrateOrigin::Local { repo: repository.clone(), - name: Some(name.canonical_name().to_string()), + name: Some(name.canonical_name().to_owned()), } } else { CrateOrigin::Local { repo: None, name: None } @@ -1117,7 +1117,7 @@ fn detached_files_to_crate_graph( let display_name = detached_file .file_stem() .and_then(|os_str| os_str.to_str()) - .map(|file_stem| CrateDisplayName::from_canonical_name(file_stem.to_string())); + .map(|file_stem| CrateDisplayName::from_canonical_name(file_stem.to_owned())); let detached_file_crate = crate_graph.add_crate_root( file_id, Edition::CURRENT, @@ -1129,7 +1129,7 @@ fn detached_files_to_crate_graph( false, CrateOrigin::Local { repo: None, - name: display_name.map(|n| n.canonical_name().to_string()), + name: display_name.map(|n| n.canonical_name().to_owned()), }, target_layout.clone(), None, @@ -1323,7 +1323,7 @@ fn add_target_crate_root( } } - let display_name = CrateDisplayName::from_canonical_name(cargo_name.to_string()); + let display_name = CrateDisplayName::from_canonical_name(cargo_name.to_owned()); let crate_id = crate_graph.add_crate_root( file_id, edition, @@ -1455,7 +1455,7 @@ fn sysroot_to_crate_graph( (SysrootPublicDeps { deps: pub_deps }, libproc_macro) } SysrootMode::Stitched(stitched) => { - let cfg_options = create_cfg_options(rustc_cfg.clone()); + let cfg_options = create_cfg_options(rustc_cfg); let sysroot_crates: FxHashMap = stitched .crates() .filter_map(|krate| { diff --git a/crates/rust-analyzer/src/bin/main.rs b/crates/rust-analyzer/src/bin/main.rs index 66b680571a97..269dd3cfffe9 100644 --- a/crates/rust-analyzer/src/bin/main.rs +++ b/crates/rust-analyzer/src/bin/main.rs @@ -3,7 +3,9 @@ //! Based on cli flags, either spawns an LSP server, or runs a batch analysis #![warn(rust_2018_idioms, unused_lifetimes)] +#![allow(clippy::print_stdout, clippy::print_stderr)] #![cfg_attr(feature = "in-rust-tree", feature(rustc_private))] + #[cfg(feature = "in-rust-tree")] extern crate rustc_driver as _; @@ -132,7 +134,7 @@ fn setup_logging(log_file_flag: Option) -> anyhow::Result<()> { writer, // Deliberately enable all `error` logs if the user has not set RA_LOG, as there is usually // useful information in there for debugging. - filter: env::var("RA_LOG").ok().unwrap_or_else(|| "error".to_string()), + filter: env::var("RA_LOG").ok().unwrap_or_else(|| "error".to_owned()), chalk_filter: env::var("CHALK_DEBUG").ok(), profile_filter: env::var("RA_PROFILE").ok(), } @@ -222,7 +224,7 @@ fn run_server() -> anyhow::Result<()> { MessageType, ShowMessageParams, }; let not = lsp_server::Notification::new( - ShowMessage::METHOD.to_string(), + ShowMessage::METHOD.to_owned(), ShowMessageParams { typ: MessageType::WARNING, message: e.to_string() }, ); connection.sender.send(lsp_server::Message::Notification(not)).unwrap(); diff --git a/crates/rust-analyzer/src/caps.rs b/crates/rust-analyzer/src/caps.rs index 94eab97e8fcd..a1469c22abf0 100644 --- a/crates/rust-analyzer/src/caps.rs +++ b/crates/rust-analyzer/src/caps.rs @@ -44,17 +44,17 @@ pub fn server_capabilities(config: &Config) -> ServerCapabilities { completion_provider: Some(CompletionOptions { resolve_provider: completions_resolve_provider(config.caps()), trigger_characters: Some(vec![ - ":".to_string(), - ".".to_string(), - "'".to_string(), - "(".to_string(), + ":".to_owned(), + ".".to_owned(), + "'".to_owned(), + "(".to_owned(), ]), all_commit_characters: None, completion_item: completion_item(config), work_done_progress_options: WorkDoneProgressOptions { work_done_progress: None }, }), signature_help_provider: Some(SignatureHelpOptions { - trigger_characters: Some(vec!["(".to_string(), ",".to_string(), "<".to_string()]), + trigger_characters: Some(vec!["(".to_owned(), ",".to_owned(), "<".to_owned()]), retrigger_characters: None, work_done_progress_options: WorkDoneProgressOptions { work_done_progress: None }, }), @@ -74,7 +74,7 @@ pub fn server_capabilities(config: &Config) -> ServerCapabilities { _ => Some(OneOf::Left(false)), }, document_on_type_formatting_provider: Some(DocumentOnTypeFormattingOptions { - first_trigger_character: "=".to_string(), + first_trigger_character: "=".to_owned(), more_trigger_character: Some(more_trigger_character(config)), }), selection_range_provider: Some(SelectionRangeProviderCapability::Simple(true)), @@ -222,9 +222,9 @@ fn code_action_capabilities(client_caps: &ClientCapabilities) -> CodeActionProvi } fn more_trigger_character(config: &Config) -> Vec { - let mut res = vec![".".to_string(), ">".to_string(), "{".to_string(), "(".to_string()]; + let mut res = vec![".".to_owned(), ">".to_owned(), "{".to_owned(), "(".to_owned()]; if config.snippet_cap() { - res.push("<".to_string()); + res.push("<".to_owned()); } res } diff --git a/crates/rust-analyzer/src/cli.rs b/crates/rust-analyzer/src/cli.rs index 00670f2cb4c8..0bd6677b6623 100644 --- a/crates/rust-analyzer/src/cli.rs +++ b/crates/rust-analyzer/src/cli.rs @@ -1,5 +1,7 @@ //! Various batch processing tasks, intended primarily for debugging. +#![allow(clippy::print_stdout, clippy::print_stderr)] + mod analysis_stats; mod diagnostics; pub mod flags; diff --git a/crates/rust-analyzer/src/cli/analysis_stats.rs b/crates/rust-analyzer/src/cli/analysis_stats.rs index 31bdd2a0e82b..2741b4522256 100644 --- a/crates/rust-analyzer/src/cli/analysis_stats.rs +++ b/crates/rust-analyzer/src/cli/analysis_stats.rs @@ -397,7 +397,7 @@ impl flags::AnalysisStats { module .krate() .display_name(db) - .map(|it| it.canonical_name().to_string()) + .map(|it| it.canonical_name().to_owned()) .into_iter() .chain( module @@ -688,7 +688,7 @@ impl flags::AnalysisStats { module .krate() .display_name(db) - .map(|it| it.canonical_name().to_string()) + .map(|it| it.canonical_name().to_owned()) .into_iter() .chain( module @@ -833,7 +833,7 @@ impl flags::AnalysisStats { fn location_csv_expr(db: &RootDatabase, vfs: &Vfs, sm: &BodySourceMap, expr_id: ExprId) -> String { let src = match sm.expr_syntax(expr_id) { Ok(s) => s, - Err(SyntheticSyntax) => return "synthetic,,".to_string(), + Err(SyntheticSyntax) => return "synthetic,,".to_owned(), }; let root = db.parse_or_expand(src.file_id); let node = src.map(|e| e.to_node(&root).syntax().clone()); @@ -849,7 +849,7 @@ fn location_csv_expr(db: &RootDatabase, vfs: &Vfs, sm: &BodySourceMap, expr_id: fn location_csv_pat(db: &RootDatabase, vfs: &Vfs, sm: &BodySourceMap, pat_id: PatId) -> String { let src = match sm.pat_syntax(pat_id) { Ok(s) => s, - Err(SyntheticSyntax) => return "synthetic,,".to_string(), + Err(SyntheticSyntax) => return "synthetic,,".to_owned(), }; let root = db.parse_or_expand(src.file_id); let node = src.map(|e| e.to_node(&root).syntax().clone()); diff --git a/crates/rust-analyzer/src/cli/diagnostics.rs b/crates/rust-analyzer/src/cli/diagnostics.rs index 6d2e97be20e5..605670f6a82e 100644 --- a/crates/rust-analyzer/src/cli/diagnostics.rs +++ b/crates/rust-analyzer/src/cli/diagnostics.rs @@ -45,7 +45,7 @@ impl flags::Diagnostics { let file_id = module.definition_source_file_id(db).original_file(db); if !visited_files.contains(&file_id) { let crate_name = - module.krate().display_name(db).as_deref().unwrap_or("unknown").to_string(); + module.krate().display_name(db).as_deref().unwrap_or("unknown").to_owned(); println!("processing crate: {crate_name}, module: {}", _vfs.file_path(file_id)); for diagnostic in analysis .diagnostics( diff --git a/crates/rust-analyzer/src/cli/lsif.rs b/crates/rust-analyzer/src/cli/lsif.rs index 64f965e22ac7..1424a775777f 100644 --- a/crates/rust-analyzer/src/cli/lsif.rs +++ b/crates/rust-analyzer/src/cli/lsif.rs @@ -1,6 +1,5 @@ //! LSIF (language server index format) generator -use std::collections::HashMap; use std::env; use std::time::Instant; @@ -16,6 +15,7 @@ use ide_db::{ use load_cargo::{load_workspace, LoadCargoConfig, ProcMacroServerChoice}; use lsp_types::{self, lsif}; use project_model::{CargoConfig, ProjectManifest, ProjectWorkspace, RustLibSource}; +use rustc_hash::FxHashMap; use vfs::{AbsPathBuf, Vfs}; use crate::{ @@ -35,10 +35,10 @@ impl Clone for Snap> { struct LsifManager<'a> { count: i32, - token_map: HashMap, - range_map: HashMap, - file_map: HashMap, - package_map: HashMap, + token_map: FxHashMap, + range_map: FxHashMap, + file_map: FxHashMap, + package_map: FxHashMap, analysis: &'a Analysis, db: &'a RootDatabase, vfs: &'a Vfs, @@ -57,10 +57,10 @@ impl LsifManager<'_> { fn new<'a>(analysis: &'a Analysis, db: &'a RootDatabase, vfs: &'a Vfs) -> LsifManager<'a> { LsifManager { count: 0, - token_map: HashMap::default(), - range_map: HashMap::default(), - file_map: HashMap::default(), - package_map: HashMap::default(), + token_map: FxHashMap::default(), + range_map: FxHashMap::default(), + file_map: FxHashMap::default(), + package_map: FxHashMap::default(), analysis, db, vfs, @@ -104,12 +104,12 @@ impl LsifManager<'_> { let result_set_id = self.add_vertex(lsif::Vertex::PackageInformation(lsif::PackageInformation { name: pi.name, - manager: "cargo".to_string(), + manager: "cargo".to_owned(), uri: None, content: None, repository: pi.repo.map(|url| lsif::Repository { url, - r#type: "git".to_string(), + r#type: "git".to_owned(), commit_id: None, }), version: pi.version, @@ -148,7 +148,7 @@ impl LsifManager<'_> { let path = self.vfs.file_path(id); let path = path.as_path().unwrap(); let doc_id = self.add_vertex(lsif::Vertex::Document(lsif::Document { - language_id: "rust".to_string(), + language_id: "rust".to_owned(), uri: lsp_types::Url::from_file_path(path).unwrap(), })); self.file_map.insert(id, doc_id); @@ -175,7 +175,7 @@ impl LsifManager<'_> { if let Some(moniker) = token.moniker { let package_id = self.get_package_id(moniker.package_information); let moniker_id = self.add_vertex(lsif::Vertex::Moniker(lsp_types::Moniker { - scheme: "rust-analyzer".to_string(), + scheme: "rust-analyzer".to_owned(), identifier: moniker.identifier.to_string(), unique: lsp_types::UniquenessLevel::Scheme, kind: Some(match moniker.kind { @@ -215,7 +215,7 @@ impl LsifManager<'_> { out_v: result_set_id.into(), })); let mut edges = token.references.iter().fold( - HashMap::<_, Vec>::new(), + FxHashMap::<_, Vec>::default(), |mut edges, it| { let entry = edges.entry((it.range.file_id, it.is_definition)).or_default(); entry.push((*self.range_map.get(&it.range).unwrap()).into()); @@ -313,7 +313,7 @@ impl flags::Lsif { project_root: lsp_types::Url::from_file_path(path).unwrap(), position_encoding: lsif::Encoding::Utf16, tool_info: Some(lsp_types::lsif::ToolInfo { - name: "rust-analyzer".to_string(), + name: "rust-analyzer".to_owned(), args: vec![], version: Some(version().to_string()), }), diff --git a/crates/rust-analyzer/src/cli/progress_report.rs b/crates/rust-analyzer/src/cli/progress_report.rs index 8166aa23b447..b23373009973 100644 --- a/crates/rust-analyzer/src/cli/progress_report.rs +++ b/crates/rust-analyzer/src/cli/progress_report.rs @@ -92,7 +92,7 @@ impl<'a> ProgressReport<'a> { let _ = io::stdout().write(output.as_bytes()); let _ = io::stdout().flush(); - self.text = text.to_string(); + self.text = text.to_owned(); } fn set_value(&mut self, value: f32) { diff --git a/crates/rust-analyzer/src/cli/run_tests.rs b/crates/rust-analyzer/src/cli/run_tests.rs index d07dcdec2510..6b43e095429a 100644 --- a/crates/rust-analyzer/src/cli/run_tests.rs +++ b/crates/rust-analyzer/src/cli/run_tests.rs @@ -34,7 +34,7 @@ impl flags::RunTests { .filter(|x| x.is_test(db)); let span_formatter = |file_id, text_range: TextRange| { let line_col = match db.line_index(file_id).try_line_col(text_range.start()) { - None => " (unknown line col)".to_string(), + None => " (unknown line col)".to_owned(), Some(x) => format!("#{}:{}", x.line + 1, x.col), }; let path = &db diff --git a/crates/rust-analyzer/src/cli/rustc_tests.rs b/crates/rust-analyzer/src/cli/rustc_tests.rs index be7e434acac2..64ea246a4587 100644 --- a/crates/rust-analyzer/src/cli/rustc_tests.rs +++ b/crates/rust-analyzer/src/cli/rustc_tests.rs @@ -1,8 +1,6 @@ //! Run all tests in a project, similar to `cargo test`, but using the mir interpreter. -use std::{ - cell::RefCell, collections::HashMap, fs::read_to_string, panic::AssertUnwindSafe, path::PathBuf, -}; +use std::{cell::RefCell, fs::read_to_string, panic::AssertUnwindSafe, path::PathBuf}; use hir::{Change, Crate}; use ide::{AnalysisHost, DiagnosticCode, DiagnosticsConfig}; @@ -10,6 +8,7 @@ use profile::StopWatch; use project_model::{CargoConfig, ProjectWorkspace, RustLibSource, Sysroot}; use load_cargo::{load_workspace, LoadCargoConfig, ProcMacroServerChoice}; +use rustc_hash::FxHashMap; use triomphe::Arc; use vfs::{AbsPathBuf, FileId}; use walkdir::WalkDir; @@ -27,7 +26,7 @@ struct Tester { fn string_to_diagnostic_code_leaky(code: &str) -> DiagnosticCode { thread_local! { - static LEAK_STORE: RefCell> = RefCell::new(HashMap::new()); + static LEAK_STORE: RefCell> = RefCell::new(FxHashMap::default()); } LEAK_STORE.with_borrow_mut(|s| match s.get(code) { Some(c) => *c, @@ -39,9 +38,9 @@ fn string_to_diagnostic_code_leaky(code: &str) -> DiagnosticCode { }) } -fn detect_errors_from_rustc_stderr_file(p: PathBuf) -> HashMap { +fn detect_errors_from_rustc_stderr_file(p: PathBuf) -> FxHashMap { let text = read_to_string(p).unwrap(); - let mut result = HashMap::new(); + let mut result = FxHashMap::default(); { let mut text = &*text; while let Some(p) = text.find("error[E") { @@ -106,7 +105,7 @@ impl Tester { let expected = if stderr_path.exists() { detect_errors_from_rustc_stderr_file(stderr_path) } else { - HashMap::new() + FxHashMap::default() }; let text = read_to_string(&p).unwrap(); let mut change = Change::new(); @@ -125,7 +124,7 @@ impl Tester { self.host.apply_change(change); let diagnostic_config = DiagnosticsConfig::test_sample(); - let mut actual = HashMap::new(); + let mut actual = FxHashMap::default(); let panicked = match std::panic::catch_unwind(|| { self.host .analysis() diff --git a/crates/rust-analyzer/src/cli/scip.rs b/crates/rust-analyzer/src/cli/scip.rs index 81622a4617ab..f4aec288348f 100644 --- a/crates/rust-analyzer/src/cli/scip.rs +++ b/crates/rust-analyzer/src/cli/scip.rs @@ -1,10 +1,6 @@ //! SCIP generator -use std::{ - collections::{HashMap, HashSet}, - path::PathBuf, - time::Instant, -}; +use std::{path::PathBuf, time::Instant}; use ide::{ LineCol, MonikerDescriptorKind, MonikerResult, StaticIndex, StaticIndexedFile, @@ -12,6 +8,7 @@ use ide::{ }; use ide_db::LineIndexDatabase; use load_cargo::{load_workspace_at, LoadCargoConfig, ProcMacroServerChoice}; +use rustc_hash::{FxHashMap, FxHashSet}; use scip::types as scip_types; use crate::{ @@ -76,9 +73,10 @@ impl flags::Scip { }; let mut documents = Vec::new(); - let mut symbols_emitted: HashSet = HashSet::default(); - let mut tokens_to_symbol: HashMap = HashMap::new(); - let mut tokens_to_enclosing_symbol: HashMap> = HashMap::new(); + let mut symbols_emitted: FxHashSet = FxHashSet::default(); + let mut tokens_to_symbol: FxHashMap = FxHashMap::default(); + let mut tokens_to_enclosing_symbol: FxHashMap> = + FxHashMap::default(); for StaticIndexedFile { file_id, tokens, .. } in si.files { let mut local_count = 0; @@ -148,7 +146,7 @@ impl flags::Scip { let signature_documentation = token.signature.clone().map(|text| scip_types::Document { relative_path: relative_path.clone(), - language: "rust".to_string(), + language: "rust".to_owned(), text, position_encoding, ..Default::default() @@ -188,7 +186,7 @@ impl flags::Scip { scip_types::PositionEncoding::UTF8CodeUnitOffsetFromLineStart.into(); documents.push(scip_types::Document { relative_path, - language: "rust".to_string(), + language: "rust".to_owned(), occurrences, symbols, text: String::new(), @@ -218,7 +216,7 @@ fn get_relative_filepath( rootpath: &vfs::AbsPathBuf, file_id: ide::FileId, ) -> Option { - Some(vfs.file_path(file_id).as_path()?.strip_prefix(rootpath)?.as_ref().to_str()?.to_string()) + Some(vfs.file_path(file_id).as_path()?.strip_prefix(rootpath)?.as_ref().to_str()?.to_owned()) } // SCIP Ranges have a (very large) optimization that ranges if they are on the same line @@ -241,8 +239,8 @@ fn new_descriptor_str( suffix: scip_types::descriptor::Suffix, ) -> scip_types::Descriptor { scip_types::Descriptor { - name: name.to_string(), - disambiguator: "".to_string(), + name: name.to_owned(), + disambiguator: "".to_owned(), suffix: suffix.into(), special_fields: Default::default(), } @@ -313,9 +311,9 @@ fn moniker_to_symbol(moniker: &MonikerResult) -> scip_types::Symbol { scip_types::Symbol { scheme: "rust-analyzer".into(), package: Some(scip_types::Package { - manager: "cargo".to_string(), + manager: "cargo".to_owned(), name: package_name, - version: version.unwrap_or_else(|| ".".to_string()), + version: version.unwrap_or_else(|| ".".to_owned()), special_fields: Default::default(), }) .into(), diff --git a/crates/rust-analyzer/src/config.rs b/crates/rust-analyzer/src/config.rs index 815f6ea12e86..7bdd9ec866a5 100644 --- a/crates/rust-analyzer/src/config.rs +++ b/crates/rust-analyzer/src/config.rs @@ -223,6 +223,9 @@ config_data! { /// /// Aliased as `"checkOnSave.targets"`. check_targets | checkOnSave_targets | checkOnSave_target: Option = "null", + /// Whether `--workspace` should be passed to `cargo check`. + /// If false, `-p ` will be passed instead. + check_workspace: bool = "true", /// Toggles the additional completions that automatically add imports when completed. /// Note that your client must specify the `additionalTextEdits` LSP client capability to truly have this feature enabled. @@ -478,6 +481,9 @@ config_data! { /// Whether to show `can't find Cargo.toml` error message. notifications_cargoTomlNotFound: bool = "true", + /// Whether to send an UnindexedProject notification to the client. + notifications_unindexedProject: bool = "false", + /// How many worker threads in the main loop. The default `null` means to pick automatically. numThreads: Option = "null", @@ -745,6 +751,7 @@ pub enum FilesWatcher { #[derive(Debug, Clone)] pub struct NotificationsConfig { pub cargo_toml_not_found: bool, + pub unindexed_project: bool, } #[derive(Debug, Clone)] @@ -897,7 +904,7 @@ impl Config { use serde::de::Error; if self.data.check_command.is_empty() { error_sink.push(( - "/check/command".to_string(), + "/check/command".to_owned(), serde_json::Error::custom("expected a non-empty string"), )); } @@ -1220,7 +1227,10 @@ impl Config { } pub fn notifications(&self) -> NotificationsConfig { - NotificationsConfig { cargo_toml_not_found: self.data.notifications_cargoTomlNotFound } + NotificationsConfig { + cargo_toml_not_found: self.data.notifications_cargoTomlNotFound, + unindexed_project: self.data.notifications_unindexedProject, + } } pub fn cargo_autoreload(&self) -> bool { @@ -1323,6 +1333,10 @@ impl Config { } } + pub fn flycheck_workspace(&self) -> bool { + self.data.check_workspace + } + pub fn flycheck(&self) -> FlycheckConfig { match &self.data.check_overrideCommand { Some(args) if !args.is_empty() => { @@ -2612,7 +2626,7 @@ mod tests { .replace('\n', "\n ") .trim_start_matches('\n') .trim_end() - .to_string(); + .to_owned(); schema.push_str(",\n"); // Transform the asciidoc form link to markdown style. diff --git a/crates/rust-analyzer/src/config/patch_old_style.rs b/crates/rust-analyzer/src/config/patch_old_style.rs index 73d2ed329845..92c0c0d048ab 100644 --- a/crates/rust-analyzer/src/config/patch_old_style.rs +++ b/crates/rust-analyzer/src/config/patch_old_style.rs @@ -19,7 +19,7 @@ pub(super) fn patch_json_for_outdated_configs(json: &mut Value) { Some(it) => { let mut last = it; for segment in [$(stringify!($dst)),+].into_iter().rev() { - last = Value::Object(serde_json::Map::from_iter(std::iter::once((segment.to_string(), last)))); + last = Value::Object(serde_json::Map::from_iter(std::iter::once((segment.to_owned(), last)))); } merge(json, last); diff --git a/crates/rust-analyzer/src/diagnostics.rs b/crates/rust-analyzer/src/diagnostics.rs index c91b22999ded..a0a53f545c9e 100644 --- a/crates/rust-analyzer/src/diagnostics.rs +++ b/crates/rust-analyzer/src/diagnostics.rs @@ -135,11 +135,11 @@ pub(crate) fn fetch_native_diagnostics( |line_index: &crate::line_index::LineIndex, d: ide::Diagnostic| lsp_types::Diagnostic { range: lsp::to_proto::range(line_index, d.range.range), severity: Some(lsp::to_proto::diagnostic_severity(d.severity)), - code: Some(lsp_types::NumberOrString::String(d.code.as_str().to_string())), + code: Some(lsp_types::NumberOrString::String(d.code.as_str().to_owned())), code_description: Some(lsp_types::CodeDescription { href: lsp_types::Url::parse(&d.code.url()).unwrap(), }), - source: Some("rust-analyzer".to_string()), + source: Some("rust-analyzer".to_owned()), message: d.message, related_information: None, tags: d.unused.then(|| vec![lsp_types::DiagnosticTag::UNNECESSARY]), diff --git a/crates/rust-analyzer/src/diagnostics/to_proto.rs b/crates/rust-analyzer/src/diagnostics/to_proto.rs index f8bc66ff8e74..e900f2601d83 100644 --- a/crates/rust-analyzer/src/diagnostics/to_proto.rs +++ b/crates/rust-analyzer/src/diagnostics/to_proto.rs @@ -1,9 +1,9 @@ //! This module provides the functionality needed to convert diagnostics from //! `cargo check` json format to the LSP diagnostic format. -use std::collections::HashMap; use flycheck::{Applicability, DiagnosticLevel, DiagnosticSpan}; use itertools::Itertools; +use rustc_hash::FxHashMap; use stdx::format_to; use vfs::{AbsPath, AbsPathBuf}; @@ -186,7 +186,7 @@ fn map_rust_child_diagnostic( return MappedRustChildDiagnostic::MessageLine(rd.message.clone()); } - let mut edit_map: HashMap> = HashMap::new(); + let mut edit_map: FxHashMap> = FxHashMap::default(); let mut suggested_replacements = Vec::new(); let mut is_preferred = true; for &span in &spans { @@ -403,16 +403,16 @@ pub(crate) fn map_rust_diagnostic_to_lsp( related_info_macro_calls.push(lsp_types::DiagnosticRelatedInformation { location: secondary_location.clone(), message: if is_in_macro_call { - "Error originated from macro call here".to_string() + "Error originated from macro call here".to_owned() } else { - "Actual error occurred here".to_string() + "Actual error occurred here".to_owned() }, }); // For the additional in-macro diagnostic we add the inverse message pointing to the error location in code. let information_for_additional_diagnostic = vec![lsp_types::DiagnosticRelatedInformation { location: primary_location.clone(), - message: "Exact error occurred here".to_string(), + message: "Exact error occurred here".to_owned(), }]; let diagnostic = lsp_types::Diagnostic { @@ -467,7 +467,7 @@ pub(crate) fn map_rust_diagnostic_to_lsp( // `related_information`, which just produces hard-to-read links, at least in VS Code. let back_ref = lsp_types::DiagnosticRelatedInformation { location: primary_location, - message: "original diagnostic".to_string(), + message: "original diagnostic".to_owned(), }; for sub in &subdiagnostics { diagnostics.push(MappedRustDiagnostic { @@ -685,7 +685,7 @@ mod tests { fn rustc_unused_variable_as_info() { check_with_config( DiagnosticsMapConfig { - warnings_as_info: vec!["unused_variables".to_string()], + warnings_as_info: vec!["unused_variables".to_owned()], ..DiagnosticsMapConfig::default() }, r##"{ @@ -769,7 +769,7 @@ mod tests { fn rustc_unused_variable_as_hint() { check_with_config( DiagnosticsMapConfig { - warnings_as_hint: vec!["unused_variables".to_string()], + warnings_as_hint: vec!["unused_variables".to_owned()], ..DiagnosticsMapConfig::default() }, r##"{ diff --git a/crates/rust-analyzer/src/dispatch.rs b/crates/rust-analyzer/src/dispatch.rs index fa856a796a8e..7adaef4ff6ec 100644 --- a/crates/rust-analyzer/src/dispatch.rs +++ b/crates/rust-analyzer/src/dispatch.rs @@ -123,7 +123,7 @@ impl RequestDispatcher<'_> { Err(_) => Task::Response(lsp_server::Response::new_err( req.id, lsp_server::ErrorCode::ContentModified as i32, - "content modified".to_string(), + "content modified".to_owned(), )), } } @@ -179,7 +179,7 @@ impl RequestDispatcher<'_> { let response = lsp_server::Response::new_err( req.id, lsp_server::ErrorCode::MethodNotFound as i32, - "unknown request".to_string(), + "unknown request".to_owned(), ); self.global_state.respond(response); } @@ -269,7 +269,7 @@ where .map(String::as_str) .or_else(|| panic.downcast_ref::<&str>().copied()); - let mut message = "request handler panicked".to_string(); + let mut message = "request handler panicked".to_owned(); if let Some(panic_message) = panic_message { message.push_str(": "); message.push_str(panic_message) diff --git a/crates/rust-analyzer/src/global_state.rs b/crates/rust-analyzer/src/global_state.rs index 2f226d01155b..da4422a60a8a 100644 --- a/crates/rust-analyzer/src/global_state.rs +++ b/crates/rust-analyzer/src/global_state.rs @@ -33,7 +33,7 @@ use crate::{ mem_docs::MemDocs, op_queue::OpQueue, reload, - task_pool::TaskPool, + task_pool::{TaskPool, TaskQueue}, }; // Enforces drop order @@ -126,6 +126,17 @@ pub(crate) struct GlobalState { OpQueue<(), (Arc>, Vec>)>, pub(crate) fetch_proc_macros_queue: OpQueue, bool>, pub(crate) prime_caches_queue: OpQueue, + + /// A deferred task queue. + /// + /// This queue is used for doing database-dependent work inside of sync + /// handlers, as accessing the database may block latency-sensitive + /// interactions and should be moved away from the main thread. + /// + /// For certain features, such as [`lsp_ext::UnindexedProjectParams`], + /// this queue should run only *after* [`GlobalState::process_changes`] has + /// been called. + pub(crate) deferred_task_queue: TaskQueue, } /// An immutable snapshot of the world's state at a point in time. @@ -165,6 +176,11 @@ impl GlobalState { Handle { handle, receiver } }; + let task_queue = { + let (sender, receiver) = unbounded(); + TaskQueue { sender, receiver } + }; + let mut analysis_host = AnalysisHost::new(config.lru_parse_query_capacity()); if let Some(capacities) = config.lru_query_capacities() { analysis_host.update_lru_capacities(capacities); @@ -208,6 +224,8 @@ impl GlobalState { fetch_proc_macros_queue: OpQueue::default(), prime_caches_queue: OpQueue::default(), + + deferred_task_queue: task_queue, }; // Apply any required database inputs from the config. this.update_configuration(config); @@ -370,7 +388,7 @@ impl GlobalState { params: R::Params, handler: ReqHandler, ) { - let request = self.req_queue.outgoing.register(R::METHOD.to_string(), params, handler); + let request = self.req_queue.outgoing.register(R::METHOD.to_owned(), params, handler); self.send(request.into()); } @@ -387,7 +405,7 @@ impl GlobalState { &self, params: N::Params, ) { - let not = lsp_server::Notification::new(N::METHOD.to_string(), params); + let not = lsp_server::Notification::new(N::METHOD.to_owned(), params); self.send(not.into()); } diff --git a/crates/rust-analyzer/src/handlers/notification.rs b/crates/rust-analyzer/src/handlers/notification.rs index 1f24e9501057..d3c2073f09d2 100644 --- a/crates/rust-analyzer/src/handlers/notification.rs +++ b/crates/rust-analyzer/src/handlers/notification.rs @@ -70,7 +70,15 @@ pub(crate) fn handle_did_open_text_document( if already_exists { tracing::error!("duplicate DidOpenTextDocument: {}", path); } + state.vfs.write().0.set_file_contents(path, Some(params.text_document.text.into_bytes())); + if state.config.notifications().unindexed_project { + tracing::debug!("queuing task"); + let _ = state + .deferred_task_queue + .sender + .send(crate::main_loop::QueuedTask::CheckIfIndexed(params.text_document.uri)); + } } Ok(()) } @@ -160,7 +168,7 @@ pub(crate) fn handle_did_save_text_document( } else if state.config.check_on_save() { // No specific flycheck was triggered, so let's trigger all of them. for flycheck in state.flycheck.iter() { - flycheck.restart(); + flycheck.restart_workspace(); } } Ok(()) @@ -176,7 +184,7 @@ pub(crate) fn handle_did_change_configuration( lsp_types::ConfigurationParams { items: vec![lsp_types::ConfigurationItem { scope_uri: None, - section: Some("rust-analyzer".to_string()), + section: Some("rust-analyzer".to_owned()), }], }, |this, resp| { @@ -228,7 +236,7 @@ pub(crate) fn handle_did_change_workspace_folders( if !config.has_linked_projects() && config.detached_files().is_empty() { config.rediscover_workspaces(); - state.fetch_workspaces_queue.request_op("client workspaces changed".to_string(), false) + state.fetch_workspaces_queue.request_op("client workspaces changed".to_owned(), false) } Ok(()) @@ -281,27 +289,40 @@ fn run_flycheck(state: &mut GlobalState, vfs_path: VfsPath) -> bool { let crate_root_paths: Vec<_> = crate_root_paths.iter().map(Deref::deref).collect(); // Find all workspaces that have at least one target containing the saved file - let workspace_ids = world.workspaces.iter().enumerate().filter(|(_, ws)| match ws { - project_model::ProjectWorkspace::Cargo { cargo, .. } => { - cargo.packages().any(|pkg| { - cargo[pkg] - .targets - .iter() - .any(|&it| crate_root_paths.contains(&cargo[it].root.as_path())) - }) - } - project_model::ProjectWorkspace::Json { project, .. } => { - project.crates().any(|(c, _)| crate_ids.iter().any(|&crate_id| crate_id == c)) - } - project_model::ProjectWorkspace::DetachedFiles { .. } => false, + let workspace_ids = world.workspaces.iter().enumerate().filter_map(|(idx, ws)| { + let package = match ws { + project_model::ProjectWorkspace::Cargo { cargo, .. } => { + cargo.packages().find_map(|pkg| { + let has_target_with_root = cargo[pkg] + .targets + .iter() + .any(|&it| crate_root_paths.contains(&cargo[it].root.as_path())); + has_target_with_root.then(|| cargo[pkg].name.clone()) + }) + } + project_model::ProjectWorkspace::Json { project, .. } => { + if !project + .crates() + .any(|(c, _)| crate_ids.iter().any(|&crate_id| crate_id == c)) + { + return None; + } + None + } + project_model::ProjectWorkspace::DetachedFiles { .. } => return None, + }; + Some((idx, package)) }); // Find and trigger corresponding flychecks for flycheck in world.flycheck.iter() { - for (id, _) in workspace_ids.clone() { + for (id, package) in workspace_ids.clone() { if id == flycheck.id() { updated = true; - flycheck.restart(); + match package.filter(|_| !world.config.flycheck_workspace()) { + Some(package) => flycheck.restart_for_package(package), + None => flycheck.restart_workspace(), + } continue; } } @@ -309,7 +330,7 @@ fn run_flycheck(state: &mut GlobalState, vfs_path: VfsPath) -> bool { // No specific flycheck was triggered, so let's trigger all of them. if !updated { for flycheck in world.flycheck.iter() { - flycheck.restart(); + flycheck.restart_workspace(); } } Ok(()) @@ -351,7 +372,7 @@ pub(crate) fn handle_run_flycheck( } // No specific flycheck was triggered, so let's trigger all of them. for flycheck in state.flycheck.iter() { - flycheck.restart(); + flycheck.restart_workspace(); } Ok(()) } diff --git a/crates/rust-analyzer/src/handlers/request.rs b/crates/rust-analyzer/src/handlers/request.rs index 2be2ba5c4465..2a3633a48e9f 100644 --- a/crates/rust-analyzer/src/handlers/request.rs +++ b/crates/rust-analyzer/src/handlers/request.rs @@ -54,7 +54,7 @@ pub(crate) fn handle_workspace_reload(state: &mut GlobalState, _: ()) -> anyhow: state.proc_macro_clients = Arc::from_iter([]); state.proc_macro_changed = false; - state.fetch_workspaces_queue.request_op("reload workspace request".to_string(), false); + state.fetch_workspaces_queue.request_op("reload workspace request".to_owned(), false); Ok(()) } @@ -62,7 +62,7 @@ pub(crate) fn handle_proc_macros_rebuild(state: &mut GlobalState, _: ()) -> anyh state.proc_macro_clients = Arc::from_iter([]); state.proc_macro_changed = false; - state.fetch_build_data_queue.request_op("rebuild proc macros request".to_string(), ()); + state.fetch_build_data_queue.request_op("rebuild proc macros request".to_owned(), ()); Ok(()) } @@ -562,7 +562,7 @@ pub(crate) fn handle_will_rename_files( (Some(p1), Some(p2)) if p1 == p2 => { if from_path.is_dir() { // add '/' to end of url -- from `file://path/to/folder` to `file://path/to/folder/` - let mut old_folder_name = from_path.file_stem()?.to_str()?.to_string(); + let mut old_folder_name = from_path.file_stem()?.to_str()?.to_owned(); old_folder_name.push('/'); let from_with_trailing_slash = from.join(&old_folder_name).ok()?; @@ -570,7 +570,7 @@ pub(crate) fn handle_will_rename_files( let new_file_name = to_path.file_name()?.to_str()?; Some(( snap.url_to_file_id(&imitate_from_url).ok()?, - new_file_name.to_string(), + new_file_name.to_owned(), )) } else { let old_name = from_path.file_stem()?.to_str()?; @@ -578,7 +578,7 @@ pub(crate) fn handle_will_rename_files( match (old_name, new_name) { ("mod", _) => None, (_, "mod") => None, - _ => Some((snap.url_to_file_id(&from).ok()?, new_name.to_string())), + _ => Some((snap.url_to_file_id(&from).ok()?, new_name.to_owned())), } } } @@ -799,13 +799,13 @@ pub(crate) fn handle_runnables( None => { if !snap.config.linked_or_discovered_projects().is_empty() { res.push(lsp_ext::Runnable { - label: "cargo check --workspace".to_string(), + label: "cargo check --workspace".to_owned(), location: None, kind: lsp_ext::RunnableKind::Cargo, args: lsp_ext::CargoRunnable { workspace_root: None, override_cargo: config.override_cargo, - cargo_args: vec!["check".to_string(), "--workspace".to_string()], + cargo_args: vec!["check".to_owned(), "--workspace".to_owned()], cargo_extra_args: config.cargo_extra_args, executable_args: Vec::new(), expect_test: None, @@ -879,7 +879,7 @@ pub(crate) fn handle_completion_resolve( if !all_edits_are_disjoint(&original_completion, &[]) { return Err(invalid_params_error( - "Received a completion with overlapping edits, this is not LSP-compliant".to_string(), + "Received a completion with overlapping edits, this is not LSP-compliant".to_owned(), ) .into()); } @@ -1191,7 +1191,7 @@ pub(crate) fn handle_code_action_resolve( let _p = tracing::span!(tracing::Level::INFO, "handle_code_action_resolve").entered(); let params = match code_action.data.take() { Some(it) => it, - None => return Err(invalid_params_error("code action without data".to_string()).into()), + None => return Err(invalid_params_error("code action without data".to_owned()).into()), }; let file_id = from_proto::file_id(&snap, ¶ms.code_action_params.text_document.uri)?; @@ -1270,7 +1270,7 @@ fn parse_action_id(action_id: &str) -> anyhow::Result<(usize, SingleResolve), St }; Ok((index, SingleResolve { assist_id: assist_id_string.to_string(), assist_kind })) } - _ => Err("Action id contains incorrect number of segments".to_string()), + _ => Err("Action id contains incorrect number of segments".to_owned()), } } diff --git a/crates/rust-analyzer/src/lsp/ext.rs b/crates/rust-analyzer/src/lsp/ext.rs index 35c8fad37415..aa40728ce6cb 100644 --- a/crates/rust-analyzer/src/lsp/ext.rs +++ b/crates/rust-analyzer/src/lsp/ext.rs @@ -1,6 +1,8 @@ //! rust-analyzer extensions to the LSP. -use std::{collections::HashMap, path::PathBuf}; +#![allow(clippy::disallowed_types)] + +use std::path::PathBuf; use ide_db::line_index::WideEncoding; use lsp_types::request::Request; @@ -9,6 +11,7 @@ use lsp_types::{ PartialResultParams, Position, Range, TextDocumentIdentifier, WorkDoneProgressParams, }; use lsp_types::{PositionEncodingKind, Url}; +use rustc_hash::FxHashMap; use serde::{Deserialize, Serialize}; use crate::line_index::PositionEncoding; @@ -448,12 +451,16 @@ pub struct CodeActionData { #[serde(rename_all = "camelCase")] pub struct SnippetWorkspaceEdit { #[serde(skip_serializing_if = "Option::is_none")] - pub changes: Option>>, + pub changes: Option>>, #[serde(skip_serializing_if = "Option::is_none")] pub document_changes: Option>, #[serde(skip_serializing_if = "Option::is_none")] - pub change_annotations: - Option>, + pub change_annotations: Option< + std::collections::HashMap< + lsp_types::ChangeAnnotationIdentifier, + lsp_types::ChangeAnnotation, + >, + >, } #[derive(Debug, Eq, PartialEq, Clone, Deserialize, Serialize)] @@ -696,3 +703,16 @@ pub struct CompletionImport { pub struct ClientCommandOptions { pub commands: Vec, } + +pub enum UnindexedProject {} + +impl Notification for UnindexedProject { + type Params = UnindexedProjectParams; + const METHOD: &'static str = "rust-analyzer/unindexedProject"; +} + +#[derive(Deserialize, Serialize, Debug)] +#[serde(rename_all = "camelCase")] +pub struct UnindexedProjectParams { + pub text_documents: Vec, +} diff --git a/crates/rust-analyzer/src/lsp/from_proto.rs b/crates/rust-analyzer/src/lsp/from_proto.rs index 9923be382b77..f42985a91610 100644 --- a/crates/rust-analyzer/src/lsp/from_proto.rs +++ b/crates/rust-analyzer/src/lsp/from_proto.rs @@ -108,7 +108,7 @@ pub(crate) fn annotation( code_lens: lsp_types::CodeLens, ) -> anyhow::Result> { let data = - code_lens.data.ok_or_else(|| invalid_params_error("code lens without data".to_string()))?; + code_lens.data.ok_or_else(|| invalid_params_error("code lens without data".to_owned()))?; let resolve = from_json::("CodeLensResolveData", &data)?; match resolve.kind { diff --git a/crates/rust-analyzer/src/lsp/to_proto.rs b/crates/rust-analyzer/src/lsp/to_proto.rs index d363ac69fdc3..64f19f0b32d7 100644 --- a/crates/rust-analyzer/src/lsp/to_proto.rs +++ b/crates/rust-analyzer/src/lsp/to_proto.rs @@ -245,7 +245,7 @@ fn completion_item( ) { let insert_replace_support = config.insert_replace_support().then_some(tdpp.position); let ref_match = item.ref_match(); - let lookup = item.lookup().to_string(); + let lookup = item.lookup().to_owned(); let mut additional_text_edits = Vec::new(); @@ -367,7 +367,7 @@ pub(crate) fn signature_help( let params = call_info .parameter_labels() .map(|label| lsp_types::ParameterInformation { - label: lsp_types::ParameterLabel::Simple(label.to_string()), + label: lsp_types::ParameterLabel::Simple(label.to_owned()), documentation: None, }) .collect::>(); @@ -1498,7 +1498,7 @@ pub(crate) mod command { pub(crate) fn run_single(runnable: &lsp_ext::Runnable, title: &str) -> lsp_types::Command { lsp_types::Command { - title: title.to_string(), + title: title.to_owned(), command: "rust-analyzer.runSingle".into(), arguments: Some(vec![to_value(runnable).unwrap()]), } @@ -1608,7 +1608,7 @@ fn main() { } }"#; - let (analysis, file_id) = Analysis::from_single_file(text.to_string()); + let (analysis, file_id) = Analysis::from_single_file(text.to_owned()); let folds = analysis.folding_ranges(file_id).unwrap(); assert_eq!(folds.len(), 4); diff --git a/crates/rust-analyzer/src/lsp/utils.rs b/crates/rust-analyzer/src/lsp/utils.rs index fa5ea5b57db0..10335cb14533 100644 --- a/crates/rust-analyzer/src/lsp/utils.rs +++ b/crates/rust-analyzer/src/lsp/utils.rs @@ -333,21 +333,20 @@ mod tests { #[test] fn empty_completion_disjoint_tests() { - let empty_completion = - CompletionItem::new_simple("label".to_string(), "detail".to_string()); + let empty_completion = CompletionItem::new_simple("label".to_owned(), "detail".to_owned()); let disjoint_edit_1 = lsp_types::TextEdit::new( Range::new(Position::new(2, 2), Position::new(3, 3)), - "new_text".to_string(), + "new_text".to_owned(), ); let disjoint_edit_2 = lsp_types::TextEdit::new( Range::new(Position::new(3, 3), Position::new(4, 4)), - "new_text".to_string(), + "new_text".to_owned(), ); let joint_edit = lsp_types::TextEdit::new( Range::new(Position::new(1, 1), Position::new(5, 5)), - "new_text".to_string(), + "new_text".to_owned(), ); assert!( @@ -375,19 +374,19 @@ mod tests { fn completion_with_joint_edits_disjoint_tests() { let disjoint_edit = lsp_types::TextEdit::new( Range::new(Position::new(1, 1), Position::new(2, 2)), - "new_text".to_string(), + "new_text".to_owned(), ); let disjoint_edit_2 = lsp_types::TextEdit::new( Range::new(Position::new(2, 2), Position::new(3, 3)), - "new_text".to_string(), + "new_text".to_owned(), ); let joint_edit = lsp_types::TextEdit::new( Range::new(Position::new(1, 1), Position::new(5, 5)), - "new_text".to_string(), + "new_text".to_owned(), ); let mut completion_with_joint_edits = - CompletionItem::new_simple("label".to_string(), "detail".to_string()); + CompletionItem::new_simple("label".to_owned(), "detail".to_owned()); completion_with_joint_edits.additional_text_edits = Some(vec![disjoint_edit.clone(), joint_edit.clone()]); assert!( @@ -405,7 +404,7 @@ mod tests { completion_with_joint_edits.text_edit = Some(CompletionTextEdit::InsertAndReplace(InsertReplaceEdit { - new_text: "new_text".to_string(), + new_text: "new_text".to_owned(), insert: disjoint_edit.range, replace: disjoint_edit_2.range, })); @@ -420,19 +419,19 @@ mod tests { fn completion_with_disjoint_edits_disjoint_tests() { let disjoint_edit = lsp_types::TextEdit::new( Range::new(Position::new(1, 1), Position::new(2, 2)), - "new_text".to_string(), + "new_text".to_owned(), ); let disjoint_edit_2 = lsp_types::TextEdit::new( Range::new(Position::new(2, 2), Position::new(3, 3)), - "new_text".to_string(), + "new_text".to_owned(), ); let joint_edit = lsp_types::TextEdit::new( Range::new(Position::new(1, 1), Position::new(5, 5)), - "new_text".to_string(), + "new_text".to_owned(), ); let mut completion_with_disjoint_edits = - CompletionItem::new_simple("label".to_string(), "detail".to_string()); + CompletionItem::new_simple("label".to_owned(), "detail".to_owned()); completion_with_disjoint_edits.text_edit = Some(CompletionTextEdit::Edit(disjoint_edit)); let completion_with_disjoint_edits = completion_with_disjoint_edits; diff --git a/crates/rust-analyzer/src/main_loop.rs b/crates/rust-analyzer/src/main_loop.rs index f3ead6d04f7d..88660db7e93b 100644 --- a/crates/rust-analyzer/src/main_loop.rs +++ b/crates/rust-analyzer/src/main_loop.rs @@ -1,5 +1,6 @@ //! The main loop of `rust-analyzer` responsible for dispatching LSP //! requests/replies and notifications back to the client. +use crate::lsp::ext; use std::{ fmt, time::{Duration, Instant}, @@ -56,6 +57,7 @@ pub fn main_loop(config: Config, connection: Connection) -> anyhow::Result<()> { enum Event { Lsp(lsp_server::Message), Task(Task), + QueuedTask(QueuedTask), Vfs(vfs::loader::Message), Flycheck(flycheck::Message), } @@ -67,13 +69,20 @@ impl fmt::Display for Event { Event::Task(_) => write!(f, "Event::Task"), Event::Vfs(_) => write!(f, "Event::Vfs"), Event::Flycheck(_) => write!(f, "Event::Flycheck"), + Event::QueuedTask(_) => write!(f, "Event::QueuedTask"), } } } +#[derive(Debug)] +pub(crate) enum QueuedTask { + CheckIfIndexed(lsp_types::Url), +} + #[derive(Debug)] pub(crate) enum Task { Response(lsp_server::Response), + ClientNotification(ext::UnindexedProjectParams), Retry(lsp_server::Request), Diagnostics(Vec<(FileId, Vec)>), PrimeCaches(PrimeCachesProgress), @@ -115,6 +124,7 @@ impl fmt::Debug for Event { match self { Event::Lsp(it) => fmt::Debug::fmt(it, f), Event::Task(it) => fmt::Debug::fmt(it, f), + Event::QueuedTask(it) => fmt::Debug::fmt(it, f), Event::Vfs(it) => fmt::Debug::fmt(it, f), Event::Flycheck(it) => fmt::Debug::fmt(it, f), } @@ -129,7 +139,7 @@ impl GlobalState { self.register_did_save_capability(); } - self.fetch_workspaces_queue.request_op("startup".to_string(), false); + self.fetch_workspaces_queue.request_op("startup".to_owned(), false); if let Some((cause, force_crate_graph_reload)) = self.fetch_workspaces_queue.should_start_op() { @@ -175,8 +185,8 @@ impl GlobalState { }; let registration = lsp_types::Registration { - id: "textDocument/didSave".to_string(), - method: "textDocument/didSave".to_string(), + id: "textDocument/didSave".to_owned(), + method: "textDocument/didSave".to_owned(), register_options: Some(serde_json::to_value(save_registration_options).unwrap()), }; self.send_request::( @@ -193,6 +203,9 @@ impl GlobalState { recv(self.task_pool.receiver) -> task => Some(Event::Task(task.unwrap())), + recv(self.deferred_task_queue.receiver) -> task => + Some(Event::QueuedTask(task.unwrap())), + recv(self.fmt_pool.receiver) -> task => Some(Event::Task(task.unwrap())), @@ -211,7 +224,7 @@ impl GlobalState { .entered(); let event_dbg_msg = format!("{event:?}"); - tracing::debug!("{:?} handle_event({})", loop_start, event_dbg_msg); + tracing::debug!(?loop_start, ?event, "handle_event"); if tracing::enabled!(tracing::Level::INFO) { let task_queue_len = self.task_pool.handle.len(); if task_queue_len > 0 { @@ -226,6 +239,16 @@ impl GlobalState { lsp_server::Message::Notification(not) => self.on_notification(not)?, lsp_server::Message::Response(resp) => self.complete_request(resp), }, + Event::QueuedTask(task) => { + let _p = + tracing::span!(tracing::Level::INFO, "GlobalState::handle_event/queued_task") + .entered(); + self.handle_queued_task(task); + // Coalesce multiple task events into one loop turn + while let Ok(task) = self.deferred_task_queue.receiver.try_recv() { + self.handle_queued_task(task); + } + } Event::Task(task) => { let _p = tracing::span!(tracing::Level::INFO, "GlobalState::handle_event/task") .entered(); @@ -273,7 +296,7 @@ impl GlobalState { self.prime_caches_queue.op_completed(()); if cancelled { self.prime_caches_queue - .request_op("restart after cancellation".to_string(), ()); + .request_op("restart after cancellation".to_owned(), ()); } } }; @@ -314,10 +337,10 @@ impl GlobalState { if became_quiescent { if self.config.check_on_save() { // Project has loaded properly, kick off initial flycheck - self.flycheck.iter().for_each(FlycheckHandle::restart); + self.flycheck.iter().for_each(FlycheckHandle::restart_workspace); } if self.config.prefill_caches() { - self.prime_caches_queue.request_op("became quiescent".to_string(), ()); + self.prime_caches_queue.request_op("became quiescent".to_owned(), ()); } } @@ -367,7 +390,7 @@ impl GlobalState { // See https://github.com/rust-lang/rust-analyzer/issues/13130 let patch_empty = |message: &mut String| { if message.is_empty() { - *message = " ".to_string(); + *message = " ".to_owned(); } }; @@ -498,6 +521,9 @@ impl GlobalState { fn handle_task(&mut self, prime_caches_progress: &mut Vec, task: Task) { match task { Task::Response(response) => self.respond(response), + Task::ClientNotification(params) => { + self.send_notification::(params) + } // Only retry requests that haven't been cancelled. Otherwise we do unnecessary work. Task::Retry(req) if !self.is_completed(&req) => self.on_request(req), Task::Retry(_) => (), @@ -531,12 +557,12 @@ impl GlobalState { } let old = Arc::clone(&self.workspaces); - self.switch_workspaces("fetched workspace".to_string()); + self.switch_workspaces("fetched workspace".to_owned()); let workspaces_updated = !Arc::ptr_eq(&old, &self.workspaces); if self.config.run_build_scripts() && workspaces_updated { self.fetch_build_data_queue - .request_op("workspace updated".to_string(), ()); + .request_op("workspace updated".to_owned(), ()); } (Progress::End, None) @@ -555,7 +581,7 @@ impl GlobalState { tracing::error!("FetchBuildDataError:\n{e}"); } - self.switch_workspaces("fetched build data".to_string()); + self.switch_workspaces("fetched build data".to_owned()); self.send_hint_refresh_query = true; (Some(Progress::End), None) @@ -638,6 +664,31 @@ impl GlobalState { } } + fn handle_queued_task(&mut self, task: QueuedTask) { + match task { + QueuedTask::CheckIfIndexed(uri) => { + let snap = self.snapshot(); + + self.task_pool.handle.spawn_with_sender(ThreadIntent::Worker, move |sender| { + let _p = tracing::span!(tracing::Level::INFO, "GlobalState::check_if_indexed") + .entered(); + tracing::debug!(?uri, "handling uri"); + let id = from_proto::file_id(&snap, &uri).expect("unable to get FileId"); + if let Ok(crates) = &snap.analysis.crates_for(id) { + if crates.is_empty() { + let params = ext::UnindexedProjectParams { + text_documents: vec![lsp_types::TextDocumentIdentifier { uri }], + }; + sender.send(Task::ClientNotification(params)).unwrap(); + } else { + tracing::debug!(?uri, "is indexed"); + } + } + }); + } + } + } + fn handle_flycheck_msg(&mut self, message: flycheck::Message) { match message { flycheck::Message::AddDiagnostic { id, workspace_root, diagnostic } => { diff --git a/crates/rust-analyzer/src/reload.rs b/crates/rust-analyzer/src/reload.rs index 65c00cc08d1a..7bd2877b00cb 100644 --- a/crates/rust-analyzer/src/reload.rs +++ b/crates/rust-analyzer/src/reload.rs @@ -83,7 +83,7 @@ impl GlobalState { } if self.config.linked_or_discovered_projects() != old_config.linked_or_discovered_projects() { - self.fetch_workspaces_queue.request_op("linked projects changed".to_string(), false) + self.fetch_workspaces_queue.request_op("linked projects changed".to_owned(), false) } else if self.config.flycheck() != old_config.flycheck() { self.reload_flycheck(); } @@ -440,8 +440,8 @@ impl GlobalState { .collect(), }; let registration = lsp_types::Registration { - id: "workspace/didChangeWatchedFiles".to_string(), - method: "workspace/didChangeWatchedFiles".to_string(), + id: "workspace/didChangeWatchedFiles".to_owned(), + method: "workspace/didChangeWatchedFiles".to_owned(), register_options: Some(serde_json::to_value(registration_options).unwrap()), }; self.send_request::( @@ -503,7 +503,7 @@ impl GlobalState { let mut crate_graph_file_dependencies = FxHashSet::default(); let mut load = |path: &AbsPath| { - let _p = tracing::span!(tracing::Level::INFO, "switch_workspaces::load").entered(); + let _p = tracing::span!(tracing::Level::DEBUG, "switch_workspaces::load").entered(); let vfs_path = vfs::VfsPath::from(path.to_path_buf()); crate_graph_file_dependencies.insert(vfs_path.clone()); match vfs.file_id(&vfs_path) { @@ -528,10 +528,16 @@ impl GlobalState { (crate_graph, proc_macros, crate_graph_file_dependencies) }; + let mut change = Change::new(); if self.config.expand_proc_macros() { + change.set_proc_macros( + crate_graph + .iter() + .map(|id| (id, Err("Proc-macros have not been built yet".to_owned()))) + .collect(), + ); self.fetch_proc_macros_queue.request_op(cause, proc_macro_paths); } - let mut change = Change::new(); change.set_crate_graph(crate_graph); self.analysis_host.apply_change(change); self.crate_graph_file_dependencies = crate_graph_file_dependencies; diff --git a/crates/rust-analyzer/src/task_pool.rs b/crates/rust-analyzer/src/task_pool.rs index a5a10e86914e..f7de5fb2ff1d 100644 --- a/crates/rust-analyzer/src/task_pool.rs +++ b/crates/rust-analyzer/src/task_pool.rs @@ -4,6 +4,8 @@ use crossbeam_channel::Sender; use stdx::thread::{Pool, ThreadIntent}; +use crate::main_loop::QueuedTask; + pub(crate) struct TaskPool { sender: Sender, pool: Pool, @@ -40,3 +42,12 @@ impl TaskPool { self.pool.len() } } + +/// `TaskQueue`, like its name suggests, queues tasks. +/// +/// This should only be used used if a task must run after [`GlobalState::process_changes`] +/// has been called. +pub(crate) struct TaskQueue { + pub(crate) sender: crossbeam_channel::Sender, + pub(crate) receiver: crossbeam_channel::Receiver, +} diff --git a/crates/rust-analyzer/src/tracing/hprof.rs b/crates/rust-analyzer/src/tracing/hprof.rs index c99b551df852..906498732977 100644 --- a/crates/rust-analyzer/src/tracing/hprof.rs +++ b/crates/rust-analyzer/src/tracing/hprof.rs @@ -179,6 +179,7 @@ impl Node { self.go(0, filter) } + #[allow(clippy::print_stderr)] fn go(&self, level: usize, filter: &WriteFilter) { if self.duration > filter.longer_than && level < filter.depth { let duration = ms(self.duration); diff --git a/crates/rust-analyzer/tests/slow-tests/main.rs b/crates/rust-analyzer/tests/slow-tests/main.rs index 19890110d53d..79ae0c30cfc4 100644 --- a/crates/rust-analyzer/tests/slow-tests/main.rs +++ b/crates/rust-analyzer/tests/slow-tests/main.rs @@ -9,6 +9,7 @@ //! be sure without a real client anyway. #![warn(rust_2018_idioms, unused_lifetimes)] +#![allow(clippy::disallowed_types)] #[cfg(not(feature = "in-rust-tree"))] mod sourcegen; @@ -29,7 +30,7 @@ use lsp_types::{ PartialResultParams, Position, Range, RenameFilesParams, TextDocumentItem, TextDocumentPositionParams, WorkDoneProgressParams, }; -use rust_analyzer::lsp::ext::{OnEnter, Runnables, RunnablesParams}; +use rust_analyzer::lsp::ext::{OnEnter, Runnables, RunnablesParams, UnindexedProject}; use serde_json::json; use stdx::format_to_acc; use test_utils::skip_slow_tests; @@ -586,6 +587,66 @@ fn main() {{}} ); } +#[test] +fn test_opening_a_file_outside_of_indexed_workspace() { + if skip_slow_tests() { + return; + } + + let tmp_dir = TestDir::new(); + let path = tmp_dir.path(); + + let project = json!({ + "roots": [path], + "crates": [ { + "root_module": path.join("src/crate_one/lib.rs"), + "deps": [], + "edition": "2015", + "cfg": [ "cfg_atom_1", "feature=\"cfg_1\""], + } ] + }); + + let code = format!( + r#" +//- /rust-project.json +{project} + +//- /src/crate_one/lib.rs +mod bar; + +fn main() {{}} +"#, + ); + + let server = Project::with_fixture(&code) + .tmp_dir(tmp_dir) + .with_config(serde_json::json!({ + "notifications": { + "unindexedProject": true + }, + })) + .server() + .wait_until_workspace_is_loaded(); + + let uri = server.doc_id("src/crate_two/lib.rs").uri; + server.notification::(DidOpenTextDocumentParams { + text_document: TextDocumentItem { + uri: uri.clone(), + language_id: "rust".to_owned(), + version: 0, + text: "/// Docs\nfn foo() {}".to_owned(), + }, + }); + let expected = json!({ + "textDocuments": [ + { + "uri": uri + } + ] + }); + server.expect_notification::(expected); +} + #[test] fn diagnostics_dont_block_typing() { if skip_slow_tests() { @@ -621,9 +682,9 @@ fn main() {{}} server.notification::(DidOpenTextDocumentParams { text_document: TextDocumentItem { uri: server.doc_id(&format!("src/m{i}.rs")).uri, - language_id: "rust".to_string(), + language_id: "rust".to_owned(), version: 0, - text: "/// Docs\nfn foo() {}".to_string(), + text: "/// Docs\nfn foo() {}".to_owned(), }, }); } @@ -1017,15 +1078,15 @@ use crate::old_folder::nested::foo as bar; server.request::( RenameFilesParams { files: vec![FileRename { - old_uri: base_path.join("src/old_file.rs").to_str().unwrap().to_string(), - new_uri: base_path.join("src/new_file.rs").to_str().unwrap().to_string(), + old_uri: base_path.join("src/old_file.rs").to_str().unwrap().to_owned(), + new_uri: base_path.join("src/new_file.rs").to_str().unwrap().to_owned(), }], }, json!({ "documentChanges": [ { "textDocument": { - "uri": format!("file://{}", tmp_dir_path.join("src").join("lib.rs").to_str().unwrap().to_string().replace("C:\\", "/c:/").replace('\\', "/")), + "uri": format!("file://{}", tmp_dir_path.join("src").join("lib.rs").to_str().unwrap().to_owned().replace("C:\\", "/c:/").replace('\\', "/")), "version": null }, "edits": [ @@ -1052,8 +1113,8 @@ use crate::old_folder::nested::foo as bar; server.request::( RenameFilesParams { files: vec![FileRename { - old_uri: base_path.join("src/from_mod/mod.rs").to_str().unwrap().to_string(), - new_uri: base_path.join("src/from_mod/foo.rs").to_str().unwrap().to_string(), + old_uri: base_path.join("src/from_mod/mod.rs").to_str().unwrap().to_owned(), + new_uri: base_path.join("src/from_mod/foo.rs").to_str().unwrap().to_owned(), }], }, json!(null), @@ -1063,8 +1124,8 @@ use crate::old_folder::nested::foo as bar; server.request::( RenameFilesParams { files: vec![FileRename { - old_uri: base_path.join("src/to_mod/foo.rs").to_str().unwrap().to_string(), - new_uri: base_path.join("src/to_mod/mod.rs").to_str().unwrap().to_string(), + old_uri: base_path.join("src/to_mod/foo.rs").to_str().unwrap().to_owned(), + new_uri: base_path.join("src/to_mod/mod.rs").to_str().unwrap().to_owned(), }], }, json!(null), @@ -1074,15 +1135,15 @@ use crate::old_folder::nested::foo as bar; server.request::( RenameFilesParams { files: vec![FileRename { - old_uri: base_path.join("src/old_folder").to_str().unwrap().to_string(), - new_uri: base_path.join("src/new_folder").to_str().unwrap().to_string(), + old_uri: base_path.join("src/old_folder").to_str().unwrap().to_owned(), + new_uri: base_path.join("src/new_folder").to_str().unwrap().to_owned(), }], }, json!({ "documentChanges": [ { "textDocument": { - "uri": format!("file://{}", tmp_dir_path.join("src").join("lib.rs").to_str().unwrap().to_string().replace("C:\\", "/c:/").replace('\\', "/")), + "uri": format!("file://{}", tmp_dir_path.join("src").join("lib.rs").to_str().unwrap().to_owned().replace("C:\\", "/c:/").replace('\\', "/")), "version": null }, "edits": [ @@ -1103,7 +1164,7 @@ use crate::old_folder::nested::foo as bar; }, { "textDocument": { - "uri": format!("file://{}", tmp_dir_path.join("src").join("old_folder").join("nested.rs").to_str().unwrap().to_string().replace("C:\\", "/c:/").replace('\\', "/")), + "uri": format!("file://{}", tmp_dir_path.join("src").join("old_folder").join("nested.rs").to_str().unwrap().to_owned().replace("C:\\", "/c:/").replace('\\', "/")), "version": null }, "edits": [ diff --git a/crates/rust-analyzer/tests/slow-tests/support.rs b/crates/rust-analyzer/tests/slow-tests/support.rs index d699374f9cde..d02cb45b8e35 100644 --- a/crates/rust-analyzer/tests/slow-tests/support.rs +++ b/crates/rust-analyzer/tests/slow-tests/support.rs @@ -9,7 +9,7 @@ use std::{ use crossbeam_channel::{after, select, Receiver}; use lsp_server::{Connection, Message, Notification, Request}; use lsp_types::{notification::Exit, request::Shutdown, TextDocumentIdentifier, Url}; -use rust_analyzer::{config::Config, lsp, main_loop, tracing}; +use rust_analyzer::{config::Config, lsp, main_loop}; use serde::Serialize; use serde_json::{json, to_string_pretty, Value}; use test_utils::FixtureWithProjectMeta; @@ -91,11 +91,11 @@ impl Project<'_> { static INIT: Once = Once::new(); INIT.call_once(|| { - let _ = tracing::Config { + let _ = rust_analyzer::tracing::Config { writer: TestWriter::default(), // Deliberately enable all `error` logs if the user has not set RA_LOG, as there is usually // useful information in there for debugging. - filter: std::env::var("RA_LOG").ok().unwrap_or_else(|| "error".to_string()), + filter: std::env::var("RA_LOG").ok().unwrap_or_else(|| "error".to_owned()), chalk_filter: std::env::var("CHALK_DEBUG").ok(), profile_filter: std::env::var("RA_PROFILE").ok(), }; @@ -193,7 +193,7 @@ impl Server { let (connection, client) = Connection::memory(); let _thread = stdx::thread::Builder::new(stdx::thread::ThreadIntent::Worker) - .name("test server".to_string()) + .name("test server".to_owned()) .spawn(move || main_loop(config, connection).unwrap()) .expect("failed to spawn a thread"); @@ -210,10 +210,44 @@ impl Server { N: lsp_types::notification::Notification, N::Params: Serialize, { - let r = Notification::new(N::METHOD.to_string(), params); + let r = Notification::new(N::METHOD.to_owned(), params); self.send_notification(r) } + pub(crate) fn expect_notification(&self, expected: Value) + where + N: lsp_types::notification::Notification, + N::Params: Serialize, + { + while let Some(Message::Notification(actual)) = + recv_timeout(&self.client.receiver).unwrap_or_else(|_| panic!("timed out")) + { + if actual.method == N::METHOD { + let actual = actual + .clone() + .extract::(N::METHOD) + .expect("was not able to extract notification"); + + tracing::debug!(?actual, "got notification"); + if let Some((expected_part, actual_part)) = find_mismatch(&expected, &actual) { + panic!( + "JSON mismatch\nExpected:\n{}\nWas:\n{}\nExpected part:\n{}\nActual part:\n{}\n", + to_string_pretty(&expected).unwrap(), + to_string_pretty(&actual).unwrap(), + to_string_pretty(expected_part).unwrap(), + to_string_pretty(actual_part).unwrap(), + ); + } else { + tracing::debug!("sucessfully matched notification"); + return; + } + } else { + continue; + } + } + panic!("never got expected notification"); + } + #[track_caller] pub(crate) fn request(&self, params: R::Params, expected_resp: Value) where @@ -240,7 +274,7 @@ impl Server { let id = self.req_id.get(); self.req_id.set(id.wrapping_add(1)); - let r = Request::new(id.into(), R::METHOD.to_string(), params); + let r = Request::new(id.into(), R::METHOD.to_owned(), params); self.send_request_(r) } fn send_request_(&self, r: Request) -> Value { diff --git a/crates/rust-analyzer/tests/slow-tests/tidy.rs b/crates/rust-analyzer/tests/slow-tests/tidy.rs index d3146ab7671c..3e38fc3ebcd7 100644 --- a/crates/rust-analyzer/tests/slow-tests/tidy.rs +++ b/crates/rust-analyzer/tests/slow-tests/tidy.rs @@ -1,3 +1,4 @@ +#![allow(clippy::disallowed_types, clippy::print_stderr)] use std::{ collections::HashSet, path::{Path, PathBuf}, @@ -78,8 +79,6 @@ fn files_are_tidy() { match extension { "rs" => { let text = sh.read_file(&path).unwrap(); - check_todo(&path, &text); - check_dbg(&path, &text); check_test_attrs(&path, &text); check_trailing_ws(&path, &text); tidy_docs.visit(&path, &text); @@ -205,74 +204,6 @@ Zlib OR Apache-2.0 OR MIT assert_eq!(licenses, expected); } -fn check_todo(path: &Path, text: &str) { - let need_todo = &[ - // This file itself obviously needs to use todo (<- like this!). - "tests/tidy.rs", - // Some of our assists generate `todo!()`. - "handlers/add_turbo_fish.rs", - "handlers/generate_function.rs", - "handlers/add_missing_match_arms.rs", - "handlers/replace_derive_with_manual_impl.rs", - // To support generating `todo!()` in assists, we have `expr_todo()` in - // `ast::make`. - "ast/make.rs", - // The documentation in string literals may contain anything for its own purposes - "ide-db/src/generated/lints.rs", - "ide-assists/src/utils/gen_trait_fn_body.rs", - "ide-assists/src/tests/generated.rs", - // The tests for missing fields - "ide-diagnostics/src/handlers/missing_fields.rs", - ]; - if need_todo.iter().any(|p| path.ends_with(p)) { - return; - } - if text.contains("TODO") || text.contains("TOOD") || text.contains("todo!") { - // Generated by an assist - if text.contains("${0:todo!()}") { - return; - } - - panic!( - "\nTODO markers or todo! macros should not be committed to the master branch,\n\ - use FIXME instead\n\ - {}\n", - path.display(), - ) - } -} - -fn check_dbg(path: &Path, text: &str) { - let need_dbg = &[ - // This file itself obviously needs to use dbg. - "slow-tests/tidy.rs", - // Assists to remove `dbg!()` - "handlers/remove_dbg.rs", - // We have .dbg postfix - "ide-completion/src/completions/postfix.rs", - "ide-completion/src/completions/keyword.rs", - "ide-completion/src/tests/expression.rs", - "ide-completion/src/tests/proc_macros.rs", - // The documentation in string literals may contain anything for its own purposes - "ide-completion/src/lib.rs", - "ide-db/src/generated/lints.rs", - // test for doc test for remove_dbg - "src/tests/generated.rs", - // `expect!` string can contain `dbg!` (due to .dbg postfix) - "ide-completion/src/tests/special.rs", - ]; - if need_dbg.iter().any(|p| path.ends_with(p)) { - return; - } - if text.contains("dbg!") { - panic!( - "\ndbg! macros should not be committed to the master branch,\n\ - {}\n", - path.display(), - ) - } -} - fn check_test_attrs(path: &Path, text: &str) { let ignore_rule = "https://github.com/rust-lang/rust-analyzer/blob/master/docs/dev/style.md#ignore"; @@ -442,7 +373,7 @@ fn find_marks(set: &mut HashSet, text: &str, mark: &str) { text = stripped_text.trim_start(); if let Some(idx2) = text.find(|c: char| !(c.is_alphanumeric() || c == '_')) { let mark_text = &text[..idx2]; - set.insert(mark_text.to_string()); + set.insert(mark_text.to_owned()); text = &text[idx2..]; } } diff --git a/crates/salsa/Cargo.toml b/crates/salsa/Cargo.toml new file mode 100644 index 000000000000..4ccbc3de846d --- /dev/null +++ b/crates/salsa/Cargo.toml @@ -0,0 +1,35 @@ +[package] +name = "salsa" +version = "0.0.0" +authors = ["Salsa developers"] +edition = "2021" +license = "Apache-2.0 OR MIT" +repository = "https://github.com/salsa-rs/salsa" +description = "A generic framework for on-demand, incrementalized computation (experimental)" + +rust-version.workspace = true + +[lib] +name = "salsa" + +[dependencies] +indexmap = "2.1.0" +lock_api = "0.4" +tracing = "0.1" +parking_lot = "0.12.1" +rustc-hash = "1.0" +smallvec = "1.0.0" +oorandom = "11" +triomphe = "0.1.11" + +salsa-macros = { version = "0.0.0", path = "salsa-macros" } + +[dev-dependencies] +linked-hash-map = "0.5.6" +rand = "0.8.5" +test-log = "0.2.14" +expect-test = "1.4.0" +dissimilar = "1.0.7" + +[lints] +workspace = true diff --git a/crates/salsa/FAQ.md b/crates/salsa/FAQ.md new file mode 100644 index 000000000000..9c9f6f92da99 --- /dev/null +++ b/crates/salsa/FAQ.md @@ -0,0 +1,34 @@ +# Frequently asked questions + +## Why is it called salsa? + +I like salsa! Don't you?! Well, ok, there's a bit more to it. The +underlying algorithm for figuring out which bits of code need to be +re-executed after any given change is based on the algorithm used in +rustc. Michael Woerister and I first described the rustc algorithm in +terms of two colors, red and green, and hence we called it the +"red-green algorithm". This made me think of the New Mexico State +Question --- ["Red or green?"][nm] --- which refers to chile +(salsa). Although this version no longer uses colors (we borrowed +revision counters from Glimmer, instead), I still like the name. + +[nm]: https://www.sos.state.nm.us/about-new-mexico/state-question/ + +## What is the relationship between salsa and an Entity-Component System (ECS)? + +You may have noticed that Salsa "feels" a lot like an ECS in some +ways. That's true -- Salsa's queries are a bit like *components* (and +the keys to the queries are a bit like *entities*). But there is one +big difference: **ECS is -- at its heart -- a mutable system**. You +can get or set a component of some entity whenever you like. In +contrast, salsa's queries **define "derived values" via pure +computations**. + +Partly as a consequence, ECS doesn't handle incremental updates for +you. When you update some component of some entity, you have to ensure +that other entities' components are updated appropriately. + +Finally, ECS offers interesting metadata and "aspect-like" facilities, +such as iterating over all entities that share certain components. +Salsa has no analogue to that. + diff --git a/crates/salsa/LICENSE-APACHE b/crates/salsa/LICENSE-APACHE new file mode 100644 index 000000000000..16fe87b06e80 --- /dev/null +++ b/crates/salsa/LICENSE-APACHE @@ -0,0 +1,201 @@ + Apache License + Version 2.0, January 2004 + http://www.apache.org/licenses/ + +TERMS AND CONDITIONS FOR USE, REPRODUCTION, AND DISTRIBUTION + +1. Definitions. + + "License" shall mean the terms and conditions for use, reproduction, + and distribution as defined by Sections 1 through 9 of this document. + + "Licensor" shall mean the copyright owner or entity authorized by + the copyright owner that is granting the License. + + "Legal Entity" shall mean the union of the acting entity and all + other entities that control, are controlled by, or are under common + control with that entity. For the purposes of this definition, + "control" means (i) the power, direct or indirect, to cause the + direction or management of such entity, whether by contract or + otherwise, or (ii) ownership of fifty percent (50%) or more of the + outstanding shares, or (iii) beneficial ownership of such entity. + + "You" (or "Your") shall mean an individual or Legal Entity + exercising permissions granted by this License. + + "Source" form shall mean the preferred form for making modifications, + including but not limited to software source code, documentation + source, and configuration files. + + "Object" form shall mean any form resulting from mechanical + transformation or translation of a Source form, including but + not limited to compiled object code, generated documentation, + and conversions to other media types. + + "Work" shall mean the work of authorship, whether in Source or + Object form, made available under the License, as indicated by a + copyright notice that is included in or attached to the work + (an example is provided in the Appendix below). + + "Derivative Works" shall mean any work, whether in Source or Object + form, that is based on (or derived from) the Work and for which the + editorial revisions, annotations, elaborations, or other modifications + represent, as a whole, an original work of authorship. For the purposes + of this License, Derivative Works shall not include works that remain + separable from, or merely link (or bind by name) to the interfaces of, + the Work and Derivative Works thereof. + + "Contribution" shall mean any work of authorship, including + the original version of the Work and any modifications or additions + to that Work or Derivative Works thereof, that is intentionally + submitted to Licensor for inclusion in the Work by the copyright owner + or by an individual or Legal Entity authorized to submit on behalf of + the copyright owner. For the purposes of this definition, "submitted" + means any form of electronic, verbal, or written communication sent + to the Licensor or its representatives, including but not limited to + communication on electronic mailing lists, source code control systems, + and issue tracking systems that are managed by, or on behalf of, the + Licensor for the purpose of discussing and improving the Work, but + excluding communication that is conspicuously marked or otherwise + designated in writing by the copyright owner as "Not a Contribution." + + "Contributor" shall mean Licensor and any individual or Legal Entity + on behalf of whom a Contribution has been received by Licensor and + subsequently incorporated within the Work. + +2. Grant of Copyright License. Subject to the terms and conditions of + this License, each Contributor hereby grants to You a perpetual, + worldwide, non-exclusive, no-charge, royalty-free, irrevocable + copyright license to reproduce, prepare Derivative Works of, + publicly display, publicly perform, sublicense, and distribute the + Work and such Derivative Works in Source or Object form. + +3. Grant of Patent License. Subject to the terms and conditions of + this License, each Contributor hereby grants to You a perpetual, + worldwide, non-exclusive, no-charge, royalty-free, irrevocable + (except as stated in this section) patent license to make, have made, + use, offer to sell, sell, import, and otherwise transfer the Work, + where such license applies only to those patent claims licensable + by such Contributor that are necessarily infringed by their + Contribution(s) alone or by combination of their Contribution(s) + with the Work to which such Contribution(s) was submitted. If You + institute patent litigation against any entity (including a + cross-claim or counterclaim in a lawsuit) alleging that the Work + or a Contribution incorporated within the Work constitutes direct + or contributory patent infringement, then any patent licenses + granted to You under this License for that Work shall terminate + as of the date such litigation is filed. + +4. Redistribution. You may reproduce and distribute copies of the + Work or Derivative Works thereof in any medium, with or without + modifications, and in Source or Object form, provided that You + meet the following conditions: + + (a) You must give any other recipients of the Work or + Derivative Works a copy of this License; and + + (b) You must cause any modified files to carry prominent notices + stating that You changed the files; and + + (c) You must retain, in the Source form of any Derivative Works + that You distribute, all copyright, patent, trademark, and + attribution notices from the Source form of the Work, + excluding those notices that do not pertain to any part of + the Derivative Works; and + + (d) If the Work includes a "NOTICE" text file as part of its + distribution, then any Derivative Works that You distribute must + include a readable copy of the attribution notices contained + within such NOTICE file, excluding those notices that do not + pertain to any part of the Derivative Works, in at least one + of the following places: within a NOTICE text file distributed + as part of the Derivative Works; within the Source form or + documentation, if provided along with the Derivative Works; or, + within a display generated by the Derivative Works, if and + wherever such third-party notices normally appear. The contents + of the NOTICE file are for informational purposes only and + do not modify the License. You may add Your own attribution + notices within Derivative Works that You distribute, alongside + or as an addendum to the NOTICE text from the Work, provided + that such additional attribution notices cannot be construed + as modifying the License. + + You may add Your own copyright statement to Your modifications and + may provide additional or different license terms and conditions + for use, reproduction, or distribution of Your modifications, or + for any such Derivative Works as a whole, provided Your use, + reproduction, and distribution of the Work otherwise complies with + the conditions stated in this License. + +5. Submission of Contributions. Unless You explicitly state otherwise, + any Contribution intentionally submitted for inclusion in the Work + by You to the Licensor shall be under the terms and conditions of + this License, without any additional terms or conditions. + Notwithstanding the above, nothing herein shall supersede or modify + the terms of any separate license agreement you may have executed + with Licensor regarding such Contributions. + +6. Trademarks. This License does not grant permission to use the trade + names, trademarks, service marks, or product names of the Licensor, + except as required for reasonable and customary use in describing the + origin of the Work and reproducing the content of the NOTICE file. + +7. Disclaimer of Warranty. Unless required by applicable law or + agreed to in writing, Licensor provides the Work (and each + Contributor provides its Contributions) on an "AS IS" BASIS, + WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or + implied, including, without limitation, any warranties or conditions + of TITLE, NON-INFRINGEMENT, MERCHANTABILITY, or FITNESS FOR A + PARTICULAR PURPOSE. You are solely responsible for determining the + appropriateness of using or redistributing the Work and assume any + risks associated with Your exercise of permissions under this License. + +8. Limitation of Liability. In no event and under no legal theory, + whether in tort (including negligence), contract, or otherwise, + unless required by applicable law (such as deliberate and grossly + negligent acts) or agreed to in writing, shall any Contributor be + liable to You for damages, including any direct, indirect, special, + incidental, or consequential damages of any character arising as a + result of this License or out of the use or inability to use the + Work (including but not limited to damages for loss of goodwill, + work stoppage, computer failure or malfunction, or any and all + other commercial damages or losses), even if such Contributor + has been advised of the possibility of such damages. + +9. Accepting Warranty or Additional Liability. While redistributing + the Work or Derivative Works thereof, You may choose to offer, + and charge a fee for, acceptance of support, warranty, indemnity, + or other liability obligations and/or rights consistent with this + License. However, in accepting such obligations, You may act only + on Your own behalf and on Your sole responsibility, not on behalf + of any other Contributor, and only if You agree to indemnify, + defend, and hold each Contributor harmless for any liability + incurred by, or claims asserted against, such Contributor by reason + of your accepting any such warranty or additional liability. + +END OF TERMS AND CONDITIONS + +APPENDIX: How to apply the Apache License to your work. + + To apply the Apache License to your work, attach the following + boilerplate notice, with the fields enclosed by brackets "[]" + replaced with your own identifying information. (Don't include + the brackets!) The text should be enclosed in the appropriate + comment syntax for the file format. We also recommend that a + file or class name and description of purpose be included on the + same "printed page" as the copyright notice for easier + identification within third-party archives. + +Copyright [yyyy] [name of copyright owner] + +Licensed under the Apache License, Version 2.0 (the "License"); +you may not use this file except in compliance with the License. +You may obtain a copy of the License at + + http://www.apache.org/licenses/LICENSE-2.0 + +Unless required by applicable law or agreed to in writing, software +distributed under the License is distributed on an "AS IS" BASIS, +WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +See the License for the specific language governing permissions and +limitations under the License. diff --git a/crates/salsa/LICENSE-MIT b/crates/salsa/LICENSE-MIT new file mode 100644 index 000000000000..31aa79387f27 --- /dev/null +++ b/crates/salsa/LICENSE-MIT @@ -0,0 +1,23 @@ +Permission is hereby granted, free of charge, to any +person obtaining a copy of this software and associated +documentation files (the "Software"), to deal in the +Software without restriction, including without +limitation the rights to use, copy, modify, merge, +publish, distribute, sublicense, and/or sell copies of +the Software, and to permit persons to whom the Software +is furnished to do so, subject to the following +conditions: + +The above copyright notice and this permission notice +shall be included in all copies or substantial portions +of the Software. + +THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF +ANY KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED +TO THE WARRANTIES OF MERCHANTABILITY, FITNESS FOR A +PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT +SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY +CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION +OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR +IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER +DEALINGS IN THE SOFTWARE. diff --git a/crates/salsa/README.md b/crates/salsa/README.md new file mode 100644 index 000000000000..4a8d9f8c7317 --- /dev/null +++ b/crates/salsa/README.md @@ -0,0 +1,42 @@ +# salsa + +*A generic framework for on-demand, incrementalized computation.* + +## Obligatory warning + +This is a fork of https://github.com/salsa-rs/salsa/ adjusted to rust-analyzer's needs. + +## Credits + +This system is heavily inspired by [adapton](http://adapton.org/), [glimmer](https://github.com/glimmerjs/glimmer-vm), and rustc's query +system. So credit goes to Eduard-Mihai Burtescu, Matthew Hammer, +Yehuda Katz, and Michael Woerister. + +## Key idea + +The key idea of `salsa` is that you define your program as a set of +**queries**. Every query is used like function `K -> V` that maps from +some key of type `K` to a value of type `V`. Queries come in two basic +varieties: + +- **Inputs**: the base inputs to your system. You can change these + whenever you like. +- **Functions**: pure functions (no side effects) that transform your + inputs into other values. The results of queries is memoized to + avoid recomputing them a lot. When you make changes to the inputs, + we'll figure out (fairly intelligently) when we can re-use these + memoized values and when we have to recompute them. + +## Want to learn more? + +To learn more about Salsa, try one of the following: + +- read the [heavily commented `hello_world` example](https://github.com/salsa-rs/salsa/blob/master/examples/hello_world/main.rs); +- check out the [Salsa book](https://salsa-rs.github.io/salsa); +- watch one of our [videos](https://salsa-rs.github.io/salsa/videos.html). + +## Getting in touch + +The bulk of the discussion happens in the [issues](https://github.com/salsa-rs/salsa/issues) +and [pull requests](https://github.com/salsa-rs/salsa/pulls), +but we have a [zulip chat](https://salsa.zulipchat.com/) as well. diff --git a/crates/salsa/salsa-macros/Cargo.toml b/crates/salsa/salsa-macros/Cargo.toml new file mode 100644 index 000000000000..791d2f6e9f5a --- /dev/null +++ b/crates/salsa/salsa-macros/Cargo.toml @@ -0,0 +1,23 @@ +[package] +name = "salsa-macros" +version = "0.0.0" +authors = ["Salsa developers"] +edition = "2021" +license = "Apache-2.0 OR MIT" +repository = "https://github.com/salsa-rs/salsa" +description = "Procedural macros for the salsa crate" + +rust-version.workspace = true + +[lib] +proc-macro = true +name = "salsa_macros" + +[dependencies] +heck = "0.4" +proc-macro2 = "1.0" +quote = "1.0" +syn = { version = "2.0", features = ["full", "extra-traits"] } + +[lints] +workspace = true diff --git a/crates/salsa/salsa-macros/LICENSE-APACHE b/crates/salsa/salsa-macros/LICENSE-APACHE new file mode 100644 index 000000000000..0bf2cad6488f --- /dev/null +++ b/crates/salsa/salsa-macros/LICENSE-APACHE @@ -0,0 +1 @@ +../LICENSE-APACHE diff --git a/crates/salsa/salsa-macros/LICENSE-MIT b/crates/salsa/salsa-macros/LICENSE-MIT new file mode 100644 index 000000000000..d99cce5f7206 --- /dev/null +++ b/crates/salsa/salsa-macros/LICENSE-MIT @@ -0,0 +1 @@ +../LICENSE-MIT diff --git a/crates/salsa/salsa-macros/README.md b/crates/salsa/salsa-macros/README.md new file mode 100644 index 000000000000..94389aee61a0 --- /dev/null +++ b/crates/salsa/salsa-macros/README.md @@ -0,0 +1 @@ +../README.md diff --git a/crates/salsa/salsa-macros/src/database_storage.rs b/crates/salsa/salsa-macros/src/database_storage.rs new file mode 100644 index 000000000000..0ec75bb043db --- /dev/null +++ b/crates/salsa/salsa-macros/src/database_storage.rs @@ -0,0 +1,250 @@ +//! +use heck::ToSnakeCase; +use proc_macro::TokenStream; +use syn::parse::{Parse, ParseStream}; +use syn::punctuated::Punctuated; +use syn::{Ident, ItemStruct, Path, Token}; + +type PunctuatedQueryGroups = Punctuated; + +pub(crate) fn database(args: TokenStream, input: TokenStream) -> TokenStream { + let args = syn::parse_macro_input!(args as QueryGroupList); + let input = syn::parse_macro_input!(input as ItemStruct); + + let query_groups = &args.query_groups; + let database_name = &input.ident; + let visibility = &input.vis; + let db_storage_field = quote! { storage }; + + let mut output = proc_macro2::TokenStream::new(); + output.extend(quote! { #input }); + + let query_group_names_snake: Vec<_> = query_groups + .iter() + .map(|query_group| { + let group_name = query_group.name(); + Ident::new(&group_name.to_string().to_snake_case(), group_name.span()) + }) + .collect(); + + let query_group_storage_names: Vec<_> = query_groups + .iter() + .map(|QueryGroup { group_path }| { + quote! { + <#group_path as salsa::plumbing::QueryGroup>::GroupStorage + } + }) + .collect(); + + // For each query group `foo::MyGroup` create a link to its + // `foo::MyGroupGroupStorage` + let mut storage_fields = proc_macro2::TokenStream::new(); + let mut storage_initializers = proc_macro2::TokenStream::new(); + let mut has_group_impls = proc_macro2::TokenStream::new(); + for (((query_group, group_name_snake), group_storage), group_index) in query_groups + .iter() + .zip(&query_group_names_snake) + .zip(&query_group_storage_names) + .zip(0_u16..) + { + let group_path = &query_group.group_path; + + // rewrite the last identifier (`MyGroup`, above) to + // (e.g.) `MyGroupGroupStorage`. + storage_fields.extend(quote! { + #group_name_snake: #group_storage, + }); + + // rewrite the last identifier (`MyGroup`, above) to + // (e.g.) `MyGroupGroupStorage`. + storage_initializers.extend(quote! { + #group_name_snake: #group_storage::new(#group_index), + }); + + // ANCHOR:HasQueryGroup + has_group_impls.extend(quote! { + impl salsa::plumbing::HasQueryGroup<#group_path> for #database_name { + fn group_storage(&self) -> &#group_storage { + &self.#db_storage_field.query_store().#group_name_snake + } + + fn group_storage_mut(&mut self) -> (&#group_storage, &mut salsa::Runtime) { + let (query_store_mut, runtime) = self.#db_storage_field.query_store_mut(); + (&query_store_mut.#group_name_snake, runtime) + } + } + }); + // ANCHOR_END:HasQueryGroup + } + + // create group storage wrapper struct + output.extend(quote! { + #[doc(hidden)] + #visibility struct __SalsaDatabaseStorage { + #storage_fields + } + + impl Default for __SalsaDatabaseStorage { + fn default() -> Self { + Self { + #storage_initializers + } + } + } + }); + + // Create a tuple (D1, D2, ...) where Di is the data for a given query group. + let mut database_data = vec![]; + for QueryGroup { group_path } in query_groups { + database_data.push(quote! { + <#group_path as salsa::plumbing::QueryGroup>::GroupData + }); + } + + // ANCHOR:DatabaseStorageTypes + output.extend(quote! { + impl salsa::plumbing::DatabaseStorageTypes for #database_name { + type DatabaseStorage = __SalsaDatabaseStorage; + } + }); + // ANCHOR_END:DatabaseStorageTypes + + // ANCHOR:DatabaseOps + let mut fmt_ops = proc_macro2::TokenStream::new(); + let mut maybe_changed_ops = proc_macro2::TokenStream::new(); + let mut cycle_recovery_strategy_ops = proc_macro2::TokenStream::new(); + let mut for_each_ops = proc_macro2::TokenStream::new(); + for ((QueryGroup { group_path }, group_storage), group_index) in + query_groups.iter().zip(&query_group_storage_names).zip(0_u16..) + { + fmt_ops.extend(quote! { + #group_index => { + let storage: &#group_storage = + >::group_storage(self); + storage.fmt_index(self, input, fmt) + } + }); + maybe_changed_ops.extend(quote! { + #group_index => { + let storage: &#group_storage = + >::group_storage(self); + storage.maybe_changed_after(self, input, revision) + } + }); + cycle_recovery_strategy_ops.extend(quote! { + #group_index => { + let storage: &#group_storage = + >::group_storage(self); + storage.cycle_recovery_strategy(self, input) + } + }); + for_each_ops.extend(quote! { + let storage: &#group_storage = + >::group_storage(self); + storage.for_each_query(runtime, &mut op); + }); + } + output.extend(quote! { + impl salsa::plumbing::DatabaseOps for #database_name { + fn ops_database(&self) -> &dyn salsa::Database { + self + } + + fn ops_salsa_runtime(&self) -> &salsa::Runtime { + self.#db_storage_field.salsa_runtime() + } + + fn ops_salsa_runtime_mut(&mut self) -> &mut salsa::Runtime { + self.#db_storage_field.salsa_runtime_mut() + } + + fn fmt_index( + &self, + input: salsa::DatabaseKeyIndex, + fmt: &mut std::fmt::Formatter<'_>, + ) -> std::fmt::Result { + match input.group_index() { + #fmt_ops + i => panic!("salsa: invalid group index {}", i) + } + } + + fn maybe_changed_after( + &self, + input: salsa::DatabaseKeyIndex, + revision: salsa::Revision + ) -> bool { + match input.group_index() { + #maybe_changed_ops + i => panic!("salsa: invalid group index {}", i) + } + } + + fn cycle_recovery_strategy( + &self, + input: salsa::DatabaseKeyIndex, + ) -> salsa::plumbing::CycleRecoveryStrategy { + match input.group_index() { + #cycle_recovery_strategy_ops + i => panic!("salsa: invalid group index {}", i) + } + } + + fn for_each_query( + &self, + mut op: &mut dyn FnMut(&dyn salsa::plumbing::QueryStorageMassOps), + ) { + let runtime = salsa::Database::salsa_runtime(self); + #for_each_ops + } + } + }); + // ANCHOR_END:DatabaseOps + + output.extend(has_group_impls); + + output.into() +} + +#[derive(Clone, Debug)] +struct QueryGroupList { + query_groups: PunctuatedQueryGroups, +} + +impl Parse for QueryGroupList { + fn parse(input: ParseStream<'_>) -> syn::Result { + let query_groups: PunctuatedQueryGroups = + input.parse_terminated(QueryGroup::parse, Token![,])?; + Ok(QueryGroupList { query_groups }) + } +} + +#[derive(Clone, Debug)] +struct QueryGroup { + group_path: Path, +} + +impl QueryGroup { + /// The name of the query group trait. + fn name(&self) -> Ident { + self.group_path.segments.last().unwrap().ident.clone() + } +} + +impl Parse for QueryGroup { + /// ```ignore + /// impl HelloWorldDatabase; + /// ``` + fn parse(input: ParseStream<'_>) -> syn::Result { + let group_path: Path = input.parse()?; + Ok(QueryGroup { group_path }) + } +} + +struct Nothing; + +impl Parse for Nothing { + fn parse(_input: ParseStream<'_>) -> syn::Result { + Ok(Nothing) + } +} diff --git a/crates/salsa/salsa-macros/src/lib.rs b/crates/salsa/salsa-macros/src/lib.rs new file mode 100644 index 000000000000..8af48b1e3f83 --- /dev/null +++ b/crates/salsa/salsa-macros/src/lib.rs @@ -0,0 +1,146 @@ +//! This crate provides salsa's macros and attributes. + +#![recursion_limit = "256"] + +#[macro_use] +extern crate quote; + +use proc_macro::TokenStream; + +mod database_storage; +mod parenthesized; +mod query_group; + +/// The decorator that defines a salsa "query group" trait. This is a +/// trait that defines everything that a block of queries need to +/// execute, as well as defining the queries themselves that are +/// exported for others to use. +/// +/// This macro declares the "prototype" for a group of queries. It will +/// expand into a trait and a set of structs, one per query. +/// +/// For each query, you give the name of the accessor method to invoke +/// the query (e.g., `my_query`, below), as well as its parameter +/// types and the output type. You also give the name for a query type +/// (e.g., `MyQuery`, below) that represents the query, and optionally +/// other details, such as its storage. +/// +/// # Examples +/// +/// The simplest example is something like this: +/// +/// ```ignore +/// #[salsa::query_group] +/// trait TypeckDatabase { +/// #[salsa::input] // see below for other legal attributes +/// fn my_query(&self, input: u32) -> u64; +/// +/// /// Queries can have any number of inputs (including zero); if there +/// /// is not exactly one input, then the key type will be +/// /// a tuple of the input types, so in this case `(u32, f32)`. +/// fn other_query(&self, input1: u32, input2: f32) -> u64; +/// } +/// ``` +/// +/// Here is a list of legal `salsa::XXX` attributes: +/// +/// - Storage attributes: control how the query data is stored and set. These +/// are described in detail in the section below. +/// - `#[salsa::input]` +/// - `#[salsa::memoized]` +/// - `#[salsa::dependencies]` +/// - Query execution: +/// - `#[salsa::invoke(path::to::my_fn)]` -- for a non-input, this +/// indicates the function to call when a query must be +/// recomputed. The default is to call a function in the same +/// module with the same name as the query. +/// - `#[query_type(MyQueryTypeName)]` specifies the name of the +/// dummy struct created for the query. Default is the name of the +/// query, in camel case, plus the word "Query" (e.g., +/// `MyQueryQuery` and `OtherQueryQuery` in the examples above). +/// +/// # Storage attributes +/// +/// Here are the possible storage values for each query. The default +/// is `storage memoized`. +/// +/// ## Input queries +/// +/// Specifying `storage input` will give you an **input +/// query**. Unlike derived queries, whose value is given by a +/// function, input queries are explicitly set by doing +/// `db.query(QueryType).set(key, value)` (where `QueryType` is the +/// `type` specified for the query). Accessing a value that has not +/// yet been set will panic. Each time you invoke `set`, we assume the +/// value has changed, and so we will potentially re-execute derived +/// queries that read (transitively) from this input. +/// +/// ## Derived queries +/// +/// Derived queries are specified by a function. +/// +/// - `#[salsa::memoized]` (the default) -- The result is memoized +/// between calls. If the inputs have changed, we will recompute +/// the value, but then compare against the old memoized value, +/// which can significantly reduce the amount of recomputation +/// required in new revisions. This does require that the value +/// implements `Eq`. +/// - `#[salsa::dependencies]` -- does not cache the value, so it will +/// be recomputed every time it is needed. We do track the inputs, however, +/// so if they have not changed, then things that rely on this query +/// may be known not to have changed. +/// +/// ## Attribute combinations +/// +/// Some attributes are mutually exclusive. For example, it is an error to add +/// multiple storage specifiers: +/// +/// ```compile_fail +/// # use salsa_macros as salsa; +/// #[salsa::query_group] +/// trait CodegenDatabase { +/// #[salsa::input] +/// #[salsa::memoized] +/// fn my_query(&self, input: u32) -> u64; +/// } +/// ``` +/// +/// It is also an error to annotate a function to `invoke` on an `input` query: +/// +/// ```compile_fail +/// # use salsa_macros as salsa; +/// #[salsa::query_group] +/// trait CodegenDatabase { +/// #[salsa::input] +/// #[salsa::invoke(typeck::my_query)] +/// fn my_query(&self, input: u32) -> u64; +/// } +/// ``` +#[proc_macro_attribute] +pub fn query_group(args: TokenStream, input: TokenStream) -> TokenStream { + query_group::query_group(args, input) +} + +/// This attribute is placed on your database struct. It takes a list of the +/// query groups that your database supports. The format looks like so: +/// +/// ```rust,ignore +/// #[salsa::database(MyQueryGroup1, MyQueryGroup2)] +/// struct MyDatabase { +/// runtime: salsa::Runtime, // <-- your database will need this field, too +/// } +/// ``` +/// +/// Here, the struct `MyDatabase` would support the two query groups +/// `MyQueryGroup1` and `MyQueryGroup2`. In addition to the `database` +/// attribute, the struct needs to have a `runtime` field (of type +/// [`salsa::Runtime`]) and to implement the `salsa::Database` trait. +/// +/// See [the `hello_world` example][hw] for more details. +/// +/// [`salsa::Runtime`]: struct.Runtime.html +/// [hw]: https://github.com/salsa-rs/salsa/tree/master/examples/hello_world +#[proc_macro_attribute] +pub fn database(args: TokenStream, input: TokenStream) -> TokenStream { + database_storage::database(args, input) +} diff --git a/crates/salsa/salsa-macros/src/parenthesized.rs b/crates/salsa/salsa-macros/src/parenthesized.rs new file mode 100644 index 000000000000..9df41e03c162 --- /dev/null +++ b/crates/salsa/salsa-macros/src/parenthesized.rs @@ -0,0 +1,13 @@ +//! +pub(crate) struct Parenthesized(pub(crate) T); + +impl syn::parse::Parse for Parenthesized +where + T: syn::parse::Parse, +{ + fn parse(input: syn::parse::ParseStream<'_>) -> syn::Result { + let content; + syn::parenthesized!(content in input); + content.parse::().map(Parenthesized) + } +} diff --git a/crates/salsa/salsa-macros/src/query_group.rs b/crates/salsa/salsa-macros/src/query_group.rs new file mode 100644 index 000000000000..e535d7ed0438 --- /dev/null +++ b/crates/salsa/salsa-macros/src/query_group.rs @@ -0,0 +1,737 @@ +//! +use std::{convert::TryFrom, iter::FromIterator}; + +use crate::parenthesized::Parenthesized; +use heck::ToUpperCamelCase; +use proc_macro::TokenStream; +use proc_macro2::Span; +use quote::ToTokens; +use syn::{ + parse_macro_input, parse_quote, spanned::Spanned, Attribute, Error, FnArg, Ident, ItemTrait, + ReturnType, TraitItem, Type, +}; + +/// Implementation for `[salsa::query_group]` decorator. +pub(crate) fn query_group(args: TokenStream, input: TokenStream) -> TokenStream { + let group_struct = parse_macro_input!(args as Ident); + let input: ItemTrait = parse_macro_input!(input as ItemTrait); + // println!("args: {:#?}", args); + // println!("input: {:#?}", input); + + let input_span = input.span(); + let (trait_attrs, salsa_attrs) = filter_attrs(input.attrs); + if !salsa_attrs.is_empty() { + return Error::new(input_span, format!("unsupported attributes: {:?}", salsa_attrs)) + .to_compile_error() + .into(); + } + + let trait_vis = input.vis; + let trait_name = input.ident; + let _generics = input.generics.clone(); + let dyn_db = quote! { dyn #trait_name }; + + // Decompose the trait into the corresponding queries. + let mut queries = vec![]; + for item in input.items { + if let TraitItem::Fn(method) = item { + let query_name = method.sig.ident.to_string(); + + let mut storage = QueryStorage::Memoized; + let mut cycle = None; + let mut invoke = None; + + let mut query_type = + format_ident!("{}Query", query_name.to_string().to_upper_camel_case()); + let mut num_storages = 0; + + // Extract attributes. + let (attrs, salsa_attrs) = filter_attrs(method.attrs); + for SalsaAttr { name, tts, span } in salsa_attrs { + match name.as_str() { + "memoized" => { + storage = QueryStorage::Memoized; + num_storages += 1; + } + "dependencies" => { + storage = QueryStorage::Dependencies; + num_storages += 1; + } + "input" => { + storage = QueryStorage::Input; + num_storages += 1; + } + "interned" => { + storage = QueryStorage::Interned; + num_storages += 1; + } + "cycle" => { + cycle = Some(parse_macro_input!(tts as Parenthesized).0); + } + "invoke" => { + invoke = Some(parse_macro_input!(tts as Parenthesized).0); + } + "query_type" => { + query_type = parse_macro_input!(tts as Parenthesized).0; + } + "transparent" => { + storage = QueryStorage::Transparent; + num_storages += 1; + } + _ => { + return Error::new(span, format!("unknown salsa attribute `{}`", name)) + .to_compile_error() + .into(); + } + } + } + + let sig_span = method.sig.span(); + // Check attribute combinations. + if num_storages > 1 { + return Error::new(sig_span, "multiple storage attributes specified") + .to_compile_error() + .into(); + } + match &invoke { + Some(invoke) if storage == QueryStorage::Input => { + return Error::new( + invoke.span(), + "#[salsa::invoke] cannot be set on #[salsa::input] queries", + ) + .to_compile_error() + .into(); + } + _ => {} + } + + // Extract keys. + let mut iter = method.sig.inputs.iter(); + let self_receiver = match iter.next() { + Some(FnArg::Receiver(sr)) if sr.mutability.is_none() => sr, + _ => { + return Error::new( + sig_span, + format!("first argument of query `{}` must be `&self`", query_name), + ) + .to_compile_error() + .into(); + } + }; + let mut keys: Vec<(Ident, Type)> = vec![]; + for (idx, arg) in iter.enumerate() { + match arg { + FnArg::Typed(syn::PatType { pat, ty, .. }) => keys.push(( + match pat.as_ref() { + syn::Pat::Ident(ident_pat) => ident_pat.ident.clone(), + _ => format_ident!("key{}", idx), + }, + Type::clone(ty), + )), + arg => { + return Error::new( + arg.span(), + format!("unsupported argument `{:?}` of `{}`", arg, query_name,), + ) + .to_compile_error() + .into(); + } + } + } + + // Extract value. + let value = match method.sig.output { + ReturnType::Type(_, ref ty) => ty.as_ref().clone(), + ref ret => { + return Error::new( + ret.span(), + format!("unsupported return type `{:?}` of `{}`", ret, query_name), + ) + .to_compile_error() + .into(); + } + }; + + // For `#[salsa::interned]` keys, we create a "lookup key" automatically. + // + // For a query like: + // + // fn foo(&self, x: Key1, y: Key2) -> u32 + // + // we would create + // + // fn lookup_foo(&self, x: u32) -> (Key1, Key2) + let lookup_query = if let QueryStorage::Interned = storage { + let lookup_query_type = + format_ident!("{}LookupQuery", query_name.to_string().to_upper_camel_case()); + let lookup_fn_name = format_ident!("lookup_{}", query_name); + let keys = keys.iter().map(|(_, ty)| ty); + let lookup_value: Type = parse_quote!((#(#keys),*)); + let lookup_keys = vec![(parse_quote! { key }, value.clone())]; + Some(Query { + query_type: lookup_query_type, + query_name: format!("{}", lookup_fn_name), + fn_name: lookup_fn_name, + receiver: self_receiver.clone(), + attrs: vec![], // FIXME -- some automatically generated docs on this method? + storage: QueryStorage::InternedLookup { intern_query_type: query_type.clone() }, + keys: lookup_keys, + value: lookup_value, + invoke: None, + cycle: cycle.clone(), + }) + } else { + None + }; + + queries.push(Query { + query_type, + query_name, + fn_name: method.sig.ident, + receiver: self_receiver.clone(), + attrs, + storage, + keys, + value, + invoke, + cycle, + }); + + queries.extend(lookup_query); + } + } + + let group_storage = format_ident!("{}GroupStorage__", trait_name, span = Span::call_site()); + + let mut query_fn_declarations = proc_macro2::TokenStream::new(); + let mut query_fn_definitions = proc_macro2::TokenStream::new(); + let mut storage_fields = proc_macro2::TokenStream::new(); + let mut queries_with_storage = vec![]; + for query in &queries { + #[allow(clippy::map_identity)] + // clippy is incorrect here, this is not the identity function due to match ergonomics + let (key_names, keys): (Vec<_>, Vec<_>) = query.keys.iter().map(|(a, b)| (a, b)).unzip(); + let value = &query.value; + let fn_name = &query.fn_name; + let qt = &query.query_type; + let attrs = &query.attrs; + let self_receiver = &query.receiver; + + query_fn_declarations.extend(quote! { + #(#attrs)* + fn #fn_name(#self_receiver, #(#key_names: #keys),*) -> #value; + }); + + // Special case: transparent queries don't create actual storage, + // just inline the definition + if let QueryStorage::Transparent = query.storage { + let invoke = query.invoke_tt(); + query_fn_definitions.extend(quote! { + fn #fn_name(&self, #(#key_names: #keys),*) -> #value { + #invoke(self, #(#key_names),*) + } + }); + continue; + } + + queries_with_storage.push(fn_name); + + query_fn_definitions.extend(quote! { + fn #fn_name(&self, #(#key_names: #keys),*) -> #value { + // Create a shim to force the code to be monomorphized in the + // query crate. Our experiments revealed that this makes a big + // difference in total compilation time in rust-analyzer, though + // it's not totally obvious why that should be. + fn __shim(db: &(dyn #trait_name + '_), #(#key_names: #keys),*) -> #value { + salsa::plumbing::get_query_table::<#qt>(db).get((#(#key_names),*)) + } + __shim(self, #(#key_names),*) + + } + }); + + // For input queries, we need `set_foo` etc + if let QueryStorage::Input = query.storage { + let set_fn_name = format_ident!("set_{}", fn_name); + let set_with_durability_fn_name = format_ident!("set_{}_with_durability", fn_name); + + let set_fn_docs = format!( + " + Set the value of the `{fn_name}` input. + + See `{fn_name}` for details. + + *Note:* Setting values will trigger cancellation + of any ongoing queries; this method blocks until + those queries have been cancelled. + ", + fn_name = fn_name + ); + + let set_constant_fn_docs = format!( + " + Set the value of the `{fn_name}` input with a + specific durability instead of the default of + `Durability::LOW`. You can use `Durability::MAX` + to promise that its value will never change again. + + See `{fn_name}` for details. + + *Note:* Setting values will trigger cancellation + of any ongoing queries; this method blocks until + those queries have been cancelled. + ", + fn_name = fn_name + ); + + query_fn_declarations.extend(quote! { + # [doc = #set_fn_docs] + fn #set_fn_name(&mut self, #(#key_names: #keys,)* value__: #value); + + + # [doc = #set_constant_fn_docs] + fn #set_with_durability_fn_name(&mut self, #(#key_names: #keys,)* value__: #value, durability__: salsa::Durability); + }); + + query_fn_definitions.extend(quote! { + fn #set_fn_name(&mut self, #(#key_names: #keys,)* value__: #value) { + fn __shim(db: &mut dyn #trait_name, #(#key_names: #keys,)* value__: #value) { + salsa::plumbing::get_query_table_mut::<#qt>(db).set((#(#key_names),*), value__) + } + __shim(self, #(#key_names,)* value__) + } + + fn #set_with_durability_fn_name(&mut self, #(#key_names: #keys,)* value__: #value, durability__: salsa::Durability) { + fn __shim(db: &mut dyn #trait_name, #(#key_names: #keys,)* value__: #value, durability__: salsa::Durability) { + salsa::plumbing::get_query_table_mut::<#qt>(db).set_with_durability((#(#key_names),*), value__, durability__) + } + __shim(self, #(#key_names,)* value__ ,durability__) + } + }); + } + + // A field for the storage struct + storage_fields.extend(quote! { + #fn_name: std::sync::Arc<<#qt as salsa::Query>::Storage>, + }); + } + + // Emit the trait itself. + let mut output = { + let bounds = &input.supertraits; + quote! { + #(#trait_attrs)* + #trait_vis trait #trait_name : + salsa::Database + + salsa::plumbing::HasQueryGroup<#group_struct> + + #bounds + { + #query_fn_declarations + } + } + }; + + // Emit the query group struct and impl of `QueryGroup`. + output.extend(quote! { + /// Representative struct for the query group. + #trait_vis struct #group_struct { } + + impl salsa::plumbing::QueryGroup for #group_struct + { + type DynDb = #dyn_db; + type GroupStorage = #group_storage; + } + }); + + // Emit an impl of the trait + output.extend({ + let bounds = input.supertraits; + quote! { + impl #trait_name for DB + where + DB: #bounds, + DB: salsa::Database, + DB: salsa::plumbing::HasQueryGroup<#group_struct>, + { + #query_fn_definitions + } + } + }); + + let non_transparent_queries = + || queries.iter().filter(|q| !matches!(q.storage, QueryStorage::Transparent)); + + // Emit the query types. + for (query, query_index) in non_transparent_queries().zip(0_u16..) { + let fn_name = &query.fn_name; + let qt = &query.query_type; + + let storage = match &query.storage { + QueryStorage::Memoized => quote!(salsa::plumbing::MemoizedStorage), + QueryStorage::Dependencies => { + quote!(salsa::plumbing::DependencyStorage) + } + QueryStorage::Input if query.keys.is_empty() => { + quote!(salsa::plumbing::UnitInputStorage) + } + QueryStorage::Input => quote!(salsa::plumbing::InputStorage), + QueryStorage::Interned => quote!(salsa::plumbing::InternedStorage), + QueryStorage::InternedLookup { intern_query_type } => { + quote!(salsa::plumbing::LookupInternedStorage) + } + QueryStorage::Transparent => panic!("should have been filtered"), + }; + let keys = query.keys.iter().map(|(_, ty)| ty); + let value = &query.value; + let query_name = &query.query_name; + + // Emit the query struct and implement the Query trait on it. + output.extend(quote! { + #[derive(Default, Debug)] + #trait_vis struct #qt; + }); + + output.extend(quote! { + impl #qt { + /// Get access to extra methods pertaining to this query. + /// You can also use it to invoke this query. + #trait_vis fn in_db(self, db: &#dyn_db) -> salsa::QueryTable<'_, Self> + { + salsa::plumbing::get_query_table::<#qt>(db) + } + } + }); + + output.extend(quote! { + impl #qt { + /// Like `in_db`, but gives access to methods for setting the + /// value of an input. Not applicable to derived queries. + /// + /// # Threads, cancellation, and blocking + /// + /// Mutating the value of a query cannot be done while there are + /// still other queries executing. If you are using your database + /// within a single thread, this is not a problem: you only have + /// `&self` access to the database, but this method requires `&mut + /// self`. + /// + /// However, if you have used `snapshot` to create other threads, + /// then attempts to `set` will **block the current thread** until + /// those snapshots are dropped (usually when those threads + /// complete). This also implies that if you create a snapshot but + /// do not send it to another thread, then invoking `set` will + /// deadlock. + /// + /// Before blocking, the thread that is attempting to `set` will + /// also set a cancellation flag. This will cause any query + /// invocations in other threads to unwind with a `Cancelled` + /// sentinel value and eventually let the `set` succeed once all + /// threads have unwound past the salsa invocation. + /// + /// If your query implementations are performing expensive + /// operations without invoking another query, you can also use + /// the `Runtime::unwind_if_cancelled` method to check for an + /// ongoing cancellation and bring those operations to a close, + /// thus allowing the `set` to succeed. Otherwise, long-running + /// computations may lead to "starvation", meaning that the + /// thread attempting to `set` has to wait a long, long time. =) + #trait_vis fn in_db_mut(self, db: &mut #dyn_db) -> salsa::QueryTableMut<'_, Self> + { + salsa::plumbing::get_query_table_mut::<#qt>(db) + } + } + + impl<'d> salsa::QueryDb<'d> for #qt + { + type DynDb = #dyn_db + 'd; + type Group = #group_struct; + type GroupStorage = #group_storage; + } + + // ANCHOR:Query_impl + impl salsa::Query for #qt + { + type Key = (#(#keys),*); + type Value = #value; + type Storage = #storage; + + const QUERY_INDEX: u16 = #query_index; + + const QUERY_NAME: &'static str = #query_name; + + fn query_storage<'a>( + group_storage: &'a >::GroupStorage, + ) -> &'a std::sync::Arc { + &group_storage.#fn_name + } + + fn query_storage_mut<'a>( + group_storage: &'a >::GroupStorage, + ) -> &'a std::sync::Arc { + &group_storage.#fn_name + } + } + // ANCHOR_END:Query_impl + }); + + // Implement the QueryFunction trait for queries which need it. + if query.storage.needs_query_function() { + let span = query.fn_name.span(); + + let key_names: Vec<_> = query.keys.iter().map(|(pat, _)| pat).collect(); + let key_pattern = if query.keys.len() == 1 { + quote! { #(#key_names),* } + } else { + quote! { (#(#key_names),*) } + }; + let invoke = query.invoke_tt(); + + let recover = if let Some(cycle_recovery_fn) = &query.cycle { + quote! { + const CYCLE_STRATEGY: salsa::plumbing::CycleRecoveryStrategy = + salsa::plumbing::CycleRecoveryStrategy::Fallback; + fn cycle_fallback(db: &>::DynDb, cycle: &salsa::Cycle, #key_pattern: &::Key) + -> ::Value { + #cycle_recovery_fn( + db, + cycle, + #(#key_names),* + ) + } + } + } else { + quote! { + const CYCLE_STRATEGY: salsa::plumbing::CycleRecoveryStrategy = + salsa::plumbing::CycleRecoveryStrategy::Panic; + } + }; + + output.extend(quote_spanned! {span=> + // ANCHOR:QueryFunction_impl + impl salsa::plumbing::QueryFunction for #qt + { + fn execute(db: &>::DynDb, #key_pattern: ::Key) + -> ::Value { + #invoke(db, #(#key_names),*) + } + + #recover + } + // ANCHOR_END:QueryFunction_impl + }); + } + } + + let mut fmt_ops = proc_macro2::TokenStream::new(); + for (Query { fn_name, .. }, query_index) in non_transparent_queries().zip(0_u16..) { + fmt_ops.extend(quote! { + #query_index => { + salsa::plumbing::QueryStorageOps::fmt_index( + &*self.#fn_name, db, input, fmt, + ) + } + }); + } + + let mut maybe_changed_ops = proc_macro2::TokenStream::new(); + for (Query { fn_name, .. }, query_index) in non_transparent_queries().zip(0_u16..) { + maybe_changed_ops.extend(quote! { + #query_index => { + salsa::plumbing::QueryStorageOps::maybe_changed_after( + &*self.#fn_name, db, input, revision + ) + } + }); + } + + let mut cycle_recovery_strategy_ops = proc_macro2::TokenStream::new(); + for (Query { fn_name, .. }, query_index) in non_transparent_queries().zip(0_u16..) { + cycle_recovery_strategy_ops.extend(quote! { + #query_index => { + salsa::plumbing::QueryStorageOps::cycle_recovery_strategy( + &*self.#fn_name + ) + } + }); + } + + let mut for_each_ops = proc_macro2::TokenStream::new(); + for Query { fn_name, .. } in non_transparent_queries() { + for_each_ops.extend(quote! { + op(&*self.#fn_name); + }); + } + + // Emit query group storage struct + output.extend(quote! { + #trait_vis struct #group_storage { + #storage_fields + } + + // ANCHOR:group_storage_new + impl #group_storage { + #trait_vis fn new(group_index: u16) -> Self { + #group_storage { + #( + #queries_with_storage: + std::sync::Arc::new(salsa::plumbing::QueryStorageOps::new(group_index)), + )* + } + } + } + // ANCHOR_END:group_storage_new + + // ANCHOR:group_storage_methods + impl #group_storage { + #trait_vis fn fmt_index( + &self, + db: &(#dyn_db + '_), + input: salsa::DatabaseKeyIndex, + fmt: &mut std::fmt::Formatter<'_>, + ) -> std::fmt::Result { + match input.query_index() { + #fmt_ops + i => panic!("salsa: impossible query index {}", i), + } + } + + #trait_vis fn maybe_changed_after( + &self, + db: &(#dyn_db + '_), + input: salsa::DatabaseKeyIndex, + revision: salsa::Revision, + ) -> bool { + match input.query_index() { + #maybe_changed_ops + i => panic!("salsa: impossible query index {}", i), + } + } + + #trait_vis fn cycle_recovery_strategy( + &self, + db: &(#dyn_db + '_), + input: salsa::DatabaseKeyIndex, + ) -> salsa::plumbing::CycleRecoveryStrategy { + match input.query_index() { + #cycle_recovery_strategy_ops + i => panic!("salsa: impossible query index {}", i), + } + } + + #trait_vis fn for_each_query( + &self, + _runtime: &salsa::Runtime, + mut op: &mut dyn FnMut(&dyn salsa::plumbing::QueryStorageMassOps), + ) { + #for_each_ops + } + } + // ANCHOR_END:group_storage_methods + }); + output.into() +} + +struct SalsaAttr { + name: String, + tts: TokenStream, + span: Span, +} + +impl std::fmt::Debug for SalsaAttr { + fn fmt(&self, fmt: &mut std::fmt::Formatter<'_>) -> std::fmt::Result { + write!(fmt, "{:?}", self.name) + } +} + +impl TryFrom for SalsaAttr { + type Error = syn::Attribute; + + fn try_from(attr: syn::Attribute) -> Result { + if is_not_salsa_attr_path(attr.path()) { + return Err(attr); + } + + let span = attr.span(); + let name = attr.path().segments[1].ident.to_string(); + let tts = match attr.meta { + syn::Meta::Path(path) => path.into_token_stream(), + syn::Meta::List(ref list) => { + let tts = list + .into_token_stream() + .into_iter() + .skip(attr.path().to_token_stream().into_iter().count()); + proc_macro2::TokenStream::from_iter(tts) + } + syn::Meta::NameValue(nv) => nv.into_token_stream(), + } + .into(); + + Ok(SalsaAttr { name, tts, span }) + } +} + +fn is_not_salsa_attr_path(path: &syn::Path) -> bool { + path.segments.first().map(|s| s.ident != "salsa").unwrap_or(true) || path.segments.len() != 2 +} + +fn filter_attrs(attrs: Vec) -> (Vec, Vec) { + let mut other = vec![]; + let mut salsa = vec![]; + // Leave non-salsa attributes untouched. These are + // attributes that don't start with `salsa::` or don't have + // exactly two segments in their path. + // Keep the salsa attributes around. + for attr in attrs { + match SalsaAttr::try_from(attr) { + Ok(it) => salsa.push(it), + Err(it) => other.push(it), + } + } + (other, salsa) +} + +#[derive(Debug)] +struct Query { + fn_name: Ident, + receiver: syn::Receiver, + query_name: String, + attrs: Vec, + query_type: Ident, + storage: QueryStorage, + keys: Vec<(Ident, syn::Type)>, + value: syn::Type, + invoke: Option, + cycle: Option, +} + +impl Query { + fn invoke_tt(&self) -> proc_macro2::TokenStream { + match &self.invoke { + Some(i) => i.into_token_stream(), + None => self.fn_name.clone().into_token_stream(), + } + } +} + +#[derive(Debug, Clone, PartialEq, Eq)] +enum QueryStorage { + Memoized, + Dependencies, + Input, + Interned, + InternedLookup { intern_query_type: Ident }, + Transparent, +} + +impl QueryStorage { + /// Do we need a `QueryFunction` impl for this type of query? + fn needs_query_function(&self) -> bool { + match self { + QueryStorage::Input + | QueryStorage::Interned + | QueryStorage::InternedLookup { .. } + | QueryStorage::Transparent => false, + QueryStorage::Memoized | QueryStorage::Dependencies => true, + } + } +} diff --git a/crates/salsa/src/debug.rs b/crates/salsa/src/debug.rs new file mode 100644 index 000000000000..0925ddb3d85b --- /dev/null +++ b/crates/salsa/src/debug.rs @@ -0,0 +1,66 @@ +//! Debugging APIs: these are meant for use when unit-testing or +//! debugging your application but aren't ordinarily needed. + +use crate::durability::Durability; +use crate::plumbing::QueryStorageOps; +use crate::Query; +use crate::QueryTable; +use std::iter::FromIterator; + +/// Additional methods on queries that can be used to "peek into" +/// their current state. These methods are meant for debugging and +/// observing the effects of garbage collection etc. +pub trait DebugQueryTable { + /// Key of this query. + type Key; + + /// Value of this query. + type Value; + + /// Returns a lower bound on the durability for the given key. + /// This is typically the minimum durability of all values that + /// the query accessed, but we may return a lower durability in + /// some cases. + fn durability(&self, key: Self::Key) -> Durability; + + /// Get the (current) set of the entries in the query table. + fn entries(&self) -> C + where + C: FromIterator>; +} + +/// An entry from a query table, for debugging and inspecting the table state. +#[derive(Debug, Clone, PartialEq, Eq, PartialOrd, Ord)] +#[non_exhaustive] +pub struct TableEntry { + /// key of the query + pub key: K, + /// value of the query, if it is stored + pub value: Option, +} + +impl TableEntry { + pub(crate) fn new(key: K, value: Option) -> TableEntry { + TableEntry { key, value } + } +} + +impl DebugQueryTable for QueryTable<'_, Q> +where + Q: Query, + Q::Storage: QueryStorageOps, +{ + type Key = Q::Key; + type Value = Q::Value; + + fn durability(&self, key: Q::Key) -> Durability { + self.storage.durability(self.db, &key) + } + + fn entries(&self) -> C + where + C: FromIterator>, + { + self.storage.entries(self.db) + } +} diff --git a/crates/salsa/src/derived.rs b/crates/salsa/src/derived.rs new file mode 100644 index 000000000000..c381e66e087b --- /dev/null +++ b/crates/salsa/src/derived.rs @@ -0,0 +1,233 @@ +//! +use crate::debug::TableEntry; +use crate::durability::Durability; +use crate::hash::FxIndexMap; +use crate::lru::Lru; +use crate::plumbing::DerivedQueryStorageOps; +use crate::plumbing::LruQueryStorageOps; +use crate::plumbing::QueryFunction; +use crate::plumbing::QueryStorageMassOps; +use crate::plumbing::QueryStorageOps; +use crate::runtime::StampedValue; +use crate::Runtime; +use crate::{Database, DatabaseKeyIndex, QueryDb, Revision}; +use parking_lot::RwLock; +use std::borrow::Borrow; +use std::convert::TryFrom; +use std::hash::Hash; +use std::marker::PhantomData; +use triomphe::Arc; + +mod slot; +use slot::Slot; + +/// Memoized queries store the result plus a list of the other queries +/// that they invoked. This means we can avoid recomputing them when +/// none of those inputs have changed. +pub type MemoizedStorage = DerivedStorage; + +/// "Dependency" queries just track their dependencies and not the +/// actual value (which they produce on demand). This lessens the +/// storage requirements. +pub type DependencyStorage = DerivedStorage; + +/// Handles storage where the value is 'derived' by executing a +/// function (in contrast to "inputs"). +pub struct DerivedStorage +where + Q: QueryFunction, + MP: MemoizationPolicy, +{ + group_index: u16, + lru_list: Lru>, + slot_map: RwLock>>>, + policy: PhantomData, +} + +impl std::panic::RefUnwindSafe for DerivedStorage +where + Q: QueryFunction, + MP: MemoizationPolicy, + Q::Key: std::panic::RefUnwindSafe, + Q::Value: std::panic::RefUnwindSafe, +{ +} + +pub trait MemoizationPolicy: Send + Sync +where + Q: QueryFunction, +{ + fn should_memoize_value(key: &Q::Key) -> bool; + + fn memoized_value_eq(old_value: &Q::Value, new_value: &Q::Value) -> bool; +} + +pub enum AlwaysMemoizeValue {} +impl MemoizationPolicy for AlwaysMemoizeValue +where + Q: QueryFunction, + Q::Value: Eq, +{ + fn should_memoize_value(_key: &Q::Key) -> bool { + true + } + + fn memoized_value_eq(old_value: &Q::Value, new_value: &Q::Value) -> bool { + old_value == new_value + } +} + +pub enum NeverMemoizeValue {} +impl MemoizationPolicy for NeverMemoizeValue +where + Q: QueryFunction, +{ + fn should_memoize_value(_key: &Q::Key) -> bool { + false + } + + fn memoized_value_eq(_old_value: &Q::Value, _new_value: &Q::Value) -> bool { + panic!("cannot reach since we never memoize") + } +} + +impl DerivedStorage +where + Q: QueryFunction, + MP: MemoizationPolicy, +{ + fn slot(&self, key: &Q::Key) -> Arc> { + if let Some(v) = self.slot_map.read().get(key) { + return v.clone(); + } + + let mut write = self.slot_map.write(); + let entry = write.entry(key.clone()); + let key_index = u32::try_from(entry.index()).unwrap(); + let database_key_index = DatabaseKeyIndex { + group_index: self.group_index, + query_index: Q::QUERY_INDEX, + key_index, + }; + entry.or_insert_with(|| Arc::new(Slot::new(key.clone(), database_key_index))).clone() + } +} + +impl QueryStorageOps for DerivedStorage +where + Q: QueryFunction, + MP: MemoizationPolicy, +{ + const CYCLE_STRATEGY: crate::plumbing::CycleRecoveryStrategy = Q::CYCLE_STRATEGY; + + fn new(group_index: u16) -> Self { + DerivedStorage { + group_index, + slot_map: RwLock::new(FxIndexMap::default()), + lru_list: Default::default(), + policy: PhantomData, + } + } + + fn fmt_index( + &self, + _db: &>::DynDb, + index: DatabaseKeyIndex, + fmt: &mut std::fmt::Formatter<'_>, + ) -> std::fmt::Result { + assert_eq!(index.group_index, self.group_index); + assert_eq!(index.query_index, Q::QUERY_INDEX); + let slot_map = self.slot_map.read(); + let key = slot_map.get_index(index.key_index as usize).unwrap().0; + write!(fmt, "{}({:?})", Q::QUERY_NAME, key) + } + + fn maybe_changed_after( + &self, + db: &>::DynDb, + input: DatabaseKeyIndex, + revision: Revision, + ) -> bool { + assert_eq!(input.group_index, self.group_index); + assert_eq!(input.query_index, Q::QUERY_INDEX); + debug_assert!(revision < db.salsa_runtime().current_revision()); + let slot = self.slot_map.read().get_index(input.key_index as usize).unwrap().1.clone(); + slot.maybe_changed_after(db, revision) + } + + fn fetch(&self, db: &>::DynDb, key: &Q::Key) -> Q::Value { + db.unwind_if_cancelled(); + + let slot = self.slot(key); + let StampedValue { value, durability, changed_at } = slot.read(db); + + if let Some(evicted) = self.lru_list.record_use(&slot) { + evicted.evict(); + } + + db.salsa_runtime().report_query_read_and_unwind_if_cycle_resulted( + slot.database_key_index(), + durability, + changed_at, + ); + + value + } + + fn durability(&self, db: &>::DynDb, key: &Q::Key) -> Durability { + self.slot(key).durability(db) + } + + fn entries(&self, _db: &>::DynDb) -> C + where + C: std::iter::FromIterator>, + { + let slot_map = self.slot_map.read(); + slot_map.values().filter_map(|slot| slot.as_table_entry()).collect() + } +} + +impl QueryStorageMassOps for DerivedStorage +where + Q: QueryFunction, + MP: MemoizationPolicy, +{ + fn purge(&self) { + self.lru_list.purge(); + *self.slot_map.write() = Default::default(); + } +} + +impl LruQueryStorageOps for DerivedStorage +where + Q: QueryFunction, + MP: MemoizationPolicy, +{ + fn set_lru_capacity(&self, new_capacity: usize) { + self.lru_list.set_lru_capacity(new_capacity); + } +} + +impl DerivedQueryStorageOps for DerivedStorage +where + Q: QueryFunction, + MP: MemoizationPolicy, +{ + fn invalidate(&self, runtime: &mut Runtime, key: &S) + where + S: Eq + Hash, + Q::Key: Borrow, + { + runtime.with_incremented_revision(|new_revision| { + let map_read = self.slot_map.read(); + + if let Some(slot) = map_read.get(key) { + if let Some(durability) = slot.invalidate(new_revision) { + return Some(durability); + } + } + + None + }) + } +} diff --git a/crates/salsa/src/derived/slot.rs b/crates/salsa/src/derived/slot.rs new file mode 100644 index 000000000000..4fad791a26ae --- /dev/null +++ b/crates/salsa/src/derived/slot.rs @@ -0,0 +1,833 @@ +//! +use crate::debug::TableEntry; +use crate::derived::MemoizationPolicy; +use crate::durability::Durability; +use crate::lru::LruIndex; +use crate::lru::LruNode; +use crate::plumbing::{DatabaseOps, QueryFunction}; +use crate::revision::Revision; +use crate::runtime::local_state::ActiveQueryGuard; +use crate::runtime::local_state::QueryInputs; +use crate::runtime::local_state::QueryRevisions; +use crate::runtime::Runtime; +use crate::runtime::RuntimeId; +use crate::runtime::StampedValue; +use crate::runtime::WaitResult; +use crate::Cycle; +use crate::{Database, DatabaseKeyIndex, Event, EventKind, QueryDb}; +use parking_lot::{RawRwLock, RwLock}; +use std::marker::PhantomData; +use std::ops::Deref; +use std::sync::atomic::{AtomicBool, Ordering}; +use tracing::{debug, info}; + +pub(super) struct Slot +where + Q: QueryFunction, + MP: MemoizationPolicy, +{ + key: Q::Key, + database_key_index: DatabaseKeyIndex, + state: RwLock>, + policy: PhantomData, + lru_index: LruIndex, +} + +/// Defines the "current state" of query's memoized results. +enum QueryState +where + Q: QueryFunction, +{ + NotComputed, + + /// The runtime with the given id is currently computing the + /// result of this query. + InProgress { + id: RuntimeId, + + /// Set to true if any other queries are blocked, + /// waiting for this query to complete. + anyone_waiting: AtomicBool, + }, + + /// We have computed the query already, and here is the result. + Memoized(Memo), +} + +struct Memo { + /// The result of the query, if we decide to memoize it. + value: Option, + + /// Last revision when this memo was verified; this begins + /// as the current revision. + pub(crate) verified_at: Revision, + + /// Revision information + revisions: QueryRevisions, +} + +/// Return value of `probe` helper. +enum ProbeState { + /// Another thread was active but has completed. + /// Try again! + Retry, + + /// No entry for this key at all. + NotComputed(G), + + /// There is an entry, but its contents have not been + /// verified in this revision. + Stale(G), + + /// There is an entry, and it has been verified + /// in this revision, but it has no cached + /// value. The `Revision` is the revision where the + /// value last changed (if we were to recompute it). + NoValue(G, Revision), + + /// There is an entry which has been verified, + /// and it has the following value-- or, we blocked + /// on another thread, and that resulted in a cycle. + UpToDate(V), +} + +/// Return value of `maybe_changed_after_probe` helper. +enum MaybeChangedSinceProbeState { + /// Another thread was active but has completed. + /// Try again! + Retry, + + /// Value may have changed in the given revision. + ChangedAt(Revision), + + /// There is a stale cache entry that has not been + /// verified in this revision, so we can't say. + Stale(G), +} + +impl Slot +where + Q: QueryFunction, + MP: MemoizationPolicy, +{ + pub(super) fn new(key: Q::Key, database_key_index: DatabaseKeyIndex) -> Self { + Self { + key, + database_key_index, + state: RwLock::new(QueryState::NotComputed), + lru_index: LruIndex::default(), + policy: PhantomData, + } + } + + pub(super) fn database_key_index(&self) -> DatabaseKeyIndex { + self.database_key_index + } + + pub(super) fn read(&self, db: &>::DynDb) -> StampedValue { + let runtime = db.salsa_runtime(); + + // NB: We don't need to worry about people modifying the + // revision out from under our feet. Either `db` is a frozen + // database, in which case there is a lock, or the mutator + // thread is the current thread, and it will be prevented from + // doing any `set` invocations while the query function runs. + let revision_now = runtime.current_revision(); + + info!("{:?}: invoked at {:?}", self, revision_now,); + + // First, do a check with a read-lock. + loop { + match self.probe(db, self.state.read(), runtime, revision_now) { + ProbeState::UpToDate(v) => return v, + ProbeState::Stale(..) | ProbeState::NoValue(..) | ProbeState::NotComputed(..) => { + break + } + ProbeState::Retry => continue, + } + } + + self.read_upgrade(db, revision_now) + } + + /// Second phase of a read operation: acquires an upgradable-read + /// and -- if needed -- validates whether inputs have changed, + /// recomputes value, etc. This is invoked after our initial probe + /// shows a potentially out of date value. + fn read_upgrade( + &self, + db: &>::DynDb, + revision_now: Revision, + ) -> StampedValue { + let runtime = db.salsa_runtime(); + + debug!("{:?}: read_upgrade(revision_now={:?})", self, revision_now,); + + // Check with an upgradable read to see if there is a value + // already. (This permits other readers but prevents anyone + // else from running `read_upgrade` at the same time.) + let mut old_memo = loop { + match self.probe(db, self.state.upgradable_read(), runtime, revision_now) { + ProbeState::UpToDate(v) => return v, + ProbeState::Stale(state) + | ProbeState::NotComputed(state) + | ProbeState::NoValue(state, _) => { + type RwLockUpgradableReadGuard<'a, T> = + lock_api::RwLockUpgradableReadGuard<'a, RawRwLock, T>; + + let mut state = RwLockUpgradableReadGuard::upgrade(state); + match std::mem::replace(&mut *state, QueryState::in_progress(runtime.id())) { + QueryState::Memoized(old_memo) => break Some(old_memo), + QueryState::InProgress { .. } => unreachable!(), + QueryState::NotComputed => break None, + } + } + ProbeState::Retry => continue, + } + }; + + let panic_guard = PanicGuard::new(self.database_key_index, self, runtime); + let active_query = runtime.push_query(self.database_key_index); + + // If we have an old-value, it *may* now be stale, since there + // has been a new revision since the last time we checked. So, + // first things first, let's walk over each of our previous + // inputs and check whether they are out of date. + if let Some(memo) = &mut old_memo { + if let Some(value) = memo.verify_value(db.ops_database(), revision_now, &active_query) { + info!("{:?}: validated old memoized value", self,); + + db.salsa_event(Event { + runtime_id: runtime.id(), + kind: EventKind::DidValidateMemoizedValue { + database_key: self.database_key_index, + }, + }); + + panic_guard.proceed(old_memo); + + return value; + } + } + + self.execute(db, runtime, revision_now, active_query, panic_guard, old_memo) + } + + fn execute( + &self, + db: &>::DynDb, + runtime: &Runtime, + revision_now: Revision, + active_query: ActiveQueryGuard<'_>, + panic_guard: PanicGuard<'_, Q, MP>, + old_memo: Option>, + ) -> StampedValue { + tracing::info!("{:?}: executing query", self.database_key_index.debug(db)); + + db.salsa_event(Event { + runtime_id: db.salsa_runtime().id(), + kind: EventKind::WillExecute { database_key: self.database_key_index }, + }); + + // Query was not previously executed, or value is potentially + // stale, or value is absent. Let's execute! + let value = match Cycle::catch(|| Q::execute(db, self.key.clone())) { + Ok(v) => v, + Err(cycle) => { + tracing::debug!( + "{:?}: caught cycle {:?}, have strategy {:?}", + self.database_key_index.debug(db), + cycle, + Q::CYCLE_STRATEGY, + ); + match Q::CYCLE_STRATEGY { + crate::plumbing::CycleRecoveryStrategy::Panic => { + panic_guard.proceed(None); + cycle.throw() + } + crate::plumbing::CycleRecoveryStrategy::Fallback => { + if let Some(c) = active_query.take_cycle() { + assert!(c.is(&cycle)); + Q::cycle_fallback(db, &cycle, &self.key) + } else { + // we are not a participant in this cycle + debug_assert!(!cycle + .participant_keys() + .any(|k| k == self.database_key_index)); + cycle.throw() + } + } + } + } + }; + + let mut revisions = active_query.pop(); + + // We assume that query is side-effect free -- that is, does + // not mutate the "inputs" to the query system. Sanity check + // that assumption here, at least to the best of our ability. + assert_eq!( + runtime.current_revision(), + revision_now, + "revision altered during query execution", + ); + + // If the new value is equal to the old one, then it didn't + // really change, even if some of its inputs have. So we can + // "backdate" its `changed_at` revision to be the same as the + // old value. + if let Some(old_memo) = &old_memo { + if let Some(old_value) = &old_memo.value { + // Careful: if the value became less durable than it + // used to be, that is a "breaking change" that our + // consumers must be aware of. Becoming *more* durable + // is not. See the test `constant_to_non_constant`. + if revisions.durability >= old_memo.revisions.durability + && MP::memoized_value_eq(old_value, &value) + { + debug!( + "read_upgrade({:?}): value is equal, back-dating to {:?}", + self, old_memo.revisions.changed_at, + ); + + assert!(old_memo.revisions.changed_at <= revisions.changed_at); + revisions.changed_at = old_memo.revisions.changed_at; + } + } + } + + let new_value = StampedValue { + value, + durability: revisions.durability, + changed_at: revisions.changed_at, + }; + + let memo_value = + if self.should_memoize_value(&self.key) { Some(new_value.value.clone()) } else { None }; + + debug!("read_upgrade({:?}): result.revisions = {:#?}", self, revisions,); + + panic_guard.proceed(Some(Memo { value: memo_value, verified_at: revision_now, revisions })); + + new_value + } + + /// Helper for `read` that does a shallow check (not recursive) if we have an up-to-date value. + /// + /// Invoked with the guard `state` corresponding to the `QueryState` of some `Slot` (the guard + /// can be either read or write). Returns a suitable `ProbeState`: + /// + /// - `ProbeState::UpToDate(r)` if the table has an up-to-date value (or we blocked on another + /// thread that produced such a value). + /// - `ProbeState::StaleOrAbsent(g)` if either (a) there is no memo for this key, (b) the memo + /// has no value; or (c) the memo has not been verified at the current revision. + /// + /// Note that in case `ProbeState::UpToDate`, the lock will have been released. + fn probe( + &self, + db: &>::DynDb, + state: StateGuard, + runtime: &Runtime, + revision_now: Revision, + ) -> ProbeState, StateGuard> + where + StateGuard: Deref>, + { + match &*state { + QueryState::NotComputed => ProbeState::NotComputed(state), + + QueryState::InProgress { id, anyone_waiting } => { + let other_id = *id; + + // NB: `Ordering::Relaxed` is sufficient here, + // as there are no loads that are "gated" on this + // value. Everything that is written is also protected + // by a lock that must be acquired. The role of this + // boolean is to decide *whether* to acquire the lock, + // not to gate future atomic reads. + anyone_waiting.store(true, Ordering::Relaxed); + + self.block_on_or_unwind(db, runtime, other_id, state); + + // Other thread completely normally, so our value may be available now. + ProbeState::Retry + } + + QueryState::Memoized(memo) => { + debug!( + "{:?}: found memoized value, verified_at={:?}, changed_at={:?}", + self, memo.verified_at, memo.revisions.changed_at, + ); + + if memo.verified_at < revision_now { + return ProbeState::Stale(state); + } + + if let Some(value) = &memo.value { + let value = StampedValue { + durability: memo.revisions.durability, + changed_at: memo.revisions.changed_at, + value: value.clone(), + }; + + info!("{:?}: returning memoized value changed at {:?}", self, value.changed_at); + + ProbeState::UpToDate(value) + } else { + let changed_at = memo.revisions.changed_at; + ProbeState::NoValue(state, changed_at) + } + } + } + } + + pub(super) fn durability(&self, db: &>::DynDb) -> Durability { + match &*self.state.read() { + QueryState::NotComputed => Durability::LOW, + QueryState::InProgress { .. } => panic!("query in progress"), + QueryState::Memoized(memo) => { + if memo.check_durability(db.salsa_runtime()) { + memo.revisions.durability + } else { + Durability::LOW + } + } + } + } + + pub(super) fn as_table_entry(&self) -> Option> { + match &*self.state.read() { + QueryState::NotComputed => None, + QueryState::InProgress { .. } => Some(TableEntry::new(self.key.clone(), None)), + QueryState::Memoized(memo) => { + Some(TableEntry::new(self.key.clone(), memo.value.clone())) + } + } + } + + pub(super) fn evict(&self) { + let mut state = self.state.write(); + if let QueryState::Memoized(memo) = &mut *state { + // Evicting a value with an untracked input could + // lead to inconsistencies. Note that we can't check + // `has_untracked_input` when we add the value to the cache, + // because inputs can become untracked in the next revision. + if memo.has_untracked_input() { + return; + } + memo.value = None; + } + } + + pub(super) fn invalidate(&self, new_revision: Revision) -> Option { + tracing::debug!("Slot::invalidate(new_revision = {:?})", new_revision); + match &mut *self.state.write() { + QueryState::Memoized(memo) => { + memo.revisions.inputs = QueryInputs::Untracked; + memo.revisions.changed_at = new_revision; + Some(memo.revisions.durability) + } + QueryState::NotComputed => None, + QueryState::InProgress { .. } => unreachable!(), + } + } + + pub(super) fn maybe_changed_after( + &self, + db: &>::DynDb, + revision: Revision, + ) -> bool { + let runtime = db.salsa_runtime(); + let revision_now = runtime.current_revision(); + + db.unwind_if_cancelled(); + + debug!( + "maybe_changed_after({:?}) called with revision={:?}, revision_now={:?}", + self, revision, revision_now, + ); + + // Do an initial probe with just the read-lock. + // + // If we find that a cache entry for the value is present + // but hasn't been verified in this revision, we'll have to + // do more. + loop { + match self.maybe_changed_after_probe(db, self.state.read(), runtime, revision_now) { + MaybeChangedSinceProbeState::Retry => continue, + MaybeChangedSinceProbeState::ChangedAt(changed_at) => return changed_at > revision, + MaybeChangedSinceProbeState::Stale(state) => { + drop(state); + return self.maybe_changed_after_upgrade(db, revision); + } + } + } + } + + fn maybe_changed_after_probe( + &self, + db: &>::DynDb, + state: StateGuard, + runtime: &Runtime, + revision_now: Revision, + ) -> MaybeChangedSinceProbeState + where + StateGuard: Deref>, + { + match self.probe(db, state, runtime, revision_now) { + ProbeState::Retry => MaybeChangedSinceProbeState::Retry, + + ProbeState::Stale(state) => MaybeChangedSinceProbeState::Stale(state), + + // If we know when value last changed, we can return right away. + // Note that we don't need the actual value to be available. + ProbeState::NoValue(_, changed_at) + | ProbeState::UpToDate(StampedValue { value: _, durability: _, changed_at }) => { + MaybeChangedSinceProbeState::ChangedAt(changed_at) + } + + // If we have nothing cached, then value may have changed. + ProbeState::NotComputed(_) => MaybeChangedSinceProbeState::ChangedAt(revision_now), + } + } + + fn maybe_changed_after_upgrade( + &self, + db: &>::DynDb, + revision: Revision, + ) -> bool { + let runtime = db.salsa_runtime(); + let revision_now = runtime.current_revision(); + + // Get an upgradable read lock, which permits other reads but no writers. + // Probe again. If the value is stale (needs to be verified), then upgrade + // to a write lock and swap it with InProgress while we work. + let mut old_memo = match self.maybe_changed_after_probe( + db, + self.state.upgradable_read(), + runtime, + revision_now, + ) { + MaybeChangedSinceProbeState::ChangedAt(changed_at) => return changed_at > revision, + + // If another thread was active, then the cache line is going to be + // either verified or cleared out. Just recurse to figure out which. + // Note that we don't need an upgradable read. + MaybeChangedSinceProbeState::Retry => return self.maybe_changed_after(db, revision), + + MaybeChangedSinceProbeState::Stale(state) => { + type RwLockUpgradableReadGuard<'a, T> = + lock_api::RwLockUpgradableReadGuard<'a, RawRwLock, T>; + + let mut state = RwLockUpgradableReadGuard::upgrade(state); + match std::mem::replace(&mut *state, QueryState::in_progress(runtime.id())) { + QueryState::Memoized(old_memo) => old_memo, + QueryState::NotComputed | QueryState::InProgress { .. } => unreachable!(), + } + } + }; + + let panic_guard = PanicGuard::new(self.database_key_index, self, runtime); + let active_query = runtime.push_query(self.database_key_index); + + if old_memo.verify_revisions(db.ops_database(), revision_now, &active_query) { + let maybe_changed = old_memo.revisions.changed_at > revision; + panic_guard.proceed(Some(old_memo)); + maybe_changed + } else if old_memo.value.is_some() { + // We found that this memoized value may have changed + // but we have an old value. We can re-run the code and + // actually *check* if it has changed. + let StampedValue { changed_at, .. } = + self.execute(db, runtime, revision_now, active_query, panic_guard, Some(old_memo)); + changed_at > revision + } else { + // We found that inputs to this memoized value may have chanced + // but we don't have an old value to compare against or re-use. + // No choice but to drop the memo and say that its value may have changed. + panic_guard.proceed(None); + true + } + } + + /// Helper: see [`Runtime::try_block_on_or_unwind`]. + fn block_on_or_unwind( + &self, + db: &>::DynDb, + runtime: &Runtime, + other_id: RuntimeId, + mutex_guard: MutexGuard, + ) { + runtime.block_on_or_unwind( + db.ops_database(), + self.database_key_index, + other_id, + mutex_guard, + ) + } + + fn should_memoize_value(&self, key: &Q::Key) -> bool { + MP::should_memoize_value(key) + } +} + +impl QueryState +where + Q: QueryFunction, +{ + fn in_progress(id: RuntimeId) -> Self { + QueryState::InProgress { id, anyone_waiting: Default::default() } + } +} + +struct PanicGuard<'me, Q, MP> +where + Q: QueryFunction, + MP: MemoizationPolicy, +{ + database_key_index: DatabaseKeyIndex, + slot: &'me Slot, + runtime: &'me Runtime, +} + +impl<'me, Q, MP> PanicGuard<'me, Q, MP> +where + Q: QueryFunction, + MP: MemoizationPolicy, +{ + fn new( + database_key_index: DatabaseKeyIndex, + slot: &'me Slot, + runtime: &'me Runtime, + ) -> Self { + Self { database_key_index, slot, runtime } + } + + /// Indicates that we have concluded normally (without panicking). + /// If `opt_memo` is some, then this memo is installed as the new + /// memoized value. If `opt_memo` is `None`, then the slot is cleared + /// and has no value. + fn proceed(mut self, opt_memo: Option>) { + self.overwrite_placeholder(WaitResult::Completed, opt_memo); + std::mem::forget(self) + } + + /// Overwrites the `InProgress` placeholder for `key` that we + /// inserted; if others were blocked, waiting for us to finish, + /// then notify them. + fn overwrite_placeholder(&mut self, wait_result: WaitResult, opt_memo: Option>) { + let mut write = self.slot.state.write(); + + let old_value = match opt_memo { + // Replace the `InProgress` marker that we installed with the new + // memo, thus releasing our unique access to this key. + Some(memo) => std::mem::replace(&mut *write, QueryState::Memoized(memo)), + + // We had installed an `InProgress` marker, but we panicked before + // it could be removed. At this point, we therefore "own" unique + // access to our slot, so we can just remove the key. + None => std::mem::replace(&mut *write, QueryState::NotComputed), + }; + + match old_value { + QueryState::InProgress { id, anyone_waiting } => { + assert_eq!(id, self.runtime.id()); + + // NB: As noted on the `store`, `Ordering::Relaxed` is + // sufficient here. This boolean signals us on whether to + // acquire a mutex; the mutex will guarantee that all writes + // we are interested in are visible. + if anyone_waiting.load(Ordering::Relaxed) { + self.runtime.unblock_queries_blocked_on(self.database_key_index, wait_result); + } + } + _ => panic!( + "\ +Unexpected panic during query evaluation, aborting the process. + +Please report this bug to https://github.com/salsa-rs/salsa/issues." + ), + } + } +} + +impl<'me, Q, MP> Drop for PanicGuard<'me, Q, MP> +where + Q: QueryFunction, + MP: MemoizationPolicy, +{ + fn drop(&mut self) { + if std::thread::panicking() { + // We panicked before we could proceed and need to remove `key`. + self.overwrite_placeholder(WaitResult::Panicked, None) + } else { + // If no panic occurred, then panic guard ought to be + // "forgotten" and so this Drop code should never run. + panic!(".forget() was not called") + } + } +} + +impl Memo +where + V: Clone, +{ + /// Determines whether the value stored in this memo (if any) is still + /// valid in the current revision. If so, returns a stamped value. + /// + /// If needed, this will walk each dependency and + /// recursively invoke `maybe_changed_after`, which may in turn + /// re-execute the dependency. This can cause cycles to occur, + /// so the current query must be pushed onto the + /// stack to permit cycle detection and recovery: therefore, + /// takes the `active_query` argument as evidence. + fn verify_value( + &mut self, + db: &dyn Database, + revision_now: Revision, + active_query: &ActiveQueryGuard<'_>, + ) -> Option> { + // If we don't have a memoized value, nothing to validate. + if self.value.is_none() { + return None; + } + if self.verify_revisions(db, revision_now, active_query) { + Some(StampedValue { + durability: self.revisions.durability, + changed_at: self.revisions.changed_at, + value: self.value.as_ref().unwrap().clone(), + }) + } else { + None + } + } + + /// Determines whether the value represented by this memo is still + /// valid in the current revision; note that the value itself is + /// not needed for this check. If needed, this will walk each + /// dependency and recursively invoke `maybe_changed_after`, which + /// may in turn re-execute the dependency. This can cause cycles to occur, + /// so the current query must be pushed onto the + /// stack to permit cycle detection and recovery: therefore, + /// takes the `active_query` argument as evidence. + fn verify_revisions( + &mut self, + db: &dyn Database, + revision_now: Revision, + _active_query: &ActiveQueryGuard<'_>, + ) -> bool { + assert!(self.verified_at != revision_now); + let verified_at = self.verified_at; + + debug!( + "verify_revisions: verified_at={:?}, revision_now={:?}, inputs={:#?}", + verified_at, revision_now, self.revisions.inputs + ); + + if self.check_durability(db.salsa_runtime()) { + return self.mark_value_as_verified(revision_now); + } + + match &self.revisions.inputs { + // We can't validate values that had untracked inputs; just have to + // re-execute. + QueryInputs::Untracked => { + return false; + } + + QueryInputs::NoInputs => {} + + // Check whether any of our inputs changed since the + // **last point where we were verified** (not since we + // last changed). This is important: if we have + // memoized values, then an input may have changed in + // revision R2, but we found that *our* value was the + // same regardless, so our change date is still + // R1. But our *verification* date will be R2, and we + // are only interested in finding out whether the + // input changed *again*. + QueryInputs::Tracked { inputs } => { + let changed_input = + inputs.iter().find(|&&input| db.maybe_changed_after(input, verified_at)); + if let Some(input) = changed_input { + debug!("validate_memoized_value: `{:?}` may have changed", input); + + return false; + } + } + }; + + self.mark_value_as_verified(revision_now) + } + + /// True if this memo is known not to have changed based on its durability. + fn check_durability(&self, runtime: &Runtime) -> bool { + let last_changed = runtime.last_changed_revision(self.revisions.durability); + debug!( + "check_durability(last_changed={:?} <= verified_at={:?}) = {:?}", + last_changed, + self.verified_at, + last_changed <= self.verified_at, + ); + last_changed <= self.verified_at + } + + fn mark_value_as_verified(&mut self, revision_now: Revision) -> bool { + self.verified_at = revision_now; + true + } + + fn has_untracked_input(&self) -> bool { + matches!(self.revisions.inputs, QueryInputs::Untracked) + } +} + +impl std::fmt::Debug for Slot +where + Q: QueryFunction, + MP: MemoizationPolicy, +{ + fn fmt(&self, fmt: &mut std::fmt::Formatter<'_>) -> std::fmt::Result { + write!(fmt, "{:?}({:?})", Q::default(), self.key) + } +} + +impl LruNode for Slot +where + Q: QueryFunction, + MP: MemoizationPolicy, +{ + fn lru_index(&self) -> &LruIndex { + &self.lru_index + } +} + +/// Check that `Slot: Send + Sync` as long as +/// `DB::DatabaseData: Send + Sync`, which in turn implies that +/// `Q::Key: Send + Sync`, `Q::Value: Send + Sync`. +#[allow(dead_code)] +fn check_send_sync() +where + Q: QueryFunction, + MP: MemoizationPolicy, + Q::Key: Send + Sync, + Q::Value: Send + Sync, +{ + fn is_send_sync() {} + is_send_sync::>(); +} + +/// Check that `Slot: 'static` as long as +/// `DB::DatabaseData: 'static`, which in turn implies that +/// `Q::Key: 'static`, `Q::Value: 'static`. +#[allow(dead_code)] +fn check_static() +where + Q: QueryFunction + 'static, + MP: MemoizationPolicy + 'static, + Q::Key: 'static, + Q::Value: 'static, +{ + fn is_static() {} + is_static::>(); +} diff --git a/crates/salsa/src/doctest.rs b/crates/salsa/src/doctest.rs new file mode 100644 index 000000000000..29a80663567f --- /dev/null +++ b/crates/salsa/src/doctest.rs @@ -0,0 +1,115 @@ +//! +#![allow(dead_code)] + +/// Test that a database with a key/value that is not `Send` will, +/// indeed, not be `Send`. +/// +/// ```compile_fail,E0277 +/// use std::rc::Rc; +/// +/// #[salsa::query_group(NoSendSyncStorage)] +/// trait NoSendSyncDatabase: salsa::Database { +/// fn no_send_sync_value(&self, key: bool) -> Rc; +/// fn no_send_sync_key(&self, key: Rc) -> bool; +/// } +/// +/// fn no_send_sync_value(_db: &dyn NoSendSyncDatabase, key: bool) -> Rc { +/// Rc::new(key) +/// } +/// +/// fn no_send_sync_key(_db: &dyn NoSendSyncDatabase, key: Rc) -> bool { +/// *key +/// } +/// +/// #[salsa::database(NoSendSyncStorage)] +/// #[derive(Default)] +/// struct DatabaseImpl { +/// storage: salsa::Storage, +/// } +/// +/// impl salsa::Database for DatabaseImpl { +/// } +/// +/// fn is_send(_: T) { } +/// +/// fn assert_send() { +/// is_send(DatabaseImpl::default()); +/// } +/// ``` +fn test_key_not_send_db_not_send() {} + +/// Test that a database with a key/value that is not `Sync` will not +/// be `Send`. +/// +/// ```compile_fail,E0277 +/// use std::rc::Rc; +/// use std::cell::Cell; +/// +/// #[salsa::query_group(NoSendSyncStorage)] +/// trait NoSendSyncDatabase: salsa::Database { +/// fn no_send_sync_value(&self, key: bool) -> Cell; +/// fn no_send_sync_key(&self, key: Cell) -> bool; +/// } +/// +/// fn no_send_sync_value(_db: &dyn NoSendSyncDatabase, key: bool) -> Cell { +/// Cell::new(key) +/// } +/// +/// fn no_send_sync_key(_db: &dyn NoSendSyncDatabase, key: Cell) -> bool { +/// *key +/// } +/// +/// #[salsa::database(NoSendSyncStorage)] +/// #[derive(Default)] +/// struct DatabaseImpl { +/// runtime: salsa::Storage, +/// } +/// +/// impl salsa::Database for DatabaseImpl { +/// } +/// +/// fn is_send(_: T) { } +/// +/// fn assert_send() { +/// is_send(DatabaseImpl::default()); +/// } +/// ``` +fn test_key_not_sync_db_not_send() {} + +/// Test that a database with a key/value that is not `Sync` will +/// not be `Sync`. +/// +/// ```compile_fail,E0277 +/// use std::cell::Cell; +/// use std::rc::Rc; +/// +/// #[salsa::query_group(NoSendSyncStorage)] +/// trait NoSendSyncDatabase: salsa::Database { +/// fn no_send_sync_value(&self, key: bool) -> Cell; +/// fn no_send_sync_key(&self, key: Cell) -> bool; +/// } +/// +/// fn no_send_sync_value(_db: &dyn NoSendSyncDatabase, key: bool) -> Cell { +/// Cell::new(key) +/// } +/// +/// fn no_send_sync_key(_db: &dyn NoSendSyncDatabase, key: Cell) -> bool { +/// *key +/// } +/// +/// #[salsa::database(NoSendSyncStorage)] +/// #[derive(Default)] +/// struct DatabaseImpl { +/// runtime: salsa::Storage, +/// } +/// +/// impl salsa::Database for DatabaseImpl { +/// } +/// +/// fn is_sync(_: T) { } +/// +/// fn assert_send() { +/// is_sync(DatabaseImpl::default()); +/// } +/// ``` +fn test_key_not_sync_db_not_sync() {} diff --git a/crates/salsa/src/durability.rs b/crates/salsa/src/durability.rs new file mode 100644 index 000000000000..0c82f6345ab6 --- /dev/null +++ b/crates/salsa/src/durability.rs @@ -0,0 +1,50 @@ +//! +/// Describes how likely a value is to change -- how "durable" it is. +/// By default, inputs have `Durability::LOW` and interned values have +/// `Durability::HIGH`. But inputs can be explicitly set with other +/// durabilities. +/// +/// We use durabilities to optimize the work of "revalidating" a query +/// after some input has changed. Ordinarily, in a new revision, +/// queries have to trace all their inputs back to the base inputs to +/// determine if any of those inputs have changed. But if we know that +/// the only changes were to inputs of low durability (the common +/// case), and we know that the query only used inputs of medium +/// durability or higher, then we can skip that enumeration. +/// +/// Typically, one assigns low durabilites to inputs that the user is +/// frequently editing. Medium or high durabilities are used for +/// configuration, the source from library crates, or other things +/// that are unlikely to be edited. +#[derive(Copy, Clone, Debug, PartialEq, Eq, PartialOrd, Ord)] +pub struct Durability(u8); + +impl Durability { + /// Low durability: things that change frequently. + /// + /// Example: part of the crate being edited + pub const LOW: Durability = Durability(0); + + /// Medium durability: things that change sometimes, but rarely. + /// + /// Example: a Cargo.toml file + pub const MEDIUM: Durability = Durability(1); + + /// High durability: things that are not expected to change under + /// common usage. + /// + /// Example: the standard library or something from crates.io + pub const HIGH: Durability = Durability(2); + + /// The maximum possible durability; equivalent to HIGH but + /// "conceptually" distinct (i.e., if we add more durability + /// levels, this could change). + pub(crate) const MAX: Durability = Self::HIGH; + + /// Number of durability levels. + pub(crate) const LEN: usize = 3; + + pub(crate) fn index(self) -> usize { + self.0 as usize + } +} diff --git a/crates/salsa/src/hash.rs b/crates/salsa/src/hash.rs new file mode 100644 index 000000000000..47a2dd1ce0c0 --- /dev/null +++ b/crates/salsa/src/hash.rs @@ -0,0 +1,4 @@ +//! +pub(crate) type FxHasher = std::hash::BuildHasherDefault; +pub(crate) type FxIndexSet = indexmap::IndexSet; +pub(crate) type FxIndexMap = indexmap::IndexMap; diff --git a/crates/salsa/src/input.rs b/crates/salsa/src/input.rs new file mode 100644 index 000000000000..4e8fca6149b7 --- /dev/null +++ b/crates/salsa/src/input.rs @@ -0,0 +1,384 @@ +//! +use crate::debug::TableEntry; +use crate::durability::Durability; +use crate::hash::FxIndexMap; +use crate::plumbing::CycleRecoveryStrategy; +use crate::plumbing::InputQueryStorageOps; +use crate::plumbing::QueryStorageMassOps; +use crate::plumbing::QueryStorageOps; +use crate::revision::Revision; +use crate::runtime::StampedValue; +use crate::Database; +use crate::Query; +use crate::Runtime; +use crate::{DatabaseKeyIndex, QueryDb}; +use indexmap::map::Entry; +use parking_lot::RwLock; +use std::convert::TryFrom; +use std::iter; +use tracing::debug; + +/// Input queries store the result plus a list of the other queries +/// that they invoked. This means we can avoid recomputing them when +/// none of those inputs have changed. +pub struct InputStorage +where + Q: Query, +{ + group_index: u16, + slots: RwLock>>, +} + +struct Slot { + database_key_index: DatabaseKeyIndex, + stamped_value: RwLock>, +} + +impl std::panic::RefUnwindSafe for InputStorage +where + Q: Query, + Q::Key: std::panic::RefUnwindSafe, + Q::Value: std::panic::RefUnwindSafe, +{ +} + +impl QueryStorageOps for InputStorage +where + Q: Query, +{ + const CYCLE_STRATEGY: crate::plumbing::CycleRecoveryStrategy = CycleRecoveryStrategy::Panic; + + fn new(group_index: u16) -> Self { + InputStorage { group_index, slots: Default::default() } + } + + fn fmt_index( + &self, + _db: &>::DynDb, + index: DatabaseKeyIndex, + fmt: &mut std::fmt::Formatter<'_>, + ) -> std::fmt::Result { + assert_eq!(index.group_index, self.group_index); + assert_eq!(index.query_index, Q::QUERY_INDEX); + let slot_map = self.slots.read(); + let key = slot_map.get_index(index.key_index as usize).unwrap().0; + write!(fmt, "{}({:?})", Q::QUERY_NAME, key) + } + + fn maybe_changed_after( + &self, + db: &>::DynDb, + input: DatabaseKeyIndex, + revision: Revision, + ) -> bool { + assert_eq!(input.group_index, self.group_index); + assert_eq!(input.query_index, Q::QUERY_INDEX); + debug_assert!(revision < db.salsa_runtime().current_revision()); + let slots = &self.slots.read(); + let slot = slots.get_index(input.key_index as usize).unwrap().1; + + debug!("maybe_changed_after(slot={:?}, revision={:?})", Q::default(), revision,); + + let changed_at = slot.stamped_value.read().changed_at; + + debug!("maybe_changed_after: changed_at = {:?}", changed_at); + + changed_at > revision + } + + fn fetch(&self, db: &>::DynDb, key: &Q::Key) -> Q::Value { + db.unwind_if_cancelled(); + + let slots = &self.slots.read(); + let slot = slots + .get(key) + .unwrap_or_else(|| panic!("no value set for {:?}({:?})", Q::default(), key)); + + let StampedValue { value, durability, changed_at } = slot.stamped_value.read().clone(); + + db.salsa_runtime().report_query_read_and_unwind_if_cycle_resulted( + slot.database_key_index, + durability, + changed_at, + ); + + value + } + + fn durability(&self, _db: &>::DynDb, key: &Q::Key) -> Durability { + match self.slots.read().get(key) { + Some(slot) => slot.stamped_value.read().durability, + None => panic!("no value set for {:?}({:?})", Q::default(), key), + } + } + + fn entries(&self, _db: &>::DynDb) -> C + where + C: std::iter::FromIterator>, + { + let slots = self.slots.read(); + slots + .iter() + .map(|(key, slot)| { + TableEntry::new(key.clone(), Some(slot.stamped_value.read().value.clone())) + }) + .collect() + } +} + +impl QueryStorageMassOps for InputStorage +where + Q: Query, +{ + fn purge(&self) { + *self.slots.write() = Default::default(); + } +} + +impl InputQueryStorageOps for InputStorage +where + Q: Query, +{ + fn set(&self, runtime: &mut Runtime, key: &Q::Key, value: Q::Value, durability: Durability) { + tracing::debug!("{:?}({:?}) = {:?} ({:?})", Q::default(), key, value, durability); + + // The value is changing, so we need a new revision (*). We also + // need to update the 'last changed' revision by invoking + // `guard.mark_durability_as_changed`. + // + // CAREFUL: This will block until the global revision lock can + // be acquired. If there are still queries executing, they may + // need to read from this input. Therefore, we wait to acquire + // the lock on `map` until we also hold the global query write + // lock. + // + // (*) Technically, since you can't presently access an input + // for a non-existent key, and you can't enumerate the set of + // keys, we only need a new revision if the key used to + // exist. But we may add such methods in the future and this + // case doesn't generally seem worth optimizing for. + runtime.with_incremented_revision(|next_revision| { + let mut slots = self.slots.write(); + + // Do this *after* we acquire the lock, so that we are not + // racing with somebody else to modify this same cell. + // (Otherwise, someone else might write a *newer* revision + // into the same cell while we block on the lock.) + let stamped_value = StampedValue { value, durability, changed_at: next_revision }; + + match slots.entry(key.clone()) { + Entry::Occupied(entry) => { + let mut slot_stamped_value = entry.get().stamped_value.write(); + let old_durability = slot_stamped_value.durability; + *slot_stamped_value = stamped_value; + Some(old_durability) + } + + Entry::Vacant(entry) => { + let key_index = u32::try_from(entry.index()).unwrap(); + let database_key_index = DatabaseKeyIndex { + group_index: self.group_index, + query_index: Q::QUERY_INDEX, + key_index, + }; + entry.insert(Slot { + database_key_index, + stamped_value: RwLock::new(stamped_value), + }); + None + } + } + }); + } +} + +/// Same as `InputStorage`, but optimized for queries that take no inputs. +pub struct UnitInputStorage +where + Q: Query, +{ + group_index: u16, + slot: UnitSlot, +} + +struct UnitSlot { + database_key_index: DatabaseKeyIndex, + stamped_value: RwLock>>, +} + +impl std::panic::RefUnwindSafe for UnitInputStorage +where + Q: Query, + Q::Key: std::panic::RefUnwindSafe, + Q::Value: std::panic::RefUnwindSafe, +{ +} + +impl QueryStorageOps for UnitInputStorage +where + Q: Query, +{ + const CYCLE_STRATEGY: crate::plumbing::CycleRecoveryStrategy = CycleRecoveryStrategy::Panic; + + fn new(group_index: u16) -> Self { + let database_key_index = + DatabaseKeyIndex { group_index, query_index: Q::QUERY_INDEX, key_index: 0 }; + UnitInputStorage { + group_index, + slot: UnitSlot { database_key_index, stamped_value: RwLock::new(None) }, + } + } + + fn fmt_index( + &self, + _db: &>::DynDb, + index: DatabaseKeyIndex, + fmt: &mut std::fmt::Formatter<'_>, + ) -> std::fmt::Result { + assert_eq!(index.group_index, self.group_index); + assert_eq!(index.query_index, Q::QUERY_INDEX); + write!(fmt, "{}", Q::QUERY_NAME) + } + + fn maybe_changed_after( + &self, + db: &>::DynDb, + input: DatabaseKeyIndex, + revision: Revision, + ) -> bool { + assert_eq!(input.group_index, self.group_index); + assert_eq!(input.query_index, Q::QUERY_INDEX); + debug_assert!(revision < db.salsa_runtime().current_revision()); + + debug!("maybe_changed_after(slot={:?}, revision={:?})", Q::default(), revision,); + + let changed_at = self.slot.stamped_value.read().as_ref().unwrap().changed_at; + + debug!("maybe_changed_after: changed_at = {:?}", changed_at); + + changed_at > revision + } + + fn fetch(&self, db: &>::DynDb, &(): &Q::Key) -> Q::Value { + db.unwind_if_cancelled(); + + let StampedValue { value, durability, changed_at } = self + .slot + .stamped_value + .read() + .clone() + .unwrap_or_else(|| panic!("no value set for {:?}", Q::default())); + + db.salsa_runtime().report_query_read_and_unwind_if_cycle_resulted( + self.slot.database_key_index, + durability, + changed_at, + ); + + value + } + + fn durability(&self, _db: &>::DynDb, &(): &Q::Key) -> Durability { + match &*self.slot.stamped_value.read() { + Some(stamped_value) => stamped_value.durability, + None => panic!("no value set for {:?}", Q::default(),), + } + } + + fn entries(&self, _db: &>::DynDb) -> C + where + C: std::iter::FromIterator>, + { + iter::once(TableEntry::new( + (), + self.slot.stamped_value.read().as_ref().map(|it| it.value.clone()), + )) + .collect() + } +} + +impl QueryStorageMassOps for UnitInputStorage +where + Q: Query, +{ + fn purge(&self) { + *self.slot.stamped_value.write() = Default::default(); + } +} + +impl InputQueryStorageOps for UnitInputStorage +where + Q: Query, +{ + fn set(&self, runtime: &mut Runtime, (): &Q::Key, value: Q::Value, durability: Durability) { + tracing::debug!("{:?} = {:?} ({:?})", Q::default(), value, durability); + + // The value is changing, so we need a new revision (*). We also + // need to update the 'last changed' revision by invoking + // `guard.mark_durability_as_changed`. + // + // CAREFUL: This will block until the global revision lock can + // be acquired. If there are still queries executing, they may + // need to read from this input. Therefore, we wait to acquire + // the lock on `map` until we also hold the global query write + // lock. + // + // (*) Technically, since you can't presently access an input + // for a non-existent key, and you can't enumerate the set of + // keys, we only need a new revision if the key used to + // exist. But we may add such methods in the future and this + // case doesn't generally seem worth optimizing for. + runtime.with_incremented_revision(|next_revision| { + let mut stamped_value_slot = self.slot.stamped_value.write(); + + // Do this *after* we acquire the lock, so that we are not + // racing with somebody else to modify this same cell. + // (Otherwise, someone else might write a *newer* revision + // into the same cell while we block on the lock.) + let stamped_value = StampedValue { value, durability, changed_at: next_revision }; + + match &mut *stamped_value_slot { + Some(slot_stamped_value) => { + let old_durability = slot_stamped_value.durability; + *slot_stamped_value = stamped_value; + Some(old_durability) + } + + stamped_value_slot @ None => { + *stamped_value_slot = Some(stamped_value); + None + } + } + }); + } +} + +/// Check that `Slot: Send + Sync` as long as +/// `DB::DatabaseData: Send + Sync`, which in turn implies that +/// `Q::Key: Send + Sync`, `Q::Value: Send + Sync`. +#[allow(dead_code)] +fn check_send_sync() +where + Q: Query, + Q::Key: Send + Sync, + Q::Value: Send + Sync, +{ + fn is_send_sync() {} + is_send_sync::>(); + is_send_sync::>(); +} + +/// Check that `Slot: 'static` as long as +/// `DB::DatabaseData: 'static`, which in turn implies that +/// `Q::Key: 'static`, `Q::Value: 'static`. +#[allow(dead_code)] +fn check_static() +where + Q: Query + 'static, + Q::Key: 'static, + Q::Value: 'static, +{ + fn is_static() {} + is_static::>(); + is_static::>(); +} diff --git a/crates/salsa/src/intern_id.rs b/crates/salsa/src/intern_id.rs new file mode 100644 index 000000000000..a7bbc088f9c0 --- /dev/null +++ b/crates/salsa/src/intern_id.rs @@ -0,0 +1,131 @@ +//! +use std::fmt; +use std::num::NonZeroU32; + +/// The "raw-id" is used for interned keys in salsa -- it is basically +/// a newtype'd u32. Typically, it is wrapped in a type of your own +/// devising. For more information about interned keys, see [the +/// interned key RFC][rfc]. +/// +/// # Creating a `InternId` +// +/// InternId values can be constructed using the `From` impls, +/// which are implemented for `u32` and `usize`: +/// +/// ``` +/// # use salsa::InternId; +/// let intern_id1 = InternId::from(22_u32); +/// let intern_id2 = InternId::from(22_usize); +/// assert_eq!(intern_id1, intern_id2); +/// ``` +/// +/// # Converting to a u32 or usize +/// +/// Normally, there should be no need to access the underlying integer +/// in a `InternId`. But if you do need to do so, you can convert to a +/// `usize` using the `as_u32` or `as_usize` methods or the `From` impls. +/// +/// ``` +/// # use salsa::InternId; +/// let intern_id = InternId::from(22_u32); +/// let value = u32::from(intern_id); +/// assert_eq!(value, 22); +/// ``` +/// +/// ## Illegal values +/// +/// Be warned, however, that `InternId` values cannot be created from +/// *arbitrary* values -- in particular large values greater than +/// `InternId::MAX` will panic. Those large values are reserved so that +/// the Rust compiler can use them as sentinel values, which means +/// that (for example) `Option` is represented in a single +/// word. +/// +/// ```should_panic +/// # use salsa::InternId; +/// InternId::from(InternId::MAX); +/// ``` +/// +/// [rfc]: https://github.com/salsa-rs/salsa-rfcs/pull/2 +#[derive(Clone, Copy, PartialEq, Eq, PartialOrd, Ord, Hash)] +pub struct InternId { + value: NonZeroU32, +} + +impl InternId { + /// The maximum allowed `InternId`. This value can grow between + /// releases without affecting semver. + pub const MAX: u32 = 0xFFFF_FF00; + + /// Creates a new InternId. + /// + /// # Safety + /// + /// `value` must be less than `MAX` + pub const unsafe fn new_unchecked(value: u32) -> Self { + debug_assert!(value < InternId::MAX); + InternId { value: NonZeroU32::new_unchecked(value + 1) } + } + + /// Convert this raw-id into a u32 value. + /// + /// ``` + /// # use salsa::InternId; + /// let intern_id = InternId::from(22_u32); + /// let value = intern_id.as_usize(); + /// assert_eq!(value, 22); + /// ``` + pub fn as_u32(self) -> u32 { + self.value.get() - 1 + } + + /// Convert this raw-id into a usize value. + /// + /// ``` + /// # use salsa::InternId; + /// let intern_id = InternId::from(22_u32); + /// let value = intern_id.as_usize(); + /// assert_eq!(value, 22); + /// ``` + pub fn as_usize(self) -> usize { + self.as_u32() as usize + } +} + +impl From for u32 { + fn from(raw: InternId) -> u32 { + raw.as_u32() + } +} + +impl From for usize { + fn from(raw: InternId) -> usize { + raw.as_usize() + } +} + +impl From for InternId { + fn from(id: u32) -> InternId { + assert!(id < InternId::MAX); + unsafe { InternId::new_unchecked(id) } + } +} + +impl From for InternId { + fn from(id: usize) -> InternId { + assert!(id < (InternId::MAX as usize)); + unsafe { InternId::new_unchecked(id as u32) } + } +} + +impl fmt::Debug for InternId { + fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { + self.as_usize().fmt(f) + } +} + +impl fmt::Display for InternId { + fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { + self.as_usize().fmt(f) + } +} diff --git a/crates/salsa/src/interned.rs b/crates/salsa/src/interned.rs new file mode 100644 index 000000000000..731839e9598c --- /dev/null +++ b/crates/salsa/src/interned.rs @@ -0,0 +1,508 @@ +//! +use crate::debug::TableEntry; +use crate::durability::Durability; +use crate::intern_id::InternId; +use crate::plumbing::CycleRecoveryStrategy; +use crate::plumbing::HasQueryGroup; +use crate::plumbing::QueryStorageMassOps; +use crate::plumbing::QueryStorageOps; +use crate::revision::Revision; +use crate::Query; +use crate::QueryTable; +use crate::{Database, DatabaseKeyIndex, QueryDb}; +use parking_lot::RwLock; +use rustc_hash::FxHashMap; +use std::collections::hash_map::Entry; +use std::convert::From; +use std::fmt::Debug; +use std::hash::Hash; +use triomphe::Arc; + +const INTERN_DURABILITY: Durability = Durability::HIGH; + +/// Handles storage where the value is 'derived' by executing a +/// function (in contrast to "inputs"). +pub struct InternedStorage +where + Q: Query, + Q::Key: InternValue, + Q::Value: InternKey, +{ + group_index: u16, + tables: RwLock, Q::Key>>, +} + +/// Storage for the looking up interned things. +pub struct LookupInternedStorage +where + Q: Query, + Q::Key: InternKey, + Q::Value: InternValue, +{ + phantom: std::marker::PhantomData<(Q::Key, IQ)>, +} + +struct InternTables { + /// Map from the key to the corresponding intern-index. + map: FxHashMap, + + /// For each valid intern-index, stores the interned value. + values: Vec>>, +} + +/// Trait implemented for the "key" that results from a +/// `#[salsa::intern]` query. This is basically meant to be a +/// "newtype"'d `u32`. +pub trait InternKey { + /// Create an instance of the intern-key from a `u32` value. + fn from_intern_id(v: InternId) -> Self; + + /// Extract the `u32` with which the intern-key was created. + fn as_intern_id(&self) -> InternId; +} + +impl InternKey for InternId { + fn from_intern_id(v: InternId) -> InternId { + v + } + + fn as_intern_id(&self) -> InternId { + *self + } +} + +/// Trait implemented for the "value" that is being interned. +pub trait InternValue { + /// They key used to intern this value by. + type Key: Eq + Hash + Debug + Clone; + /// Maps the value to a key that will be used to intern it. + fn into_key(&self) -> Self::Key; + /// Calls the given function with the key that was used to intern this value. + /// + /// This is mainly used to prevent frequent cloning of the key when doing a lookup. + #[inline] + fn with_key T, T>(&self, f: F) -> T { + f(&self.into_key()) + } +} + +impl + InternValue for (A, B) +{ + type Key = Self; + #[inline] + fn into_key(&self) -> Self::Key { + self.clone() + } + #[inline] + fn with_key T, T>(&self, f: F) -> T { + f(self) + } +} + +/// Implement [`InternValue`] trivially, that is without actually mapping at all. +#[macro_export] +macro_rules! impl_intern_value_trivial { + ($($ty:ty),*) => { + $( + impl $crate::InternValue for $ty { + type Key = $ty; + #[inline] + fn into_key(&self) -> Self::Key { + self.clone() + } + #[inline] + fn with_key T, T>(&self, f: F) -> T { + f(self) + } + } + )* + }; +} +impl_intern_value_trivial!(String); +#[derive(Debug)] +struct Slot { + /// DatabaseKeyIndex for this slot. + database_key_index: DatabaseKeyIndex, + + /// Value that was interned. + value: V, + + /// When was this intern'd? + /// + /// (This informs the "changed-at" result) + interned_at: Revision, +} + +impl std::panic::RefUnwindSafe for InternedStorage +where + Q: Query, + Q::Key: InternValue, + Q::Key: std::panic::RefUnwindSafe, + Q::Value: InternKey, + Q::Value: std::panic::RefUnwindSafe, +{ +} + +impl InternTables { + /// Returns the slot for the given key. + fn slot_for_key(&self, key: &K) -> Option<(Arc>, InternId)> { + let &index = self.map.get(key)?; + Some((self.slot_for_index(index), index)) + } + + /// Returns the slot at the given index. + fn slot_for_index(&self, index: InternId) -> Arc> { + let slot = &self.values[index.as_usize()]; + slot.clone() + } +} + +impl Default for InternTables +where + K: Eq + Hash, +{ + fn default() -> Self { + Self { map: Default::default(), values: Default::default() } + } +} + +type MappedKey = <::Key as InternValue>::Key; + +impl InternedStorage +where + Q: Query, + Q::Key: InternValue, + Q::Value: InternKey, +{ + /// Creates a new slot. + fn intern_index( + &self, + db: &>::DynDb, + mapped_key: MappedKey, + insert: impl FnOnce(Q::Value) -> Q::Key, + ) -> (Arc>, InternId) { + let revision_now = db.salsa_runtime().current_revision(); + + let mut tables = self.tables.write(); + let tables = &mut *tables; + let entry = match tables.map.entry(mapped_key) { + Entry::Vacant(entry) => entry, + Entry::Occupied(entry) => { + // Somebody inserted this key while we were waiting + // for the write lock. In this case, we don't need to + // update the `accessed_at` field because they should + // have already done so! + let index = *entry.get(); + let slot = &tables.values[index.as_usize()]; + return (slot.clone(), index); + } + }; + + let create_slot = |index: InternId| { + let database_key_index = DatabaseKeyIndex { + group_index: self.group_index, + query_index: Q::QUERY_INDEX, + key_index: index.as_u32(), + }; + Arc::new(Slot { + database_key_index, + value: insert(Q::Value::from_intern_id(index)), + interned_at: revision_now, + }) + }; + + let index = InternId::from(tables.values.len()); + let slot = create_slot(index); + tables.values.push(slot.clone()); + entry.insert(index); + + (slot, index) + } + + fn intern_check(&self, key: &MappedKey) -> Option<(Arc>, InternId)> { + self.tables.read().slot_for_key(key) + } + + /// Given an index, lookup and clone its value, updating the + /// `accessed_at` time if necessary. + fn lookup_value(&self, index: InternId) -> Arc> { + self.tables.read().slot_for_index(index) + } + + fn fetch_or_insert( + &self, + db: &>::DynDb, + key: MappedKey, + insert: impl FnOnce(Q::Value) -> Q::Key, + ) -> Q::Value { + db.unwind_if_cancelled(); + let (slot, index) = match self.intern_check(&key) { + Some(i) => i, + None => self.intern_index(db, key, insert), + }; + let changed_at = slot.interned_at; + db.salsa_runtime().report_query_read_and_unwind_if_cycle_resulted( + slot.database_key_index, + INTERN_DURABILITY, + changed_at, + ); + ::from_intern_id(index) + } +} + +impl QueryStorageOps for InternedStorage +where + Q: Query, + Q::Key: InternValue, + Q::Value: InternKey, +{ + const CYCLE_STRATEGY: crate::plumbing::CycleRecoveryStrategy = CycleRecoveryStrategy::Panic; + + fn new(group_index: u16) -> Self { + InternedStorage { group_index, tables: RwLock::new(InternTables::default()) } + } + + fn fmt_index( + &self, + _db: &>::DynDb, + index: DatabaseKeyIndex, + fmt: &mut std::fmt::Formatter<'_>, + ) -> std::fmt::Result { + assert_eq!(index.group_index, self.group_index); + assert_eq!(index.query_index, Q::QUERY_INDEX); + let intern_id = InternId::from(index.key_index); + let slot = self.lookup_value(intern_id); + write!(fmt, "{}({:?})", Q::QUERY_NAME, slot.value) + } + + fn maybe_changed_after( + &self, + db: &>::DynDb, + input: DatabaseKeyIndex, + revision: Revision, + ) -> bool { + assert_eq!(input.group_index, self.group_index); + assert_eq!(input.query_index, Q::QUERY_INDEX); + debug_assert!(revision < db.salsa_runtime().current_revision()); + let intern_id = InternId::from(input.key_index); + let slot = self.lookup_value(intern_id); + slot.maybe_changed_after(revision) + } + + fn fetch(&self, db: &>::DynDb, key: &Q::Key) -> Q::Value { + db.unwind_if_cancelled(); + + let (slot, index) = match key.with_key(|key| self.intern_check(key)) { + Some(i) => i, + None => self.intern_index(db, key.into_key(), |_| key.clone()), + }; + let changed_at = slot.interned_at; + db.salsa_runtime().report_query_read_and_unwind_if_cycle_resulted( + slot.database_key_index, + INTERN_DURABILITY, + changed_at, + ); + ::from_intern_id(index) + } + + fn durability(&self, _db: &>::DynDb, _key: &Q::Key) -> Durability { + INTERN_DURABILITY + } + + fn entries(&self, _db: &>::DynDb) -> C + where + C: std::iter::FromIterator>, + { + let tables = self.tables.read(); + tables + .map + .values() + .map(|index| { + TableEntry::new( + tables.values[index.as_usize()].value.clone(), + Some(::from_intern_id(*index)), + ) + }) + .collect() + } +} + +impl QueryStorageMassOps for InternedStorage +where + Q: Query, + Q::Key: InternValue, + Q::Value: InternKey, +{ + fn purge(&self) { + *self.tables.write() = Default::default(); + } +} + +// Workaround for +// ``` +// IQ: for<'d> QueryDb< +// 'd, +// DynDb = >::DynDb, +// Group = >::Group, +// GroupStorage = >::GroupStorage, +// >, +// ``` +// not working to make rustc know DynDb, Group and GroupStorage being the same in `Q` and `IQ` +#[doc(hidden)] +pub trait EqualDynDb<'d, IQ>: QueryDb<'d> +where + IQ: QueryDb<'d>, +{ + fn convert_db(d: &Self::DynDb) -> &IQ::DynDb; + fn convert_group_storage(d: &Self::GroupStorage) -> &IQ::GroupStorage; +} + +impl<'d, IQ, Q> EqualDynDb<'d, IQ> for Q +where + Q: QueryDb<'d, DynDb = IQ::DynDb, Group = IQ::Group, GroupStorage = IQ::GroupStorage>, + Q::DynDb: HasQueryGroup, + IQ: QueryDb<'d>, +{ + fn convert_db(d: &Self::DynDb) -> &IQ::DynDb { + d + } + fn convert_group_storage(d: &Self::GroupStorage) -> &IQ::GroupStorage { + d + } +} + +impl QueryStorageOps for LookupInternedStorage +where + Q: Query, + Q::Key: InternKey, + Q::Value: InternValue, + IQ: Query>, + for<'d> Q: EqualDynDb<'d, IQ>, +{ + const CYCLE_STRATEGY: CycleRecoveryStrategy = CycleRecoveryStrategy::Panic; + + fn new(_group_index: u16) -> Self { + LookupInternedStorage { phantom: std::marker::PhantomData } + } + + fn fmt_index( + &self, + db: &>::DynDb, + index: DatabaseKeyIndex, + fmt: &mut std::fmt::Formatter<'_>, + ) -> std::fmt::Result { + let group_storage = + <>::DynDb as HasQueryGroup>::group_storage(db); + let interned_storage = IQ::query_storage(Q::convert_group_storage(group_storage)); + interned_storage.fmt_index(Q::convert_db(db), index, fmt) + } + + fn maybe_changed_after( + &self, + db: &>::DynDb, + input: DatabaseKeyIndex, + revision: Revision, + ) -> bool { + let group_storage = + <>::DynDb as HasQueryGroup>::group_storage(db); + let interned_storage = IQ::query_storage(Q::convert_group_storage(group_storage)); + interned_storage.maybe_changed_after(Q::convert_db(db), input, revision) + } + + fn fetch(&self, db: &>::DynDb, key: &Q::Key) -> Q::Value { + let index = key.as_intern_id(); + let group_storage = + <>::DynDb as HasQueryGroup>::group_storage(db); + let interned_storage = IQ::query_storage(Q::convert_group_storage(group_storage)); + let slot = interned_storage.lookup_value(index); + let value = slot.value.clone(); + let interned_at = slot.interned_at; + db.salsa_runtime().report_query_read_and_unwind_if_cycle_resulted( + slot.database_key_index, + INTERN_DURABILITY, + interned_at, + ); + value + } + + fn durability(&self, _db: &>::DynDb, _key: &Q::Key) -> Durability { + INTERN_DURABILITY + } + + fn entries(&self, db: &>::DynDb) -> C + where + C: std::iter::FromIterator>, + { + let group_storage = + <>::DynDb as HasQueryGroup>::group_storage(db); + let interned_storage = IQ::query_storage(Q::convert_group_storage(group_storage)); + let tables = interned_storage.tables.read(); + tables + .map + .values() + .map(|index| { + TableEntry::new( + ::from_intern_id(*index), + Some(tables.values[index.as_usize()].value.clone()), + ) + }) + .collect() + } +} + +impl QueryStorageMassOps for LookupInternedStorage +where + Q: Query, + Q::Key: InternKey, + Q::Value: InternValue, + IQ: Query, +{ + fn purge(&self) {} +} + +impl Slot { + fn maybe_changed_after(&self, revision: Revision) -> bool { + self.interned_at > revision + } +} + +/// Check that `Slot: Send + Sync` as long as +/// `DB::DatabaseData: Send + Sync`, which in turn implies that +/// `Q::Key: Send + Sync`, `Q::Value: Send + Sync`. +#[allow(dead_code)] +fn check_send_sync() +where + K: Send + Sync, +{ + fn is_send_sync() {} + is_send_sync::>(); +} + +/// Check that `Slot: 'static` as long as +/// `DB::DatabaseData: 'static`, which in turn implies that +/// `Q::Key: 'static`, `Q::Value: 'static`. +#[allow(dead_code)] +fn check_static() +where + K: 'static, +{ + fn is_static() {} + is_static::>(); +} + +impl<'me, Q> QueryTable<'me, Q> +where + Q: Query>, + Q::Key: InternValue, + Q::Value: InternKey, +{ + /// Fetches the intern id for the given key or inserts it if it does not exist. + pub fn get_or_insert( + &self, + key: MappedKey, + insert: impl FnOnce(Q::Value) -> Q::Key, + ) -> Q::Value { + self.storage.fetch_or_insert(self.db, key, insert) + } +} diff --git a/crates/salsa/src/lib.rs b/crates/salsa/src/lib.rs new file mode 100644 index 000000000000..2d58beafb2a0 --- /dev/null +++ b/crates/salsa/src/lib.rs @@ -0,0 +1,742 @@ +//! +#![allow(clippy::type_complexity)] +#![allow(clippy::question_mark)] +#![warn(rust_2018_idioms)] +#![warn(missing_docs)] + +//! The salsa crate is a crate for incremental recomputation. It +//! permits you to define a "database" of queries with both inputs and +//! values derived from those inputs; as you set the inputs, you can +//! re-execute the derived queries and it will try to re-use results +//! from previous invocations as appropriate. + +mod derived; +mod doctest; +mod durability; +mod hash; +mod input; +mod intern_id; +mod interned; +mod lru; +mod revision; +mod runtime; +mod storage; + +pub mod debug; +/// Items in this module are public for implementation reasons, +/// and are exempt from the SemVer guarantees. +#[doc(hidden)] +pub mod plumbing; + +use crate::plumbing::CycleRecoveryStrategy; +use crate::plumbing::DerivedQueryStorageOps; +use crate::plumbing::InputQueryStorageOps; +use crate::plumbing::LruQueryStorageOps; +use crate::plumbing::QueryStorageMassOps; +use crate::plumbing::QueryStorageOps; +pub use crate::revision::Revision; +use std::fmt::{self, Debug}; +use std::hash::Hash; +use std::panic::AssertUnwindSafe; +use std::panic::{self, UnwindSafe}; + +pub use crate::durability::Durability; +pub use crate::intern_id::InternId; +pub use crate::interned::{InternKey, InternValue}; +pub use crate::runtime::Runtime; +pub use crate::runtime::RuntimeId; +pub use crate::storage::Storage; + +/// The base trait which your "query context" must implement. Gives +/// access to the salsa runtime, which you must embed into your query +/// context (along with whatever other state you may require). +pub trait Database: plumbing::DatabaseOps { + /// This function is invoked at key points in the salsa + /// runtime. It permits the database to be customized and to + /// inject logging or other custom behavior. + fn salsa_event(&self, event_fn: Event) { + #![allow(unused_variables)] + } + + /// Starts unwinding the stack if the current revision is cancelled. + /// + /// This method can be called by query implementations that perform + /// potentially expensive computations, in order to speed up propagation of + /// cancellation. + /// + /// Cancellation will automatically be triggered by salsa on any query + /// invocation. + /// + /// This method should not be overridden by `Database` implementors. A + /// `salsa_event` is emitted when this method is called, so that should be + /// used instead. + #[inline] + fn unwind_if_cancelled(&self) { + let runtime = self.salsa_runtime(); + self.salsa_event(Event { + runtime_id: runtime.id(), + kind: EventKind::WillCheckCancellation, + }); + + let current_revision = runtime.current_revision(); + let pending_revision = runtime.pending_revision(); + tracing::debug!( + "unwind_if_cancelled: current_revision={:?}, pending_revision={:?}", + current_revision, + pending_revision + ); + if pending_revision > current_revision { + runtime.unwind_cancelled(); + } + } + + /// Gives access to the underlying salsa runtime. + /// + /// This method should not be overridden by `Database` implementors. + fn salsa_runtime(&self) -> &Runtime { + self.ops_salsa_runtime() + } + + /// Gives access to the underlying salsa runtime. + /// + /// This method should not be overridden by `Database` implementors. + fn salsa_runtime_mut(&mut self) -> &mut Runtime { + self.ops_salsa_runtime_mut() + } +} + +/// The `Event` struct identifies various notable things that can +/// occur during salsa execution. Instances of this struct are given +/// to `salsa_event`. +pub struct Event { + /// The id of the snapshot that triggered the event. Usually + /// 1-to-1 with a thread, as well. + pub runtime_id: RuntimeId, + + /// What sort of event was it. + pub kind: EventKind, +} + +impl Event { + /// Returns a type that gives a user-readable debug output. + /// Use like `println!("{:?}", index.debug(db))`. + pub fn debug<'me, D: ?Sized>(&'me self, db: &'me D) -> impl std::fmt::Debug + 'me + where + D: plumbing::DatabaseOps, + { + EventDebug { event: self, db } + } +} + +impl fmt::Debug for Event { + fn fmt(&self, fmt: &mut fmt::Formatter<'_>) -> fmt::Result { + fmt.debug_struct("Event") + .field("runtime_id", &self.runtime_id) + .field("kind", &self.kind) + .finish() + } +} + +struct EventDebug<'me, D: ?Sized> +where + D: plumbing::DatabaseOps, +{ + event: &'me Event, + db: &'me D, +} + +impl<'me, D: ?Sized> fmt::Debug for EventDebug<'me, D> +where + D: plumbing::DatabaseOps, +{ + fn fmt(&self, fmt: &mut fmt::Formatter<'_>) -> fmt::Result { + fmt.debug_struct("Event") + .field("runtime_id", &self.event.runtime_id) + .field("kind", &self.event.kind.debug(self.db)) + .finish() + } +} + +/// An enum identifying the various kinds of events that can occur. +pub enum EventKind { + /// Occurs when we found that all inputs to a memoized value are + /// up-to-date and hence the value can be re-used without + /// executing the closure. + /// + /// Executes before the "re-used" value is returned. + DidValidateMemoizedValue { + /// The database-key for the affected value. Implements `Debug`. + database_key: DatabaseKeyIndex, + }, + + /// Indicates that another thread (with id `other_runtime_id`) is processing the + /// given query (`database_key`), so we will block until they + /// finish. + /// + /// Executes after we have registered with the other thread but + /// before they have answered us. + /// + /// (NB: you can find the `id` of the current thread via the + /// `salsa_runtime`) + WillBlockOn { + /// The id of the runtime we will block on. + other_runtime_id: RuntimeId, + + /// The database-key for the affected value. Implements `Debug`. + database_key: DatabaseKeyIndex, + }, + + /// Indicates that the function for this query will be executed. + /// This is either because it has never executed before or because + /// its inputs may be out of date. + WillExecute { + /// The database-key for the affected value. Implements `Debug`. + database_key: DatabaseKeyIndex, + }, + + /// Indicates that `unwind_if_cancelled` was called and salsa will check if + /// the current revision has been cancelled. + WillCheckCancellation, +} + +impl EventKind { + /// Returns a type that gives a user-readable debug output. + /// Use like `println!("{:?}", index.debug(db))`. + pub fn debug<'me, D: ?Sized>(&'me self, db: &'me D) -> impl std::fmt::Debug + 'me + where + D: plumbing::DatabaseOps, + { + EventKindDebug { kind: self, db } + } +} + +impl fmt::Debug for EventKind { + fn fmt(&self, fmt: &mut fmt::Formatter<'_>) -> fmt::Result { + match self { + EventKind::DidValidateMemoizedValue { database_key } => fmt + .debug_struct("DidValidateMemoizedValue") + .field("database_key", database_key) + .finish(), + EventKind::WillBlockOn { other_runtime_id, database_key } => fmt + .debug_struct("WillBlockOn") + .field("other_runtime_id", other_runtime_id) + .field("database_key", database_key) + .finish(), + EventKind::WillExecute { database_key } => { + fmt.debug_struct("WillExecute").field("database_key", database_key).finish() + } + EventKind::WillCheckCancellation => fmt.debug_struct("WillCheckCancellation").finish(), + } + } +} + +struct EventKindDebug<'me, D: ?Sized> +where + D: plumbing::DatabaseOps, +{ + kind: &'me EventKind, + db: &'me D, +} + +impl<'me, D: ?Sized> fmt::Debug for EventKindDebug<'me, D> +where + D: plumbing::DatabaseOps, +{ + fn fmt(&self, fmt: &mut fmt::Formatter<'_>) -> fmt::Result { + match self.kind { + EventKind::DidValidateMemoizedValue { database_key } => fmt + .debug_struct("DidValidateMemoizedValue") + .field("database_key", &database_key.debug(self.db)) + .finish(), + EventKind::WillBlockOn { other_runtime_id, database_key } => fmt + .debug_struct("WillBlockOn") + .field("other_runtime_id", &other_runtime_id) + .field("database_key", &database_key.debug(self.db)) + .finish(), + EventKind::WillExecute { database_key } => fmt + .debug_struct("WillExecute") + .field("database_key", &database_key.debug(self.db)) + .finish(), + EventKind::WillCheckCancellation => fmt.debug_struct("WillCheckCancellation").finish(), + } + } +} + +/// Indicates a database that also supports parallel query +/// evaluation. All of Salsa's base query support is capable of +/// parallel execution, but for it to work, your query key/value types +/// must also be `Send`, as must any additional data in your database. +pub trait ParallelDatabase: Database + Send { + /// Creates a second handle to the database that holds the + /// database fixed at a particular revision. So long as this + /// "frozen" handle exists, any attempt to [`set`] an input will + /// block. + /// + /// [`set`]: struct.QueryTable.html#method.set + /// + /// This is the method you are meant to use most of the time in a + /// parallel setting where modifications may arise asynchronously + /// (e.g., a language server). In this context, it is common to + /// wish to "fork off" a snapshot of the database performing some + /// series of queries in parallel and arranging the results. Using + /// this method for that purpose ensures that those queries will + /// see a consistent view of the database (it is also advisable + /// for those queries to use the [`Runtime::unwind_if_cancelled`] + /// method to check for cancellation). + /// + /// # Panics + /// + /// It is not permitted to create a snapshot from inside of a + /// query. Attepting to do so will panic. + /// + /// # Deadlock warning + /// + /// The intended pattern for snapshots is that, once created, they + /// are sent to another thread and used from there. As such, the + /// `snapshot` acquires a "read lock" on the database -- + /// therefore, so long as the `snapshot` is not dropped, any + /// attempt to `set` a value in the database will block. If the + /// `snapshot` is owned by the same thread that is attempting to + /// `set`, this will cause a problem. + /// + /// # How to implement this + /// + /// Typically, this method will create a second copy of your + /// database type (`MyDatabaseType`, in the example below), + /// cloning over each of the fields from `self` into this new + /// copy. For the field that stores the salsa runtime, you should + /// use [the `Runtime::snapshot` method][rfm] to create a snapshot of the + /// runtime. Finally, package up the result using `Snapshot::new`, + /// which is a simple wrapper type that only gives `&self` access + /// to the database within (thus preventing the use of methods + /// that may mutate the inputs): + /// + /// [rfm]: struct.Runtime.html#method.snapshot + /// + /// ```rust,ignore + /// impl ParallelDatabase for MyDatabaseType { + /// fn snapshot(&self) -> Snapshot { + /// Snapshot::new( + /// MyDatabaseType { + /// runtime: self.runtime.snapshot(self), + /// other_field: self.other_field.clone(), + /// } + /// ) + /// } + /// } + /// ``` + fn snapshot(&self) -> Snapshot; +} + +/// Simple wrapper struct that takes ownership of a database `DB` and +/// only gives `&self` access to it. See [the `snapshot` method][fm] +/// for more details. +/// +/// [fm]: trait.ParallelDatabase.html#method.snapshot +#[derive(Debug)] +pub struct Snapshot +where + DB: ParallelDatabase, +{ + db: DB, +} + +impl Snapshot +where + DB: ParallelDatabase, +{ + /// Creates a `Snapshot` that wraps the given database handle + /// `db`. From this point forward, only shared references to `db` + /// will be possible. + pub fn new(db: DB) -> Self { + Snapshot { db } + } +} + +impl std::ops::Deref for Snapshot +where + DB: ParallelDatabase, +{ + type Target = DB; + + fn deref(&self) -> &DB { + &self.db + } +} + +/// An integer that uniquely identifies a particular query instance within the +/// database. Used to track dependencies between queries. Fully ordered and +/// equatable but those orderings are arbitrary, and meant to be used only for +/// inserting into maps and the like. +#[derive(Copy, Clone, PartialEq, Eq, PartialOrd, Ord, Hash, Debug)] +pub struct DatabaseKeyIndex { + group_index: u16, + query_index: u16, + key_index: u32, +} + +impl DatabaseKeyIndex { + /// Returns the index of the query group containing this key. + #[inline] + pub fn group_index(self) -> u16 { + self.group_index + } + + /// Returns the index of the query within its query group. + #[inline] + pub fn query_index(self) -> u16 { + self.query_index + } + + /// Returns the index of this particular query key within the query. + #[inline] + pub fn key_index(self) -> u32 { + self.key_index + } + + /// Returns a type that gives a user-readable debug output. + /// Use like `println!("{:?}", index.debug(db))`. + pub fn debug(self, db: &D) -> impl std::fmt::Debug + '_ + where + D: plumbing::DatabaseOps, + { + DatabaseKeyIndexDebug { index: self, db } + } +} + +/// Helper type for `DatabaseKeyIndex::debug` +struct DatabaseKeyIndexDebug<'me, D: ?Sized> +where + D: plumbing::DatabaseOps, +{ + index: DatabaseKeyIndex, + db: &'me D, +} + +impl std::fmt::Debug for DatabaseKeyIndexDebug<'_, D> +where + D: plumbing::DatabaseOps, +{ + fn fmt(&self, fmt: &mut std::fmt::Formatter<'_>) -> std::fmt::Result { + self.db.fmt_index(self.index, fmt) + } +} + +/// Trait implements by all of the "special types" associated with +/// each of your queries. +/// +/// Base trait of `Query` that has a lifetime parameter to allow the `DynDb` to be non-'static. +pub trait QueryDb<'d>: Sized { + /// Dyn version of the associated trait for this query group. + type DynDb: ?Sized + Database + HasQueryGroup + 'd; + + /// Associate query group struct. + type Group: plumbing::QueryGroup; + + /// Generated struct that contains storage for all queries in a group. + type GroupStorage; +} + +/// Trait implements by all of the "special types" associated with +/// each of your queries. +pub trait Query: Debug + Default + Sized + for<'d> QueryDb<'d> { + /// Type that you you give as a parameter -- for queries with zero + /// or more than one input, this will be a tuple. + type Key: Clone + Debug + Hash + Eq; + + /// What value does the query return? + type Value: Clone + Debug; + + /// Internal struct storing the values for the query. + // type Storage: plumbing::QueryStorageOps; + type Storage; + + /// A unique index identifying this query within the group. + const QUERY_INDEX: u16; + + /// Name of the query method (e.g., `foo`) + const QUERY_NAME: &'static str; + + /// Extact storage for this query from the storage for its group. + fn query_storage<'a>( + group_storage: &'a >::GroupStorage, + ) -> &'a std::sync::Arc; + + /// Extact storage for this query from the storage for its group. + fn query_storage_mut<'a>( + group_storage: &'a >::GroupStorage, + ) -> &'a std::sync::Arc; +} + +/// Return value from [the `query` method] on `Database`. +/// Gives access to various less common operations on queries. +/// +/// [the `query` method]: trait.Database.html#method.query +pub struct QueryTable<'me, Q> +where + Q: Query, +{ + db: &'me >::DynDb, + storage: &'me Q::Storage, +} + +impl<'me, Q> QueryTable<'me, Q> +where + Q: Query, + Q::Storage: QueryStorageOps, +{ + /// Constructs a new `QueryTable`. + pub fn new(db: &'me >::DynDb, storage: &'me Q::Storage) -> Self { + Self { db, storage } + } + + /// Execute the query on a given input. Usually it's easier to + /// invoke the trait method directly. Note that for variadic + /// queries (those with no inputs, or those with more than one + /// input) the key will be a tuple. + pub fn get(&self, key: Q::Key) -> Q::Value { + self.storage.fetch(self.db, &key) + } + + /// Completely clears the storage for this query. + /// + /// This method breaks internal invariants of salsa, so any further queries + /// might return nonsense results. It is useful only in very specific + /// circumstances -- for example, when one wants to observe which values + /// dropped together with the table + pub fn purge(&self) + where + Q::Storage: plumbing::QueryStorageMassOps, + { + self.storage.purge(); + } +} + +/// Return value from [the `query_mut` method] on `Database`. +/// Gives access to the `set` method, notably, that is used to +/// set the value of an input query. +/// +/// [the `query_mut` method]: trait.Database.html#method.query_mut +pub struct QueryTableMut<'me, Q> +where + Q: Query + 'me, +{ + runtime: &'me mut Runtime, + storage: &'me Q::Storage, +} + +impl<'me, Q> QueryTableMut<'me, Q> +where + Q: Query, +{ + /// Constructs a new `QueryTableMut`. + pub fn new(runtime: &'me mut Runtime, storage: &'me Q::Storage) -> Self { + Self { runtime, storage } + } + + /// Assign a value to an "input query". Must be used outside of + /// an active query computation. + /// + /// If you are using `snapshot`, see the notes on blocking + /// and cancellation on [the `query_mut` method]. + /// + /// [the `query_mut` method]: trait.Database.html#method.query_mut + pub fn set(&mut self, key: Q::Key, value: Q::Value) + where + Q::Storage: plumbing::InputQueryStorageOps, + { + self.set_with_durability(key, value, Durability::LOW); + } + + /// Assign a value to an "input query", with the additional + /// promise that this value will **never change**. Must be used + /// outside of an active query computation. + /// + /// If you are using `snapshot`, see the notes on blocking + /// and cancellation on [the `query_mut` method]. + /// + /// [the `query_mut` method]: trait.Database.html#method.query_mut + pub fn set_with_durability(&mut self, key: Q::Key, value: Q::Value, durability: Durability) + where + Q::Storage: plumbing::InputQueryStorageOps, + { + self.storage.set(self.runtime, &key, value, durability); + } + + /// Sets the size of LRU cache of values for this query table. + /// + /// That is, at most `cap` values will be preset in the table at the same + /// time. This helps with keeping maximum memory usage under control, at the + /// cost of potential extra recalculations of evicted values. + /// + /// If `cap` is zero, all values are preserved, this is the default. + pub fn set_lru_capacity(&self, cap: usize) + where + Q::Storage: plumbing::LruQueryStorageOps, + { + self.storage.set_lru_capacity(cap); + } + + /// Marks the computed value as outdated. + /// + /// This causes salsa to re-execute the query function on the next access to + /// the query, even if all dependencies are up to date. + /// + /// This is most commonly used as part of the [on-demand input + /// pattern](https://salsa-rs.github.io/salsa/common_patterns/on_demand_inputs.html). + pub fn invalidate(&mut self, key: &Q::Key) + where + Q::Storage: plumbing::DerivedQueryStorageOps, + { + self.storage.invalidate(self.runtime, key) + } +} + +/// A panic payload indicating that execution of a salsa query was cancelled. +/// +/// This can occur for a few reasons: +/// * +/// * +/// * +#[derive(Debug)] +#[non_exhaustive] +pub enum Cancelled { + /// The query was operating on revision R, but there is a pending write to move to revision R+1. + #[non_exhaustive] + PendingWrite, + + /// The query was blocked on another thread, and that thread panicked. + #[non_exhaustive] + PropagatedPanic, +} + +impl Cancelled { + fn throw(self) -> ! { + // We use resume and not panic here to avoid running the panic + // hook (that is, to avoid collecting and printing backtrace). + std::panic::resume_unwind(Box::new(self)); + } + + /// Runs `f`, and catches any salsa cancellation. + pub fn catch(f: F) -> Result + where + F: FnOnce() -> T + UnwindSafe, + { + match panic::catch_unwind(f) { + Ok(t) => Ok(t), + Err(payload) => match payload.downcast() { + Ok(cancelled) => Err(*cancelled), + Err(payload) => panic::resume_unwind(payload), + }, + } + } +} + +impl std::fmt::Display for Cancelled { + fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { + let why = match self { + Cancelled::PendingWrite => "pending write", + Cancelled::PropagatedPanic => "propagated panic", + }; + f.write_str("cancelled because of ")?; + f.write_str(why) + } +} + +impl std::error::Error for Cancelled {} + +/// Captures the participants of a cycle that occurred when executing a query. +/// +/// This type is meant to be used to help give meaningful error messages to the +/// user or to help salsa developers figure out why their program is resulting +/// in a computation cycle. +/// +/// It is used in a few ways: +/// +/// * During [cycle recovery](https://https://salsa-rs.github.io/salsa/cycles/fallback.html), +/// where it is given to the fallback function. +/// * As the panic value when an unexpected cycle (i.e., a cycle where one or more participants +/// lacks cycle recovery information) occurs. +/// +/// You can read more about cycle handling in +/// the [salsa book](https://https://salsa-rs.github.io/salsa/cycles.html). +#[derive(Debug, Clone, PartialEq, Eq, PartialOrd, Ord, Hash)] +pub struct Cycle { + participants: plumbing::CycleParticipants, +} + +impl Cycle { + pub(crate) fn new(participants: plumbing::CycleParticipants) -> Self { + Self { participants } + } + + /// True if two `Cycle` values represent the same cycle. + pub(crate) fn is(&self, cycle: &Cycle) -> bool { + triomphe::Arc::ptr_eq(&self.participants, &cycle.participants) + } + + pub(crate) fn throw(self) -> ! { + tracing::debug!("throwing cycle {:?}", self); + std::panic::resume_unwind(Box::new(self)) + } + + pub(crate) fn catch(execute: impl FnOnce() -> T) -> Result { + match std::panic::catch_unwind(AssertUnwindSafe(execute)) { + Ok(v) => Ok(v), + Err(err) => match err.downcast::() { + Ok(cycle) => Err(*cycle), + Err(other) => std::panic::resume_unwind(other), + }, + } + } + + /// Iterate over the [`DatabaseKeyIndex`] for each query participating + /// in the cycle. The start point of this iteration within the cycle + /// is arbitrary but deterministic, but the ordering is otherwise determined + /// by the execution. + pub fn participant_keys(&self) -> impl Iterator + '_ { + self.participants.iter().copied() + } + + /// Returns a vector with the debug information for + /// all the participants in the cycle. + pub fn all_participants(&self, db: &DB) -> Vec { + self.participant_keys().map(|d| format!("{:?}", d.debug(db))).collect() + } + + /// Returns a vector with the debug information for + /// those participants in the cycle that lacked recovery + /// information. + pub fn unexpected_participants(&self, db: &DB) -> Vec { + self.participant_keys() + .filter(|&d| db.cycle_recovery_strategy(d) == CycleRecoveryStrategy::Panic) + .map(|d| format!("{:?}", d.debug(db))) + .collect() + } + + /// Returns a "debug" view onto this strict that can be used to print out information. + pub fn debug<'me, DB: ?Sized + Database>(&'me self, db: &'me DB) -> impl std::fmt::Debug + 'me { + struct UnexpectedCycleDebug<'me> { + c: &'me Cycle, + db: &'me dyn Database, + } + + impl<'me> std::fmt::Debug for UnexpectedCycleDebug<'me> { + fn fmt(&self, fmt: &mut std::fmt::Formatter<'_>) -> std::fmt::Result { + fmt.debug_struct("UnexpectedCycle") + .field("all_participants", &self.c.all_participants(self.db)) + .field("unexpected_participants", &self.c.unexpected_participants(self.db)) + .finish() + } + } + + UnexpectedCycleDebug { c: self, db: db.ops_database() } + } +} + +// Re-export the procedural macros. +#[allow(unused_imports)] +#[macro_use] +extern crate salsa_macros; +use plumbing::HasQueryGroup; +pub use salsa_macros::*; diff --git a/crates/salsa/src/lru.rs b/crates/salsa/src/lru.rs new file mode 100644 index 000000000000..c6b9778f20ad --- /dev/null +++ b/crates/salsa/src/lru.rs @@ -0,0 +1,325 @@ +//! +use oorandom::Rand64; +use parking_lot::Mutex; +use std::fmt::Debug; +use std::sync::atomic::AtomicUsize; +use std::sync::atomic::Ordering; +use triomphe::Arc; + +/// A simple and approximate concurrent lru list. +/// +/// We assume but do not verify that each node is only used with one +/// list. If this is not the case, it is not *unsafe*, but panics and +/// weird results will ensue. +/// +/// Each "node" in the list is of type `Node` and must implement +/// `LruNode`, which is a trait that gives access to a field that +/// stores the index in the list. This index gives us a rough idea of +/// how recently the node has been used. +#[derive(Debug)] +pub(crate) struct Lru +where + Node: LruNode, +{ + green_zone: AtomicUsize, + data: Mutex>, +} + +#[derive(Debug)] +struct LruData { + end_red_zone: usize, + end_yellow_zone: usize, + end_green_zone: usize, + rng: Rand64, + entries: Vec>, +} + +pub(crate) trait LruNode: Sized + Debug { + fn lru_index(&self) -> &LruIndex; +} + +#[derive(Debug)] +pub(crate) struct LruIndex { + /// Index in the approprate LRU list, or std::usize::MAX if not a + /// member. + index: AtomicUsize, +} + +impl Default for Lru +where + Node: LruNode, +{ + fn default() -> Self { + Lru::new() + } +} + +// We always use a fixed seed for our randomness so that we have +// predictable results. +const LRU_SEED: &str = "Hello, Rustaceans"; + +impl Lru +where + Node: LruNode, +{ + /// Creates a new LRU list where LRU caching is disabled. + pub(crate) fn new() -> Self { + Self::with_seed(LRU_SEED) + } + + #[cfg_attr(not(test), allow(dead_code))] + fn with_seed(seed: &str) -> Self { + Lru { green_zone: AtomicUsize::new(0), data: Mutex::new(LruData::with_seed(seed)) } + } + + /// Adjust the total number of nodes permitted to have a value at + /// once. If `len` is zero, this disables LRU caching completely. + pub(crate) fn set_lru_capacity(&self, len: usize) { + let mut data = self.data.lock(); + + // We require each zone to have at least 1 slot. Therefore, + // the length cannot be just 1 or 2. + if len == 0 { + self.green_zone.store(0, Ordering::Release); + data.resize(0, 0, 0); + } else { + let len = std::cmp::max(len, 3); + + // Top 10% is the green zone. This must be at least length 1. + let green_zone = std::cmp::max(len / 10, 1); + + // Next 20% is the yellow zone. + let yellow_zone = std::cmp::max(len / 5, 1); + + // Remaining 70% is the red zone. + let red_zone = len - yellow_zone - green_zone; + + // We need quick access to the green zone. + self.green_zone.store(green_zone, Ordering::Release); + + // Resize existing array. + data.resize(green_zone, yellow_zone, red_zone); + } + } + + /// Records that `node` was used. This may displace an old node (if the LRU limits are + pub(crate) fn record_use(&self, node: &Arc) -> Option> { + tracing::debug!("record_use(node={:?})", node); + + // Load green zone length and check if the LRU cache is even enabled. + let green_zone = self.green_zone.load(Ordering::Acquire); + tracing::debug!("record_use: green_zone={}", green_zone); + if green_zone == 0 { + return None; + } + + // Find current index of list (if any) and the current length + // of our green zone. + let index = node.lru_index().load(); + tracing::debug!("record_use: index={}", index); + + // Already a member of the list, and in the green zone -- nothing to do! + if index < green_zone { + return None; + } + + self.data.lock().record_use(node) + } + + pub(crate) fn purge(&self) { + self.green_zone.store(0, Ordering::SeqCst); + *self.data.lock() = LruData::with_seed(LRU_SEED); + } +} + +impl LruData +where + Node: LruNode, +{ + fn with_seed(seed_str: &str) -> Self { + Self::with_rng(rng_with_seed(seed_str)) + } + + fn with_rng(rng: Rand64) -> Self { + LruData { end_yellow_zone: 0, end_green_zone: 0, end_red_zone: 0, entries: Vec::new(), rng } + } + + fn green_zone(&self) -> std::ops::Range { + 0..self.end_green_zone + } + + fn yellow_zone(&self) -> std::ops::Range { + self.end_green_zone..self.end_yellow_zone + } + + fn red_zone(&self) -> std::ops::Range { + self.end_yellow_zone..self.end_red_zone + } + + fn resize(&mut self, len_green_zone: usize, len_yellow_zone: usize, len_red_zone: usize) { + self.end_green_zone = len_green_zone; + self.end_yellow_zone = self.end_green_zone + len_yellow_zone; + self.end_red_zone = self.end_yellow_zone + len_red_zone; + let entries = std::mem::replace(&mut self.entries, Vec::with_capacity(self.end_red_zone)); + + tracing::debug!("green_zone = {:?}", self.green_zone()); + tracing::debug!("yellow_zone = {:?}", self.yellow_zone()); + tracing::debug!("red_zone = {:?}", self.red_zone()); + + // We expect to resize when the LRU cache is basically empty. + // So just forget all the old LRU indices to start. + for entry in entries { + entry.lru_index().clear(); + } + } + + /// Records that a node was used. If it is already a member of the + /// LRU list, it is promoted to the green zone (unless it's + /// already there). Otherwise, it is added to the list first and + /// *then* promoted to the green zone. Adding a new node to the + /// list may displace an old member of the red zone, in which case + /// that is returned. + fn record_use(&mut self, node: &Arc) -> Option> { + tracing::debug!("record_use(node={:?})", node); + + // NB: When this is invoked, we have typically already loaded + // the LRU index (to check if it is in green zone). But that + // check was done outside the lock and -- for all we know -- + // the index may have changed since. So we always reload. + let index = node.lru_index().load(); + + if index < self.end_green_zone { + None + } else if index < self.end_yellow_zone { + self.promote_yellow_to_green(node, index); + None + } else if index < self.end_red_zone { + self.promote_red_to_green(node, index); + None + } else { + self.insert_new(node) + } + } + + /// Inserts a node that is not yet a member of the LRU list. If + /// the list is at capacity, this can displace an existing member. + fn insert_new(&mut self, node: &Arc) -> Option> { + debug_assert!(!node.lru_index().is_in_lru()); + + // Easy case: we still have capacity. Push it, and then promote + // it up to the appropriate zone. + let len = self.entries.len(); + if len < self.end_red_zone { + self.entries.push(node.clone()); + node.lru_index().store(len); + tracing::debug!("inserted node {:?} at {}", node, len); + return self.record_use(node); + } + + // Harder case: no capacity. Create some by evicting somebody from red + // zone and then promoting. + let victim_index = self.pick_index(self.red_zone()); + let victim_node = std::mem::replace(&mut self.entries[victim_index], node.clone()); + tracing::debug!("evicting red node {:?} from {}", victim_node, victim_index); + victim_node.lru_index().clear(); + self.promote_red_to_green(node, victim_index); + Some(victim_node) + } + + /// Promotes the node `node`, stored at `red_index` (in the red + /// zone), into a green index, demoting yellow/green nodes at + /// random. + /// + /// NB: It is not required that `node.lru_index()` is up-to-date + /// when entering this method. + fn promote_red_to_green(&mut self, node: &Arc, red_index: usize) { + debug_assert!(self.red_zone().contains(&red_index)); + + // Pick a yellow at random and switch places with it. + // + // Subtle: we do not update `node.lru_index` *yet* -- we're + // going to invoke `self.promote_yellow` next, and it will get + // updated then. + let yellow_index = self.pick_index(self.yellow_zone()); + tracing::debug!( + "demoting yellow node {:?} from {} to red at {}", + self.entries[yellow_index], + yellow_index, + red_index, + ); + self.entries.swap(yellow_index, red_index); + self.entries[red_index].lru_index().store(red_index); + + // Now move ourselves up into the green zone. + self.promote_yellow_to_green(node, yellow_index); + } + + /// Promotes the node `node`, stored at `yellow_index` (in the + /// yellow zone), into a green index, demoting a green node at + /// random to replace it. + /// + /// NB: It is not required that `node.lru_index()` is up-to-date + /// when entering this method. + fn promote_yellow_to_green(&mut self, node: &Arc, yellow_index: usize) { + debug_assert!(self.yellow_zone().contains(&yellow_index)); + + // Pick a yellow at random and switch places with it. + let green_index = self.pick_index(self.green_zone()); + tracing::debug!( + "demoting green node {:?} from {} to yellow at {}", + self.entries[green_index], + green_index, + yellow_index + ); + self.entries.swap(green_index, yellow_index); + self.entries[yellow_index].lru_index().store(yellow_index); + node.lru_index().store(green_index); + + tracing::debug!("promoted {:?} to green index {}", node, green_index); + } + + fn pick_index(&mut self, zone: std::ops::Range) -> usize { + let end_index = std::cmp::min(zone.end, self.entries.len()); + self.rng.rand_range(zone.start as u64..end_index as u64) as usize + } +} + +impl Default for LruIndex { + fn default() -> Self { + Self { index: AtomicUsize::new(std::usize::MAX) } + } +} + +impl LruIndex { + fn load(&self) -> usize { + self.index.load(Ordering::Acquire) // see note on ordering below + } + + fn store(&self, value: usize) { + self.index.store(value, Ordering::Release) // see note on ordering below + } + + fn clear(&self) { + self.store(std::usize::MAX); + } + + fn is_in_lru(&self) -> bool { + self.load() != std::usize::MAX + } +} + +fn rng_with_seed(seed_str: &str) -> Rand64 { + let mut seed: [u8; 16] = [0; 16]; + for (i, &b) in seed_str.as_bytes().iter().take(16).enumerate() { + seed[i] = b; + } + Rand64::new(u128::from_le_bytes(seed)) +} + +// A note on ordering: +// +// I chose to use AcqRel for the ordering but I don't think it's +// strictly needed. All writes occur under a lock, so they should be +// ordered w/r/t one another. As for the reads, they can occur +// outside the lock, but they don't themselves enable dependent reads +// -- if the reads are out of bounds, we would acquire a lock. diff --git a/crates/salsa/src/plumbing.rs b/crates/salsa/src/plumbing.rs new file mode 100644 index 000000000000..71332e39cadb --- /dev/null +++ b/crates/salsa/src/plumbing.rs @@ -0,0 +1,238 @@ +//! +#![allow(missing_docs)] + +use crate::debug::TableEntry; +use crate::durability::Durability; +use crate::Cycle; +use crate::Database; +use crate::Query; +use crate::QueryTable; +use crate::QueryTableMut; +use std::borrow::Borrow; +use std::fmt::Debug; +use std::hash::Hash; +use triomphe::Arc; + +pub use crate::derived::DependencyStorage; +pub use crate::derived::MemoizedStorage; +pub use crate::input::{InputStorage, UnitInputStorage}; +pub use crate::interned::InternedStorage; +pub use crate::interned::LookupInternedStorage; +pub use crate::{revision::Revision, DatabaseKeyIndex, QueryDb, Runtime}; + +/// Defines various associated types. An impl of this +/// should be generated for your query-context type automatically by +/// the `database_storage` macro, so you shouldn't need to mess +/// with this trait directly. +pub trait DatabaseStorageTypes: Database { + /// Defines the "storage type", where all the query data is kept. + /// This type is defined by the `database_storage` macro. + type DatabaseStorage: Default; +} + +/// Internal operations that the runtime uses to operate on the database. +pub trait DatabaseOps { + /// Upcast this type to a `dyn Database`. + fn ops_database(&self) -> &dyn Database; + + /// Gives access to the underlying salsa runtime. + fn ops_salsa_runtime(&self) -> &Runtime; + + /// Gives access to the underlying salsa runtime. + fn ops_salsa_runtime_mut(&mut self) -> &mut Runtime; + + /// Formats a database key index in a human readable fashion. + fn fmt_index( + &self, + index: DatabaseKeyIndex, + fmt: &mut std::fmt::Formatter<'_>, + ) -> std::fmt::Result; + + /// True if the computed value for `input` may have changed since `revision`. + fn maybe_changed_after(&self, input: DatabaseKeyIndex, revision: Revision) -> bool; + + /// Find the `CycleRecoveryStrategy` for a given input. + fn cycle_recovery_strategy(&self, input: DatabaseKeyIndex) -> CycleRecoveryStrategy; + + /// Executes the callback for each kind of query. + fn for_each_query(&self, op: &mut dyn FnMut(&dyn QueryStorageMassOps)); +} + +/// Internal operations performed on the query storage as a whole +/// (note that these ops do not need to know the identity of the +/// query, unlike `QueryStorageOps`). +pub trait QueryStorageMassOps { + fn purge(&self); +} + +pub trait DatabaseKey: Clone + Debug + Eq + Hash {} + +pub trait QueryFunction: Query { + /// See `CycleRecoveryStrategy` + const CYCLE_STRATEGY: CycleRecoveryStrategy; + + fn execute(db: &>::DynDb, key: Self::Key) -> Self::Value; + + fn cycle_fallback( + db: &>::DynDb, + cycle: &Cycle, + key: &Self::Key, + ) -> Self::Value { + let _ = (db, cycle, key); + panic!("query `{:?}` doesn't support cycle fallback", Self::default()) + } +} + +/// Cycle recovery strategy: Is this query capable of recovering from +/// a cycle that results from executing the function? If so, how? +#[derive(Copy, Clone, Debug, PartialEq, Eq)] +pub enum CycleRecoveryStrategy { + /// Cannot recover from cycles: panic. + /// + /// This is the default. It is also what happens if a cycle + /// occurs and the queries involved have different recovery + /// strategies. + /// + /// In the case of a failure due to a cycle, the panic + /// value will be XXX (FIXME). + Panic, + + /// Recovers from cycles by storing a sentinel value. + /// + /// This value is computed by the `QueryFunction::cycle_fallback` + /// function. + Fallback, +} + +/// Create a query table, which has access to the storage for the query +/// and offers methods like `get`. +pub fn get_query_table<'me, Q>(db: &'me >::DynDb) -> QueryTable<'me, Q> +where + Q: Query + 'me, + Q::Storage: QueryStorageOps, +{ + let group_storage: &Q::GroupStorage = HasQueryGroup::group_storage(db); + let query_storage: &Q::Storage = Q::query_storage(group_storage); + QueryTable::new(db, query_storage) +} + +/// Create a mutable query table, which has access to the storage +/// for the query and offers methods like `set`. +pub fn get_query_table_mut<'me, Q>(db: &'me mut >::DynDb) -> QueryTableMut<'me, Q> +where + Q: Query, +{ + let (group_storage, runtime) = HasQueryGroup::group_storage_mut(db); + let query_storage = Q::query_storage_mut(group_storage); + QueryTableMut::new(runtime, &**query_storage) +} + +pub trait QueryGroup: Sized { + type GroupStorage; + + /// Dyn version of the associated database trait. + type DynDb: ?Sized + Database + HasQueryGroup; +} + +/// Trait implemented by a database for each group that it supports. +/// `S` and `K` are the types for *group storage* and *group key*, respectively. +pub trait HasQueryGroup: Database +where + G: QueryGroup, +{ + /// Access the group storage struct from the database. + fn group_storage(&self) -> &G::GroupStorage; + + /// Access the group storage struct from the database. + /// Also returns a ref to the `Runtime`, since otherwise + /// the database is borrowed and one cannot get access to it. + fn group_storage_mut(&mut self) -> (&G::GroupStorage, &mut Runtime); +} + +// ANCHOR:QueryStorageOps +pub trait QueryStorageOps +where + Self: QueryStorageMassOps, + Q: Query, +{ + // ANCHOR_END:QueryStorageOps + + /// See CycleRecoveryStrategy + const CYCLE_STRATEGY: CycleRecoveryStrategy; + + fn new(group_index: u16) -> Self; + + /// Format a database key index in a suitable way. + fn fmt_index( + &self, + db: &>::DynDb, + index: DatabaseKeyIndex, + fmt: &mut std::fmt::Formatter<'_>, + ) -> std::fmt::Result; + + // ANCHOR:maybe_changed_after + /// True if the value of `input`, which must be from this query, may have + /// changed after the given revision ended. + /// + /// This function should only be invoked with a revision less than the current + /// revision. + fn maybe_changed_after( + &self, + db: &>::DynDb, + input: DatabaseKeyIndex, + revision: Revision, + ) -> bool; + // ANCHOR_END:maybe_changed_after + + fn cycle_recovery_strategy(&self) -> CycleRecoveryStrategy { + Self::CYCLE_STRATEGY + } + + // ANCHOR:fetch + /// Execute the query, returning the result (often, the result + /// will be memoized). This is the "main method" for + /// queries. + /// + /// Returns `Err` in the event of a cycle, meaning that computing + /// the value for this `key` is recursively attempting to fetch + /// itself. + fn fetch(&self, db: &>::DynDb, key: &Q::Key) -> Q::Value; + // ANCHOR_END:fetch + + /// Returns the durability associated with a given key. + fn durability(&self, db: &>::DynDb, key: &Q::Key) -> Durability; + + /// Get the (current) set of the entries in the query storage + fn entries(&self, db: &>::DynDb) -> C + where + C: std::iter::FromIterator>; +} + +/// An optional trait that is implemented for "user mutable" storage: +/// that is, storage whose value is not derived from other storage but +/// is set independently. +pub trait InputQueryStorageOps +where + Q: Query, +{ + fn set(&self, runtime: &mut Runtime, key: &Q::Key, new_value: Q::Value, durability: Durability); +} + +/// An optional trait that is implemented for "user mutable" storage: +/// that is, storage whose value is not derived from other storage but +/// is set independently. +pub trait LruQueryStorageOps { + fn set_lru_capacity(&self, new_capacity: usize); +} + +pub trait DerivedQueryStorageOps +where + Q: Query, +{ + fn invalidate(&self, runtime: &mut Runtime, key: &S) + where + S: Eq + Hash, + Q::Key: Borrow; +} + +pub type CycleParticipants = Arc>; diff --git a/crates/salsa/src/revision.rs b/crates/salsa/src/revision.rs new file mode 100644 index 000000000000..d97aaf9debab --- /dev/null +++ b/crates/salsa/src/revision.rs @@ -0,0 +1,67 @@ +//! +use std::num::NonZeroU32; +use std::sync::atomic::{AtomicU32, Ordering}; + +/// Value of the initial revision, as a u32. We don't use 0 +/// because we want to use a `NonZeroU32`. +const START: u32 = 1; + +/// A unique identifier for the current version of the database; each +/// time an input is changed, the revision number is incremented. +/// `Revision` is used internally to track which values may need to be +/// recomputed, but is not something you should have to interact with +/// directly as a user of salsa. +#[derive(Copy, Clone, PartialEq, Eq, Hash, PartialOrd, Ord)] +pub struct Revision { + generation: NonZeroU32, +} + +impl Revision { + pub(crate) fn start() -> Self { + Self::from(START) + } + + pub(crate) fn from(g: u32) -> Self { + Self { generation: NonZeroU32::new(g).unwrap() } + } + + pub(crate) fn next(self) -> Revision { + Self::from(self.generation.get() + 1) + } + + fn as_u32(self) -> u32 { + self.generation.get() + } +} + +impl std::fmt::Debug for Revision { + fn fmt(&self, fmt: &mut std::fmt::Formatter<'_>) -> std::fmt::Result { + write!(fmt, "R{}", self.generation) + } +} + +#[derive(Debug)] +pub(crate) struct AtomicRevision { + data: AtomicU32, +} + +impl AtomicRevision { + pub(crate) fn start() -> Self { + Self { data: AtomicU32::new(START) } + } + + pub(crate) fn load(&self) -> Revision { + Revision::from(self.data.load(Ordering::SeqCst)) + } + + pub(crate) fn store(&self, r: Revision) { + self.data.store(r.as_u32(), Ordering::SeqCst); + } + + /// Increment by 1, returning previous value. + pub(crate) fn fetch_then_increment(&self) -> Revision { + let v = self.data.fetch_add(1, Ordering::SeqCst); + assert!(v != u32::max_value(), "revision overflow"); + Revision::from(v) + } +} diff --git a/crates/salsa/src/runtime.rs b/crates/salsa/src/runtime.rs new file mode 100644 index 000000000000..40b8856991f9 --- /dev/null +++ b/crates/salsa/src/runtime.rs @@ -0,0 +1,667 @@ +//! +use crate::durability::Durability; +use crate::hash::FxIndexSet; +use crate::plumbing::CycleRecoveryStrategy; +use crate::revision::{AtomicRevision, Revision}; +use crate::{Cancelled, Cycle, Database, DatabaseKeyIndex, Event, EventKind}; +use parking_lot::lock_api::{RawRwLock, RawRwLockRecursive}; +use parking_lot::{Mutex, RwLock}; +use std::hash::Hash; +use std::panic::panic_any; +use std::sync::atomic::{AtomicUsize, Ordering}; +use tracing::debug; +use triomphe::Arc; + +mod dependency_graph; +use dependency_graph::DependencyGraph; + +pub(crate) mod local_state; +use local_state::LocalState; + +use self::local_state::{ActiveQueryGuard, QueryInputs, QueryRevisions}; + +/// The salsa runtime stores the storage for all queries as well as +/// tracking the query stack and dependencies between cycles. +/// +/// Each new runtime you create (e.g., via `Runtime::new` or +/// `Runtime::default`) will have an independent set of query storage +/// associated with it. Normally, therefore, you only do this once, at +/// the start of your application. +pub struct Runtime { + /// Our unique runtime id. + id: RuntimeId, + + /// If this is a "forked" runtime, then the `revision_guard` will + /// be `Some`; this guard holds a read-lock on the global query + /// lock. + revision_guard: Option, + + /// Local state that is specific to this runtime (thread). + local_state: LocalState, + + /// Shared state that is accessible via all runtimes. + shared_state: Arc, +} + +#[derive(Clone, Debug)] +pub(crate) enum WaitResult { + Completed, + Panicked, + Cycle(Cycle), +} + +impl Default for Runtime { + fn default() -> Self { + Runtime { + id: RuntimeId { counter: 0 }, + revision_guard: None, + shared_state: Default::default(), + local_state: Default::default(), + } + } +} + +impl std::fmt::Debug for Runtime { + fn fmt(&self, fmt: &mut std::fmt::Formatter<'_>) -> std::fmt::Result { + fmt.debug_struct("Runtime") + .field("id", &self.id()) + .field("forked", &self.revision_guard.is_some()) + .field("shared_state", &self.shared_state) + .finish() + } +} + +impl Runtime { + /// Create a new runtime; equivalent to `Self::default`. This is + /// used when creating a new database. + pub fn new() -> Self { + Self::default() + } + + /// See [`crate::storage::Storage::snapshot`]. + pub(crate) fn snapshot(&self) -> Self { + if self.local_state.query_in_progress() { + panic!("it is not legal to `snapshot` during a query (see salsa-rs/salsa#80)"); + } + + let revision_guard = RevisionGuard::new(&self.shared_state); + + let id = RuntimeId { counter: self.shared_state.next_id.fetch_add(1, Ordering::SeqCst) }; + + Runtime { + id, + revision_guard: Some(revision_guard), + shared_state: self.shared_state.clone(), + local_state: Default::default(), + } + } + + /// A "synthetic write" causes the system to act *as though* some + /// input of durability `durability` has changed. This is mostly + /// useful for profiling scenarios. + /// + /// **WARNING:** Just like an ordinary write, this method triggers + /// cancellation. If you invoke it while a snapshot exists, it + /// will block until that snapshot is dropped -- if that snapshot + /// is owned by the current thread, this could trigger deadlock. + pub fn synthetic_write(&mut self, durability: Durability) { + self.with_incremented_revision(|_next_revision| Some(durability)); + } + + /// The unique identifier attached to this `SalsaRuntime`. Each + /// snapshotted runtime has a distinct identifier. + #[inline] + pub fn id(&self) -> RuntimeId { + self.id + } + + /// Returns the database-key for the query that this thread is + /// actively executing (if any). + pub fn active_query(&self) -> Option { + self.local_state.active_query() + } + + /// Read current value of the revision counter. + #[inline] + pub(crate) fn current_revision(&self) -> Revision { + self.shared_state.revisions[0].load() + } + + /// The revision in which values with durability `d` may have last + /// changed. For D0, this is just the current revision. But for + /// higher levels of durability, this value may lag behind the + /// current revision. If we encounter a value of durability Di, + /// then, we can check this function to get a "bound" on when the + /// value may have changed, which allows us to skip walking its + /// dependencies. + #[inline] + pub(crate) fn last_changed_revision(&self, d: Durability) -> Revision { + self.shared_state.revisions[d.index()].load() + } + + /// Read current value of the revision counter. + #[inline] + pub(crate) fn pending_revision(&self) -> Revision { + self.shared_state.pending_revision.load() + } + + #[cold] + pub(crate) fn unwind_cancelled(&self) { + self.report_untracked_read(); + Cancelled::PendingWrite.throw(); + } + + /// Acquires the **global query write lock** (ensuring that no queries are + /// executing) and then increments the current revision counter; invokes + /// `op` with the global query write lock still held. + /// + /// While we wait to acquire the global query write lock, this method will + /// also increment `pending_revision_increments`, thus signalling to queries + /// that their results are "cancelled" and they should abort as expeditiously + /// as possible. + /// + /// The `op` closure should actually perform the writes needed. It is given + /// the new revision as an argument, and its return value indicates whether + /// any pre-existing value was modified: + /// + /// - returning `None` means that no pre-existing value was modified (this + /// could occur e.g. when setting some key on an input that was never set + /// before) + /// - returning `Some(d)` indicates that a pre-existing value was modified + /// and it had the durability `d`. This will update the records for when + /// values with each durability were modified. + /// + /// Note that, given our writer model, we can assume that only one thread is + /// attempting to increment the global revision at a time. + pub(crate) fn with_incremented_revision(&mut self, op: F) + where + F: FnOnce(Revision) -> Option, + { + tracing::debug!("increment_revision()"); + + if !self.permits_increment() { + panic!("increment_revision invoked during a query computation"); + } + + // Set the `pending_revision` field so that people + // know current revision is cancelled. + let current_revision = self.shared_state.pending_revision.fetch_then_increment(); + + // To modify the revision, we need the lock. + let shared_state = self.shared_state.clone(); + let _lock = shared_state.query_lock.write(); + + let old_revision = self.shared_state.revisions[0].fetch_then_increment(); + assert_eq!(current_revision, old_revision); + + let new_revision = current_revision.next(); + + debug!("increment_revision: incremented to {:?}", new_revision); + + if let Some(d) = op(new_revision) { + for rev in &self.shared_state.revisions[1..=d.index()] { + rev.store(new_revision); + } + } + } + + pub(crate) fn permits_increment(&self) -> bool { + self.revision_guard.is_none() && !self.local_state.query_in_progress() + } + + #[inline] + pub(crate) fn push_query(&self, database_key_index: DatabaseKeyIndex) -> ActiveQueryGuard<'_> { + self.local_state.push_query(database_key_index) + } + + /// Reports that the currently active query read the result from + /// another query. + /// + /// Also checks whether the "cycle participant" flag is set on + /// the current stack frame -- if so, panics with `CycleParticipant` + /// value, which should be caught by the code executing the query. + /// + /// # Parameters + /// + /// - `database_key`: the query whose result was read + /// - `changed_revision`: the last revision in which the result of that + /// query had changed + pub(crate) fn report_query_read_and_unwind_if_cycle_resulted( + &self, + input: DatabaseKeyIndex, + durability: Durability, + changed_at: Revision, + ) { + self.local_state + .report_query_read_and_unwind_if_cycle_resulted(input, durability, changed_at); + } + + /// Reports that the query depends on some state unknown to salsa. + /// + /// Queries which report untracked reads will be re-executed in the next + /// revision. + pub fn report_untracked_read(&self) { + self.local_state.report_untracked_read(self.current_revision()); + } + + /// Acts as though the current query had read an input with the given durability; this will force the current query's durability to be at most `durability`. + /// + /// This is mostly useful to control the durability level for [on-demand inputs](https://salsa-rs.github.io/salsa/common_patterns/on_demand_inputs.html). + pub fn report_synthetic_read(&self, durability: Durability) { + let changed_at = self.last_changed_revision(durability); + self.local_state.report_synthetic_read(durability, changed_at); + } + + /// Handles a cycle in the dependency graph that was detected when the + /// current thread tried to block on `database_key_index` which is being + /// executed by `to_id`. If this function returns, then `to_id` no longer + /// depends on the current thread, and so we should continue executing + /// as normal. Otherwise, the function will throw a `Cycle` which is expected + /// to be caught by some frame on our stack. This occurs either if there is + /// a frame on our stack with cycle recovery (possibly the top one!) or if there + /// is no cycle recovery at all. + fn unblock_cycle_and_maybe_throw( + &self, + db: &dyn Database, + dg: &mut DependencyGraph, + database_key_index: DatabaseKeyIndex, + to_id: RuntimeId, + ) { + debug!("unblock_cycle_and_maybe_throw(database_key={:?})", database_key_index); + + let mut from_stack = self.local_state.take_query_stack(); + let from_id = self.id(); + + // Make a "dummy stack frame". As we iterate through the cycle, we will collect the + // inputs from each participant. Then, if we are participating in cycle recovery, we + // will propagate those results to all participants. + let mut cycle_query = ActiveQuery::new(database_key_index); + + // Identify the cycle participants: + let cycle = { + let mut v = vec![]; + dg.for_each_cycle_participant( + from_id, + &mut from_stack, + database_key_index, + to_id, + |aqs| { + aqs.iter_mut().for_each(|aq| { + cycle_query.add_from(aq); + v.push(aq.database_key_index); + }); + }, + ); + + // We want to give the participants in a deterministic order + // (at least for this execution, not necessarily across executions), + // no matter where it started on the stack. Find the minimum + // key and rotate it to the front. + let min = v.iter().min().unwrap(); + let index = v.iter().position(|p| p == min).unwrap(); + v.rotate_left(index); + + // No need to store extra memory. + v.shrink_to_fit(); + + Cycle::new(Arc::new(v)) + }; + debug!("cycle {:?}, cycle_query {:#?}", cycle.debug(db), cycle_query,); + + // We can remove the cycle participants from the list of dependencies; + // they are a strongly connected component (SCC) and we only care about + // dependencies to things outside the SCC that control whether it will + // form again. + cycle_query.remove_cycle_participants(&cycle); + + // Mark each cycle participant that has recovery set, along with + // any frames that come after them on the same thread. Those frames + // are going to be unwound so that fallback can occur. + dg.for_each_cycle_participant(from_id, &mut from_stack, database_key_index, to_id, |aqs| { + aqs.iter_mut() + .skip_while(|aq| match db.cycle_recovery_strategy(aq.database_key_index) { + CycleRecoveryStrategy::Panic => true, + CycleRecoveryStrategy::Fallback => false, + }) + .for_each(|aq| { + debug!("marking {:?} for fallback", aq.database_key_index.debug(db)); + aq.take_inputs_from(&cycle_query); + assert!(aq.cycle.is_none()); + aq.cycle = Some(cycle.clone()); + }); + }); + + // Unblock every thread that has cycle recovery with a `WaitResult::Cycle`. + // They will throw the cycle, which will be caught by the frame that has + // cycle recovery so that it can execute that recovery. + let (me_recovered, others_recovered) = + dg.maybe_unblock_runtimes_in_cycle(from_id, &from_stack, database_key_index, to_id); + + self.local_state.restore_query_stack(from_stack); + + if me_recovered { + // If the current thread has recovery, we want to throw + // so that it can begin. + cycle.throw() + } else if others_recovered { + // If other threads have recovery but we didn't: return and we will block on them. + } else { + // if nobody has recover, then we panic + panic_any(cycle); + } + } + + /// Block until `other_id` completes executing `database_key`; + /// panic or unwind in the case of a cycle. + /// + /// `query_mutex_guard` is the guard for the current query's state; + /// it will be dropped after we have successfully registered the + /// dependency. + /// + /// # Propagating panics + /// + /// If the thread `other_id` panics, then our thread is considered + /// cancelled, so this function will panic with a `Cancelled` value. + /// + /// # Cycle handling + /// + /// If the thread `other_id` already depends on the current thread, + /// and hence there is a cycle in the query graph, then this function + /// will unwind instead of returning normally. The method of unwinding + /// depends on the [`Self::mutual_cycle_recovery_strategy`] + /// of the cycle participants: + /// + /// * [`CycleRecoveryStrategy::Panic`]: panic with the [`Cycle`] as the value. + /// * [`CycleRecoveryStrategy::Fallback`]: initiate unwinding with [`CycleParticipant::unwind`]. + pub(crate) fn block_on_or_unwind( + &self, + db: &dyn Database, + database_key: DatabaseKeyIndex, + other_id: RuntimeId, + query_mutex_guard: QueryMutexGuard, + ) { + let mut dg = self.shared_state.dependency_graph.lock(); + + if dg.depends_on(other_id, self.id()) { + self.unblock_cycle_and_maybe_throw(db, &mut dg, database_key, other_id); + + // If the above fn returns, then (via cycle recovery) it has unblocked the + // cycle, so we can continue. + assert!(!dg.depends_on(other_id, self.id())); + } + + db.salsa_event(Event { + runtime_id: self.id(), + kind: EventKind::WillBlockOn { other_runtime_id: other_id, database_key }, + }); + + let stack = self.local_state.take_query_stack(); + + let (stack, result) = DependencyGraph::block_on( + dg, + self.id(), + database_key, + other_id, + stack, + query_mutex_guard, + ); + + self.local_state.restore_query_stack(stack); + + match result { + WaitResult::Completed => (), + + // If the other thread panicked, then we consider this thread + // cancelled. The assumption is that the panic will be detected + // by the other thread and responded to appropriately. + WaitResult::Panicked => Cancelled::PropagatedPanic.throw(), + + WaitResult::Cycle(c) => c.throw(), + } + } + + /// Invoked when this runtime completed computing `database_key` with + /// the given result `wait_result` (`wait_result` should be `None` if + /// computing `database_key` panicked and could not complete). + /// This function unblocks any dependent queries and allows them + /// to continue executing. + pub(crate) fn unblock_queries_blocked_on( + &self, + database_key: DatabaseKeyIndex, + wait_result: WaitResult, + ) { + self.shared_state + .dependency_graph + .lock() + .unblock_runtimes_blocked_on(database_key, wait_result); + } +} + +/// State that will be common to all threads (when we support multiple threads) +struct SharedState { + /// Stores the next id to use for a snapshotted runtime (starts at 1). + next_id: AtomicUsize, + + /// Whenever derived queries are executing, they acquire this lock + /// in read mode. Mutating inputs (and thus creating a new + /// revision) requires a write lock (thus guaranteeing that no + /// derived queries are in progress). Note that this is not needed + /// to prevent **race conditions** -- the revision counter itself + /// is stored in an `AtomicUsize` so it can be cheaply read + /// without acquiring the lock. Rather, the `query_lock` is used + /// to ensure a higher-level consistency property. + query_lock: RwLock<()>, + + /// This is typically equal to `revision` -- set to `revision+1` + /// when a new revision is pending (which implies that the current + /// revision is cancelled). + pending_revision: AtomicRevision, + + /// Stores the "last change" revision for values of each duration. + /// This vector is always of length at least 1 (for Durability 0) + /// but its total length depends on the number of durations. The + /// element at index 0 is special as it represents the "current + /// revision". In general, we have the invariant that revisions + /// in here are *declining* -- that is, `revisions[i] >= + /// revisions[i + 1]`, for all `i`. This is because when you + /// modify a value with durability D, that implies that values + /// with durability less than D may have changed too. + revisions: Vec, + + /// The dependency graph tracks which runtimes are blocked on one + /// another, waiting for queries to terminate. + dependency_graph: Mutex, +} + +impl SharedState { + fn with_durabilities(durabilities: usize) -> Self { + SharedState { + next_id: AtomicUsize::new(1), + query_lock: Default::default(), + revisions: (0..durabilities).map(|_| AtomicRevision::start()).collect(), + pending_revision: AtomicRevision::start(), + dependency_graph: Default::default(), + } + } +} + +impl std::panic::RefUnwindSafe for SharedState {} + +impl Default for SharedState { + fn default() -> Self { + Self::with_durabilities(Durability::LEN) + } +} + +impl std::fmt::Debug for SharedState { + fn fmt(&self, fmt: &mut std::fmt::Formatter<'_>) -> std::fmt::Result { + let query_lock = if self.query_lock.try_write().is_some() { + "" + } else if self.query_lock.try_read().is_some() { + "" + } else { + "" + }; + fmt.debug_struct("SharedState") + .field("query_lock", &query_lock) + .field("revisions", &self.revisions) + .field("pending_revision", &self.pending_revision) + .finish() + } +} + +#[derive(Debug)] +struct ActiveQuery { + /// What query is executing + database_key_index: DatabaseKeyIndex, + + /// Minimum durability of inputs observed so far. + durability: Durability, + + /// Maximum revision of all inputs observed. If we observe an + /// untracked read, this will be set to the most recent revision. + changed_at: Revision, + + /// Set of subqueries that were accessed thus far, or `None` if + /// there was an untracked the read. + dependencies: Option>, + + /// Stores the entire cycle, if one is found and this query is part of it. + cycle: Option, +} + +impl ActiveQuery { + fn new(database_key_index: DatabaseKeyIndex) -> Self { + ActiveQuery { + database_key_index, + durability: Durability::MAX, + changed_at: Revision::start(), + dependencies: Some(FxIndexSet::default()), + cycle: None, + } + } + + fn add_read(&mut self, input: DatabaseKeyIndex, durability: Durability, revision: Revision) { + if let Some(set) = &mut self.dependencies { + set.insert(input); + } + + self.durability = self.durability.min(durability); + self.changed_at = self.changed_at.max(revision); + } + + fn add_untracked_read(&mut self, changed_at: Revision) { + self.dependencies = None; + self.durability = Durability::LOW; + self.changed_at = changed_at; + } + + fn add_synthetic_read(&mut self, durability: Durability, revision: Revision) { + self.dependencies = None; + self.durability = self.durability.min(durability); + self.changed_at = self.changed_at.max(revision); + } + + pub(crate) fn revisions(&self) -> QueryRevisions { + let inputs = match &self.dependencies { + None => QueryInputs::Untracked, + + Some(dependencies) => { + if dependencies.is_empty() { + QueryInputs::NoInputs + } else { + QueryInputs::Tracked { inputs: dependencies.iter().copied().collect() } + } + } + }; + + QueryRevisions { changed_at: self.changed_at, inputs, durability: self.durability } + } + + /// Adds any dependencies from `other` into `self`. + /// Used during cycle recovery, see [`Runtime::create_cycle_error`]. + fn add_from(&mut self, other: &ActiveQuery) { + self.changed_at = self.changed_at.max(other.changed_at); + self.durability = self.durability.min(other.durability); + if let Some(other_dependencies) = &other.dependencies { + if let Some(my_dependencies) = &mut self.dependencies { + my_dependencies.extend(other_dependencies.iter().copied()); + } + } else { + self.dependencies = None; + } + } + + /// Removes the participants in `cycle` from my dependencies. + /// Used during cycle recovery, see [`Runtime::create_cycle_error`]. + fn remove_cycle_participants(&mut self, cycle: &Cycle) { + if let Some(my_dependencies) = &mut self.dependencies { + for p in cycle.participant_keys() { + my_dependencies.remove(&p); + } + } + } + + /// Copy the changed-at, durability, and dependencies from `cycle_query`. + /// Used during cycle recovery, see [`Runtime::create_cycle_error`]. + pub(crate) fn take_inputs_from(&mut self, cycle_query: &ActiveQuery) { + self.changed_at = cycle_query.changed_at; + self.durability = cycle_query.durability; + self.dependencies = cycle_query.dependencies.clone(); + } +} + +/// A unique identifier for a particular runtime. Each time you create +/// a snapshot, a fresh `RuntimeId` is generated. Once a snapshot is +/// complete, its `RuntimeId` may potentially be re-used. +#[derive(Copy, Clone, Debug, PartialEq, Eq, Hash, PartialOrd, Ord)] +pub struct RuntimeId { + counter: usize, +} + +#[derive(Clone, Debug)] +pub(crate) struct StampedValue { + pub(crate) value: V, + pub(crate) durability: Durability, + pub(crate) changed_at: Revision, +} + +struct RevisionGuard { + shared_state: Arc, +} + +impl RevisionGuard { + fn new(shared_state: &Arc) -> Self { + // Subtle: we use a "recursive" lock here so that it is not an + // error to acquire a read-lock when one is already held (this + // happens when a query uses `snapshot` to spawn off parallel + // workers, for example). + // + // This has the side-effect that we are responsible to ensure + // that people contending for the write lock do not starve, + // but this is what we achieve via the cancellation mechanism. + // + // (In particular, since we only ever have one "mutating + // handle" to the database, the only contention for the global + // query lock occurs when there are "futures" evaluating + // queries in parallel, and those futures hold a read-lock + // already, so the starvation problem is more about them bring + // themselves to a close, versus preventing other people from + // *starting* work). + unsafe { + shared_state.query_lock.raw().lock_shared_recursive(); + } + + Self { shared_state: shared_state.clone() } + } +} + +impl Drop for RevisionGuard { + fn drop(&mut self) { + // Release our read-lock without using RAII. As documented in + // `Snapshot::new` above, this requires the unsafe keyword. + unsafe { + self.shared_state.query_lock.raw().unlock_shared(); + } + } +} diff --git a/crates/salsa/src/runtime/dependency_graph.rs b/crates/salsa/src/runtime/dependency_graph.rs new file mode 100644 index 000000000000..e41eb280deee --- /dev/null +++ b/crates/salsa/src/runtime/dependency_graph.rs @@ -0,0 +1,251 @@ +//! +use triomphe::Arc; + +use crate::{DatabaseKeyIndex, RuntimeId}; +use parking_lot::{Condvar, MutexGuard}; +use rustc_hash::FxHashMap; +use smallvec::SmallVec; + +use super::{ActiveQuery, WaitResult}; + +type QueryStack = Vec; + +#[derive(Debug, Default)] +pub(super) struct DependencyGraph { + /// A `(K -> V)` pair in this map indicates that the the runtime + /// `K` is blocked on some query executing in the runtime `V`. + /// This encodes a graph that must be acyclic (or else deadlock + /// will result). + edges: FxHashMap, + + /// Encodes the `RuntimeId` that are blocked waiting for the result + /// of a given query. + query_dependents: FxHashMap>, + + /// When a key K completes which had dependent queries Qs blocked on it, + /// it stores its `WaitResult` here. As they wake up, each query Q in Qs will + /// come here to fetch their results. + wait_results: FxHashMap, +} + +#[derive(Debug)] +struct Edge { + blocked_on_id: RuntimeId, + blocked_on_key: DatabaseKeyIndex, + stack: QueryStack, + + /// Signalled whenever a query with dependents completes. + /// Allows those dependents to check if they are ready to unblock. + condvar: Arc, +} + +impl DependencyGraph { + /// True if `from_id` depends on `to_id`. + /// + /// (i.e., there is a path from `from_id` to `to_id` in the graph.) + pub(super) fn depends_on(&mut self, from_id: RuntimeId, to_id: RuntimeId) -> bool { + let mut p = from_id; + while let Some(q) = self.edges.get(&p).map(|edge| edge.blocked_on_id) { + if q == to_id { + return true; + } + + p = q; + } + p == to_id + } + + /// Invokes `closure` with a `&mut ActiveQuery` for each query that participates in the cycle. + /// The cycle runs as follows: + /// + /// 1. The runtime `from_id`, which has the stack `from_stack`, would like to invoke `database_key`... + /// 2. ...but `database_key` is already being executed by `to_id`... + /// 3. ...and `to_id` is transitively dependent on something which is present on `from_stack`. + pub(super) fn for_each_cycle_participant( + &mut self, + from_id: RuntimeId, + from_stack: &mut QueryStack, + database_key: DatabaseKeyIndex, + to_id: RuntimeId, + mut closure: impl FnMut(&mut [ActiveQuery]), + ) { + debug_assert!(self.depends_on(to_id, from_id)); + + // To understand this algorithm, consider this [drawing](https://is.gd/TGLI9v): + // + // database_key = QB2 + // from_id = A + // to_id = B + // from_stack = [QA1, QA2, QA3] + // + // self.edges[B] = { C, QC2, [QB1..QB3] } + // self.edges[C] = { A, QA2, [QC1..QC3] } + // + // The cyclic + // edge we have + // failed to add. + // : + // A : B C + // : + // QA1 v QB1 QC1 + // ┌► QA2 ┌──► QB2 ┌─► QC2 + // │ QA3 ───┘ QB3 ──┘ QC3 ───┐ + // │ │ + // └───────────────────────────────┘ + // + // Final output: [QB2, QB3, QC2, QC3, QA2, QA3] + + let mut id = to_id; + let mut key = database_key; + while id != from_id { + // Looking at the diagram above, the idea is to + // take the edge from `to_id` starting at `key` + // (inclusive) and down to the end. We can then + // load up the next thread (i.e., we start at B/QB2, + // and then load up the dependency on C/QC2). + let edge = self.edges.get_mut(&id).unwrap(); + let prefix = edge.stack.iter_mut().take_while(|p| p.database_key_index != key).count(); + closure(&mut edge.stack[prefix..]); + id = edge.blocked_on_id; + key = edge.blocked_on_key; + } + + // Finally, we copy in the results from `from_stack`. + let prefix = from_stack.iter_mut().take_while(|p| p.database_key_index != key).count(); + closure(&mut from_stack[prefix..]); + } + + /// Unblock each blocked runtime (excluding the current one) if some + /// query executing in that runtime is participating in cycle fallback. + /// + /// Returns a boolean (Current, Others) where: + /// * Current is true if the current runtime has cycle participants + /// with fallback; + /// * Others is true if other runtimes were unblocked. + pub(super) fn maybe_unblock_runtimes_in_cycle( + &mut self, + from_id: RuntimeId, + from_stack: &QueryStack, + database_key: DatabaseKeyIndex, + to_id: RuntimeId, + ) -> (bool, bool) { + // See diagram in `for_each_cycle_participant`. + let mut id = to_id; + let mut key = database_key; + let mut others_unblocked = false; + while id != from_id { + let edge = self.edges.get(&id).unwrap(); + let prefix = edge.stack.iter().take_while(|p| p.database_key_index != key).count(); + let next_id = edge.blocked_on_id; + let next_key = edge.blocked_on_key; + + if let Some(cycle) = edge.stack[prefix..].iter().rev().find_map(|aq| aq.cycle.clone()) { + // Remove `id` from the list of runtimes blocked on `next_key`: + self.query_dependents.get_mut(&next_key).unwrap().retain(|r| *r != id); + + // Unblock runtime so that it can resume execution once lock is released: + self.unblock_runtime(id, WaitResult::Cycle(cycle)); + + others_unblocked = true; + } + + id = next_id; + key = next_key; + } + + let prefix = from_stack.iter().take_while(|p| p.database_key_index != key).count(); + let this_unblocked = from_stack[prefix..].iter().any(|aq| aq.cycle.is_some()); + + (this_unblocked, others_unblocked) + } + + /// Modifies the graph so that `from_id` is blocked + /// on `database_key`, which is being computed by + /// `to_id`. + /// + /// For this to be reasonable, the lock on the + /// results table for `database_key` must be held. + /// This ensures that computing `database_key` doesn't + /// complete before `block_on` executes. + /// + /// Preconditions: + /// * No path from `to_id` to `from_id` + /// (i.e., `me.depends_on(to_id, from_id)` is false) + /// * `held_mutex` is a read lock (or stronger) on `database_key` + pub(super) fn block_on( + mut me: MutexGuard<'_, Self>, + from_id: RuntimeId, + database_key: DatabaseKeyIndex, + to_id: RuntimeId, + from_stack: QueryStack, + query_mutex_guard: QueryMutexGuard, + ) -> (QueryStack, WaitResult) { + let condvar = me.add_edge(from_id, database_key, to_id, from_stack); + + // Release the mutex that prevents `database_key` + // from completing, now that the edge has been added. + drop(query_mutex_guard); + + loop { + if let Some(stack_and_result) = me.wait_results.remove(&from_id) { + debug_assert!(!me.edges.contains_key(&from_id)); + return stack_and_result; + } + condvar.wait(&mut me); + } + } + + /// Helper for `block_on`: performs actual graph modification + /// to add a dependency edge from `from_id` to `to_id`, which is + /// computing `database_key`. + fn add_edge( + &mut self, + from_id: RuntimeId, + database_key: DatabaseKeyIndex, + to_id: RuntimeId, + from_stack: QueryStack, + ) -> Arc { + assert_ne!(from_id, to_id); + debug_assert!(!self.edges.contains_key(&from_id)); + debug_assert!(!self.depends_on(to_id, from_id)); + + let condvar = Arc::new(Condvar::new()); + self.edges.insert( + from_id, + Edge { + blocked_on_id: to_id, + blocked_on_key: database_key, + stack: from_stack, + condvar: condvar.clone(), + }, + ); + self.query_dependents.entry(database_key).or_default().push(from_id); + condvar + } + + /// Invoked when runtime `to_id` completes executing + /// `database_key`. + pub(super) fn unblock_runtimes_blocked_on( + &mut self, + database_key: DatabaseKeyIndex, + wait_result: WaitResult, + ) { + let dependents = self.query_dependents.remove(&database_key).unwrap_or_default(); + + for from_id in dependents { + self.unblock_runtime(from_id, wait_result.clone()); + } + } + + /// Unblock the runtime with the given id with the given wait-result. + /// This will cause it resume execution (though it will have to grab + /// the lock on this data structure first, to recover the wait result). + fn unblock_runtime(&mut self, id: RuntimeId, wait_result: WaitResult) { + let edge = self.edges.remove(&id).expect("not blocked"); + self.wait_results.insert(id, (edge.stack, wait_result)); + + // Now that we have inserted the `wait_results`, + // notify the thread. + edge.condvar.notify_one(); + } +} diff --git a/crates/salsa/src/runtime/local_state.rs b/crates/salsa/src/runtime/local_state.rs new file mode 100644 index 000000000000..91b95dffe78a --- /dev/null +++ b/crates/salsa/src/runtime/local_state.rs @@ -0,0 +1,214 @@ +//! +use tracing::debug; + +use crate::durability::Durability; +use crate::runtime::ActiveQuery; +use crate::runtime::Revision; +use crate::Cycle; +use crate::DatabaseKeyIndex; +use std::cell::RefCell; +use triomphe::Arc; + +/// State that is specific to a single execution thread. +/// +/// Internally, this type uses ref-cells. +/// +/// **Note also that all mutations to the database handle (and hence +/// to the local-state) must be undone during unwinding.** +pub(super) struct LocalState { + /// Vector of active queries. + /// + /// This is normally `Some`, but it is set to `None` + /// while the query is blocked waiting for a result. + /// + /// Unwinding note: pushes onto this vector must be popped -- even + /// during unwinding. + query_stack: RefCell>>, +} + +/// Summarizes "all the inputs that a query used" +#[derive(Debug, Clone)] +pub(crate) struct QueryRevisions { + /// The most revision in which some input changed. + pub(crate) changed_at: Revision, + + /// Minimum durability of the inputs to this query. + pub(crate) durability: Durability, + + /// The inputs that went into our query, if we are tracking them. + pub(crate) inputs: QueryInputs, +} + +/// Every input. +#[derive(Debug, Clone)] +pub(crate) enum QueryInputs { + /// Non-empty set of inputs, fully known + Tracked { inputs: Arc<[DatabaseKeyIndex]> }, + + /// Empty set of inputs, fully known. + NoInputs, + + /// Unknown quantity of inputs + Untracked, +} + +impl Default for LocalState { + fn default() -> Self { + LocalState { query_stack: RefCell::new(Some(Vec::new())) } + } +} + +impl LocalState { + #[inline] + pub(super) fn push_query(&self, database_key_index: DatabaseKeyIndex) -> ActiveQueryGuard<'_> { + let mut query_stack = self.query_stack.borrow_mut(); + let query_stack = query_stack.as_mut().expect("local stack taken"); + query_stack.push(ActiveQuery::new(database_key_index)); + ActiveQueryGuard { local_state: self, database_key_index, push_len: query_stack.len() } + } + + fn with_query_stack(&self, c: impl FnOnce(&mut Vec) -> R) -> R { + c(self.query_stack.borrow_mut().as_mut().expect("query stack taken")) + } + + pub(super) fn query_in_progress(&self) -> bool { + self.with_query_stack(|stack| !stack.is_empty()) + } + + pub(super) fn active_query(&self) -> Option { + self.with_query_stack(|stack| { + stack.last().map(|active_query| active_query.database_key_index) + }) + } + + pub(super) fn report_query_read_and_unwind_if_cycle_resulted( + &self, + input: DatabaseKeyIndex, + durability: Durability, + changed_at: Revision, + ) { + debug!( + "report_query_read_and_unwind_if_cycle_resulted(input={:?}, durability={:?}, changed_at={:?})", + input, durability, changed_at + ); + self.with_query_stack(|stack| { + if let Some(top_query) = stack.last_mut() { + top_query.add_read(input, durability, changed_at); + + // We are a cycle participant: + // + // C0 --> ... --> Ci --> Ci+1 -> ... -> Cn --> C0 + // ^ ^ + // : | + // This edge -----+ | + // | + // | + // N0 + // + // In this case, the value we have just read from `Ci+1` + // is actually the cycle fallback value and not especially + // interesting. We unwind now with `CycleParticipant` to avoid + // executing the rest of our query function. This unwinding + // will be caught and our own fallback value will be used. + // + // Note that `Ci+1` may` have *other* callers who are not + // participants in the cycle (e.g., N0 in the graph above). + // They will not have the `cycle` marker set in their + // stack frames, so they will just read the fallback value + // from `Ci+1` and continue on their merry way. + if let Some(cycle) = &top_query.cycle { + cycle.clone().throw() + } + } + }) + } + + pub(super) fn report_untracked_read(&self, current_revision: Revision) { + self.with_query_stack(|stack| { + if let Some(top_query) = stack.last_mut() { + top_query.add_untracked_read(current_revision); + } + }) + } + + /// Update the top query on the stack to act as though it read a value + /// of durability `durability` which changed in `revision`. + pub(super) fn report_synthetic_read(&self, durability: Durability, revision: Revision) { + self.with_query_stack(|stack| { + if let Some(top_query) = stack.last_mut() { + top_query.add_synthetic_read(durability, revision); + } + }) + } + + /// Takes the query stack and returns it. This is used when + /// the current thread is blocking. The stack must be restored + /// with [`Self::restore_query_stack`] when the thread unblocks. + pub(super) fn take_query_stack(&self) -> Vec { + assert!(self.query_stack.borrow().is_some(), "query stack already taken"); + self.query_stack.take().unwrap() + } + + /// Restores a query stack taken with [`Self::take_query_stack`] once + /// the thread unblocks. + pub(super) fn restore_query_stack(&self, stack: Vec) { + assert!(self.query_stack.borrow().is_none(), "query stack not taken"); + self.query_stack.replace(Some(stack)); + } +} + +impl std::panic::RefUnwindSafe for LocalState {} + +/// When a query is pushed onto the `active_query` stack, this guard +/// is returned to represent its slot. The guard can be used to pop +/// the query from the stack -- in the case of unwinding, the guard's +/// destructor will also remove the query. +pub(crate) struct ActiveQueryGuard<'me> { + local_state: &'me LocalState, + push_len: usize, + database_key_index: DatabaseKeyIndex, +} + +impl ActiveQueryGuard<'_> { + fn pop_helper(&self) -> ActiveQuery { + self.local_state.with_query_stack(|stack| { + // Sanity check: pushes and pops should be balanced. + assert_eq!(stack.len(), self.push_len); + debug_assert_eq!(stack.last().unwrap().database_key_index, self.database_key_index); + stack.pop().unwrap() + }) + } + + /// Invoked when the query has successfully completed execution. + pub(super) fn complete(self) -> ActiveQuery { + let query = self.pop_helper(); + std::mem::forget(self); + query + } + + /// Pops an active query from the stack. Returns the [`QueryRevisions`] + /// which summarizes the other queries that were accessed during this + /// query's execution. + #[inline] + pub(crate) fn pop(self) -> QueryRevisions { + // Extract accumulated inputs. + let popped_query = self.complete(); + + // If this frame were a cycle participant, it would have unwound. + assert!(popped_query.cycle.is_none()); + + popped_query.revisions() + } + + /// If the active query is registered as a cycle participant, remove and + /// return that cycle. + pub(crate) fn take_cycle(&self) -> Option { + self.local_state.with_query_stack(|stack| stack.last_mut()?.cycle.take()) + } +} + +impl Drop for ActiveQueryGuard<'_> { + fn drop(&mut self) { + self.pop_helper(); + } +} diff --git a/crates/salsa/src/storage.rs b/crates/salsa/src/storage.rs new file mode 100644 index 000000000000..c0e6416f4a33 --- /dev/null +++ b/crates/salsa/src/storage.rs @@ -0,0 +1,54 @@ +//! +use crate::{plumbing::DatabaseStorageTypes, Runtime}; +use triomphe::Arc; + +/// Stores the cached results and dependency information for all the queries +/// defined on your salsa database. Also embeds a [`Runtime`] which is used to +/// manage query execution. Every database must include a `storage: +/// Storage` field. +pub struct Storage { + query_store: Arc, + runtime: Runtime, +} + +impl Default for Storage { + fn default() -> Self { + Self { query_store: Default::default(), runtime: Default::default() } + } +} + +impl Storage { + /// Gives access to the underlying salsa runtime. + pub fn salsa_runtime(&self) -> &Runtime { + &self.runtime + } + + /// Gives access to the underlying salsa runtime. + pub fn salsa_runtime_mut(&mut self) -> &mut Runtime { + &mut self.runtime + } + + /// Access the query storage tables. Not meant to be used directly by end + /// users. + pub fn query_store(&self) -> &DB::DatabaseStorage { + &self.query_store + } + + /// Access the query storage tables. Not meant to be used directly by end + /// users. + pub fn query_store_mut(&mut self) -> (&DB::DatabaseStorage, &mut Runtime) { + (&self.query_store, &mut self.runtime) + } + + /// Returns a "snapshotted" storage, suitable for use in a forked database. + /// This snapshot hold a read-lock on the global state, which means that any + /// attempt to `set` an input will block until the forked runtime is + /// dropped. See `ParallelDatabase::snapshot` for more information. + /// + /// **Warning.** This second handle is intended to be used from a separate + /// thread. Using two database handles from the **same thread** can lead to + /// deadlock. + pub fn snapshot(&self) -> Self { + Storage { query_store: self.query_store.clone(), runtime: self.runtime.snapshot() } + } +} diff --git a/crates/salsa/tests/cycles.rs b/crates/salsa/tests/cycles.rs new file mode 100644 index 000000000000..00ca5332440e --- /dev/null +++ b/crates/salsa/tests/cycles.rs @@ -0,0 +1,493 @@ +use std::panic::UnwindSafe; + +use expect_test::expect; +use salsa::{Durability, ParallelDatabase, Snapshot}; +use test_log::test; + +// Axes: +// +// Threading +// * Intra-thread +// * Cross-thread -- part of cycle is on one thread, part on another +// +// Recovery strategies: +// * Panic +// * Fallback +// * Mixed -- multiple strategies within cycle participants +// +// Across revisions: +// * N/A -- only one revision +// * Present in new revision, not old +// * Present in old revision, not new +// * Present in both revisions +// +// Dependencies +// * Tracked +// * Untracked -- cycle participant(s) contain untracked reads +// +// Layers +// * Direct -- cycle participant is directly invoked from test +// * Indirect -- invoked a query that invokes the cycle +// +// +// | Thread | Recovery | Old, New | Dep style | Layers | Test Name | +// | ------ | -------- | -------- | --------- | ------ | --------- | +// | Intra | Panic | N/A | Tracked | direct | cycle_memoized | +// | Intra | Panic | N/A | Untracked | direct | cycle_volatile | +// | Intra | Fallback | N/A | Tracked | direct | cycle_cycle | +// | Intra | Fallback | N/A | Tracked | indirect | inner_cycle | +// | Intra | Fallback | Both | Tracked | direct | cycle_revalidate | +// | Intra | Fallback | New | Tracked | direct | cycle_appears | +// | Intra | Fallback | Old | Tracked | direct | cycle_disappears | +// | Intra | Fallback | Old | Tracked | direct | cycle_disappears_durability | +// | Intra | Mixed | N/A | Tracked | direct | cycle_mixed_1 | +// | Intra | Mixed | N/A | Tracked | direct | cycle_mixed_2 | +// | Cross | Fallback | N/A | Tracked | both | parallel/cycles.rs: recover_parallel_cycle | +// | Cross | Panic | N/A | Tracked | both | parallel/cycles.rs: panic_parallel_cycle | + +#[derive(PartialEq, Eq, Hash, Clone, Debug)] +struct Error { + cycle: Vec, +} + +#[salsa::database(GroupStruct)] +#[derive(Default)] +struct DatabaseImpl { + storage: salsa::Storage, +} + +impl salsa::Database for DatabaseImpl {} + +impl ParallelDatabase for DatabaseImpl { + fn snapshot(&self) -> Snapshot { + Snapshot::new(DatabaseImpl { storage: self.storage.snapshot() }) + } +} + +/// The queries A, B, and C in `Database` can be configured +/// to invoke one another in arbitrary ways using this +/// enum. +#[derive(Debug, Copy, Clone, PartialEq, Eq)] +enum CycleQuery { + None, + A, + B, + C, + AthenC, +} + +#[salsa::query_group(GroupStruct)] +trait Database: salsa::Database { + // `a` and `b` depend on each other and form a cycle + fn memoized_a(&self) -> (); + fn memoized_b(&self) -> (); + fn volatile_a(&self) -> (); + fn volatile_b(&self) -> (); + + #[salsa::input] + fn a_invokes(&self) -> CycleQuery; + + #[salsa::input] + fn b_invokes(&self) -> CycleQuery; + + #[salsa::input] + fn c_invokes(&self) -> CycleQuery; + + #[salsa::cycle(recover_a)] + fn cycle_a(&self) -> Result<(), Error>; + + #[salsa::cycle(recover_b)] + fn cycle_b(&self) -> Result<(), Error>; + + fn cycle_c(&self) -> Result<(), Error>; +} + +fn recover_a(db: &dyn Database, cycle: &salsa::Cycle) -> Result<(), Error> { + Err(Error { cycle: cycle.all_participants(db) }) +} + +fn recover_b(db: &dyn Database, cycle: &salsa::Cycle) -> Result<(), Error> { + Err(Error { cycle: cycle.all_participants(db) }) +} + +fn memoized_a(db: &dyn Database) { + db.memoized_b() +} + +fn memoized_b(db: &dyn Database) { + db.memoized_a() +} + +fn volatile_a(db: &dyn Database) { + db.salsa_runtime().report_untracked_read(); + db.volatile_b() +} + +fn volatile_b(db: &dyn Database) { + db.salsa_runtime().report_untracked_read(); + db.volatile_a() +} + +impl CycleQuery { + fn invoke(self, db: &dyn Database) -> Result<(), Error> { + match self { + CycleQuery::A => db.cycle_a(), + CycleQuery::B => db.cycle_b(), + CycleQuery::C => db.cycle_c(), + CycleQuery::AthenC => { + let _ = db.cycle_a(); + db.cycle_c() + } + CycleQuery::None => Ok(()), + } + } +} + +fn cycle_a(db: &dyn Database) -> Result<(), Error> { + db.a_invokes().invoke(db) +} + +fn cycle_b(db: &dyn Database) -> Result<(), Error> { + db.b_invokes().invoke(db) +} + +fn cycle_c(db: &dyn Database) -> Result<(), Error> { + db.c_invokes().invoke(db) +} + +#[track_caller] +fn extract_cycle(f: impl FnOnce() + UnwindSafe) -> salsa::Cycle { + let v = std::panic::catch_unwind(f); + if let Err(d) = &v { + if let Some(cycle) = d.downcast_ref::() { + return cycle.clone(); + } + } + panic!("unexpected value: {:?}", v) +} + +#[test] +fn cycle_memoized() { + let db = DatabaseImpl::default(); + let cycle = extract_cycle(|| db.memoized_a()); + expect![[r#" + [ + "memoized_a(())", + "memoized_b(())", + ] + "#]] + .assert_debug_eq(&cycle.unexpected_participants(&db)); +} + +#[test] +fn cycle_volatile() { + let db = DatabaseImpl::default(); + let cycle = extract_cycle(|| db.volatile_a()); + expect![[r#" + [ + "volatile_a(())", + "volatile_b(())", + ] + "#]] + .assert_debug_eq(&cycle.unexpected_participants(&db)); +} + +#[test] +fn cycle_cycle() { + let mut query = DatabaseImpl::default(); + + // A --> B + // ^ | + // +-----+ + + query.set_a_invokes(CycleQuery::B); + query.set_b_invokes(CycleQuery::A); + + assert!(query.cycle_a().is_err()); +} + +#[test] +fn inner_cycle() { + let mut query = DatabaseImpl::default(); + + // A --> B <-- C + // ^ | + // +-----+ + + query.set_a_invokes(CycleQuery::B); + query.set_b_invokes(CycleQuery::A); + query.set_c_invokes(CycleQuery::B); + + let err = query.cycle_c(); + assert!(err.is_err()); + let cycle = err.unwrap_err().cycle; + expect![[r#" + [ + "cycle_a(())", + "cycle_b(())", + ] + "#]] + .assert_debug_eq(&cycle); +} + +#[test] +fn cycle_revalidate() { + let mut db = DatabaseImpl::default(); + + // A --> B + // ^ | + // +-----+ + db.set_a_invokes(CycleQuery::B); + db.set_b_invokes(CycleQuery::A); + + assert!(db.cycle_a().is_err()); + db.set_b_invokes(CycleQuery::A); // same value as default + assert!(db.cycle_a().is_err()); +} + +#[test] +fn cycle_revalidate_unchanged_twice() { + let mut db = DatabaseImpl::default(); + + // A --> B + // ^ | + // +-----+ + db.set_a_invokes(CycleQuery::B); + db.set_b_invokes(CycleQuery::A); + + assert!(db.cycle_a().is_err()); + db.set_c_invokes(CycleQuery::A); // force new revisi5on + + // on this run + expect![[r#" + Err( + Error { + cycle: [ + "cycle_a(())", + "cycle_b(())", + ], + }, + ) + "#]] + .assert_debug_eq(&db.cycle_a()); +} + +#[test] +fn cycle_appears() { + let mut db = DatabaseImpl::default(); + + // A --> B + db.set_a_invokes(CycleQuery::B); + db.set_b_invokes(CycleQuery::None); + assert!(db.cycle_a().is_ok()); + + // A --> B + // ^ | + // +-----+ + db.set_b_invokes(CycleQuery::A); + tracing::debug!("Set Cycle Leaf"); + assert!(db.cycle_a().is_err()); +} + +#[test] +fn cycle_disappears() { + let mut db = DatabaseImpl::default(); + + // A --> B + // ^ | + // +-----+ + db.set_a_invokes(CycleQuery::B); + db.set_b_invokes(CycleQuery::A); + assert!(db.cycle_a().is_err()); + + // A --> B + db.set_b_invokes(CycleQuery::None); + assert!(db.cycle_a().is_ok()); +} + +/// A variant on `cycle_disappears` in which the values of +/// `a_invokes` and `b_invokes` are set with durability values. +/// If we are not careful, this could cause us to overlook +/// the fact that the cycle will no longer occur. +#[test] +fn cycle_disappears_durability() { + let mut db = DatabaseImpl::default(); + db.set_a_invokes_with_durability(CycleQuery::B, Durability::LOW); + db.set_b_invokes_with_durability(CycleQuery::A, Durability::HIGH); + + let res = db.cycle_a(); + assert!(res.is_err()); + + // At this point, `a` read `LOW` input, and `b` read `HIGH` input. However, + // because `b` participates in the same cycle as `a`, its final durability + // should be `LOW`. + // + // Check that setting a `LOW` input causes us to re-execute `b` query, and + // observe that the cycle goes away. + db.set_a_invokes_with_durability(CycleQuery::None, Durability::LOW); + + let res = db.cycle_b(); + assert!(res.is_ok()); +} + +#[test] +fn cycle_mixed_1() { + let mut db = DatabaseImpl::default(); + // A --> B <-- C + // | ^ + // +-----+ + db.set_a_invokes(CycleQuery::B); + db.set_b_invokes(CycleQuery::C); + db.set_c_invokes(CycleQuery::B); + + let u = db.cycle_c(); + expect![[r#" + Err( + Error { + cycle: [ + "cycle_b(())", + "cycle_c(())", + ], + }, + ) + "#]] + .assert_debug_eq(&u); +} + +#[test] +fn cycle_mixed_2() { + let mut db = DatabaseImpl::default(); + + // Configuration: + // + // A --> B --> C + // ^ | + // +-----------+ + db.set_a_invokes(CycleQuery::B); + db.set_b_invokes(CycleQuery::C); + db.set_c_invokes(CycleQuery::A); + + let u = db.cycle_a(); + expect![[r#" + Err( + Error { + cycle: [ + "cycle_a(())", + "cycle_b(())", + "cycle_c(())", + ], + }, + ) + "#]] + .assert_debug_eq(&u); +} + +#[test] +fn cycle_deterministic_order() { + // No matter whether we start from A or B, we get the same set of participants: + let db = || { + let mut db = DatabaseImpl::default(); + // A --> B + // ^ | + // +-----+ + db.set_a_invokes(CycleQuery::B); + db.set_b_invokes(CycleQuery::A); + db + }; + let a = db().cycle_a(); + let b = db().cycle_b(); + expect![[r#" + ( + Err( + Error { + cycle: [ + "cycle_a(())", + "cycle_b(())", + ], + }, + ), + Err( + Error { + cycle: [ + "cycle_a(())", + "cycle_b(())", + ], + }, + ), + ) + "#]] + .assert_debug_eq(&(a, b)); +} + +#[test] +fn cycle_multiple() { + // No matter whether we start from A or B, we get the same set of participants: + let mut db = DatabaseImpl::default(); + + // Configuration: + // + // A --> B <-- C + // ^ | ^ + // +-----+ | + // | | + // +-----+ + // + // Here, conceptually, B encounters a cycle with A and then + // recovers. + db.set_a_invokes(CycleQuery::B); + db.set_b_invokes(CycleQuery::AthenC); + db.set_c_invokes(CycleQuery::B); + + let c = db.cycle_c(); + let b = db.cycle_b(); + let a = db.cycle_a(); + expect![[r#" + ( + Err( + Error { + cycle: [ + "cycle_a(())", + "cycle_b(())", + ], + }, + ), + Err( + Error { + cycle: [ + "cycle_a(())", + "cycle_b(())", + ], + }, + ), + Err( + Error { + cycle: [ + "cycle_a(())", + "cycle_b(())", + ], + }, + ), + ) + "#]] + .assert_debug_eq(&(a, b, c)); +} + +#[test] +fn cycle_recovery_set_but_not_participating() { + let mut db = DatabaseImpl::default(); + + // A --> C -+ + // ^ | + // +--+ + db.set_a_invokes(CycleQuery::C); + db.set_c_invokes(CycleQuery::C); + + // Here we expect C to panic and A not to recover: + let r = extract_cycle(|| drop(db.cycle_a())); + expect![[r#" + [ + "cycle_c(())", + ] + "#]] + .assert_debug_eq(&r.all_participants(&db)); +} diff --git a/crates/salsa/tests/dyn_trait.rs b/crates/salsa/tests/dyn_trait.rs new file mode 100644 index 000000000000..09ebc5c4ce4d --- /dev/null +++ b/crates/salsa/tests/dyn_trait.rs @@ -0,0 +1,28 @@ +//! Test that you can implement a query using a `dyn Trait` setup. + +#[salsa::database(DynTraitStorage)] +#[derive(Default)] +struct DynTraitDatabase { + storage: salsa::Storage, +} + +impl salsa::Database for DynTraitDatabase {} + +#[salsa::query_group(DynTraitStorage)] +trait DynTrait { + #[salsa::input] + fn input(&self, x: u32) -> u32; + + fn output(&self, x: u32) -> u32; +} + +fn output(db: &dyn DynTrait, x: u32) -> u32 { + db.input(x) * 2 +} + +#[test] +fn dyn_trait() { + let mut query = DynTraitDatabase::default(); + query.set_input(22, 23); + assert_eq!(query.output(22), 46); +} diff --git a/crates/salsa/tests/incremental/constants.rs b/crates/salsa/tests/incremental/constants.rs new file mode 100644 index 000000000000..ea0eb8197860 --- /dev/null +++ b/crates/salsa/tests/incremental/constants.rs @@ -0,0 +1,145 @@ +use crate::implementation::{TestContext, TestContextImpl}; +use salsa::debug::DebugQueryTable; +use salsa::Durability; + +#[salsa::query_group(Constants)] +pub(crate) trait ConstantsDatabase: TestContext { + #[salsa::input] + fn input(&self, key: char) -> usize; + + fn add(&self, key1: char, key2: char) -> usize; + + fn add3(&self, key1: char, key2: char, key3: char) -> usize; +} + +fn add(db: &dyn ConstantsDatabase, key1: char, key2: char) -> usize { + db.log().add(format!("add({}, {})", key1, key2)); + db.input(key1) + db.input(key2) +} + +fn add3(db: &dyn ConstantsDatabase, key1: char, key2: char, key3: char) -> usize { + db.log().add(format!("add3({}, {}, {})", key1, key2, key3)); + db.add(key1, key2) + db.input(key3) +} + +// Test we can assign a constant and things will be correctly +// recomputed afterwards. +#[test] +fn invalidate_constant() { + let db = &mut TestContextImpl::default(); + db.set_input_with_durability('a', 44, Durability::HIGH); + db.set_input_with_durability('b', 22, Durability::HIGH); + assert_eq!(db.add('a', 'b'), 66); + + db.set_input_with_durability('a', 66, Durability::HIGH); + assert_eq!(db.add('a', 'b'), 88); +} + +#[test] +fn invalidate_constant_1() { + let db = &mut TestContextImpl::default(); + + // Not constant: + db.set_input('a', 44); + assert_eq!(db.add('a', 'a'), 88); + + // Becomes constant: + db.set_input_with_durability('a', 44, Durability::HIGH); + assert_eq!(db.add('a', 'a'), 88); + + // Invalidates: + db.set_input_with_durability('a', 33, Durability::HIGH); + assert_eq!(db.add('a', 'a'), 66); +} + +// Test cases where we assign same value to 'a' after declaring it a +// constant. +#[test] +fn set_after_constant_same_value() { + let db = &mut TestContextImpl::default(); + db.set_input_with_durability('a', 44, Durability::HIGH); + db.set_input_with_durability('a', 44, Durability::HIGH); + db.set_input('a', 44); +} + +#[test] +fn not_constant() { + let mut db = TestContextImpl::default(); + + db.set_input('a', 22); + db.set_input('b', 44); + assert_eq!(db.add('a', 'b'), 66); + assert_eq!(Durability::LOW, AddQuery.in_db(&db).durability(('a', 'b'))); +} + +#[test] +fn durability() { + let mut db = TestContextImpl::default(); + + db.set_input_with_durability('a', 22, Durability::HIGH); + db.set_input_with_durability('b', 44, Durability::HIGH); + assert_eq!(db.add('a', 'b'), 66); + assert_eq!(Durability::HIGH, AddQuery.in_db(&db).durability(('a', 'b'))); +} + +#[test] +fn mixed_constant() { + let mut db = TestContextImpl::default(); + + db.set_input_with_durability('a', 22, Durability::HIGH); + db.set_input('b', 44); + assert_eq!(db.add('a', 'b'), 66); + assert_eq!(Durability::LOW, AddQuery.in_db(&db).durability(('a', 'b'))); +} + +#[test] +fn becomes_constant_with_change() { + let mut db = TestContextImpl::default(); + + db.set_input('a', 22); + db.set_input('b', 44); + assert_eq!(db.add('a', 'b'), 66); + assert_eq!(Durability::LOW, AddQuery.in_db(&db).durability(('a', 'b'))); + + db.set_input_with_durability('a', 23, Durability::HIGH); + assert_eq!(db.add('a', 'b'), 67); + assert_eq!(Durability::LOW, AddQuery.in_db(&db).durability(('a', 'b'))); + + db.set_input_with_durability('b', 45, Durability::HIGH); + assert_eq!(db.add('a', 'b'), 68); + assert_eq!(Durability::HIGH, AddQuery.in_db(&db).durability(('a', 'b'))); + + db.set_input_with_durability('b', 45, Durability::MEDIUM); + assert_eq!(db.add('a', 'b'), 68); + assert_eq!(Durability::MEDIUM, AddQuery.in_db(&db).durability(('a', 'b'))); +} + +// Test a subtle case in which an input changes from constant to +// non-constant, but its value doesn't change. If we're not careful, +// this can cause us to incorrectly consider derived values as still +// being constant. +#[test] +fn constant_to_non_constant() { + let mut db = TestContextImpl::default(); + + db.set_input_with_durability('a', 11, Durability::HIGH); + db.set_input_with_durability('b', 22, Durability::HIGH); + db.set_input_with_durability('c', 33, Durability::HIGH); + + // Here, `add3` invokes `add`, which yields 33. Both calls are + // constant. + assert_eq!(db.add3('a', 'b', 'c'), 66); + + db.set_input('a', 11); + + // Here, `add3` invokes `add`, which *still* yields 33, but which + // is no longer constant. Since value didn't change, we might + // preserve `add3` unchanged, not noticing that it is no longer + // constant. + assert_eq!(db.add3('a', 'b', 'c'), 66); + + // In that case, we would not get the correct result here, when + // 'a' changes *again*. + db.set_input('a', 22); + assert_eq!(db.add3('a', 'b', 'c'), 77); +} diff --git a/crates/salsa/tests/incremental/counter.rs b/crates/salsa/tests/incremental/counter.rs new file mode 100644 index 000000000000..c04857e24c9e --- /dev/null +++ b/crates/salsa/tests/incremental/counter.rs @@ -0,0 +1,14 @@ +use std::cell::Cell; + +#[derive(Default)] +pub(crate) struct Counter { + value: Cell, +} + +impl Counter { + pub(crate) fn increment(&self) -> usize { + let v = self.value.get(); + self.value.set(v + 1); + v + } +} diff --git a/crates/salsa/tests/incremental/implementation.rs b/crates/salsa/tests/incremental/implementation.rs new file mode 100644 index 000000000000..19752bba005a --- /dev/null +++ b/crates/salsa/tests/incremental/implementation.rs @@ -0,0 +1,59 @@ +use crate::constants; +use crate::counter::Counter; +use crate::log::Log; +use crate::memoized_dep_inputs; +use crate::memoized_inputs; +use crate::memoized_volatile; + +pub(crate) trait TestContext: salsa::Database { + fn clock(&self) -> &Counter; + fn log(&self) -> &Log; +} + +#[salsa::database( + constants::Constants, + memoized_dep_inputs::MemoizedDepInputs, + memoized_inputs::MemoizedInputs, + memoized_volatile::MemoizedVolatile +)] +#[derive(Default)] +pub(crate) struct TestContextImpl { + storage: salsa::Storage, + clock: Counter, + log: Log, +} + +impl TestContextImpl { + #[track_caller] + pub(crate) fn assert_log(&self, expected_log: &[&str]) { + let expected_text = &format!("{:#?}", expected_log); + let actual_text = &format!("{:#?}", self.log().take()); + + if expected_text == actual_text { + return; + } + + #[allow(clippy::print_stdout)] + for diff in dissimilar::diff(expected_text, actual_text) { + match diff { + dissimilar::Chunk::Delete(l) => println!("-{}", l), + dissimilar::Chunk::Equal(l) => println!(" {}", l), + dissimilar::Chunk::Insert(r) => println!("+{}", r), + } + } + + panic!("incorrect log results"); + } +} + +impl TestContext for TestContextImpl { + fn clock(&self) -> &Counter { + &self.clock + } + + fn log(&self) -> &Log { + &self.log + } +} + +impl salsa::Database for TestContextImpl {} diff --git a/crates/salsa/tests/incremental/log.rs b/crates/salsa/tests/incremental/log.rs new file mode 100644 index 000000000000..1ee57fe667d5 --- /dev/null +++ b/crates/salsa/tests/incremental/log.rs @@ -0,0 +1,16 @@ +use std::cell::RefCell; + +#[derive(Default)] +pub(crate) struct Log { + data: RefCell>, +} + +impl Log { + pub(crate) fn add(&self, text: impl Into) { + self.data.borrow_mut().push(text.into()); + } + + pub(crate) fn take(&self) -> Vec { + self.data.take() + } +} diff --git a/crates/salsa/tests/incremental/main.rs b/crates/salsa/tests/incremental/main.rs new file mode 100644 index 000000000000..bcd13c75f715 --- /dev/null +++ b/crates/salsa/tests/incremental/main.rs @@ -0,0 +1,9 @@ +mod constants; +mod counter; +mod implementation; +mod log; +mod memoized_dep_inputs; +mod memoized_inputs; +mod memoized_volatile; + +fn main() {} diff --git a/crates/salsa/tests/incremental/memoized_dep_inputs.rs b/crates/salsa/tests/incremental/memoized_dep_inputs.rs new file mode 100644 index 000000000000..4ea33e0c1a03 --- /dev/null +++ b/crates/salsa/tests/incremental/memoized_dep_inputs.rs @@ -0,0 +1,60 @@ +use crate::implementation::{TestContext, TestContextImpl}; + +#[salsa::query_group(MemoizedDepInputs)] +pub(crate) trait MemoizedDepInputsContext: TestContext { + fn dep_memoized2(&self) -> usize; + fn dep_memoized1(&self) -> usize; + #[salsa::dependencies] + fn dep_derived1(&self) -> usize; + #[salsa::input] + fn dep_input1(&self) -> usize; + #[salsa::input] + fn dep_input2(&self) -> usize; +} + +fn dep_memoized2(db: &dyn MemoizedDepInputsContext) -> usize { + db.log().add("Memoized2 invoked"); + db.dep_memoized1() +} + +fn dep_memoized1(db: &dyn MemoizedDepInputsContext) -> usize { + db.log().add("Memoized1 invoked"); + db.dep_derived1() * 2 +} + +fn dep_derived1(db: &dyn MemoizedDepInputsContext) -> usize { + db.log().add("Derived1 invoked"); + db.dep_input1() / 2 +} + +#[test] +fn revalidate() { + let db = &mut TestContextImpl::default(); + + db.set_dep_input1(0); + + // Initial run starts from Memoized2: + let v = db.dep_memoized2(); + assert_eq!(v, 0); + db.assert_log(&["Memoized2 invoked", "Memoized1 invoked", "Derived1 invoked"]); + + // After that, we first try to validate Memoized1 but wind up + // running Memoized2. Note that we don't try to validate + // Derived1, so it is invoked by Memoized1. + db.set_dep_input1(44); + let v = db.dep_memoized2(); + assert_eq!(v, 44); + db.assert_log(&["Memoized1 invoked", "Derived1 invoked", "Memoized2 invoked"]); + + // Here validation of Memoized1 succeeds so Memoized2 never runs. + db.set_dep_input1(45); + let v = db.dep_memoized2(); + assert_eq!(v, 44); + db.assert_log(&["Memoized1 invoked", "Derived1 invoked"]); + + // Here, a change to input2 doesn't affect us, so nothing runs. + db.set_dep_input2(45); + let v = db.dep_memoized2(); + assert_eq!(v, 44); + db.assert_log(&[]); +} diff --git a/crates/salsa/tests/incremental/memoized_inputs.rs b/crates/salsa/tests/incremental/memoized_inputs.rs new file mode 100644 index 000000000000..53d2ace8871d --- /dev/null +++ b/crates/salsa/tests/incremental/memoized_inputs.rs @@ -0,0 +1,76 @@ +use crate::implementation::{TestContext, TestContextImpl}; + +#[salsa::query_group(MemoizedInputs)] +pub(crate) trait MemoizedInputsContext: TestContext { + fn max(&self) -> usize; + #[salsa::input] + fn input1(&self) -> usize; + #[salsa::input] + fn input2(&self) -> usize; +} + +fn max(db: &dyn MemoizedInputsContext) -> usize { + db.log().add("Max invoked"); + std::cmp::max(db.input1(), db.input2()) +} + +#[test] +fn revalidate() { + let db = &mut TestContextImpl::default(); + + db.set_input1(0); + db.set_input2(0); + + let v = db.max(); + assert_eq!(v, 0); + db.assert_log(&["Max invoked"]); + + let v = db.max(); + assert_eq!(v, 0); + db.assert_log(&[]); + + db.set_input1(44); + db.assert_log(&[]); + + let v = db.max(); + assert_eq!(v, 44); + db.assert_log(&["Max invoked"]); + + let v = db.max(); + assert_eq!(v, 44); + db.assert_log(&[]); + + db.set_input1(44); + db.assert_log(&[]); + db.set_input2(66); + db.assert_log(&[]); + db.set_input1(64); + db.assert_log(&[]); + + let v = db.max(); + assert_eq!(v, 66); + db.assert_log(&["Max invoked"]); + + let v = db.max(); + assert_eq!(v, 66); + db.assert_log(&[]); +} + +/// Test that invoking `set` on an input with the same value still +/// triggers a new revision. +#[test] +fn set_after_no_change() { + let db = &mut TestContextImpl::default(); + + db.set_input2(0); + + db.set_input1(44); + let v = db.max(); + assert_eq!(v, 44); + db.assert_log(&["Max invoked"]); + + db.set_input1(44); + let v = db.max(); + assert_eq!(v, 44); + db.assert_log(&["Max invoked"]); +} diff --git a/crates/salsa/tests/incremental/memoized_volatile.rs b/crates/salsa/tests/incremental/memoized_volatile.rs new file mode 100644 index 000000000000..6dc5030063b7 --- /dev/null +++ b/crates/salsa/tests/incremental/memoized_volatile.rs @@ -0,0 +1,77 @@ +use crate::implementation::{TestContext, TestContextImpl}; +use salsa::{Database, Durability}; + +#[salsa::query_group(MemoizedVolatile)] +pub(crate) trait MemoizedVolatileContext: TestContext { + // Queries for testing a "volatile" value wrapped by + // memoization. + fn memoized2(&self) -> usize; + fn memoized1(&self) -> usize; + fn volatile(&self) -> usize; +} + +fn memoized2(db: &dyn MemoizedVolatileContext) -> usize { + db.log().add("Memoized2 invoked"); + db.memoized1() +} + +fn memoized1(db: &dyn MemoizedVolatileContext) -> usize { + db.log().add("Memoized1 invoked"); + let v = db.volatile(); + v / 2 +} + +fn volatile(db: &dyn MemoizedVolatileContext) -> usize { + db.log().add("Volatile invoked"); + db.salsa_runtime().report_untracked_read(); + db.clock().increment() +} + +#[test] +fn volatile_x2() { + let query = TestContextImpl::default(); + + // Invoking volatile twice doesn't execute twice, because volatile + // queries are memoized by default. + query.volatile(); + query.volatile(); + query.assert_log(&["Volatile invoked"]); +} + +/// Test that: +/// +/// - On the first run of R0, we recompute everything. +/// - On the second run of R1, we recompute nothing. +/// - On the first run of R1, we recompute Memoized1 but not Memoized2 (since Memoized1 result +/// did not change). +/// - On the second run of R1, we recompute nothing. +/// - On the first run of R2, we recompute everything (since Memoized1 result *did* change). +#[test] +fn revalidate() { + let mut query = TestContextImpl::default(); + + query.memoized2(); + query.assert_log(&["Memoized2 invoked", "Memoized1 invoked", "Volatile invoked"]); + + query.memoized2(); + query.assert_log(&[]); + + // Second generation: volatile will change (to 1) but memoized1 + // will not (still 0, as 1/2 = 0) + query.salsa_runtime_mut().synthetic_write(Durability::LOW); + query.memoized2(); + query.assert_log(&["Volatile invoked", "Memoized1 invoked"]); + query.memoized2(); + query.assert_log(&[]); + + // Third generation: volatile will change (to 2) and memoized1 + // will too (to 1). Therefore, after validating that Memoized1 + // changed, we now invoke Memoized2. + query.salsa_runtime_mut().synthetic_write(Durability::LOW); + + query.memoized2(); + query.assert_log(&["Volatile invoked", "Memoized1 invoked", "Memoized2 invoked"]); + + query.memoized2(); + query.assert_log(&[]); +} diff --git a/crates/salsa/tests/interned.rs b/crates/salsa/tests/interned.rs new file mode 100644 index 000000000000..d097e41cfd63 --- /dev/null +++ b/crates/salsa/tests/interned.rs @@ -0,0 +1,90 @@ +//! Test that you can implement a query using a `dyn Trait` setup. + +use salsa::InternId; + +#[salsa::database(InternStorage)] +#[derive(Default)] +struct Database { + storage: salsa::Storage, +} + +impl salsa::Database for Database {} + +impl salsa::ParallelDatabase for Database { + fn snapshot(&self) -> salsa::Snapshot { + salsa::Snapshot::new(Database { storage: self.storage.snapshot() }) + } +} + +#[salsa::query_group(InternStorage)] +trait Intern { + #[salsa::interned] + fn intern1(&self, x: String) -> InternId; + + #[salsa::interned] + fn intern2(&self, x: String, y: String) -> InternId; + + #[salsa::interned] + fn intern_key(&self, x: String) -> InternKey; +} + +#[derive(Copy, Clone, Debug, PartialEq, Eq, Hash)] +pub struct InternKey(InternId); + +impl salsa::InternKey for InternKey { + fn from_intern_id(v: InternId) -> Self { + InternKey(v) + } + + fn as_intern_id(&self) -> InternId { + self.0 + } +} + +#[test] +fn test_intern1() { + let db = Database::default(); + let foo0 = db.intern1("foo".to_owned()); + let bar0 = db.intern1("bar".to_owned()); + let foo1 = db.intern1("foo".to_owned()); + let bar1 = db.intern1("bar".to_owned()); + + assert_eq!(foo0, foo1); + assert_eq!(bar0, bar1); + assert_ne!(foo0, bar0); + + assert_eq!("foo".to_owned(), db.lookup_intern1(foo0)); + assert_eq!("bar".to_owned(), db.lookup_intern1(bar0)); +} + +#[test] +fn test_intern2() { + let db = Database::default(); + let foo0 = db.intern2("x".to_owned(), "foo".to_owned()); + let bar0 = db.intern2("x".to_owned(), "bar".to_owned()); + let foo1 = db.intern2("x".to_owned(), "foo".to_owned()); + let bar1 = db.intern2("x".to_owned(), "bar".to_owned()); + + assert_eq!(foo0, foo1); + assert_eq!(bar0, bar1); + assert_ne!(foo0, bar0); + + assert_eq!(("x".to_owned(), "foo".to_owned()), db.lookup_intern2(foo0)); + assert_eq!(("x".to_owned(), "bar".to_owned()), db.lookup_intern2(bar0)); +} + +#[test] +fn test_intern_key() { + let db = Database::default(); + let foo0 = db.intern_key("foo".to_owned()); + let bar0 = db.intern_key("bar".to_owned()); + let foo1 = db.intern_key("foo".to_owned()); + let bar1 = db.intern_key("bar".to_owned()); + + assert_eq!(foo0, foo1); + assert_eq!(bar0, bar1); + assert_ne!(foo0, bar0); + + assert_eq!("foo".to_owned(), db.lookup_intern_key(foo0)); + assert_eq!("bar".to_owned(), db.lookup_intern_key(bar0)); +} diff --git a/crates/salsa/tests/lru.rs b/crates/salsa/tests/lru.rs new file mode 100644 index 000000000000..3da8519b0818 --- /dev/null +++ b/crates/salsa/tests/lru.rs @@ -0,0 +1,102 @@ +//! Test setting LRU actually limits the number of things in the database; +use std::sync::{ + atomic::{AtomicUsize, Ordering}, + Arc, +}; + +#[derive(Debug, PartialEq, Eq)] +struct HotPotato(u32); + +static N_POTATOES: AtomicUsize = AtomicUsize::new(0); + +impl HotPotato { + fn new(id: u32) -> HotPotato { + N_POTATOES.fetch_add(1, Ordering::SeqCst); + HotPotato(id) + } +} + +impl Drop for HotPotato { + fn drop(&mut self) { + N_POTATOES.fetch_sub(1, Ordering::SeqCst); + } +} + +#[salsa::query_group(QueryGroupStorage)] +trait QueryGroup: salsa::Database { + fn get(&self, x: u32) -> Arc; + fn get_volatile(&self, x: u32) -> usize; +} + +fn get(_db: &dyn QueryGroup, x: u32) -> Arc { + Arc::new(HotPotato::new(x)) +} + +fn get_volatile(db: &dyn QueryGroup, _x: u32) -> usize { + static COUNTER: AtomicUsize = AtomicUsize::new(0); + db.salsa_runtime().report_untracked_read(); + COUNTER.fetch_add(1, Ordering::SeqCst) +} + +#[salsa::database(QueryGroupStorage)] +#[derive(Default)] +struct Database { + storage: salsa::Storage, +} + +impl salsa::Database for Database {} + +#[test] +fn lru_works() { + let mut db = Database::default(); + GetQuery.in_db_mut(&mut db).set_lru_capacity(32); + assert_eq!(N_POTATOES.load(Ordering::SeqCst), 0); + + for i in 0..128u32 { + let p = db.get(i); + assert_eq!(p.0, i) + } + assert_eq!(N_POTATOES.load(Ordering::SeqCst), 32); + + for i in 0..128u32 { + let p = db.get(i); + assert_eq!(p.0, i) + } + assert_eq!(N_POTATOES.load(Ordering::SeqCst), 32); + + GetQuery.in_db_mut(&mut db).set_lru_capacity(32); + assert_eq!(N_POTATOES.load(Ordering::SeqCst), 32); + + GetQuery.in_db_mut(&mut db).set_lru_capacity(64); + assert_eq!(N_POTATOES.load(Ordering::SeqCst), 32); + for i in 0..128u32 { + let p = db.get(i); + assert_eq!(p.0, i) + } + assert_eq!(N_POTATOES.load(Ordering::SeqCst), 64); + + // Special case: setting capacity to zero disables LRU + GetQuery.in_db_mut(&mut db).set_lru_capacity(0); + assert_eq!(N_POTATOES.load(Ordering::SeqCst), 64); + for i in 0..128u32 { + let p = db.get(i); + assert_eq!(p.0, i) + } + assert_eq!(N_POTATOES.load(Ordering::SeqCst), 128); + + drop(db); + assert_eq!(N_POTATOES.load(Ordering::SeqCst), 0); +} + +#[test] +fn lru_doesnt_break_volatile_queries() { + let mut db = Database::default(); + GetVolatileQuery.in_db_mut(&mut db).set_lru_capacity(32); + // Here, we check that we execute each volatile query at most once, despite + // LRU. That does mean that we have more values in DB than the LRU capacity, + // but it's much better than inconsistent results from volatile queries! + for i in (0..3).flat_map(|_| 0..128usize) { + let x = db.get_volatile(i as u32); + assert_eq!(x, i) + } +} diff --git a/crates/salsa/tests/macros.rs b/crates/salsa/tests/macros.rs new file mode 100644 index 000000000000..3d818e53c8e9 --- /dev/null +++ b/crates/salsa/tests/macros.rs @@ -0,0 +1,11 @@ +#[salsa::query_group(MyStruct)] +trait MyDatabase: salsa::Database { + #[salsa::invoke(another_module::another_name)] + fn my_query(&self, key: ()) -> (); +} + +mod another_module { + pub(crate) fn another_name(_: &dyn crate::MyDatabase, (): ()) {} +} + +fn main() {} diff --git a/crates/salsa/tests/no_send_sync.rs b/crates/salsa/tests/no_send_sync.rs new file mode 100644 index 000000000000..2a25c437c3e5 --- /dev/null +++ b/crates/salsa/tests/no_send_sync.rs @@ -0,0 +1,31 @@ +use std::rc::Rc; + +#[salsa::query_group(NoSendSyncStorage)] +trait NoSendSyncDatabase: salsa::Database { + fn no_send_sync_value(&self, key: bool) -> Rc; + fn no_send_sync_key(&self, key: Rc) -> bool; +} + +fn no_send_sync_value(_db: &dyn NoSendSyncDatabase, key: bool) -> Rc { + Rc::new(key) +} + +fn no_send_sync_key(_db: &dyn NoSendSyncDatabase, key: Rc) -> bool { + *key +} + +#[salsa::database(NoSendSyncStorage)] +#[derive(Default)] +struct DatabaseImpl { + storage: salsa::Storage, +} + +impl salsa::Database for DatabaseImpl {} + +#[test] +fn no_send_sync() { + let db = DatabaseImpl::default(); + + assert_eq!(db.no_send_sync_value(true), Rc::new(true)); + assert!(!db.no_send_sync_key(Rc::new(false))); +} diff --git a/crates/salsa/tests/on_demand_inputs.rs b/crates/salsa/tests/on_demand_inputs.rs new file mode 100644 index 000000000000..5d0e4866442e --- /dev/null +++ b/crates/salsa/tests/on_demand_inputs.rs @@ -0,0 +1,147 @@ +//! Test that "on-demand" input pattern works. +//! +//! On-demand inputs are inputs computed lazily on the fly. They are simulated +//! via a b query with zero inputs, which uses `add_synthetic_read` to +//! tweak durability and `invalidate` to clear the input. + +#![allow(clippy::disallowed_types, clippy::type_complexity)] + +use std::{cell::RefCell, collections::HashMap, rc::Rc}; + +use salsa::{Database as _, Durability, EventKind}; + +#[salsa::query_group(QueryGroupStorage)] +trait QueryGroup: salsa::Database + AsRef> { + fn a(&self, x: u32) -> u32; + fn b(&self, x: u32) -> u32; + fn c(&self, x: u32) -> u32; +} + +fn a(db: &dyn QueryGroup, x: u32) -> u32 { + let durability = if x % 2 == 0 { Durability::LOW } else { Durability::HIGH }; + db.salsa_runtime().report_synthetic_read(durability); + let external_state: &HashMap = db.as_ref(); + external_state[&x] +} + +fn b(db: &dyn QueryGroup, x: u32) -> u32 { + db.a(x) +} + +fn c(db: &dyn QueryGroup, x: u32) -> u32 { + db.b(x) +} + +#[salsa::database(QueryGroupStorage)] +#[derive(Default)] +struct Database { + storage: salsa::Storage, + external_state: HashMap, + on_event: Option>, +} + +impl salsa::Database for Database { + fn salsa_event(&self, event: salsa::Event) { + if let Some(cb) = &self.on_event { + cb(self, event) + } + } +} + +impl AsRef> for Database { + fn as_ref(&self) -> &HashMap { + &self.external_state + } +} + +#[test] +fn on_demand_input_works() { + let mut db = Database::default(); + + db.external_state.insert(1, 10); + assert_eq!(db.b(1), 10); + assert_eq!(db.a(1), 10); + + // We changed external state, but haven't signaled about this yet, + // so we expect to see the old answer + db.external_state.insert(1, 92); + assert_eq!(db.b(1), 10); + assert_eq!(db.a(1), 10); + + AQuery.in_db_mut(&mut db).invalidate(&1); + assert_eq!(db.b(1), 92); + assert_eq!(db.a(1), 92); + + // Downstream queries should also be rerun if we call `a` first. + db.external_state.insert(1, 50); + AQuery.in_db_mut(&mut db).invalidate(&1); + assert_eq!(db.a(1), 50); + assert_eq!(db.b(1), 50); +} + +#[test] +fn on_demand_input_durability() { + let mut db = Database::default(); + + let events = Rc::new(RefCell::new(vec![])); + db.on_event = Some(Box::new({ + let events = events.clone(); + move |db, event| { + if let EventKind::WillCheckCancellation = event.kind { + // these events are not interesting + } else { + events.borrow_mut().push(format!("{:?}", event.debug(db))) + } + } + })); + + events.replace(vec![]); + db.external_state.insert(1, 10); + db.external_state.insert(2, 20); + assert_eq!(db.b(1), 10); + assert_eq!(db.b(2), 20); + expect_test::expect![[r#" + RefCell { + value: [ + "Event { runtime_id: RuntimeId { counter: 0 }, kind: WillExecute { database_key: b(1) } }", + "Event { runtime_id: RuntimeId { counter: 0 }, kind: WillExecute { database_key: a(1) } }", + "Event { runtime_id: RuntimeId { counter: 0 }, kind: WillExecute { database_key: b(2) } }", + "Event { runtime_id: RuntimeId { counter: 0 }, kind: WillExecute { database_key: a(2) } }", + ], + } + "#]].assert_debug_eq(&events); + + db.salsa_runtime_mut().synthetic_write(Durability::LOW); + events.replace(vec![]); + assert_eq!(db.c(1), 10); + assert_eq!(db.c(2), 20); + // Re-execute `a(2)` because that has low durability, but not `a(1)` + expect_test::expect![[r#" + RefCell { + value: [ + "Event { runtime_id: RuntimeId { counter: 0 }, kind: WillExecute { database_key: c(1) } }", + "Event { runtime_id: RuntimeId { counter: 0 }, kind: DidValidateMemoizedValue { database_key: b(1) } }", + "Event { runtime_id: RuntimeId { counter: 0 }, kind: WillExecute { database_key: c(2) } }", + "Event { runtime_id: RuntimeId { counter: 0 }, kind: WillExecute { database_key: a(2) } }", + "Event { runtime_id: RuntimeId { counter: 0 }, kind: DidValidateMemoizedValue { database_key: b(2) } }", + ], + } + "#]].assert_debug_eq(&events); + + db.salsa_runtime_mut().synthetic_write(Durability::HIGH); + events.replace(vec![]); + assert_eq!(db.c(1), 10); + assert_eq!(db.c(2), 20); + // Re-execute both `a(1)` and `a(2)`, but we don't re-execute any `b` queries as the + // result didn't actually change. + expect_test::expect![[r#" + RefCell { + value: [ + "Event { runtime_id: RuntimeId { counter: 0 }, kind: WillExecute { database_key: a(1) } }", + "Event { runtime_id: RuntimeId { counter: 0 }, kind: DidValidateMemoizedValue { database_key: c(1) } }", + "Event { runtime_id: RuntimeId { counter: 0 }, kind: WillExecute { database_key: a(2) } }", + "Event { runtime_id: RuntimeId { counter: 0 }, kind: DidValidateMemoizedValue { database_key: c(2) } }", + ], + } + "#]].assert_debug_eq(&events); +} diff --git a/crates/salsa/tests/panic_safely.rs b/crates/salsa/tests/panic_safely.rs new file mode 100644 index 000000000000..c11ae9c2144a --- /dev/null +++ b/crates/salsa/tests/panic_safely.rs @@ -0,0 +1,93 @@ +use salsa::{Database, ParallelDatabase, Snapshot}; +use std::panic::{self, AssertUnwindSafe}; +use std::sync::atomic::{AtomicU32, Ordering::SeqCst}; + +#[salsa::query_group(PanicSafelyStruct)] +trait PanicSafelyDatabase: salsa::Database { + #[salsa::input] + fn one(&self) -> usize; + + fn panic_safely(&self) -> (); + + fn outer(&self) -> (); +} + +fn panic_safely(db: &dyn PanicSafelyDatabase) { + assert_eq!(db.one(), 1); +} + +static OUTER_CALLS: AtomicU32 = AtomicU32::new(0); + +fn outer(db: &dyn PanicSafelyDatabase) { + OUTER_CALLS.fetch_add(1, SeqCst); + db.panic_safely(); +} + +#[salsa::database(PanicSafelyStruct)] +#[derive(Default)] +struct DatabaseStruct { + storage: salsa::Storage, +} + +impl salsa::Database for DatabaseStruct {} + +impl salsa::ParallelDatabase for DatabaseStruct { + fn snapshot(&self) -> Snapshot { + Snapshot::new(DatabaseStruct { storage: self.storage.snapshot() }) + } +} + +#[test] +fn should_panic_safely() { + let mut db = DatabaseStruct::default(); + db.set_one(0); + + // Invoke `db.panic_safely() without having set `db.one`. `db.one` will + // return 0 and we should catch the panic. + let result = panic::catch_unwind(AssertUnwindSafe({ + let db = db.snapshot(); + move || db.panic_safely() + })); + assert!(result.is_err()); + + // Set `db.one` to 1 and assert ok + db.set_one(1); + let result = panic::catch_unwind(AssertUnwindSafe(|| db.panic_safely())); + assert!(result.is_ok()); + + // Check, that memoized outer is not invalidated by a panic + { + assert_eq!(OUTER_CALLS.load(SeqCst), 0); + db.outer(); + assert_eq!(OUTER_CALLS.load(SeqCst), 1); + + db.set_one(0); + let result = panic::catch_unwind(AssertUnwindSafe(|| db.outer())); + assert!(result.is_err()); + assert_eq!(OUTER_CALLS.load(SeqCst), 1); + + db.set_one(1); + db.outer(); + assert_eq!(OUTER_CALLS.load(SeqCst), 2); + } +} + +#[test] +fn storages_are_unwind_safe() { + fn check_unwind_safe() {} + check_unwind_safe::<&DatabaseStruct>(); +} + +#[test] +fn panics_clear_query_stack() { + let db = DatabaseStruct::default(); + + // Invoke `db.panic_if_not_one() without having set `db.input`. `db.input` + // will default to 0 and we should catch the panic. + let result = panic::catch_unwind(AssertUnwindSafe(|| db.panic_safely())); + assert!(result.is_err()); + + // The database has been poisoned and any attempt to increment the + // revision should panic. + assert_eq!(db.salsa_runtime().active_query(), None); +} diff --git a/crates/salsa/tests/parallel/cancellation.rs b/crates/salsa/tests/parallel/cancellation.rs new file mode 100644 index 000000000000..9a92e5cc1ff4 --- /dev/null +++ b/crates/salsa/tests/parallel/cancellation.rs @@ -0,0 +1,132 @@ +use crate::setup::{CancellationFlag, Knobs, ParDatabase, ParDatabaseImpl, WithValue}; +use salsa::{Cancelled, ParallelDatabase}; + +macro_rules! assert_cancelled { + ($thread:expr) => { + match $thread.join() { + Ok(value) => panic!("expected cancellation, got {:?}", value), + Err(payload) => match payload.downcast::() { + Ok(_) => {} + Err(payload) => ::std::panic::resume_unwind(payload), + }, + } + }; +} + +/// Add test where a call to `sum` is cancelled by a simultaneous +/// write. Check that we recompute the result in next revision, even +/// though none of the inputs have changed. +#[test] +fn in_par_get_set_cancellation_immediate() { + let mut db = ParDatabaseImpl::default(); + + db.set_input('a', 100); + db.set_input('b', 10); + db.set_input('c', 1); + db.set_input('d', 0); + + let thread1 = std::thread::spawn({ + let db = db.snapshot(); + move || { + // This will not return until it sees cancellation is + // signaled. + db.knobs().sum_signal_on_entry.with_value(1, || { + db.knobs() + .sum_wait_for_cancellation + .with_value(CancellationFlag::Panic, || db.sum("abc")) + }) + } + }); + + // Wait until we have entered `sum` in the other thread. + db.wait_for(1); + + // Try to set the input. This will signal cancellation. + db.set_input('d', 1000); + + // This should re-compute the value (even though no input has changed). + let thread2 = std::thread::spawn({ + let db = db.snapshot(); + move || db.sum("abc") + }); + + assert_eq!(db.sum("d"), 1000); + assert_cancelled!(thread1); + assert_eq!(thread2.join().unwrap(), 111); +} + +/// Here, we check that `sum`'s cancellation is propagated +/// to `sum2` properly. +#[test] +fn in_par_get_set_cancellation_transitive() { + let mut db = ParDatabaseImpl::default(); + + db.set_input('a', 100); + db.set_input('b', 10); + db.set_input('c', 1); + db.set_input('d', 0); + + let thread1 = std::thread::spawn({ + let db = db.snapshot(); + move || { + // This will not return until it sees cancellation is + // signaled. + db.knobs().sum_signal_on_entry.with_value(1, || { + db.knobs() + .sum_wait_for_cancellation + .with_value(CancellationFlag::Panic, || db.sum2("abc")) + }) + } + }); + + // Wait until we have entered `sum` in the other thread. + db.wait_for(1); + + // Try to set the input. This will signal cancellation. + db.set_input('d', 1000); + + // This should re-compute the value (even though no input has changed). + let thread2 = std::thread::spawn({ + let db = db.snapshot(); + move || db.sum2("abc") + }); + + assert_eq!(db.sum2("d"), 1000); + assert_cancelled!(thread1); + assert_eq!(thread2.join().unwrap(), 111); +} + +/// https://github.com/salsa-rs/salsa/issues/66 +#[test] +fn no_back_dating_in_cancellation() { + let mut db = ParDatabaseImpl::default(); + + db.set_input('a', 1); + let thread1 = std::thread::spawn({ + let db = db.snapshot(); + move || { + // Here we compute a long-chain of queries, + // but the last one gets cancelled. + db.knobs().sum_signal_on_entry.with_value(1, || { + db.knobs() + .sum_wait_for_cancellation + .with_value(CancellationFlag::Panic, || db.sum3("a")) + }) + } + }); + + db.wait_for(1); + + // Set unrelated input to bump revision + db.set_input('b', 2); + + // Here we should recompuet the whole chain again, clearing the cancellation + // state. If we get `usize::max()` here, it is a bug! + assert_eq!(db.sum3("a"), 1); + + assert_cancelled!(thread1); + + db.set_input('a', 3); + db.set_input('a', 4); + assert_eq!(db.sum3("ab"), 6); +} diff --git a/crates/salsa/tests/parallel/frozen.rs b/crates/salsa/tests/parallel/frozen.rs new file mode 100644 index 000000000000..5359a8820e24 --- /dev/null +++ b/crates/salsa/tests/parallel/frozen.rs @@ -0,0 +1,57 @@ +use crate::setup::{ParDatabase, ParDatabaseImpl}; +use crate::signal::Signal; +use salsa::{Database, ParallelDatabase}; +use std::{ + panic::{catch_unwind, AssertUnwindSafe}, + sync::Arc, +}; + +/// Add test where a call to `sum` is cancelled by a simultaneous +/// write. Check that we recompute the result in next revision, even +/// though none of the inputs have changed. +#[test] +fn in_par_get_set_cancellation() { + let mut db = ParDatabaseImpl::default(); + + db.set_input('a', 1); + + let signal = Arc::new(Signal::default()); + + let thread1 = std::thread::spawn({ + let db = db.snapshot(); + let signal = signal.clone(); + move || { + // Check that cancellation flag is not yet set, because + // `set` cannot have been called yet. + catch_unwind(AssertUnwindSafe(|| db.unwind_if_cancelled())).unwrap(); + + // Signal other thread to proceed. + signal.signal(1); + + // Wait for other thread to signal cancellation + catch_unwind(AssertUnwindSafe(|| loop { + db.unwind_if_cancelled(); + std::thread::yield_now(); + })) + .unwrap_err(); + } + }); + + let thread2 = std::thread::spawn({ + move || { + // Wait until thread 1 has asserted that they are not cancelled + // before we invoke `set.` + signal.wait_for(1); + + // This will block until thread1 drops the revision lock. + db.set_input('a', 2); + + db.input('a') + } + }); + + thread1.join().unwrap(); + + let c = thread2.join().unwrap(); + assert_eq!(c, 2); +} diff --git a/crates/salsa/tests/parallel/independent.rs b/crates/salsa/tests/parallel/independent.rs new file mode 100644 index 000000000000..bd6ba3bf931c --- /dev/null +++ b/crates/salsa/tests/parallel/independent.rs @@ -0,0 +1,29 @@ +use crate::setup::{ParDatabase, ParDatabaseImpl}; +use salsa::ParallelDatabase; + +/// Test two `sum` queries (on distinct keys) executing in different +/// threads. Really just a test that `snapshot` etc compiles. +#[test] +fn in_par_two_independent_queries() { + let mut db = ParDatabaseImpl::default(); + + db.set_input('a', 100); + db.set_input('b', 10); + db.set_input('c', 1); + db.set_input('d', 200); + db.set_input('e', 20); + db.set_input('f', 2); + + let thread1 = std::thread::spawn({ + let db = db.snapshot(); + move || db.sum("abc") + }); + + let thread2 = std::thread::spawn({ + let db = db.snapshot(); + move || db.sum("def") + }); + + assert_eq!(thread1.join().unwrap(), 111); + assert_eq!(thread2.join().unwrap(), 222); +} diff --git a/crates/salsa/tests/parallel/main.rs b/crates/salsa/tests/parallel/main.rs new file mode 100644 index 000000000000..31c0da183757 --- /dev/null +++ b/crates/salsa/tests/parallel/main.rs @@ -0,0 +1,13 @@ +mod setup; + +mod cancellation; +mod frozen; +mod independent; +mod parallel_cycle_all_recover; +mod parallel_cycle_mid_recover; +mod parallel_cycle_none_recover; +mod parallel_cycle_one_recovers; +mod race; +mod signal; +mod stress; +mod true_parallel; diff --git a/crates/salsa/tests/parallel/parallel_cycle_all_recover.rs b/crates/salsa/tests/parallel/parallel_cycle_all_recover.rs new file mode 100644 index 000000000000..cee51b4db75e --- /dev/null +++ b/crates/salsa/tests/parallel/parallel_cycle_all_recover.rs @@ -0,0 +1,110 @@ +//! Test for cycle recover spread across two threads. +//! See `../cycles.rs` for a complete listing of cycle tests, +//! both intra and cross thread. + +use crate::setup::{Knobs, ParDatabaseImpl}; +use salsa::ParallelDatabase; +use test_log::test; + +// Recover cycle test: +// +// The pattern is as follows. +// +// Thread A Thread B +// -------- -------- +// a1 b1 +// | wait for stage 1 (blocks) +// signal stage 1 | +// wait for stage 2 (blocks) (unblocked) +// | signal stage 2 +// (unblocked) wait for stage 3 (blocks) +// a2 | +// b1 (blocks -> stage 3) | +// | (unblocked) +// | b2 +// | a1 (cycle detected, recovers) +// | b2 completes, recovers +// | b1 completes, recovers +// a2 sees cycle, recovers +// a1 completes, recovers + +#[test] +fn parallel_cycle_all_recover() { + let db = ParDatabaseImpl::default(); + db.knobs().signal_on_will_block.set(3); + + let thread_a = std::thread::spawn({ + let db = db.snapshot(); + move || db.a1(1) + }); + + let thread_b = std::thread::spawn({ + let db = db.snapshot(); + move || db.b1(1) + }); + + assert_eq!(thread_a.join().unwrap(), 11); + assert_eq!(thread_b.join().unwrap(), 21); +} + +#[salsa::query_group(ParallelCycleAllRecover)] +pub(crate) trait TestDatabase: Knobs { + #[salsa::cycle(recover_a1)] + fn a1(&self, key: i32) -> i32; + + #[salsa::cycle(recover_a2)] + fn a2(&self, key: i32) -> i32; + + #[salsa::cycle(recover_b1)] + fn b1(&self, key: i32) -> i32; + + #[salsa::cycle(recover_b2)] + fn b2(&self, key: i32) -> i32; +} + +fn recover_a1(_db: &dyn TestDatabase, _cycle: &salsa::Cycle, key: &i32) -> i32 { + tracing::debug!("recover_a1"); + key * 10 + 1 +} + +fn recover_a2(_db: &dyn TestDatabase, _cycle: &salsa::Cycle, key: &i32) -> i32 { + tracing::debug!("recover_a2"); + key * 10 + 2 +} + +fn recover_b1(_db: &dyn TestDatabase, _cycle: &salsa::Cycle, key: &i32) -> i32 { + tracing::debug!("recover_b1"); + key * 20 + 1 +} + +fn recover_b2(_db: &dyn TestDatabase, _cycle: &salsa::Cycle, key: &i32) -> i32 { + tracing::debug!("recover_b2"); + key * 20 + 2 +} + +fn a1(db: &dyn TestDatabase, key: i32) -> i32 { + // Wait to create the cycle until both threads have entered + db.signal(1); + db.wait_for(2); + + db.a2(key) +} + +fn a2(db: &dyn TestDatabase, key: i32) -> i32 { + db.b1(key) +} + +fn b1(db: &dyn TestDatabase, key: i32) -> i32 { + // Wait to create the cycle until both threads have entered + db.wait_for(1); + db.signal(2); + + // Wait for thread A to block on this thread + db.wait_for(3); + + db.b2(key) +} + +fn b2(db: &dyn TestDatabase, key: i32) -> i32 { + db.a1(key) +} diff --git a/crates/salsa/tests/parallel/parallel_cycle_mid_recover.rs b/crates/salsa/tests/parallel/parallel_cycle_mid_recover.rs new file mode 100644 index 000000000000..f78c05c5593c --- /dev/null +++ b/crates/salsa/tests/parallel/parallel_cycle_mid_recover.rs @@ -0,0 +1,110 @@ +//! Test for cycle recover spread across two threads. +//! See `../cycles.rs` for a complete listing of cycle tests, +//! both intra and cross thread. + +use crate::setup::{Knobs, ParDatabaseImpl}; +use salsa::ParallelDatabase; +use test_log::test; + +// Recover cycle test: +// +// The pattern is as follows. +// +// Thread A Thread B +// -------- -------- +// a1 b1 +// | wait for stage 1 (blocks) +// signal stage 1 | +// wait for stage 2 (blocks) (unblocked) +// | | +// | b2 +// | b3 +// | a1 (blocks -> stage 2) +// (unblocked) | +// a2 (cycle detected) | +// b3 recovers +// b2 resumes +// b1 panics because bug + +#[test] +fn parallel_cycle_mid_recovers() { + let db = ParDatabaseImpl::default(); + db.knobs().signal_on_will_block.set(2); + + let thread_a = std::thread::spawn({ + let db = db.snapshot(); + move || db.a1(1) + }); + + let thread_b = std::thread::spawn({ + let db = db.snapshot(); + move || db.b1(1) + }); + + // We expect that the recovery function yields + // `1 * 20 + 2`, which is returned (and forwarded) + // to b1, and from there to a2 and a1. + assert_eq!(thread_a.join().unwrap(), 22); + assert_eq!(thread_b.join().unwrap(), 22); +} + +#[salsa::query_group(ParallelCycleMidRecovers)] +pub(crate) trait TestDatabase: Knobs { + fn a1(&self, key: i32) -> i32; + + fn a2(&self, key: i32) -> i32; + + #[salsa::cycle(recover_b1)] + fn b1(&self, key: i32) -> i32; + + fn b2(&self, key: i32) -> i32; + + #[salsa::cycle(recover_b3)] + fn b3(&self, key: i32) -> i32; +} + +fn recover_b1(_db: &dyn TestDatabase, _cycle: &salsa::Cycle, key: &i32) -> i32 { + tracing::debug!("recover_b1"); + key * 20 + 2 +} + +fn recover_b3(_db: &dyn TestDatabase, _cycle: &salsa::Cycle, key: &i32) -> i32 { + tracing::debug!("recover_b1"); + key * 200 + 2 +} + +fn a1(db: &dyn TestDatabase, key: i32) -> i32 { + // tell thread b we have started + db.signal(1); + + // wait for thread b to block on a1 + db.wait_for(2); + + db.a2(key) +} + +fn a2(db: &dyn TestDatabase, key: i32) -> i32 { + // create the cycle + db.b1(key) +} + +fn b1(db: &dyn TestDatabase, key: i32) -> i32 { + // wait for thread a to have started + db.wait_for(1); + + db.b2(key); + + 0 +} + +fn b2(db: &dyn TestDatabase, key: i32) -> i32 { + // will encounter a cycle but recover + db.b3(key); + db.b1(key); // hasn't recovered yet + 0 +} + +fn b3(db: &dyn TestDatabase, key: i32) -> i32 { + // will block on thread a, signaling stage 2 + db.a1(key) +} diff --git a/crates/salsa/tests/parallel/parallel_cycle_none_recover.rs b/crates/salsa/tests/parallel/parallel_cycle_none_recover.rs new file mode 100644 index 000000000000..35fe3791182e --- /dev/null +++ b/crates/salsa/tests/parallel/parallel_cycle_none_recover.rs @@ -0,0 +1,69 @@ +//! Test a cycle where no queries recover that occurs across threads. +//! See the `../cycles.rs` for a complete listing of cycle tests, +//! both intra and cross thread. + +use crate::setup::{Knobs, ParDatabaseImpl}; +use expect_test::expect; +use salsa::ParallelDatabase; +use test_log::test; + +#[test] +fn parallel_cycle_none_recover() { + let db = ParDatabaseImpl::default(); + db.knobs().signal_on_will_block.set(3); + + let thread_a = std::thread::spawn({ + let db = db.snapshot(); + move || db.a(-1) + }); + + let thread_b = std::thread::spawn({ + let db = db.snapshot(); + move || db.b(-1) + }); + + // We expect B to panic because it detects a cycle (it is the one that calls A, ultimately). + // Right now, it panics with a string. + let err_b = thread_b.join().unwrap_err(); + if let Some(c) = err_b.downcast_ref::() { + expect![[r#" + [ + "a(-1)", + "b(-1)", + ] + "#]] + .assert_debug_eq(&c.unexpected_participants(&db)); + } else { + panic!("b failed in an unexpected way: {:?}", err_b); + } + + // We expect A to propagate a panic, which causes us to use the sentinel + // type `Canceled`. + assert!(thread_a.join().unwrap_err().downcast_ref::().is_some()); +} + +#[salsa::query_group(ParallelCycleNoneRecover)] +pub(crate) trait TestDatabase: Knobs { + fn a(&self, key: i32) -> i32; + fn b(&self, key: i32) -> i32; +} + +fn a(db: &dyn TestDatabase, key: i32) -> i32 { + // Wait to create the cycle until both threads have entered + db.signal(1); + db.wait_for(2); + + db.b(key) +} + +fn b(db: &dyn TestDatabase, key: i32) -> i32 { + // Wait to create the cycle until both threads have entered + db.wait_for(1); + db.signal(2); + + // Wait for thread A to block on this thread + db.wait_for(3); + + // Now try to execute A + db.a(key) +} diff --git a/crates/salsa/tests/parallel/parallel_cycle_one_recovers.rs b/crates/salsa/tests/parallel/parallel_cycle_one_recovers.rs new file mode 100644 index 000000000000..7d3944714aef --- /dev/null +++ b/crates/salsa/tests/parallel/parallel_cycle_one_recovers.rs @@ -0,0 +1,95 @@ +//! Test for cycle recover spread across two threads. +//! See `../cycles.rs` for a complete listing of cycle tests, +//! both intra and cross thread. + +use crate::setup::{Knobs, ParDatabaseImpl}; +use salsa::ParallelDatabase; +use test_log::test; + +// Recover cycle test: +// +// The pattern is as follows. +// +// Thread A Thread B +// -------- -------- +// a1 b1 +// | wait for stage 1 (blocks) +// signal stage 1 | +// wait for stage 2 (blocks) (unblocked) +// | signal stage 2 +// (unblocked) wait for stage 3 (blocks) +// a2 | +// b1 (blocks -> stage 3) | +// | (unblocked) +// | b2 +// | a1 (cycle detected) +// a2 recovery fn executes | +// a1 completes normally | +// b2 completes, recovers +// b1 completes, recovers + +#[test] +fn parallel_cycle_one_recovers() { + let db = ParDatabaseImpl::default(); + db.knobs().signal_on_will_block.set(3); + + let thread_a = std::thread::spawn({ + let db = db.snapshot(); + move || db.a1(1) + }); + + let thread_b = std::thread::spawn({ + let db = db.snapshot(); + move || db.b1(1) + }); + + // We expect that the recovery function yields + // `1 * 20 + 2`, which is returned (and forwarded) + // to b1, and from there to a2 and a1. + assert_eq!(thread_a.join().unwrap(), 22); + assert_eq!(thread_b.join().unwrap(), 22); +} + +#[salsa::query_group(ParallelCycleOneRecovers)] +pub(crate) trait TestDatabase: Knobs { + fn a1(&self, key: i32) -> i32; + + #[salsa::cycle(recover)] + fn a2(&self, key: i32) -> i32; + + fn b1(&self, key: i32) -> i32; + + fn b2(&self, key: i32) -> i32; +} + +fn recover(_db: &dyn TestDatabase, _cycle: &salsa::Cycle, key: &i32) -> i32 { + tracing::debug!("recover"); + key * 20 + 2 +} + +fn a1(db: &dyn TestDatabase, key: i32) -> i32 { + // Wait to create the cycle until both threads have entered + db.signal(1); + db.wait_for(2); + + db.a2(key) +} + +fn a2(db: &dyn TestDatabase, key: i32) -> i32 { + db.b1(key) +} + +fn b1(db: &dyn TestDatabase, key: i32) -> i32 { + // Wait to create the cycle until both threads have entered + db.wait_for(1); + db.signal(2); + + // Wait for thread A to block on this thread + db.wait_for(3); + + db.b2(key) +} + +fn b2(db: &dyn TestDatabase, key: i32) -> i32 { + db.a1(key) +} diff --git a/crates/salsa/tests/parallel/race.rs b/crates/salsa/tests/parallel/race.rs new file mode 100644 index 000000000000..e875de998f8d --- /dev/null +++ b/crates/salsa/tests/parallel/race.rs @@ -0,0 +1,37 @@ +use std::panic::AssertUnwindSafe; + +use crate::setup::{ParDatabase, ParDatabaseImpl}; +use salsa::{Cancelled, ParallelDatabase}; + +/// Test where a read and a set are racing with one another. +/// Should be atomic. +#[test] +fn in_par_get_set_race() { + let mut db = ParDatabaseImpl::default(); + + db.set_input('a', 100); + db.set_input('b', 10); + db.set_input('c', 1); + + let thread1 = std::thread::spawn({ + let db = db.snapshot(); + move || Cancelled::catch(AssertUnwindSafe(|| db.sum("abc"))) + }); + + let thread2 = std::thread::spawn(move || { + db.set_input('a', 1000); + db.sum("a") + }); + + // If the 1st thread runs first, you get 111, otherwise you get + // 1011; if they run concurrently and the 1st thread observes the + // cancellation, it'll unwind. + let result1 = thread1.join().unwrap(); + if let Ok(value1) = result1 { + assert!(value1 == 111 || value1 == 1011, "illegal result {}", value1); + } + + // thread2 can not observe a cancellation because it performs a + // database write before running any other queries. + assert_eq!(thread2.join().unwrap(), 1000); +} diff --git a/crates/salsa/tests/parallel/setup.rs b/crates/salsa/tests/parallel/setup.rs new file mode 100644 index 000000000000..0a35902b4350 --- /dev/null +++ b/crates/salsa/tests/parallel/setup.rs @@ -0,0 +1,197 @@ +use crate::signal::Signal; +use salsa::Database; +use salsa::ParallelDatabase; +use salsa::Snapshot; +use std::sync::Arc; +use std::{ + cell::Cell, + panic::{catch_unwind, resume_unwind, AssertUnwindSafe}, +}; + +#[salsa::query_group(Par)] +pub(crate) trait ParDatabase: Knobs { + #[salsa::input] + fn input(&self, key: char) -> usize; + + fn sum(&self, key: &'static str) -> usize; + + /// Invokes `sum` + fn sum2(&self, key: &'static str) -> usize; + + /// Invokes `sum` but doesn't really care about the result. + fn sum2_drop_sum(&self, key: &'static str) -> usize; + + /// Invokes `sum2` + fn sum3(&self, key: &'static str) -> usize; + + /// Invokes `sum2_drop_sum` + fn sum3_drop_sum(&self, key: &'static str) -> usize; +} + +/// Various "knobs" and utilities used by tests to force +/// a certain behavior. +pub(crate) trait Knobs { + fn knobs(&self) -> &KnobsStruct; + + fn signal(&self, stage: usize); + + fn wait_for(&self, stage: usize); +} + +pub(crate) trait WithValue { + fn with_value(&self, value: T, closure: impl FnOnce() -> R) -> R; +} + +impl WithValue for Cell { + fn with_value(&self, value: T, closure: impl FnOnce() -> R) -> R { + let old_value = self.replace(value); + + let result = catch_unwind(AssertUnwindSafe(closure)); + + self.set(old_value); + + match result { + Ok(r) => r, + Err(payload) => resume_unwind(payload), + } + } +} + +#[derive(Default, Clone, Copy, PartialEq, Eq)] +pub(crate) enum CancellationFlag { + #[default] + Down, + Panic, +} + +/// Various "knobs" that can be used to customize how the queries +/// behave on one specific thread. Note that this state is +/// intentionally thread-local (apart from `signal`). +#[derive(Clone, Default)] +pub(crate) struct KnobsStruct { + /// A kind of flexible barrier used to coordinate execution across + /// threads to ensure we reach various weird states. + pub(crate) signal: Arc, + + /// When this database is about to block, send a signal. + pub(crate) signal_on_will_block: Cell, + + /// Invocations of `sum` will signal this stage on entry. + pub(crate) sum_signal_on_entry: Cell, + + /// Invocations of `sum` will wait for this stage on entry. + pub(crate) sum_wait_for_on_entry: Cell, + + /// If true, invocations of `sum` will panic before they exit. + pub(crate) sum_should_panic: Cell, + + /// If true, invocations of `sum` will wait for cancellation before + /// they exit. + pub(crate) sum_wait_for_cancellation: Cell, + + /// Invocations of `sum` will wait for this stage prior to exiting. + pub(crate) sum_wait_for_on_exit: Cell, + + /// Invocations of `sum` will signal this stage prior to exiting. + pub(crate) sum_signal_on_exit: Cell, + + /// Invocations of `sum3_drop_sum` will panic unconditionally + pub(crate) sum3_drop_sum_should_panic: Cell, +} + +fn sum(db: &dyn ParDatabase, key: &'static str) -> usize { + let mut sum = 0; + + db.signal(db.knobs().sum_signal_on_entry.get()); + + db.wait_for(db.knobs().sum_wait_for_on_entry.get()); + + if db.knobs().sum_should_panic.get() { + panic!("query set to panic before exit") + } + + for ch in key.chars() { + sum += db.input(ch); + } + + match db.knobs().sum_wait_for_cancellation.get() { + CancellationFlag::Down => (), + CancellationFlag::Panic => { + tracing::debug!("waiting for cancellation"); + loop { + db.unwind_if_cancelled(); + std::thread::yield_now(); + } + } + } + + db.wait_for(db.knobs().sum_wait_for_on_exit.get()); + + db.signal(db.knobs().sum_signal_on_exit.get()); + + sum +} + +fn sum2(db: &dyn ParDatabase, key: &'static str) -> usize { + db.sum(key) +} + +fn sum2_drop_sum(db: &dyn ParDatabase, key: &'static str) -> usize { + let _ = db.sum(key); + 22 +} + +fn sum3(db: &dyn ParDatabase, key: &'static str) -> usize { + db.sum2(key) +} + +fn sum3_drop_sum(db: &dyn ParDatabase, key: &'static str) -> usize { + if db.knobs().sum3_drop_sum_should_panic.get() { + panic!("sum3_drop_sum executed") + } + db.sum2_drop_sum(key) +} + +#[salsa::database( + Par, + crate::parallel_cycle_all_recover::ParallelCycleAllRecover, + crate::parallel_cycle_none_recover::ParallelCycleNoneRecover, + crate::parallel_cycle_mid_recover::ParallelCycleMidRecovers, + crate::parallel_cycle_one_recovers::ParallelCycleOneRecovers +)] +#[derive(Default)] +pub(crate) struct ParDatabaseImpl { + storage: salsa::Storage, + knobs: KnobsStruct, +} + +impl Database for ParDatabaseImpl { + fn salsa_event(&self, event: salsa::Event) { + if let salsa::EventKind::WillBlockOn { .. } = event.kind { + self.signal(self.knobs().signal_on_will_block.get()); + } + } +} + +impl ParallelDatabase for ParDatabaseImpl { + fn snapshot(&self) -> Snapshot { + Snapshot::new(ParDatabaseImpl { + storage: self.storage.snapshot(), + knobs: self.knobs.clone(), + }) + } +} + +impl Knobs for ParDatabaseImpl { + fn knobs(&self) -> &KnobsStruct { + &self.knobs + } + + fn signal(&self, stage: usize) { + self.knobs.signal.signal(stage); + } + + fn wait_for(&self, stage: usize) { + self.knobs.signal.wait_for(stage); + } +} diff --git a/crates/salsa/tests/parallel/signal.rs b/crates/salsa/tests/parallel/signal.rs new file mode 100644 index 000000000000..0af7b66e4826 --- /dev/null +++ b/crates/salsa/tests/parallel/signal.rs @@ -0,0 +1,40 @@ +use parking_lot::{Condvar, Mutex}; + +#[derive(Default)] +pub(crate) struct Signal { + value: Mutex, + cond_var: Condvar, +} + +impl Signal { + pub(crate) fn signal(&self, stage: usize) { + tracing::debug!("signal({})", stage); + + // This check avoids acquiring the lock for things that will + // clearly be a no-op. Not *necessary* but helps to ensure we + // are more likely to encounter weird race conditions; + // otherwise calls to `sum` will tend to be unnecessarily + // synchronous. + if stage > 0 { + let mut v = self.value.lock(); + if stage > *v { + *v = stage; + self.cond_var.notify_all(); + } + } + } + + /// Waits until the given condition is true; the fn is invoked + /// with the current stage. + pub(crate) fn wait_for(&self, stage: usize) { + tracing::debug!("wait_for({})", stage); + + // As above, avoid lock if clearly a no-op. + if stage > 0 { + let mut v = self.value.lock(); + while *v < stage { + self.cond_var.wait(&mut v); + } + } + } +} diff --git a/crates/salsa/tests/parallel/stress.rs b/crates/salsa/tests/parallel/stress.rs new file mode 100644 index 000000000000..2fa317b2b908 --- /dev/null +++ b/crates/salsa/tests/parallel/stress.rs @@ -0,0 +1,168 @@ +use rand::seq::SliceRandom; +use rand::Rng; + +use salsa::ParallelDatabase; +use salsa::Snapshot; +use salsa::{Cancelled, Database}; + +// Number of operations a reader performs +const N_MUTATOR_OPS: usize = 100; +const N_READER_OPS: usize = 100; + +#[salsa::query_group(Stress)] +trait StressDatabase: salsa::Database { + #[salsa::input] + fn a(&self, key: usize) -> usize; + + fn b(&self, key: usize) -> usize; + + fn c(&self, key: usize) -> usize; +} + +fn b(db: &dyn StressDatabase, key: usize) -> usize { + db.unwind_if_cancelled(); + db.a(key) +} + +fn c(db: &dyn StressDatabase, key: usize) -> usize { + db.b(key) +} + +#[salsa::database(Stress)] +#[derive(Default)] +struct StressDatabaseImpl { + storage: salsa::Storage, +} + +impl salsa::Database for StressDatabaseImpl {} + +impl salsa::ParallelDatabase for StressDatabaseImpl { + fn snapshot(&self) -> Snapshot { + Snapshot::new(StressDatabaseImpl { storage: self.storage.snapshot() }) + } +} + +#[derive(Clone, Copy, Debug)] +enum Query { + A, + B, + C, +} + +enum MutatorOp { + WriteOp(WriteOp), + LaunchReader { ops: Vec, check_cancellation: bool }, +} + +#[derive(Debug)] +enum WriteOp { + SetA(usize, usize), +} + +#[derive(Debug)] +enum ReadOp { + Get(Query, usize), +} + +impl rand::distributions::Distribution for rand::distributions::Standard { + fn sample(&self, rng: &mut R) -> Query { + *[Query::A, Query::B, Query::C].choose(rng).unwrap() + } +} + +impl rand::distributions::Distribution for rand::distributions::Standard { + fn sample(&self, rng: &mut R) -> MutatorOp { + if rng.gen_bool(0.5) { + MutatorOp::WriteOp(rng.gen()) + } else { + MutatorOp::LaunchReader { + ops: (0..N_READER_OPS).map(|_| rng.gen()).collect(), + check_cancellation: rng.gen(), + } + } + } +} + +impl rand::distributions::Distribution for rand::distributions::Standard { + fn sample(&self, rng: &mut R) -> WriteOp { + let key = rng.gen::() % 10; + let value = rng.gen::() % 10; + WriteOp::SetA(key, value) + } +} + +impl rand::distributions::Distribution for rand::distributions::Standard { + fn sample(&self, rng: &mut R) -> ReadOp { + let query = rng.gen::(); + let key = rng.gen::() % 10; + ReadOp::Get(query, key) + } +} + +fn db_reader_thread(db: &StressDatabaseImpl, ops: Vec, check_cancellation: bool) { + for op in ops { + if check_cancellation { + db.unwind_if_cancelled(); + } + op.execute(db); + } +} + +impl WriteOp { + fn execute(self, db: &mut StressDatabaseImpl) { + match self { + WriteOp::SetA(key, value) => { + db.set_a(key, value); + } + } + } +} + +impl ReadOp { + fn execute(self, db: &StressDatabaseImpl) { + match self { + ReadOp::Get(query, key) => match query { + Query::A => { + db.a(key); + } + Query::B => { + let _ = db.b(key); + } + Query::C => { + let _ = db.c(key); + } + }, + } + } +} + +#[test] +fn stress_test() { + let mut db = StressDatabaseImpl::default(); + for i in 0..10 { + db.set_a(i, i); + } + + let mut rng = rand::thread_rng(); + + // generate the ops that the mutator thread will perform + let write_ops: Vec = (0..N_MUTATOR_OPS).map(|_| rng.gen()).collect(); + + // execute the "main thread", which sometimes snapshots off other threads + let mut all_threads = vec![]; + for op in write_ops { + match op { + MutatorOp::WriteOp(w) => w.execute(&mut db), + MutatorOp::LaunchReader { ops, check_cancellation } => { + all_threads.push(std::thread::spawn({ + let db = db.snapshot(); + move || Cancelled::catch(|| db_reader_thread(&db, ops, check_cancellation)) + })) + } + } + } + + for thread in all_threads { + thread.join().unwrap().ok(); + } +} diff --git a/crates/salsa/tests/parallel/true_parallel.rs b/crates/salsa/tests/parallel/true_parallel.rs new file mode 100644 index 000000000000..d0e58efd1ac9 --- /dev/null +++ b/crates/salsa/tests/parallel/true_parallel.rs @@ -0,0 +1,125 @@ +use crate::setup::{Knobs, ParDatabase, ParDatabaseImpl, WithValue}; +use salsa::ParallelDatabase; +use std::panic::{self, AssertUnwindSafe}; + +/// Test where two threads are executing sum. We show that they can +/// both be executing sum in parallel by having thread1 wait for +/// thread2 to send a signal before it leaves (similarly, thread2 +/// waits for thread1 to send a signal before it enters). +#[test] +fn true_parallel_different_keys() { + let mut db = ParDatabaseImpl::default(); + + db.set_input('a', 100); + db.set_input('b', 10); + db.set_input('c', 1); + + // Thread 1 will signal stage 1 when it enters and wait for stage 2. + let thread1 = std::thread::spawn({ + let db = db.snapshot(); + move || { + let v = db + .knobs() + .sum_signal_on_entry + .with_value(1, || db.knobs().sum_wait_for_on_exit.with_value(2, || db.sum("a"))); + v + } + }); + + // Thread 2 will wait_for stage 1 when it enters and signal stage 2 + // when it leaves. + let thread2 = std::thread::spawn({ + let db = db.snapshot(); + move || { + let v = db + .knobs() + .sum_wait_for_on_entry + .with_value(1, || db.knobs().sum_signal_on_exit.with_value(2, || db.sum("b"))); + v + } + }); + + assert_eq!(thread1.join().unwrap(), 100); + assert_eq!(thread2.join().unwrap(), 10); +} + +/// Add a test that tries to trigger a conflict, where we fetch +/// `sum("abc")` from two threads simultaneously, and of them +/// therefore has to block. +#[test] +fn true_parallel_same_keys() { + let mut db = ParDatabaseImpl::default(); + + db.set_input('a', 100); + db.set_input('b', 10); + db.set_input('c', 1); + + // Thread 1 will wait_for a barrier in the start of `sum` + let thread1 = std::thread::spawn({ + let db = db.snapshot(); + move || { + let v = db + .knobs() + .sum_signal_on_entry + .with_value(1, || db.knobs().sum_wait_for_on_entry.with_value(2, || db.sum("abc"))); + v + } + }); + + // Thread 2 will wait until Thread 1 has entered sum and then -- + // once it has set itself to block -- signal Thread 1 to + // continue. This way, we test out the mechanism of one thread + // blocking on another. + let thread2 = std::thread::spawn({ + let db = db.snapshot(); + move || { + db.knobs().signal.wait_for(1); + db.knobs().signal_on_will_block.set(2); + db.sum("abc") + } + }); + + assert_eq!(thread1.join().unwrap(), 111); + assert_eq!(thread2.join().unwrap(), 111); +} + +/// Add a test that tries to trigger a conflict, where we fetch `sum("a")` +/// from two threads simultaneously. After `thread2` begins blocking, +/// we force `thread1` to panic and should see that propagate to `thread2`. +#[test] +fn true_parallel_propagate_panic() { + let mut db = ParDatabaseImpl::default(); + + db.set_input('a', 1); + + // `thread1` will wait_for a barrier in the start of `sum`. Once it can + // continue, it will panic. + let thread1 = std::thread::spawn({ + let db = db.snapshot(); + move || { + let v = db.knobs().sum_signal_on_entry.with_value(1, || { + db.knobs() + .sum_wait_for_on_entry + .with_value(2, || db.knobs().sum_should_panic.with_value(true, || db.sum("a"))) + }); + v + } + }); + + // `thread2` will wait until `thread1` has entered sum and then -- once it + // has set itself to block -- signal `thread1` to continue. + let thread2 = std::thread::spawn({ + let db = db.snapshot(); + move || { + db.knobs().signal.wait_for(1); + db.knobs().signal_on_will_block.set(2); + db.sum("a") + } + }); + + let result1 = panic::catch_unwind(AssertUnwindSafe(|| thread1.join().unwrap())); + let result2 = panic::catch_unwind(AssertUnwindSafe(|| thread2.join().unwrap())); + + assert!(result1.is_err()); + assert!(result2.is_err()); +} diff --git a/crates/salsa/tests/storage_varieties/implementation.rs b/crates/salsa/tests/storage_varieties/implementation.rs new file mode 100644 index 000000000000..2843660f1544 --- /dev/null +++ b/crates/salsa/tests/storage_varieties/implementation.rs @@ -0,0 +1,19 @@ +use crate::queries; +use std::cell::Cell; + +#[salsa::database(queries::GroupStruct)] +#[derive(Default)] +pub(crate) struct DatabaseImpl { + storage: salsa::Storage, + counter: Cell, +} + +impl queries::Counter for DatabaseImpl { + fn increment(&self) -> usize { + let v = self.counter.get(); + self.counter.set(v + 1); + v + } +} + +impl salsa::Database for DatabaseImpl {} diff --git a/crates/salsa/tests/storage_varieties/main.rs b/crates/salsa/tests/storage_varieties/main.rs new file mode 100644 index 000000000000..e92c61740e0c --- /dev/null +++ b/crates/salsa/tests/storage_varieties/main.rs @@ -0,0 +1,5 @@ +mod implementation; +mod queries; +mod tests; + +fn main() {} diff --git a/crates/salsa/tests/storage_varieties/queries.rs b/crates/salsa/tests/storage_varieties/queries.rs new file mode 100644 index 000000000000..0847fadefb0f --- /dev/null +++ b/crates/salsa/tests/storage_varieties/queries.rs @@ -0,0 +1,22 @@ +pub(crate) trait Counter: salsa::Database { + fn increment(&self) -> usize; +} + +#[salsa::query_group(GroupStruct)] +pub(crate) trait Database: Counter { + fn memoized(&self) -> usize; + fn volatile(&self) -> usize; +} + +/// Because this query is memoized, we only increment the counter +/// the first time it is invoked. +fn memoized(db: &dyn Database) -> usize { + db.volatile() +} + +/// Because this query is volatile, each time it is invoked, +/// we will increment the counter. +fn volatile(db: &dyn Database) -> usize { + db.salsa_runtime().report_untracked_read(); + db.increment() +} diff --git a/crates/salsa/tests/storage_varieties/tests.rs b/crates/salsa/tests/storage_varieties/tests.rs new file mode 100644 index 000000000000..f75c7c142feb --- /dev/null +++ b/crates/salsa/tests/storage_varieties/tests.rs @@ -0,0 +1,49 @@ +#![cfg(test)] + +use crate::implementation::DatabaseImpl; +use crate::queries::Database; +use salsa::Database as _Database; +use salsa::Durability; + +#[test] +fn memoized_twice() { + let db = DatabaseImpl::default(); + let v1 = db.memoized(); + let v2 = db.memoized(); + assert_eq!(v1, v2); +} + +#[test] +fn volatile_twice() { + let mut db = DatabaseImpl::default(); + let v1 = db.volatile(); + let v2 = db.volatile(); // volatiles are cached, so 2nd read returns the same + assert_eq!(v1, v2); + + db.salsa_runtime_mut().synthetic_write(Durability::LOW); // clears volatile caches + + let v3 = db.volatile(); // will re-increment the counter + let v4 = db.volatile(); // second call will be cached + assert_eq!(v1 + 1, v3); + assert_eq!(v3, v4); +} + +#[test] +fn intermingled() { + let mut db = DatabaseImpl::default(); + let v1 = db.volatile(); + let v2 = db.memoized(); + let v3 = db.volatile(); // cached + let v4 = db.memoized(); // cached + + assert_eq!(v1, v2); + assert_eq!(v1, v3); + assert_eq!(v2, v4); + + db.salsa_runtime_mut().synthetic_write(Durability::LOW); // clears volatile caches + + let v5 = db.memoized(); // re-executes volatile, caches new result + let v6 = db.memoized(); // re-use cached result + assert_eq!(v4 + 1, v5); + assert_eq!(v5, v6); +} diff --git a/crates/salsa/tests/transparent.rs b/crates/salsa/tests/transparent.rs new file mode 100644 index 000000000000..2e6dd4267b2e --- /dev/null +++ b/crates/salsa/tests/transparent.rs @@ -0,0 +1,39 @@ +//! Test that transparent (uncached) queries work + +#[salsa::query_group(QueryGroupStorage)] +trait QueryGroup { + #[salsa::input] + fn input(&self, x: u32) -> u32; + #[salsa::transparent] + fn wrap(&self, x: u32) -> u32; + fn get(&self, x: u32) -> u32; +} + +fn wrap(db: &dyn QueryGroup, x: u32) -> u32 { + db.input(x) +} + +fn get(db: &dyn QueryGroup, x: u32) -> u32 { + db.wrap(x) +} + +#[salsa::database(QueryGroupStorage)] +#[derive(Default)] +struct Database { + storage: salsa::Storage, +} + +impl salsa::Database for Database {} + +#[test] +fn transparent_queries_work() { + let mut db = Database::default(); + + db.set_input(1, 10); + assert_eq!(db.get(1), 10); + assert_eq!(db.get(1), 10); + + db.set_input(1, 92); + assert_eq!(db.get(1), 92); + assert_eq!(db.get(1), 92); +} diff --git a/crates/salsa/tests/variadic.rs b/crates/salsa/tests/variadic.rs new file mode 100644 index 000000000000..cb857844eb70 --- /dev/null +++ b/crates/salsa/tests/variadic.rs @@ -0,0 +1,51 @@ +#[salsa::query_group(HelloWorld)] +trait HelloWorldDatabase: salsa::Database { + #[salsa::input] + fn input(&self, a: u32, b: u32) -> u32; + + fn none(&self) -> u32; + + fn one(&self, k: u32) -> u32; + + fn two(&self, a: u32, b: u32) -> u32; + + fn trailing(&self, a: u32, b: u32) -> u32; +} + +fn none(_db: &dyn HelloWorldDatabase) -> u32 { + 22 +} + +fn one(_db: &dyn HelloWorldDatabase, k: u32) -> u32 { + k * 2 +} + +fn two(_db: &dyn HelloWorldDatabase, a: u32, b: u32) -> u32 { + a * b +} + +fn trailing(_db: &dyn HelloWorldDatabase, a: u32, b: u32) -> u32 { + a - b +} + +#[salsa::database(HelloWorld)] +#[derive(Default)] +struct DatabaseStruct { + storage: salsa::Storage, +} + +impl salsa::Database for DatabaseStruct {} + +#[test] +fn execute() { + let mut db = DatabaseStruct::default(); + + // test what happens with inputs: + db.set_input(1, 2, 3); + assert_eq!(db.input(1, 2), 3); + + assert_eq!(db.none(), 22); + assert_eq!(db.one(11), 22); + assert_eq!(db.two(11, 2), 22); + assert_eq!(db.trailing(24, 2), 22); +} diff --git a/crates/sourcegen/src/lib.rs b/crates/sourcegen/src/lib.rs index 18fa77fd9743..295b716b4e90 100644 --- a/crates/sourcegen/src/lib.rs +++ b/crates/sourcegen/src/lib.rs @@ -69,7 +69,7 @@ impl CommentBlock { panic!("Use plain (non-doc) comments with tags like {tag}:\n {first}"); } - block.id = id.trim().to_string(); + block.id = id.trim().to_owned(); true }); blocks @@ -93,7 +93,7 @@ impl CommentBlock { if let Some(' ') = contents.chars().next() { contents = &contents[1..]; } - block.contents.push(contents.to_string()); + block.contents.push(contents.to_owned()); } None => { if !block.contents.is_empty() { @@ -167,6 +167,7 @@ pub fn add_preamble(generator: &'static str, mut text: String) -> String { /// Checks that the `file` has the specified `contents`. If that is not the /// case, updates the file and then fails the test. +#[allow(clippy::print_stderr)] pub fn ensure_file_contents(file: &Path, contents: &str) { if let Ok(old_contents) = fs::read_to_string(file) { if normalize_newlines(&old_contents) == normalize_newlines(contents) { diff --git a/crates/span/Cargo.toml b/crates/span/Cargo.toml index a4abba29bb97..7093f3a691e1 100644 --- a/crates/span/Cargo.toml +++ b/crates/span/Cargo.toml @@ -11,7 +11,7 @@ authors.workspace = true [dependencies] la-arena.workspace = true -rust-analyzer-salsa.workspace = true +salsa.workspace = true # local deps diff --git a/crates/span/src/lib.rs b/crates/span/src/lib.rs index 6796dc41886f..7763d75cc92f 100644 --- a/crates/span/src/lib.rs +++ b/crates/span/src/lib.rs @@ -68,26 +68,9 @@ impl fmt::Display for Span { } } -#[derive(Clone, Copy, PartialEq, Eq, PartialOrd, Ord, Hash)] +#[derive(Clone, Copy, Debug, PartialEq, Eq, PartialOrd, Ord, Hash)] pub struct SyntaxContextId(InternId); -impl fmt::Debug for SyntaxContextId { - fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { - if *self == Self::SELF_REF { - f.debug_tuple("SyntaxContextId") - .field(&{ - #[derive(Debug)] - #[allow(non_camel_case_types)] - struct SELF_REF; - SELF_REF - }) - .finish() - } else { - f.debug_tuple("SyntaxContextId").field(&self.0).finish() - } - } -} - impl salsa::InternKey for SyntaxContextId { fn from_intern_id(v: salsa::InternId) -> Self { SyntaxContextId(v) @@ -106,10 +89,6 @@ impl fmt::Display for SyntaxContextId { // inherent trait impls please tyvm impl SyntaxContextId { pub const ROOT: Self = SyntaxContextId(unsafe { InternId::new_unchecked(0) }); - // veykril(HACK): FIXME salsa doesn't allow us fetching the id of the current input to be allocated so - // we need a special value that behaves as the current context. - pub const SELF_REF: Self = - SyntaxContextId(unsafe { InternId::new_unchecked(InternId::MAX - 1) }); pub fn is_root(self) -> bool { self == Self::ROOT diff --git a/crates/stdx/Cargo.toml b/crates/stdx/Cargo.toml index 2e3f9113b066..6cca11633539 100644 --- a/crates/stdx/Cargo.toml +++ b/crates/stdx/Cargo.toml @@ -29,4 +29,4 @@ winapi = { version = "0.3.9", features = ["winerror"] } # default = [ "backtrace" ] [lints] -workspace = true \ No newline at end of file +workspace = true diff --git a/crates/stdx/src/anymap.rs b/crates/stdx/src/anymap.rs index 9990f8b08609..899cd8ac6bbe 100644 --- a/crates/stdx/src/anymap.rs +++ b/crates/stdx/src/anymap.rs @@ -54,12 +54,13 @@ use core::any::{Any, TypeId}; use core::hash::BuildHasherDefault; use core::marker::PhantomData; -use ::std::collections::hash_map::{self, HashMap}; +use ::std::collections::hash_map; /// Raw access to the underlying `HashMap`. /// /// This alias is provided for convenience because of the ugly third generic parameter. -pub type RawMap = HashMap, BuildHasherDefault>; +#[allow(clippy::disallowed_types)] // Uses a custom hasher +pub type RawMap = hash_map::HashMap, BuildHasherDefault>; /// A collection containing zero or one values for any given type and allowing convenient, /// type-safe access to those values. diff --git a/crates/stdx/src/lib.rs b/crates/stdx/src/lib.rs index 07b782722818..9a9ebae74e8c 100644 --- a/crates/stdx/src/lib.rs +++ b/crates/stdx/src/lib.rs @@ -23,12 +23,14 @@ pub fn is_ci() -> bool { } #[must_use] +#[allow(clippy::print_stderr)] pub fn timeit(label: &'static str) -> impl Drop { let start = Instant::now(); defer(move || eprintln!("{}: {:.2?}", label, start.elapsed())) } /// Prints backtrace to stderr, useful for debugging. +#[allow(clippy::print_stderr)] pub fn print_backtrace() { #[cfg(feature = "backtrace")] eprintln!("{:?}", backtrace::Backtrace::new()); diff --git a/crates/stdx/src/panic_context.rs b/crates/stdx/src/panic_context.rs index c3e8813b0e81..cf3d85b4da39 100644 --- a/crates/stdx/src/panic_context.rs +++ b/crates/stdx/src/panic_context.rs @@ -18,6 +18,7 @@ pub struct PanicContext { } impl PanicContext { + #[allow(clippy::print_stderr)] fn init() { let default_hook = panic::take_hook(); let hook = move |panic_info: &panic::PanicInfo<'_>| { @@ -43,7 +44,7 @@ impl Drop for PanicContext { fn with_ctx(f: impl FnOnce(&mut Vec)) { thread_local! { - static CTX: RefCell> = RefCell::new(Vec::new()); + static CTX: RefCell> = const { RefCell::new(Vec::new()) }; } CTX.with(|ctx| f(&mut ctx.borrow_mut())); } diff --git a/crates/stdx/src/rand.rs b/crates/stdx/src/rand.rs index 64aa57eae09c..115a073dab33 100644 --- a/crates/stdx/src/rand.rs +++ b/crates/stdx/src/rand.rs @@ -14,8 +14,7 @@ pub fn shuffle(slice: &mut [T], mut rand_index: impl FnMut(usize) -> usize) { } pub fn seed() -> u64 { - use std::collections::hash_map::RandomState; use std::hash::{BuildHasher, Hasher}; - - RandomState::new().build_hasher().finish() + #[allow(clippy::disallowed_types)] + std::collections::hash_map::RandomState::new().build_hasher().finish() } diff --git a/crates/syntax/src/ast/edit_in_place.rs b/crates/syntax/src/ast/edit_in_place.rs index c9944b75b096..bc9c54d0b73e 100644 --- a/crates/syntax/src/ast/edit_in_place.rs +++ b/crates/syntax/src/ast/edit_in_place.rs @@ -627,6 +627,8 @@ impl ast::Impl { } impl ast::AssocItemList { + /// Adds a new associated item after all of the existing associated items. + /// /// Attention! This function does align the first line of `item` with respect to `self`, /// but it does _not_ change indentation of other lines (if any). pub fn add_item(&self, item: ast::AssocItem) { @@ -650,6 +652,46 @@ impl ast::AssocItemList { ]; ted::insert_all(position, elements); } + + /// Adds a new associated item at the start of the associated item list. + /// + /// Attention! This function does align the first line of `item` with respect to `self`, + /// but it does _not_ change indentation of other lines (if any). + pub fn add_item_at_start(&self, item: ast::AssocItem) { + match self.assoc_items().next() { + Some(first_item) => { + let indent = IndentLevel::from_node(first_item.syntax()); + let before = Position::before(first_item.syntax()); + + ted::insert_all( + before, + vec![ + item.syntax().clone().into(), + make::tokens::whitespace(&format!("\n\n{indent}")).into(), + ], + ) + } + None => { + let (indent, position, whitespace) = match self.l_curly_token() { + Some(l_curly) => { + normalize_ws_between_braces(self.syntax()); + (IndentLevel::from_token(&l_curly) + 1, Position::after(&l_curly), "\n") + } + None => (IndentLevel::single(), Position::first_child_of(self.syntax()), ""), + }; + + let mut elements = vec![]; + + // Avoid pushing an empty whitespace token + if !indent.is_zero() || !whitespace.is_empty() { + elements.push(make::tokens::whitespace(&format!("{whitespace}{indent}")).into()) + } + elements.push(item.syntax().clone().into()); + + ted::insert_all(position, elements) + } + }; + } } impl ast::Fn { diff --git a/crates/syntax/src/ast/make.rs b/crates/syntax/src/ast/make.rs index d5eda8f15e4a..120d801c8d17 100644 --- a/crates/syntax/src/ast/make.rs +++ b/crates/syntax/src/ast/make.rs @@ -68,6 +68,9 @@ pub mod ext { pub fn expr_ty_new(ty: &ast::Type) -> ast::Expr { expr_from_text(&format!("{ty}::new()")) } + pub fn expr_self() -> ast::Expr { + expr_from_text("self") + } pub fn zero_number() -> ast::Expr { expr_from_text("0") @@ -236,24 +239,21 @@ fn merge_where_clause( pub fn impl_( generic_params: Option, - generic_args: Option, + generic_args: Option, path_type: ast::Type, where_clause: Option, body: Option>>, ) -> ast::Impl { - let (gen_params, tr_gen_args) = match (generic_params, generic_args) { - (None, None) => (String::new(), String::new()), - (None, Some(args)) => (String::new(), args.to_generic_args().to_string()), - (Some(params), None) => (params.to_string(), params.to_generic_args().to_string()), - (Some(params), Some(args)) => match merge_gen_params(Some(params.clone()), Some(args)) { - Some(merged) => (params.to_string(), merged.to_generic_args().to_string()), - None => (params.to_string(), String::new()), - }, - }; + let gen_args = generic_args.map_or_else(String::new, |it| it.to_string()); + + let gen_params = generic_params.map_or_else(String::new, |it| it.to_string()); + + let body_newline = + if where_clause.is_some() && body.is_none() { "\n".to_owned() } else { String::new() }; let where_clause = match where_clause { - Some(pr) => pr.to_string(), - None => " ".to_string(), + Some(pr) => format!("\n{pr}\n"), + None => " ".to_owned(), }; let body = match body { @@ -261,7 +261,9 @@ pub fn impl_( None => String::new(), }; - ast_from_text(&format!("impl{gen_params} {path_type}{tr_gen_args}{where_clause}{{{}}}", body)) + ast_from_text(&format!( + "impl{gen_params} {path_type}{gen_args}{where_clause}{{{body_newline}{body}}}" + )) } pub fn impl_trait( @@ -282,22 +284,27 @@ pub fn impl_trait( let trait_gen_args = trait_gen_args.map(|args| args.to_string()).unwrap_or_default(); let type_gen_args = type_gen_args.map(|args| args.to_string()).unwrap_or_default(); - let gen_params = match merge_gen_params(trait_gen_params, type_gen_params) { - Some(pars) => pars.to_string(), - None => String::new(), - }; + let gen_params = merge_gen_params(trait_gen_params, type_gen_params) + .map_or_else(String::new, |it| it.to_string()); let is_negative = if is_negative { "! " } else { "" }; + let body_newline = + if (ty_where_clause.is_some() || trait_where_clause.is_some()) && body.is_none() { + "\n".to_owned() + } else { + String::new() + }; + let where_clause = merge_where_clause(ty_where_clause, trait_where_clause) - .map_or_else(|| " ".to_string(), |wc| format!("\n{}\n", wc)); + .map_or_else(|| " ".to_owned(), |wc| format!("\n{}\n", wc)); let body = match body { Some(bd) => bd.iter().map(|elem| elem.to_string()).join(""), None => String::new(), }; - ast_from_text(&format!("{is_unsafe}impl{gen_params} {is_negative}{path_type}{trait_gen_args} for {ty}{type_gen_args}{where_clause}{{{}}}" , body)) + ast_from_text(&format!("{is_unsafe}impl{gen_params} {is_negative}{path_type}{trait_gen_args} for {ty}{type_gen_args}{where_clause}{{{body_newline}{body}}}")) } pub fn impl_trait_type(bounds: ast::TypeBoundList) -> ast::ImplTraitType { @@ -371,7 +378,7 @@ pub fn use_tree( alias: Option, add_star: bool, ) -> ast::UseTree { - let mut buf = "use ".to_string(); + let mut buf = "use ".to_owned(); buf += &path.syntax().to_string(); if let Some(use_tree_list) = use_tree_list { format_to!(buf, "::{use_tree_list}"); @@ -437,7 +444,7 @@ pub fn block_expr( stmts: impl IntoIterator, tail_expr: Option, ) -> ast::BlockExpr { - let mut buf = "{\n".to_string(); + let mut buf = "{\n".to_owned(); for stmt in stmts.into_iter() { format_to!(buf, " {stmt}\n"); } @@ -452,7 +459,7 @@ pub fn async_move_block_expr( stmts: impl IntoIterator, tail_expr: Option, ) -> ast::BlockExpr { - let mut buf = "async move {\n".to_string(); + let mut buf = "async move {\n".to_owned(); for stmt in stmts.into_iter() { format_to!(buf, " {stmt}\n"); } @@ -475,7 +482,7 @@ pub fn hacky_block_expr( elements: impl IntoIterator, tail_expr: Option, ) -> ast::BlockExpr { - let mut buf = "{\n".to_string(); + let mut buf = "{\n".to_owned(); for node_or_token in elements.into_iter() { match node_or_token { rowan::NodeOrToken::Node(n) => format_to!(buf, " {n}\n"), @@ -903,7 +910,12 @@ pub fn trait_( ast_from_text(&text) } -pub fn type_bound(bound: &str) -> ast::TypeBound { +// FIXME: remove when no one depends on `generate_impl_text_inner` +pub fn type_bound_text(bound: &str) -> ast::TypeBound { + ast_from_text(&format!("fn f() {{ }}")) +} + +pub fn type_bound(bound: ast::Type) -> ast::TypeBound { ast_from_text(&format!("fn f() {{ }}")) } diff --git a/crates/syntax/src/fuzz.rs b/crates/syntax/src/fuzz.rs index 239a89f9b2d5..28738671790c 100644 --- a/crates/syntax/src/fuzz.rs +++ b/crates/syntax/src/fuzz.rs @@ -34,7 +34,7 @@ impl CheckReparse { let mut lines = data.lines(); let delete_start = usize::from_str(lines.next()?).ok()? + PREFIX.len(); let delete_len = usize::from_str(lines.next()?).ok()?; - let insert = lines.next()?.to_string(); + let insert = lines.next()?.to_owned(); let text = lines.collect::>().join("\n"); let text = format!("{PREFIX}{text}{SUFFIX}"); text.get(delete_start..delete_start.checked_add(delete_len)?)?; // make sure delete is a valid range @@ -46,6 +46,7 @@ impl CheckReparse { Some(CheckReparse { text, edit, edited_text }) } + #[allow(clippy::print_stderr)] pub fn run(&self) { let parse = SourceFile::parse(&self.text); let new_parse = parse.reparse(&self.edit); diff --git a/crates/syntax/src/lib.rs b/crates/syntax/src/lib.rs index 62a0261d7a45..960889b74211 100644 --- a/crates/syntax/src/lib.rs +++ b/crates/syntax/src/lib.rs @@ -432,7 +432,7 @@ fn api_walkthrough() { WalkEvent::Enter(node) => { let text = match &node { NodeOrToken::Node(it) => it.text().to_string(), - NodeOrToken::Token(it) => it.text().to_string(), + NodeOrToken::Token(it) => it.text().to_owned(), }; format_to!(buf, "{:indent$}{:?} {:?}\n", " ", text, node.kind(), indent = indent); indent += 2; diff --git a/crates/syntax/src/parsing.rs b/crates/syntax/src/parsing.rs index 047e670c9f45..1250b5274c18 100644 --- a/crates/syntax/src/parsing.rs +++ b/crates/syntax/src/parsing.rs @@ -28,7 +28,7 @@ pub(crate) fn build_tree( parser::StrStep::Enter { kind } => builder.start_node(kind), parser::StrStep::Exit => builder.finish_node(), parser::StrStep::Error { msg, pos } => { - builder.error(msg.to_string(), pos.try_into().unwrap()) + builder.error(msg.to_owned(), pos.try_into().unwrap()) } }); diff --git a/crates/syntax/src/parsing/reparsing.rs b/crates/syntax/src/parsing/reparsing.rs index 0ddc641711fb..14715b572534 100644 --- a/crates/syntax/src/parsing/reparsing.rs +++ b/crates/syntax/src/parsing/reparsing.rs @@ -105,7 +105,7 @@ fn get_text_after_edit(element: SyntaxElement, edit: &Indel) -> String { let edit = Indel::replace(edit.delete - element.text_range().start(), edit.insert.clone()); let mut text = match element { - NodeOrToken::Token(token) => token.text().to_string(), + NodeOrToken::Token(token) => token.text().to_owned(), NodeOrToken::Node(node) => node.text().to_string(), }; edit.apply(&mut text); diff --git a/crates/syntax/src/ptr.rs b/crates/syntax/src/ptr.rs index b716d3670667..fb8aee9c3b0d 100644 --- a/crates/syntax/src/ptr.rs +++ b/crates/syntax/src/ptr.rs @@ -36,7 +36,7 @@ impl std::fmt::Debug for AstPtr { impl Copy for AstPtr {} impl Clone for AstPtr { fn clone(&self) -> AstPtr { - AstPtr { raw: self.raw, _ty: PhantomData } + *self } } diff --git a/crates/syntax/src/tests/sourcegen_ast.rs b/crates/syntax/src/tests/sourcegen_ast.rs index c2e921e4b6f3..2fd7a4734989 100644 --- a/crates/syntax/src/tests/sourcegen_ast.rs +++ b/crates/syntax/src/tests/sourcegen_ast.rs @@ -3,14 +3,12 @@ //! Specifically, it generates the `SyntaxKind` enum and a number of newtype //! wrappers around `SyntaxNode` which implement `syntax::AstNode`. -use std::{ - collections::{BTreeSet, HashSet}, - fmt::Write, -}; +use std::{collections::BTreeSet, fmt::Write}; use itertools::Itertools; use proc_macro2::{Punct, Spacing}; use quote::{format_ident, quote}; +use rustc_hash::FxHashSet; use ungrammar::{Grammar, Rule}; use crate::tests::ast_src::{ @@ -278,7 +276,7 @@ fn generate_nodes(kinds: KindsSrc<'_>, grammar: &AstSrc) -> String { } }); - let defined_nodes: HashSet<_> = node_names.collect(); + let defined_nodes: FxHashSet<_> = node_names.collect(); for node in kinds .nodes @@ -575,7 +573,7 @@ fn lower(grammar: &Grammar) -> AstSrc { tokens: "Whitespace Comment String ByteString CString IntNumber FloatNumber Char Byte Ident" .split_ascii_whitespace() - .map(|it| it.to_string()) + .map(|it| it.to_owned()) .collect::>(), ..Default::default() }; @@ -818,7 +816,7 @@ fn extract_struct_trait(node: &mut AstNodeSrc, trait_name: &str, methods: &[&str } } if to_remove.len() == methods.len() { - node.traits.push(trait_name.to_string()); + node.traits.push(trait_name.to_owned()); node.remove_field(to_remove); } } diff --git a/crates/test-fixture/src/lib.rs b/crates/test-fixture/src/lib.rs index b5ff7a1bf51b..28e757e81bb2 100644 --- a/crates/test-fixture/src/lib.rs +++ b/crates/test-fixture/src/lib.rs @@ -260,7 +260,7 @@ impl ChangeFixture { file_id = FileId::from_raw(file_id.index() + 1); let mut fs = FileSet::default(); - fs.insert(core_file, VfsPath::new_virtual_path("/sysroot/core/lib.rs".to_string())); + fs.insert(core_file, VfsPath::new_virtual_path("/sysroot/core/lib.rs".to_owned())); roots.push(SourceRoot::new_library(fs)); source_change.change_file(core_file, Some(mini_core.source_code().into())); @@ -270,7 +270,7 @@ impl ChangeFixture { let core_crate = crate_graph.add_crate_root( core_file, Edition::Edition2021, - Some(CrateDisplayName::from_canonical_name("core".to_string())), + Some(CrateDisplayName::from_canonical_name("core".to_owned())), None, Default::default(), Default::default(), @@ -304,7 +304,7 @@ impl ChangeFixture { let mut fs = FileSet::default(); fs.insert( proc_lib_file, - VfsPath::new_virtual_path("/sysroot/proc_macros/lib.rs".to_string()), + VfsPath::new_virtual_path("/sysroot/proc_macros/lib.rs".to_owned()), ); roots.push(SourceRoot::new_library(fs)); @@ -315,7 +315,7 @@ impl ChangeFixture { let proc_macros_crate = crate_graph.add_crate_root( proc_lib_file, Edition::Edition2021, - Some(CrateDisplayName::from_canonical_name("proc_macros".to_string())), + Some(CrateDisplayName::from_canonical_name("proc_macros".to_owned())), None, Default::default(), Default::default(), @@ -598,7 +598,7 @@ impl ProcMacroExpander for MirrorProcMacroExpander { }; token_trees.push(tt); } - Subtree { delimiter: input.delimiter, token_trees } + Subtree { delimiter: input.delimiter, token_trees: token_trees.into_boxed_slice() } } Ok(traverse(input)) } diff --git a/crates/test-utils/src/bench_fixture.rs b/crates/test-utils/src/bench_fixture.rs index 9296fd2e6835..d83f95768628 100644 --- a/crates/test-utils/src/bench_fixture.rs +++ b/crates/test-utils/src/bench_fixture.rs @@ -12,7 +12,7 @@ pub fn big_struct() -> String { } pub fn big_struct_n(n: u32) -> String { - let mut buf = "pub struct RegisterBlock {".to_string(); + let mut buf = "pub struct RegisterBlock {".to_owned(); for i in 0..n { format_to!(buf, " /// Doc comment for {}.\n", i); format_to!(buf, " pub s{}: S{},\n", i, i); diff --git a/crates/test-utils/src/fixture.rs b/crates/test-utils/src/fixture.rs index 3f8b5a089690..595281336d58 100644 --- a/crates/test-utils/src/fixture.rs +++ b/crates/test-utils/src/fixture.rs @@ -178,13 +178,13 @@ impl FixtureWithProjectMeta { if let Some(meta) = fixture.strip_prefix("//- toolchain:") { let (meta, remain) = meta.split_once('\n').unwrap(); - toolchain = Some(meta.trim().to_string()); + toolchain = Some(meta.trim().to_owned()); fixture = remain; } if let Some(meta) = fixture.strip_prefix("//- proc_macros:") { let (meta, remain) = meta.split_once('\n').unwrap(); - proc_macro_names = meta.split(',').map(|it| it.trim().to_string()).collect(); + proc_macro_names = meta.split(',').map(|it| it.trim().to_owned()).collect(); fixture = remain; } @@ -234,7 +234,7 @@ impl FixtureWithProjectMeta { let meta = meta["//-".len()..].trim(); let mut components = meta.split_ascii_whitespace(); - let path = components.next().expect("fixture meta must start with a path").to_string(); + let path = components.next().expect("fixture meta must start with a path").to_owned(); assert!(path.starts_with('/'), "fixture path does not start with `/`: {path:?}"); let mut krate = None; @@ -246,7 +246,7 @@ impl FixtureWithProjectMeta { let mut introduce_new_source_root = None; let mut library = false; let mut target_data_layout = Some( - "e-m:e-p270:32:32-p271:32:32-p272:64:64-i64:64-f80:128-n8:16:32:64-S128".to_string(), + "e-m:e-p270:32:32-p271:32:32-p272:64:64-i64:64-f80:128-n8:16:32:64-S128".to_owned(), ); for component in components { if component == "library" { @@ -257,22 +257,22 @@ impl FixtureWithProjectMeta { let (key, value) = component.split_once(':').unwrap_or_else(|| panic!("invalid meta line: {meta:?}")); match key { - "crate" => krate = Some(value.to_string()), - "deps" => deps = value.split(',').map(|it| it.to_string()).collect(), + "crate" => krate = Some(value.to_owned()), + "deps" => deps = value.split(',').map(|it| it.to_owned()).collect(), "extern-prelude" => { if value.is_empty() { extern_prelude = Some(Vec::new()); } else { extern_prelude = - Some(value.split(',').map(|it| it.to_string()).collect::>()); + Some(value.split(',').map(|it| it.to_owned()).collect::>()); } } - "edition" => edition = Some(value.to_string()), + "edition" => edition = Some(value.to_owned()), "cfg" => { for entry in value.split(',') { match entry.split_once('=') { - Some((k, v)) => cfgs.push((k.to_string(), Some(v.to_string()))), - None => cfgs.push((entry.to_string(), None)), + Some((k, v)) => cfgs.push((k.to_owned(), Some(v.to_owned()))), + None => cfgs.push((entry.to_owned(), None)), } } } @@ -283,8 +283,8 @@ impl FixtureWithProjectMeta { } } } - "new_source_root" => introduce_new_source_root = Some(value.to_string()), - "target_data_layout" => target_data_layout = Some(value.to_string()), + "new_source_root" => introduce_new_source_root = Some(value.to_owned()), + "target_data_layout" => target_data_layout = Some(value.to_owned()), _ => panic!("bad component: {component:?}"), } } @@ -381,7 +381,7 @@ impl MiniCore { let (flag, deps) = line.split_once(':').unwrap(); let flag = flag.trim(); - self.valid_flags.push(flag.to_string()); + self.valid_flags.push(flag.to_owned()); implications.extend( iter::repeat(flag) .zip(deps.split(", ").map(str::trim).filter(|dep| !dep.is_empty())), @@ -401,7 +401,7 @@ impl MiniCore { let mut changed = false; for &(u, v) in &implications { if self.has_flag(u) && !self.has_flag(v) { - self.activated_flags.push(v.to_string()); + self.activated_flags.push(v.to_owned()); changed = true; } } @@ -486,9 +486,9 @@ fn parse_fixture_gets_full_meta() { mod m; "#, ); - assert_eq!(toolchain, Some("nightly".to_string())); - assert_eq!(proc_macro_names, vec!["identity".to_string()]); - assert_eq!(mini_core.unwrap().activated_flags, vec!["coerce_unsized".to_string()]); + assert_eq!(toolchain, Some("nightly".to_owned())); + assert_eq!(proc_macro_names, vec!["identity".to_owned()]); + assert_eq!(mini_core.unwrap().activated_flags, vec!["coerce_unsized".to_owned()]); assert_eq!(1, parsed.len()); let meta = &parsed[0]; diff --git a/crates/test-utils/src/lib.rs b/crates/test-utils/src/lib.rs index e48b27313068..b750107803a9 100644 --- a/crates/test-utils/src/lib.rs +++ b/crates/test-utils/src/lib.rs @@ -7,6 +7,7 @@ //! * marks (see the eponymous module). #![warn(rust_2018_idioms, unused_lifetimes)] +#![allow(clippy::print_stderr)] mod assert_linear; pub mod bench_fixture; @@ -163,7 +164,7 @@ pub fn extract_tags(mut text: &str, tag: &str) -> (Vec<(TextRange, Option').unwrap(); let attr = text[open.len()..close_open].trim(); - let attr = if attr.is_empty() { None } else { Some(attr.to_string()) }; + let attr = if attr.is_empty() { None } else { Some(attr.to_owned()) }; text = &text[close_open + '>'.len_utf8()..]; let from = TextSize::of(&res); stack.push((from, attr)); @@ -325,7 +326,7 @@ fn extract_line_annotations(mut line: &str) -> Vec { content = &content["file".len()..]; } - let content = content.trim_start().to_string(); + let content = content.trim_start().to_owned(); let annotation = if continuation { LineAnnotation::Continuation { offset: range.end(), content } @@ -424,7 +425,7 @@ pub fn format_diff(chunks: Vec>) -> String { /// /// A benchmark test looks like this: /// -/// ``` +/// ```ignore /// #[test] /// fn benchmark_foo() { /// if skip_slow_tests() { return; } diff --git a/crates/test-utils/src/minicore.rs b/crates/test-utils/src/minicore.rs index 9c25d88cb842..23a3a7e0afa4 100644 --- a/crates/test-utils/src/minicore.rs +++ b/crates/test-utils/src/minicore.rs @@ -328,7 +328,6 @@ pub mod convert { } pub mod mem { - // region:drop // region:manually_drop #[lang = "manually_drop"] #[repr(transparent)] @@ -353,6 +352,7 @@ pub mod mem { // endregion:manually_drop + // region:drop pub fn drop(_x: T) {} pub const fn replace(dest: &mut T, src: T) -> T { unsafe { diff --git a/crates/text-edit/src/lib.rs b/crates/text-edit/src/lib.rs index fb52a50f0b3f..e2ff373c1baa 100644 --- a/crates/text-edit/src/lib.rs +++ b/crates/text-edit/src/lib.rs @@ -231,11 +231,11 @@ mod tests { #[test] fn test_apply() { - let mut text = "_11h1_2222_xx3333_4444_6666".to_string(); + let mut text = "_11h1_2222_xx3333_4444_6666".to_owned(); let mut builder = TextEditBuilder::default(); - builder.replace(range(3, 4), "1".to_string()); + builder.replace(range(3, 4), "1".to_owned()); builder.delete(range(11, 13)); - builder.insert(22.into(), "_5555".to_string()); + builder.insert(22.into(), "_5555".to_owned()); let text_edit = builder.finish(); text_edit.apply(&mut text); diff --git a/crates/tt/src/buffer.rs b/crates/tt/src/buffer.rs index cade2e9f67a2..cd41af03c613 100644 --- a/crates/tt/src/buffer.rs +++ b/crates/tt/src/buffer.rs @@ -106,7 +106,7 @@ impl<'t, Span> TokenBuffer<'t, Span> { for (child_idx, (subtree, tt)) in children { let idx = TokenBuffer::new_inner( - subtree.token_trees.as_slice(), + &*subtree.token_trees, buffers, Some(EntryPtr(EntryId(res), child_idx + 1)), ); diff --git a/crates/tt/src/lib.rs b/crates/tt/src/lib.rs index b3b0eeda75af..9004bff53a80 100644 --- a/crates/tt/src/lib.rs +++ b/crates/tt/src/lib.rs @@ -23,10 +23,10 @@ pub enum TokenTree { } impl_from!(Leaf, Subtree for TokenTree); impl TokenTree { - pub const fn empty(span: S) -> Self { + pub fn empty(span: S) -> Self { Self::Subtree(Subtree { delimiter: Delimiter::invisible_spanned(span), - token_trees: vec![], + token_trees: Box::new([]), }) } @@ -34,7 +34,7 @@ impl TokenTree { match self { TokenTree::Leaf(_) => Subtree { delimiter: Delimiter::invisible_delim_spanned(span), - token_trees: vec![self], + token_trees: Box::new([self]), }, TokenTree::Subtree(s) => s, } @@ -69,25 +69,35 @@ impl_from!(Literal, Punct, Ident for Leaf); #[derive(Clone, PartialEq, Eq, Hash)] pub struct Subtree { pub delimiter: Delimiter, - pub token_trees: Vec>, + pub token_trees: Box<[TokenTree]>, } impl Subtree { - pub const fn empty(span: DelimSpan) -> Self { - Subtree { delimiter: Delimiter::invisible_delim_spanned(span), token_trees: vec![] } + pub fn empty(span: DelimSpan) -> Self { + Subtree { delimiter: Delimiter::invisible_delim_spanned(span), token_trees: Box::new([]) } } - pub fn visit_ids(&mut self, f: &mut impl FnMut(S) -> S) { - self.delimiter.open = f(self.delimiter.open); - self.delimiter.close = f(self.delimiter.close); - self.token_trees.iter_mut().for_each(|tt| match tt { - crate::TokenTree::Leaf(leaf) => match leaf { - crate::Leaf::Literal(it) => it.span = f(it.span), - crate::Leaf::Punct(it) => it.span = f(it.span), - crate::Leaf::Ident(it) => it.span = f(it.span), - }, - crate::TokenTree::Subtree(s) => s.visit_ids(f), - }) + /// This is slow, and should be avoided, as it will always reallocate! + pub fn push(&mut self, subtree: TokenTree) { + let mut mutable_trees = std::mem::take(&mut self.token_trees).into_vec(); + + // Reserve exactly space for one element, to avoid `into_boxed_slice` having to reallocate again. + mutable_trees.reserve_exact(1); + mutable_trees.push(subtree); + + self.token_trees = mutable_trees.into_boxed_slice(); + } +} + +#[derive(Clone, PartialEq, Eq, Hash)] +pub struct SubtreeBuilder { + pub delimiter: Delimiter, + pub token_trees: Vec>, +} + +impl SubtreeBuilder { + pub fn build(self) -> Subtree { + Subtree { delimiter: self.delimiter, token_trees: self.token_trees.into_boxed_slice() } } } @@ -241,7 +251,7 @@ impl fmt::Display for Subtree { }; f.write_str(l)?; let mut needs_space = false; - for tt in &self.token_trees { + for tt in self.token_trees.iter() { if needs_space { f.write_str(" ")?; } @@ -316,7 +326,7 @@ impl Subtree { let mut res = String::new(); res.push_str(delim.0); let mut last = None; - for child in &self.token_trees { + for child in self.token_trees.iter() { let s = match child { TokenTree::Leaf(it) => { let s = match it { @@ -326,11 +336,11 @@ impl Subtree { }; match (it, last) { (Leaf::Ident(_), Some(&TokenTree::Leaf(Leaf::Ident(_)))) => { - " ".to_string() + &s + " ".to_owned() + &s } (Leaf::Punct(_), Some(TokenTree::Leaf(Leaf::Punct(punct)))) => { if punct.spacing == Spacing::Alone { - " ".to_string() + &s + " ".to_owned() + &s } else { s } diff --git a/crates/vfs/src/loader.rs b/crates/vfs/src/loader.rs index e49849d23077..c3d1ff7271a9 100644 --- a/crates/vfs/src/loader.rs +++ b/crates/vfs/src/loader.rs @@ -201,7 +201,7 @@ impl Directories { /// ``` fn dirs(base: AbsPathBuf, exclude: &[&str]) -> Directories { let exclude = exclude.iter().map(|it| base.join(it)).collect::>(); - Directories { extensions: vec!["rs".to_string()], include: vec![base], exclude } + Directories { extensions: vec!["rs".to_owned()], include: vec![base], exclude } } impl fmt::Debug for Message { diff --git a/crates/vfs/src/vfs_path/tests.rs b/crates/vfs/src/vfs_path/tests.rs index 510e021e8903..2d89362ee069 100644 --- a/crates/vfs/src/vfs_path/tests.rs +++ b/crates/vfs/src/vfs_path/tests.rs @@ -2,29 +2,29 @@ use super::*; #[test] fn virtual_path_extensions() { - assert_eq!(VirtualPath("/".to_string()).name_and_extension(), None); + assert_eq!(VirtualPath("/".to_owned()).name_and_extension(), None); assert_eq!( - VirtualPath("/directory".to_string()).name_and_extension(), + VirtualPath("/directory".to_owned()).name_and_extension(), Some(("directory", None)) ); assert_eq!( - VirtualPath("/directory/".to_string()).name_and_extension(), + VirtualPath("/directory/".to_owned()).name_and_extension(), Some(("directory", None)) ); assert_eq!( - VirtualPath("/directory/file".to_string()).name_and_extension(), + VirtualPath("/directory/file".to_owned()).name_and_extension(), Some(("file", None)) ); assert_eq!( - VirtualPath("/directory/.file".to_string()).name_and_extension(), + VirtualPath("/directory/.file".to_owned()).name_and_extension(), Some((".file", None)) ); assert_eq!( - VirtualPath("/directory/.file.rs".to_string()).name_and_extension(), + VirtualPath("/directory/.file.rs".to_owned()).name_and_extension(), Some((".file", Some("rs"))) ); assert_eq!( - VirtualPath("/directory/file.rs".to_string()).name_and_extension(), + VirtualPath("/directory/file.rs".to_owned()).name_and_extension(), Some(("file", Some("rs"))) ); } diff --git a/docs/dev/lsp-extensions.md b/docs/dev/lsp-extensions.md index bc558c202472..f3100ee194e6 100644 --- a/docs/dev/lsp-extensions.md +++ b/docs/dev/lsp-extensions.md @@ -1,5 +1,5 @@ " } }, - "wordPattern": { "pattern": "(\\p{Alphabetic}|\\p{Number}|\\p{Nonspacing_Mark})(((\\p{Alphabetic}|\\p{Number}|\\p{Nonspacing_Mark})|[_])?(\\p{Alphabetic}|\\p{Number}|\\p{Nonspacing_Mark}))*", "flags": "ug" } + "wordPattern": { + "pattern": "(\\p{Alphabetic}|\\p{Number}|\\p{Nonspacing_Mark})(((\\p{Alphabetic}|\\p{Number}|\\p{Nonspacing_Mark})|[_])?(\\p{Alphabetic}|\\p{Number}|\\p{Nonspacing_Mark}))*", + "flags": "ug" + } } diff --git a/editors/code/rustdoc-inject.json b/editors/code/rustdoc-inject.json index 45ca2fa96bc2..7a4498fea9d0 100644 --- a/editors/code/rustdoc-inject.json +++ b/editors/code/rustdoc-inject.json @@ -90,4 +90,4 @@ } }, "scopeName": "comment.markdown-cell-inject.rustdoc" -} \ No newline at end of file +} diff --git a/editors/code/rustdoc.json b/editors/code/rustdoc.json index a149af47efdc..cecfae9d753e 100644 --- a/editors/code/rustdoc.json +++ b/editors/code/rustdoc.json @@ -79,4 +79,4 @@ "name": "markup.fenced_code.block.markdown" } } -} \ No newline at end of file +} From c6bb35269c1fe4adc60cbfdab1cc819ba4cb35cf Mon Sep 17 00:00:00 2001 From: Lukas Wirth Date: Mon, 12 Feb 2024 16:01:14 +0100 Subject: [PATCH 035/127] Remove autoclosing pair <> in rustdoc --- editors/code/language-configuration-rustdoc.json | 3 +-- 1 file changed, 1 insertion(+), 2 deletions(-) diff --git a/editors/code/language-configuration-rustdoc.json b/editors/code/language-configuration-rustdoc.json index 8474180e7b0f..c905d3b60674 100644 --- a/editors/code/language-configuration-rustdoc.json +++ b/editors/code/language-configuration-rustdoc.json @@ -11,8 +11,7 @@ "autoClosingPairs": [ { "open": "{", "close": "}" }, { "open": "[", "close": "]" }, - { "open": "(", "close": ")" }, - { "open": "<", "close": ">", "notIn": ["string"] } + { "open": "(", "close": ")" } ], "surroundingPairs": [ ["(", ")"], From 2fa57d90bc7cc50a6443f17214e6cf1b14488712 Mon Sep 17 00:00:00 2001 From: Lukas Wirth Date: Mon, 12 Feb 2024 15:26:17 +0100 Subject: [PATCH 036/127] internal: macro_arg query always returns a TokenTree --- crates/hir-def/src/body/lower.rs | 4 - crates/hir-def/src/expander.rs | 6 +- .../src/macro_expansion_tests/mbe/matching.rs | 2 +- .../macro_expansion_tests/mbe/meta_syntax.rs | 42 ++--- .../macro_expansion_tests/mbe/metavar_expr.rs | 6 +- .../mbe/tt_conversion.rs | 4 +- crates/hir-expand/src/builtin_fn_macro.rs | 2 +- crates/hir-expand/src/db.rs | 154 +++++++++--------- crates/hir-expand/src/declarative.rs | 8 +- crates/hir-expand/src/lib.rs | 19 +-- crates/ide-completion/src/tests/expression.rs | 21 ++- .../src/handlers/macro_error.rs | 2 +- 12 files changed, 139 insertions(+), 131 deletions(-) diff --git a/crates/hir-def/src/body/lower.rs b/crates/hir-def/src/body/lower.rs index 29ac666277d0..8a589a6cf1b5 100644 --- a/crates/hir-def/src/body/lower.rs +++ b/crates/hir-def/src/body/lower.rs @@ -1000,10 +1000,6 @@ impl ExprCollector<'_> { krate: *krate, }); } - Some(ExpandError::RecursionOverflowPoisoned) => { - // Recursion limit has been reached in the macro expansion tree, but not in - // this very macro call. Don't add diagnostics to avoid duplication. - } Some(err) => { self.source_map.diagnostics.push(BodyDiagnostic::MacroError { node: InFile::new(outer_file, syntax_ptr), diff --git a/crates/hir-def/src/expander.rs b/crates/hir-def/src/expander.rs index b83feeedc34c..b99df1ed5934 100644 --- a/crates/hir-def/src/expander.rs +++ b/crates/hir-def/src/expander.rs @@ -140,13 +140,11 @@ impl Expander { // The overflow error should have been reported when it occurred (see the next branch), // so don't return overflow error here to avoid diagnostics duplication. cov_mark::hit!(overflow_but_not_me); - return ExpandResult::only_err(ExpandError::RecursionOverflowPoisoned); + return ExpandResult::ok(None); } else if self.recursion_limit.check(self.recursion_depth as usize + 1).is_err() { self.recursion_depth = u32::MAX; cov_mark::hit!(your_stack_belongs_to_me); - return ExpandResult::only_err(ExpandError::other( - "reached recursion limit during macro expansion", - )); + return ExpandResult::only_err(ExpandError::RecursionOverflow); } let ExpandResult { value, err } = op(self); diff --git a/crates/hir-def/src/macro_expansion_tests/mbe/matching.rs b/crates/hir-def/src/macro_expansion_tests/mbe/matching.rs index 0909d8c83544..63f211022c97 100644 --- a/crates/hir-def/src/macro_expansion_tests/mbe/matching.rs +++ b/crates/hir-def/src/macro_expansion_tests/mbe/matching.rs @@ -33,7 +33,7 @@ m!(&k"); "#, expect![[r#" macro_rules! m { ($i:literal) => {}; } -/* error: invalid token tree */"#]], +/* error: mismatched delimiters */"#]], ); } diff --git a/crates/hir-def/src/macro_expansion_tests/mbe/meta_syntax.rs b/crates/hir-def/src/macro_expansion_tests/mbe/meta_syntax.rs index e875950e4e5f..2d289b768338 100644 --- a/crates/hir-def/src/macro_expansion_tests/mbe/meta_syntax.rs +++ b/crates/hir-def/src/macro_expansion_tests/mbe/meta_syntax.rs @@ -68,26 +68,26 @@ m2!(); "#, expect![[r#" macro_rules! i1 { invalid } -/* error: invalid macro definition: expected subtree */ +/* error: macro definition has parse errors */ macro_rules! e1 { $i:ident => () } -/* error: invalid macro definition: expected subtree */ +/* error: macro definition has parse errors */ macro_rules! e2 { ($i:ident) () } -/* error: invalid macro definition: expected `=` */ +/* error: macro definition has parse errors */ macro_rules! e3 { ($(i:ident)_) => () } -/* error: invalid macro definition: invalid repeat */ +/* error: macro definition has parse errors */ macro_rules! f1 { ($i) => ($i) } -/* error: invalid macro definition: missing fragment specifier */ +/* error: macro definition has parse errors */ macro_rules! f2 { ($i:) => ($i) } -/* error: invalid macro definition: missing fragment specifier */ +/* error: macro definition has parse errors */ macro_rules! f3 { ($i:_) => () } -/* error: invalid macro definition: missing fragment specifier */ +/* error: macro definition has parse errors */ macro_rules! m1 { ($$i) => () } -/* error: invalid macro definition: `$$` is not allowed on the pattern side */ +/* error: macro definition has parse errors */ macro_rules! m2 { () => ( ${invalid()} ) } -/* error: invalid macro definition: invalid metavariable expression */ +/* error: macro definition has parse errors */ "#]], ) } @@ -137,18 +137,18 @@ macro_rules! m9 { ($($($($i:ident)?)*)+) => {}; } macro_rules! mA { ($($($($i:ident)+)?)*) => {}; } macro_rules! mB { ($($($($i:ident)+)*)?) => {}; } -/* error: invalid macro definition: empty token tree in repetition */ -/* error: invalid macro definition: empty token tree in repetition */ -/* error: invalid macro definition: empty token tree in repetition */ -/* error: invalid macro definition: empty token tree in repetition */ -/* error: invalid macro definition: empty token tree in repetition */ -/* error: invalid macro definition: empty token tree in repetition */ -/* error: invalid macro definition: empty token tree in repetition */ -/* error: invalid macro definition: empty token tree in repetition */ -/* error: invalid macro definition: empty token tree in repetition */ -/* error: invalid macro definition: empty token tree in repetition */ -/* error: invalid macro definition: empty token tree in repetition */ -/* error: invalid macro definition: empty token tree in repetition */ +/* error: macro definition has parse errors */ +/* error: macro definition has parse errors */ +/* error: macro definition has parse errors */ +/* error: macro definition has parse errors */ +/* error: macro definition has parse errors */ +/* error: macro definition has parse errors */ +/* error: macro definition has parse errors */ +/* error: macro definition has parse errors */ +/* error: macro definition has parse errors */ +/* error: macro definition has parse errors */ +/* error: macro definition has parse errors */ +/* error: macro definition has parse errors */ "#]], ); } diff --git a/crates/hir-def/src/macro_expansion_tests/mbe/metavar_expr.rs b/crates/hir-def/src/macro_expansion_tests/mbe/metavar_expr.rs index 6560d0ec4664..bf7011983876 100644 --- a/crates/hir-def/src/macro_expansion_tests/mbe/metavar_expr.rs +++ b/crates/hir-def/src/macro_expansion_tests/mbe/metavar_expr.rs @@ -275,9 +275,9 @@ macro_rules! depth_too_large { } fn test() { - /* error: invalid macro definition: invalid metavariable expression */; - /* error: invalid macro definition: invalid metavariable expression */; - /* error: invalid macro definition: invalid metavariable expression */; + /* error: macro definition has parse errors */; + /* error: macro definition has parse errors */; + /* error: macro definition has parse errors */; } "#]], ); diff --git a/crates/hir-def/src/macro_expansion_tests/mbe/tt_conversion.rs b/crates/hir-def/src/macro_expansion_tests/mbe/tt_conversion.rs index ae56934f632f..362c189f6a73 100644 --- a/crates/hir-def/src/macro_expansion_tests/mbe/tt_conversion.rs +++ b/crates/hir-def/src/macro_expansion_tests/mbe/tt_conversion.rs @@ -97,8 +97,8 @@ m2!(x macro_rules! m1 { ($x:ident) => { ($x } } macro_rules! m2 { ($x:ident) => {} } -/* error: invalid macro definition: expected subtree */ -/* error: invalid token tree */ +/* error: macro definition has parse errors */ +/* error: mismatched delimiters */ "#]], ) } diff --git a/crates/hir-expand/src/builtin_fn_macro.rs b/crates/hir-expand/src/builtin_fn_macro.rs index 6d3de0e55d24..90cd3af75783 100644 --- a/crates/hir-expand/src/builtin_fn_macro.rs +++ b/crates/hir-expand/src/builtin_fn_macro.rs @@ -446,7 +446,7 @@ fn compile_error_expand( ) -> ExpandResult { let err = match &*tt.token_trees { [tt::TokenTree::Leaf(tt::Leaf::Literal(it))] => match unquote_str(it) { - Some(unquoted) => ExpandError::other(unquoted), + Some(unquoted) => ExpandError::other(unquoted.into_boxed_str()), None => ExpandError::other("`compile_error!` argument must be a string"), }, _ => ExpandError::other("`compile_error!` argument must be a string"), diff --git a/crates/hir-expand/src/db.rs b/crates/hir-expand/src/db.rs index 6a288cf91979..7b62eaa0289d 100644 --- a/crates/hir-expand/src/db.rs +++ b/crates/hir-expand/src/db.rs @@ -108,7 +108,7 @@ pub trait ExpandDatabase: SourceDatabase { fn macro_arg( &self, id: MacroCallId, - ) -> ValueResult, SyntaxFixupUndoInfo)>, Arc>>; + ) -> ValueResult<(Arc, SyntaxFixupUndoInfo), Arc>>; /// Fetches the expander for this macro. #[salsa::transparent] #[salsa::invoke(TokenExpander::macro_expander)] @@ -326,58 +326,77 @@ fn macro_arg( db: &dyn ExpandDatabase, id: MacroCallId, // FIXME: consider the following by putting fixup info into eager call info args - // ) -> ValueResult>, Arc>> { -) -> ValueResult, SyntaxFixupUndoInfo)>, Arc>> { - let mismatched_delimiters = |arg: &SyntaxNode| { - let first = arg.first_child_or_token().map_or(T![.], |it| it.kind()); - let last = arg.last_child_or_token().map_or(T![.], |it| it.kind()); - let well_formed_tt = - matches!((first, last), (T!['('], T![')']) | (T!['['], T![']']) | (T!['{'], T!['}'])); - if !well_formed_tt { - // Don't expand malformed (unbalanced) macro invocations. This is - // less than ideal, but trying to expand unbalanced macro calls - // sometimes produces pathological, deeply nested code which breaks - // all kinds of things. - // - // Some day, we'll have explicit recursion counters for all - // recursive things, at which point this code might be removed. - cov_mark::hit!(issue9358_bad_macro_stack_overflow); - Some(Arc::new(Box::new([SyntaxError::new( - "unbalanced token tree".to_owned(), - arg.text_range(), - )]) as Box<[_]>)) - } else { - None - } - }; + // ) -> ValueResult, Arc>> { +) -> ValueResult<(Arc, SyntaxFixupUndoInfo), Arc>> { let loc = db.lookup_intern_macro_call(id); if let Some(EagerCallInfo { arg, .. }) = matches!(loc.def.kind, MacroDefKind::BuiltInEager(..)) .then(|| loc.eager.as_deref()) .flatten() { - ValueResult::ok(Some((arg.clone(), SyntaxFixupUndoInfo::NONE))) + ValueResult::ok((arg.clone(), SyntaxFixupUndoInfo::NONE)) } else { let (parse, map) = parse_with_map(db, loc.kind.file_id()); let root = parse.syntax_node(); let syntax = match loc.kind { MacroCallKind::FnLike { ast_id, .. } => { + let dummy_tt = |kind| { + ( + Arc::new(tt::Subtree { + delimiter: tt::Delimiter { + open: loc.call_site, + close: loc.call_site, + kind, + }, + token_trees: Box::default(), + }), + SyntaxFixupUndoInfo::default(), + ) + }; + let node = &ast_id.to_ptr(db).to_node(&root); let offset = node.syntax().text_range().start(); - match node.token_tree() { - Some(tt) => { - let tt = tt.syntax(); - if let Some(e) = mismatched_delimiters(tt) { - return ValueResult::only_err(e); - } - tt.clone() - } - None => { - return ValueResult::only_err(Arc::new(Box::new([ - SyntaxError::new_at_offset("missing token tree".to_owned(), offset), - ]))); - } + let Some(tt) = node.token_tree() else { + return ValueResult::new( + dummy_tt(tt::DelimiterKind::Invisible), + Arc::new(Box::new([SyntaxError::new_at_offset( + "missing token tree".to_owned(), + offset, + )])), + ); + }; + let first = tt.left_delimiter_token().map(|it| it.kind()).unwrap_or(T!['(']); + let last = tt.right_delimiter_token().map(|it| it.kind()).unwrap_or(T![.]); + + let mismatched_delimiters = !matches!( + (first, last), + (T!['('], T![')']) | (T!['['], T![']']) | (T!['{'], T!['}']) + ); + if mismatched_delimiters { + // Don't expand malformed (unbalanced) macro invocations. This is + // less than ideal, but trying to expand unbalanced macro calls + // sometimes produces pathological, deeply nested code which breaks + // all kinds of things. + // + // So instead, we'll return an empty subtree here + cov_mark::hit!(issue9358_bad_macro_stack_overflow); + + let kind = match first { + _ if loc.def.is_proc_macro() => tt::DelimiterKind::Invisible, + T!['('] => tt::DelimiterKind::Parenthesis, + T!['['] => tt::DelimiterKind::Bracket, + T!['{'] => tt::DelimiterKind::Brace, + _ => tt::DelimiterKind::Invisible, + }; + return ValueResult::new( + dummy_tt(kind), + Arc::new(Box::new([SyntaxError::new_at_offset( + "mismatched delimiters".to_owned(), + offset, + )])), + ); } + tt.syntax().clone() } MacroCallKind::Derive { ast_id, .. } => { ast_id.to_ptr(db).to_node(&root).syntax().clone() @@ -427,15 +446,15 @@ fn macro_arg( if matches!(loc.def.kind, MacroDefKind::BuiltInEager(..)) { match parse.errors() { - [] => ValueResult::ok(Some((Arc::new(tt), undo_info))), + [] => ValueResult::ok((Arc::new(tt), undo_info)), errors => ValueResult::new( - Some((Arc::new(tt), undo_info)), + (Arc::new(tt), undo_info), // Box::<[_]>::from(res.errors()), not stable yet Arc::new(errors.to_vec().into_boxed_slice()), ), } } else { - ValueResult::ok(Some((Arc::new(tt), undo_info))) + ValueResult::ok((Arc::new(tt), undo_info)) } } } @@ -519,21 +538,20 @@ fn macro_expand( expander.expand(db, macro_call_id, &node, map.as_ref()) } _ => { - let ValueResult { value, err } = db.macro_arg(macro_call_id); - let Some((macro_arg, undo_info)) = value else { - return ExpandResult { - value: CowArc::Owned(tt::Subtree { - delimiter: tt::Delimiter::invisible_spanned(loc.call_site), - token_trees: Box::new([]), - }), - // FIXME: We should make sure to enforce an invariant that invalid macro - // calls do not reach this call path! - err: Some(ExpandError::other("invalid token tree")), - }; + let ValueResult { value: (macro_arg, undo_info), err } = db.macro_arg(macro_call_id); + let format_parse_err = |err: Arc>| { + let mut buf = String::new(); + for err in &**err { + use std::fmt::Write; + _ = write!(buf, "{}, ", err); + } + buf.pop(); + buf.pop(); + ExpandError::other(buf) }; let arg = &*macro_arg; - match loc.def.kind { + let res = match loc.def.kind { MacroDefKind::Declarative(id) => { db.decl_macro_expander(loc.def.krate, id).expand(db, arg.clone(), macro_call_id) } @@ -549,16 +567,7 @@ fn macro_expand( MacroDefKind::BuiltInEager(..) if loc.eager.is_none() => { return ExpandResult { value: CowArc::Arc(macro_arg.clone()), - err: err.map(|err| { - let mut buf = String::new(); - for err in &**err { - use std::fmt::Write; - _ = write!(buf, "{}, ", err); - } - buf.pop(); - buf.pop(); - ExpandError::other(buf) - }), + err: err.map(format_parse_err), }; } MacroDefKind::BuiltInEager(it, _) => { @@ -570,6 +579,11 @@ fn macro_expand( res } _ => unreachable!(), + }; + ExpandResult { + value: res.value, + // if the arg had parse errors, show them instead of the expansion errors + err: err.map(format_parse_err).or(res.err), } } }; @@ -597,17 +611,7 @@ fn macro_expand( fn expand_proc_macro(db: &dyn ExpandDatabase, id: MacroCallId) -> ExpandResult> { let loc = db.lookup_intern_macro_call(id); - let Some((macro_arg, undo_info)) = db.macro_arg(id).value else { - return ExpandResult { - value: Arc::new(tt::Subtree { - delimiter: tt::Delimiter::invisible_spanned(loc.call_site), - token_trees: Box::new([]), - }), - // FIXME: We should make sure to enforce an invariant that invalid macro - // calls do not reach this call path! - err: Some(ExpandError::other("invalid token tree")), - }; - }; + let (macro_arg, undo_info) = db.macro_arg(id).value; let expander = match loc.def.kind { MacroDefKind::ProcMacro(expander, ..) => expander, diff --git a/crates/hir-expand/src/declarative.rs b/crates/hir-expand/src/declarative.rs index 37084ee8b93c..345e5cee266b 100644 --- a/crates/hir-expand/src/declarative.rs +++ b/crates/hir-expand/src/declarative.rs @@ -44,9 +44,9 @@ impl DeclarativeMacroExpander { ) }); match self.mac.err() { - Some(e) => ExpandResult::new( + Some(_) => ExpandResult::new( tt::Subtree::empty(tt::DelimSpan { open: loc.call_site, close: loc.call_site }), - ExpandError::other(format!("invalid macro definition: {e}")), + ExpandError::MacroDefinition, ), None => self .mac @@ -80,9 +80,9 @@ impl DeclarativeMacroExpander { ) }); match self.mac.err() { - Some(e) => ExpandResult::new( + Some(_) => ExpandResult::new( tt::Subtree::empty(tt::DelimSpan { open: call_site, close: call_site }), - ExpandError::other(format!("invalid macro definition: {e}")), + ExpandError::MacroDefinition, ), None => self.mac.expand(&tt, |_| (), new_meta_vars, call_site).map_err(Into::into), } diff --git a/crates/hir-expand/src/lib.rs b/crates/hir-expand/src/lib.rs index 42a8864c6a78..3a7febc2eb89 100644 --- a/crates/hir-expand/src/lib.rs +++ b/crates/hir-expand/src/lib.rs @@ -44,7 +44,6 @@ use crate::{ builtin_derive_macro::BuiltinDeriveExpander, builtin_fn_macro::{BuiltinFnLikeExpander, EagerExpander}, db::{ExpandDatabase, TokenExpander}, - fixup::SyntaxFixupUndoInfo, hygiene::SyntaxContextData, mod_path::ModPath, proc_macro::{CustomProcMacroExpander, ProcMacroKind}, @@ -131,8 +130,9 @@ pub enum ExpandError { UnresolvedProcMacro(CrateId), /// The macro expansion is disabled. MacroDisabled, + MacroDefinition, Mbe(mbe::ExpandError), - RecursionOverflowPoisoned, + RecursionOverflow, Other(Box>), ProcMacroPanic(Box>), } @@ -154,15 +154,14 @@ impl fmt::Display for ExpandError { match self { ExpandError::UnresolvedProcMacro(_) => f.write_str("unresolved proc-macro"), ExpandError::Mbe(it) => it.fmt(f), - ExpandError::RecursionOverflowPoisoned => { - f.write_str("overflow expanding the original macro") - } + ExpandError::RecursionOverflow => f.write_str("overflow expanding the original macro"), ExpandError::ProcMacroPanic(it) => { f.write_str("proc-macro panicked: ")?; f.write_str(it) } ExpandError::Other(it) => f.write_str(it), ExpandError::MacroDisabled => f.write_str("macro disabled"), + ExpandError::MacroDefinition => f.write_str("macro definition has parse errors"), } } } @@ -761,15 +760,7 @@ impl ExpansionInfo { let (parse, exp_map) = db.parse_macro_expansion(macro_file).value; let expanded = InMacroFile { file_id: macro_file, value: parse.syntax_node() }; - let (macro_arg, _) = db.macro_arg(macro_file.macro_call_id).value.unwrap_or_else(|| { - ( - Arc::new(tt::Subtree { - delimiter: tt::Delimiter::invisible_spanned(loc.call_site), - token_trees: Box::new([]), - }), - SyntaxFixupUndoInfo::NONE, - ) - }); + let (macro_arg, _) = db.macro_arg(macro_file.macro_call_id).value; let def = loc.def.ast_id().left().and_then(|id| { let def_tt = match id.to_node(db) { diff --git a/crates/ide-completion/src/tests/expression.rs b/crates/ide-completion/src/tests/expression.rs index 78907a2896c4..e167f5c9aee3 100644 --- a/crates/ide-completion/src/tests/expression.rs +++ b/crates/ide-completion/src/tests/expression.rs @@ -542,7 +542,26 @@ fn quux(x: i32) { m!(x$0 } "#, - expect![[r#""#]], + expect![[r#" + fn quux(…) fn(i32) + lc x i32 + lc y i32 + ma m!(…) macro_rules! m + bt u32 u32 + kw crate:: + kw false + kw for + kw if + kw if let + kw loop + kw match + kw return + kw self:: + kw true + kw unsafe + kw while + kw while let + "#]], ); } diff --git a/crates/ide-diagnostics/src/handlers/macro_error.rs b/crates/ide-diagnostics/src/handlers/macro_error.rs index e4cb53f3a2f7..6a957ac1c978 100644 --- a/crates/ide-diagnostics/src/handlers/macro_error.rs +++ b/crates/ide-diagnostics/src/handlers/macro_error.rs @@ -242,7 +242,7 @@ macro_rules! foo { fn f() { foo!(); - //^^^ error: invalid macro definition: expected subtree + //^^^ error: macro definition has parse errors } "#, From 0209c28136bd9d2684fd39d6f0e13b3e32ae0526 Mon Sep 17 00:00:00 2001 From: dfireBird Date: Mon, 12 Feb 2024 22:56:03 +0530 Subject: [PATCH 037/127] add completions to show only traits in trait `impl` statement --- crates/ide-completion/src/completions/type.rs | 10 ++++++++++ crates/ide-completion/src/tests/type_pos.rs | 18 ++++++++++++++++++ 2 files changed, 28 insertions(+) diff --git a/crates/ide-completion/src/completions/type.rs b/crates/ide-completion/src/completions/type.rs index e6a4335c3fec..eca17bb7c7f2 100644 --- a/crates/ide-completion/src/completions/type.rs +++ b/crates/ide-completion/src/completions/type.rs @@ -184,6 +184,16 @@ pub(crate) fn complete_type_path( } } } + TypeLocation::ImplTrait => { + acc.add_nameref_keywords_with_colon(ctx); + ctx.process_all_names(&mut |name, def, doc_aliases| { + let is_trait = matches!(def, ScopeDef::ModuleDef(hir::ModuleDef::Trait(_))); + if is_trait { + acc.add_path_resolution(ctx, path_ctx, name, def, doc_aliases); + } + }); + return; + } _ => {} }; diff --git a/crates/ide-completion/src/tests/type_pos.rs b/crates/ide-completion/src/tests/type_pos.rs index c7161f82ce74..656592ebfd62 100644 --- a/crates/ide-completion/src/tests/type_pos.rs +++ b/crates/ide-completion/src/tests/type_pos.rs @@ -989,3 +989,21 @@ fn foo<'a>() { S::<'static, F$0, _, _>; } "#]], ); } + +#[test] +fn complete_traits_on_impl_trait_block() { + check( + r#" +trait Foo {} + +struct Bar; + +impl $0 for Bar { }"#, + expect![[r#" + tt Foo + tt Trait + kw crate:: + kw self:: + "#]], + ); +} From a7641a8f5781fce232bc100de9face29bb55788a Mon Sep 17 00:00:00 2001 From: Lukas Wirth Date: Mon, 12 Feb 2024 21:29:52 +0100 Subject: [PATCH 038/127] fix: Fix target layout fetching --- crates/project-model/src/rustc_cfg.rs | 32 ++++++++------ .../project-model/src/target_data_layout.rs | 42 ++++++++++++------- 2 files changed, 45 insertions(+), 29 deletions(-) diff --git a/crates/project-model/src/rustc_cfg.rs b/crates/project-model/src/rustc_cfg.rs index efbdfffd9a4f..1d07e4a8a656 100644 --- a/crates/project-model/src/rustc_cfg.rs +++ b/crates/project-model/src/rustc_cfg.rs @@ -67,7 +67,7 @@ fn get_rust_cfgs( extra_env: &FxHashMap, config: RustcCfgConfig<'_>, ) -> anyhow::Result { - match config { + let sysroot = match config { RustcCfgConfig::Cargo(sysroot, cargo_oml) => { let cargo = Sysroot::discover_tool(sysroot, toolchain::Tool::Cargo)?; let mut cmd = Command::new(cargo); @@ -79,19 +79,25 @@ fn get_rust_cfgs( cmd.args(["--target", target]); } - utf8_stdout(cmd).context("Unable to run `cargo rustc`") - } - RustcCfgConfig::Rustc(sysroot) => { - let rustc = Sysroot::discover_tool(sysroot, toolchain::Tool::Rustc)?; - tracing::debug!(?rustc, "using explicit rustc from sysroot"); - let mut cmd = Command::new(rustc); - cmd.envs(extra_env); - cmd.args(["--print", "cfg", "-O"]); - if let Some(target) = target { - cmd.args(["--target", target]); + match utf8_stdout(cmd) { + Ok(it) => return Ok(it), + Err(e) => { + tracing::warn!("failed to run `cargo rustc --print cfg`, falling back to invoking rustc directly: {e}"); + sysroot + } } - - utf8_stdout(cmd).context("Unable to run `rustc`") } + RustcCfgConfig::Rustc(sysroot) => sysroot, + }; + + let rustc = Sysroot::discover_tool(sysroot, toolchain::Tool::Rustc)?; + tracing::debug!(?rustc, "using explicit rustc from sysroot"); + let mut cmd = Command::new(rustc); + cmd.envs(extra_env); + cmd.args(["--print", "cfg", "-O"]); + if let Some(target) = target { + cmd.args(["--target", target]); } + + utf8_stdout(cmd).context("unable to fetch cfgs via `rustc --print cfg -O`") } diff --git a/crates/project-model/src/target_data_layout.rs b/crates/project-model/src/target_data_layout.rs index 847829be1808..4c5200cee81b 100644 --- a/crates/project-model/src/target_data_layout.rs +++ b/crates/project-model/src/target_data_layout.rs @@ -20,31 +20,41 @@ pub fn get( target: Option<&str>, extra_env: &FxHashMap, ) -> anyhow::Result { - let output = match config { + let process = |output: String| { + (|| Some(output.split_once(r#""data-layout": ""#)?.1.split_once('"')?.0.to_owned()))() + .ok_or_else(|| { + anyhow::format_err!("could not fetch target-spec-json from command output") + }) + }; + let sysroot = match config { RustcDataLayoutConfig::Cargo(sysroot, cargo_toml) => { let cargo = Sysroot::discover_tool(sysroot, toolchain::Tool::Cargo)?; let mut cmd = Command::new(cargo); cmd.envs(extra_env); cmd.current_dir(cargo_toml.parent()) - .args(["-Z", "unstable-options", "--print", "target-spec-json"]) + .args(["rustc", "--", "-Z", "unstable-options", "--print", "target-spec-json"]) .env("RUSTC_BOOTSTRAP", "1"); if let Some(target) = target { cmd.args(["--target", target]); } - utf8_stdout(cmd) - } - RustcDataLayoutConfig::Rustc(sysroot) => { - let rustc = Sysroot::discover_tool(sysroot, toolchain::Tool::Rustc)?; - let mut cmd = Command::new(rustc); - cmd.envs(extra_env) - .args(["-Z", "unstable-options", "--print", "target-spec-json"]) - .env("RUSTC_BOOTSTRAP", "1"); - if let Some(target) = target { - cmd.args(["--target", target]); + match utf8_stdout(cmd) { + Ok(output) => return process(output), + Err(e) => { + tracing::warn!("failed to run `cargo rustc --print target-spec-json`, falling back to invoking rustc directly: {e}"); + sysroot + } } - utf8_stdout(cmd) } - }?; - (|| Some(output.split_once(r#""data-layout": ""#)?.1.split_once('"')?.0.to_owned()))() - .ok_or_else(|| anyhow::format_err!("could not fetch target-spec-json from command output")) + RustcDataLayoutConfig::Rustc(sysroot) => sysroot, + }; + + let rustc = Sysroot::discover_tool(sysroot, toolchain::Tool::Rustc)?; + let mut cmd = Command::new(rustc); + cmd.envs(extra_env) + .args(["-Z", "unstable-options", "--print", "target-spec-json"]) + .env("RUSTC_BOOTSTRAP", "1"); + if let Some(target) = target { + cmd.args(["--target", target]); + } + process(utf8_stdout(cmd)?) } From 24a3c42bd6b8b7aa994191ad4559cd4ec0d3ed4a Mon Sep 17 00:00:00 2001 From: Mohammad Mustakim Ali Date: Mon, 12 Feb 2024 22:34:59 +0000 Subject: [PATCH 039/127] feat: completion list suggests constructor like & builder methods first --- crates/ide-completion/src/item.rs | 48 +++ crates/ide-completion/src/render.rs | 356 +++++++++++++++++++ crates/ide-completion/src/render/function.rs | 59 ++- 3 files changed, 456 insertions(+), 7 deletions(-) diff --git a/crates/ide-completion/src/item.rs b/crates/ide-completion/src/item.rs index 17dfcc08ca1f..6b5392b704b4 100644 --- a/crates/ide-completion/src/item.rs +++ b/crates/ide-completion/src/item.rs @@ -166,6 +166,8 @@ pub struct CompletionRelevance { pub postfix_match: Option, /// This is set for type inference results pub is_definite: bool, + /// This is set for items that are function (associated or method) + pub function: Option, } #[derive(Debug, Clone, Copy, Eq, PartialEq)] @@ -207,6 +209,24 @@ pub enum CompletionRelevancePostfixMatch { Exact, } +#[derive(Debug, Clone, Copy, Eq, PartialEq)] +pub struct CompletionRelevanceFn { + pub has_args: bool, + pub has_self_arg: bool, + pub return_type: CompletionRelevanceReturnType, +} + +#[derive(Debug, Clone, Copy, Eq, PartialEq)] +pub enum CompletionRelevanceReturnType { + Other, + /// Returns the Self type of the impl/trait + DirectConstructor, + /// Returns something that indirectly constructs the `Self` type of the impl/trait e.g. `Result`, `Option` + Constructor, + /// Returns a possible builder for the type + Builder, +} + impl CompletionRelevance { /// Provides a relevance score. Higher values are more relevant. /// @@ -231,6 +251,7 @@ impl CompletionRelevance { postfix_match, is_definite, is_item_from_notable_trait, + function, } = self; // lower rank private things @@ -275,6 +296,33 @@ impl CompletionRelevance { if is_definite { score += 10; } + + score += function + .map(|asf| { + let mut fn_score = match asf.return_type { + CompletionRelevanceReturnType::DirectConstructor => 15, + CompletionRelevanceReturnType::Builder => 10, + CompletionRelevanceReturnType::Constructor => 5, + CompletionRelevanceReturnType::Other => 0, + }; + + // When a fn is bumped due to return type: + // Bump Constructor or Builder methods with no arguments, + // over them tha with self arguments + if fn_score > 0 { + if !asf.has_args { + // bump associated functions + fn_score += 1; + } else if asf.has_self_arg { + // downgrade methods (below Constructor) + fn_score = 1; + } + } + + fn_score + }) + .unwrap_or_default(); + score } diff --git a/crates/ide-completion/src/render.rs b/crates/ide-completion/src/render.rs index 88dc3b5cbe7c..629f4a51c1bb 100644 --- a/crates/ide-completion/src/render.rs +++ b/crates/ide-completion/src/render.rs @@ -675,6 +675,16 @@ mod tests { expect.assert_debug_eq(&actual); } + #[track_caller] + fn check_function_relevance(ra_fixture: &str, expect: Expect) { + let actual: Vec<_> = do_completion(ra_fixture, CompletionItemKind::Method) + .into_iter() + .map(|item| (item.detail.unwrap_or_default(), item.relevance.function)) + .collect(); + + expect.assert_debug_eq(&actual); + } + #[track_caller] fn check_relevance_for_kinds(ra_fixture: &str, kinds: &[CompletionItemKind], expect: Expect) { let mut actual = get_all_items(TEST_CONFIG, ra_fixture, None); @@ -1254,6 +1264,7 @@ fn main() { let _: m::Spam = S$0 } is_private_editable: false, postfix_match: None, is_definite: false, + function: None, }, trigger_call_info: true, }, @@ -1281,6 +1292,7 @@ fn main() { let _: m::Spam = S$0 } is_private_editable: false, postfix_match: None, is_definite: false, + function: None, }, trigger_call_info: true, }, @@ -1360,6 +1372,7 @@ fn foo() { A { the$0 } } is_private_editable: false, postfix_match: None, is_definite: false, + function: None, }, }, ] @@ -1393,6 +1406,26 @@ impl S { documentation: Documentation( "Method docs", ), + relevance: CompletionRelevance { + exact_name_match: false, + type_match: None, + is_local: false, + is_item_from_trait: false, + is_item_from_notable_trait: false, + is_name_already_imported: false, + requires_import: false, + is_op_method: false, + is_private_editable: false, + postfix_match: None, + is_definite: false, + function: Some( + CompletionRelevanceFn { + has_args: true, + has_self_arg: true, + return_type: Other, + }, + ), + }, }, CompletionItem { label: "foo", @@ -1498,6 +1531,26 @@ fn foo(s: S) { s.$0 } kind: Method, lookup: "the_method", detail: "fn(&self)", + relevance: CompletionRelevance { + exact_name_match: false, + type_match: None, + is_local: false, + is_item_from_trait: false, + is_item_from_notable_trait: false, + is_name_already_imported: false, + requires_import: false, + is_op_method: false, + is_private_editable: false, + postfix_match: None, + is_definite: false, + function: Some( + CompletionRelevanceFn { + has_args: true, + has_self_arg: true, + return_type: Other, + }, + ), + }, }, ] "#]], @@ -2098,6 +2151,254 @@ fn main() { ); } + #[test] + fn constructor_order_simple() { + check_relevance( + r#" +struct Foo; +struct Other; +struct Option(T); + +impl Foo { + fn fn_ctr() -> Foo { unimplemented!() } + fn fn_another(n: u32) -> Other { unimplemented!() } + fn fn_ctr_self() -> Option { unimplemented!() } +} + +fn test() { + let a = Foo::$0; +} +"#, + expect![[r#" + fn fn_ctr() [type_could_unify] + fn fn_ctr_self() [type_could_unify] + fn fn_another(…) [type_could_unify] + "#]], + ); + } + + #[test] + fn constructor_order_kind() { + check_function_relevance( + r#" +struct Foo; +struct Bar; +struct Option(T); +enum Result { Ok(T), Err(E) }; + +impl Foo { + fn fn_ctr(&self) -> Foo { unimplemented!() } + fn fn_ctr_with_args(&self, n: u32) -> Foo { unimplemented!() } + fn fn_another(&self, n: u32) -> Bar { unimplemented!() } + fn fn_ctr_wrapped(&self, ) -> Option { unimplemented!() } + fn fn_ctr_wrapped_2(&self, ) -> Result { unimplemented!() } + fn fn_ctr_wrapped_3(&self, ) -> Result { unimplemented!() } // Self is not the first type + fn fn_ctr_wrapped_with_args(&self, m: u32) -> Option { unimplemented!() } + fn fn_another_unit(&self) { unimplemented!() } +} + +fn test() { + let a = self::Foo::$0; +} +"#, + expect![[r#" + [ + ( + "fn(&self, u32) -> Bar", + Some( + CompletionRelevanceFn { + has_args: true, + has_self_arg: true, + return_type: Other, + }, + ), + ), + ( + "fn(&self)", + Some( + CompletionRelevanceFn { + has_args: true, + has_self_arg: true, + return_type: Other, + }, + ), + ), + ( + "fn(&self) -> Foo", + Some( + CompletionRelevanceFn { + has_args: true, + has_self_arg: true, + return_type: DirectConstructor, + }, + ), + ), + ( + "fn(&self, u32) -> Foo", + Some( + CompletionRelevanceFn { + has_args: true, + has_self_arg: true, + return_type: DirectConstructor, + }, + ), + ), + ( + "fn(&self) -> Option", + Some( + CompletionRelevanceFn { + has_args: true, + has_self_arg: true, + return_type: Constructor, + }, + ), + ), + ( + "fn(&self) -> Result", + Some( + CompletionRelevanceFn { + has_args: true, + has_self_arg: true, + return_type: Constructor, + }, + ), + ), + ( + "fn(&self) -> Result", + Some( + CompletionRelevanceFn { + has_args: true, + has_self_arg: true, + return_type: Constructor, + }, + ), + ), + ( + "fn(&self, u32) -> Option", + Some( + CompletionRelevanceFn { + has_args: true, + has_self_arg: true, + return_type: Constructor, + }, + ), + ), + ] + "#]], + ); + } + + #[test] + fn constructor_order_relevance() { + check_relevance( + r#" +struct Foo; +struct FooBuilder; +struct Result(T); + +impl Foo { + fn fn_no_ret(&self) {} + fn fn_ctr_with_args(input: u32) -> Foo { unimplemented!() } + fn fn_direct_ctr() -> Self { unimplemented!() } + fn fn_ctr() -> Result { unimplemented!() } + fn fn_other() -> Result { unimplemented!() } + fn fn_builder() -> FooBuilder { unimplemented!() } +} + +fn test() { + let a = self::Foo::$0; +} +"#, + // preference: + // Direct Constructor + // Direct Constructor with args + // Builder + // Constructor + // Others + expect![[r#" + fn fn_direct_ctr() [type_could_unify] + fn fn_ctr_with_args(…) [type_could_unify] + fn fn_builder() [type_could_unify] + fn fn_ctr() [type_could_unify] + me fn_no_ret(…) [type_could_unify] + fn fn_other() [type_could_unify] + "#]], + ); + + // + } + + #[test] + fn function_relevance_generic_1() { + check_relevance( + r#" +struct Foo(T); +struct FooBuilder; +struct Option(T); +enum Result{Ok(T), Err(E)}; + +impl Foo { + fn fn_returns_unit(&self) {} + fn fn_ctr_with_args(input: T) -> Foo { unimplemented!() } + fn fn_direct_ctr() -> Self { unimplemented!() } + fn fn_ctr_wrapped() -> Option { unimplemented!() } + fn fn_ctr_wrapped_2() -> Result { unimplemented!() } + fn fn_other() -> Option { unimplemented!() } + fn fn_builder() -> FooBuilder { unimplemented!() } +} + +fn test() { + let a = self::Foo::::$0; +} + "#, + expect![[r#" + fn fn_direct_ctr() [type_could_unify] + fn fn_ctr_with_args(…) [type_could_unify] + fn fn_builder() [type_could_unify] + fn fn_ctr_wrapped() [type_could_unify] + fn fn_ctr_wrapped_2() [type_could_unify] + me fn_returns_unit(…) [type_could_unify] + fn fn_other() [type_could_unify] + "#]], + ); + } + + #[test] + fn function_relevance_generic_2() { + // Generic 2 + check_relevance( + r#" +struct Foo(T); +struct FooBuilder; +struct Option(T); +enum Result{Ok(T), Err(E)}; + +impl Foo { + fn fn_no_ret(&self) {} + fn fn_ctr_with_args(input: T) -> Foo { unimplemented!() } + fn fn_direct_ctr() -> Self { unimplemented!() } + fn fn_ctr() -> Option { unimplemented!() } + fn fn_ctr2() -> Result { unimplemented!() } + fn fn_other() -> Option { unimplemented!() } + fn fn_builder() -> FooBuilder { unimplemented!() } +} + +fn test() { + let a : Res> = Foo::$0; +} + "#, + expect![[r#" + fn fn_direct_ctr() [type_could_unify] + fn fn_ctr_with_args(…) [type_could_unify] + fn fn_builder() [type_could_unify] + fn fn_ctr() [type_could_unify] + fn fn_ctr2() [type_could_unify] + me fn_no_ret(…) [type_could_unify] + fn fn_other() [type_could_unify] + "#]], + ); + } + #[test] fn struct_field_method_ref() { check_kinds( @@ -2118,6 +2419,26 @@ fn foo(f: Foo) { let _: &u32 = f.b$0 } kind: Method, lookup: "baz", detail: "fn(&self) -> u32", + relevance: CompletionRelevance { + exact_name_match: false, + type_match: None, + is_local: false, + is_item_from_trait: false, + is_item_from_notable_trait: false, + is_name_already_imported: false, + requires_import: false, + is_op_method: false, + is_private_editable: false, + postfix_match: None, + is_definite: false, + function: Some( + CompletionRelevanceFn { + has_args: true, + has_self_arg: true, + return_type: Other, + }, + ), + }, ref_match: "&@107", }, CompletionItem { @@ -2192,6 +2513,7 @@ fn foo() { is_private_editable: false, postfix_match: None, is_definite: false, + function: None, }, }, ] @@ -2229,6 +2551,26 @@ fn main() { ), lookup: "foo", detail: "fn() -> S", + relevance: CompletionRelevance { + exact_name_match: false, + type_match: None, + is_local: false, + is_item_from_trait: false, + is_item_from_notable_trait: false, + is_name_already_imported: false, + requires_import: false, + is_op_method: false, + is_private_editable: false, + postfix_match: None, + is_definite: false, + function: Some( + CompletionRelevanceFn { + has_args: false, + has_self_arg: false, + return_type: Other, + }, + ), + }, ref_match: "&@92", }, ] @@ -2590,6 +2932,13 @@ fn main() { is_private_editable: false, postfix_match: None, is_definite: false, + function: Some( + CompletionRelevanceFn { + has_args: true, + has_self_arg: true, + return_type: Other, + }, + ), }, }, CompletionItem { @@ -2612,6 +2961,13 @@ fn main() { is_private_editable: false, postfix_match: None, is_definite: false, + function: Some( + CompletionRelevanceFn { + has_args: true, + has_self_arg: true, + return_type: Other, + }, + ), }, }, ] diff --git a/crates/ide-completion/src/render/function.rs b/crates/ide-completion/src/render/function.rs index 27186a2b7ffb..32075baef517 100644 --- a/crates/ide-completion/src/render/function.rs +++ b/crates/ide-completion/src/render/function.rs @@ -8,8 +8,13 @@ use syntax::{format_smolstr, AstNode, SmolStr}; use crate::{ context::{CompletionContext, DotAccess, DotAccessKind, PathCompletionCtx, PathKind}, - item::{Builder, CompletionItem, CompletionItemKind, CompletionRelevance}, - render::{compute_exact_name_match, compute_ref_match, compute_type_match, RenderContext}, + item::{ + Builder, CompletionItem, CompletionItemKind, CompletionRelevance, CompletionRelevanceFn, + CompletionRelevanceReturnType, + }, + render::{ + compute_exact_name_match, compute_ref_match, compute_type_match, match_types, RenderContext, + }, CallableSnippets, }; @@ -99,13 +104,20 @@ fn render( .filter(|_| !has_call_parens) .and_then(|cap| Some((cap, params(ctx.completion, func, &func_kind, has_dot_receiver)?))); + let type_match = if has_call_parens || complete_call_parens.is_some() { + compute_type_match(completion, &ret_type) + } else { + compute_type_match(completion, &func.ty(db)) + }; + + let function = assoc_item + .and_then(|assoc_item| assoc_item.implementing_ty(db)) + .and_then(|self_type| compute_function_match(db, &ctx, self_type, func, &ret_type)); + item.set_relevance(CompletionRelevance { - type_match: if has_call_parens || complete_call_parens.is_some() { - compute_type_match(completion, &ret_type) - } else { - compute_type_match(completion, &func.ty(db)) - }, + type_match, exact_name_match: compute_exact_name_match(completion, &call), + function, is_op_method, is_item_from_notable_trait, ..ctx.completion_relevance() @@ -156,6 +168,39 @@ fn render( item } +fn compute_function_match( + db: &dyn HirDatabase, + ctx: &RenderContext<'_>, + self_type: hir::Type, + func: hir::Function, + func_return_type: &hir::Type, +) -> Option { + let has_args = func.num_params(db) > 0; + let has_self_arg = func.self_param(db).is_some(); + + let return_type = if match_types(ctx.completion, &self_type, &func_return_type).is_some() { + // fn([..]) -> Self + CompletionRelevanceReturnType::DirectConstructor + } else if func_return_type + .type_arguments() + .any(|ret_type_arg| match_types(ctx.completion, &self_type, &ret_type_arg).is_some()) + { + // fn([..]) -> Result OR Wrapped + CompletionRelevanceReturnType::Constructor + } else if func_return_type + .as_adt() + .and_then(|adt| adt.name(db).as_str().map(|name| name.ends_with("Builder"))) + .unwrap_or(false) + { + // fn([..]) -> [..]Builder + CompletionRelevanceReturnType::Builder + } else { + CompletionRelevanceReturnType::Other + }; + + Some(CompletionRelevanceFn { return_type, has_args, has_self_arg }) +} + pub(super) fn add_call_parens<'b>( builder: &'b mut Builder, ctx: &CompletionContext<'_>, From 0af14ef8c32f2fd633023d8ff6ba47b820cde7fe Mon Sep 17 00:00:00 2001 From: Mohammad Mustakim Ali Date: Mon, 12 Feb 2024 23:11:06 +0000 Subject: [PATCH 040/127] chore: optimise --- crates/ide-completion/src/item.rs | 8 +-- crates/ide-completion/src/render.rs | 64 ++++++++------------ crates/ide-completion/src/render/function.rs | 43 ++++++------- 3 files changed, 50 insertions(+), 65 deletions(-) diff --git a/crates/ide-completion/src/item.rs b/crates/ide-completion/src/item.rs index 6b5392b704b4..c2c0641961a6 100644 --- a/crates/ide-completion/src/item.rs +++ b/crates/ide-completion/src/item.rs @@ -211,8 +211,8 @@ pub enum CompletionRelevancePostfixMatch { #[derive(Debug, Clone, Copy, Eq, PartialEq)] pub struct CompletionRelevanceFn { - pub has_args: bool, - pub has_self_arg: bool, + pub has_params: bool, + pub has_self_param: bool, pub return_type: CompletionRelevanceReturnType, } @@ -310,10 +310,10 @@ impl CompletionRelevance { // Bump Constructor or Builder methods with no arguments, // over them tha with self arguments if fn_score > 0 { - if !asf.has_args { + if !asf.has_params { // bump associated functions fn_score += 1; - } else if asf.has_self_arg { + } else if asf.has_self_param { // downgrade methods (below Constructor) fn_score = 1; } diff --git a/crates/ide-completion/src/render.rs b/crates/ide-completion/src/render.rs index 629f4a51c1bb..3f374b307fbe 100644 --- a/crates/ide-completion/src/render.rs +++ b/crates/ide-completion/src/render.rs @@ -1420,8 +1420,8 @@ impl S { is_definite: false, function: Some( CompletionRelevanceFn { - has_args: true, - has_self_arg: true, + has_params: true, + has_self_param: true, return_type: Other, }, ), @@ -1545,8 +1545,8 @@ fn foo(s: S) { s.$0 } is_definite: false, function: Some( CompletionRelevanceFn { - has_args: true, - has_self_arg: true, + has_params: true, + has_self_param: true, return_type: Other, }, ), @@ -2207,8 +2207,8 @@ fn test() { "fn(&self, u32) -> Bar", Some( CompletionRelevanceFn { - has_args: true, - has_self_arg: true, + has_params: true, + has_self_param: true, return_type: Other, }, ), @@ -2217,8 +2217,8 @@ fn test() { "fn(&self)", Some( CompletionRelevanceFn { - has_args: true, - has_self_arg: true, + has_params: true, + has_self_param: true, return_type: Other, }, ), @@ -2227,8 +2227,8 @@ fn test() { "fn(&self) -> Foo", Some( CompletionRelevanceFn { - has_args: true, - has_self_arg: true, + has_params: true, + has_self_param: true, return_type: DirectConstructor, }, ), @@ -2237,8 +2237,8 @@ fn test() { "fn(&self, u32) -> Foo", Some( CompletionRelevanceFn { - has_args: true, - has_self_arg: true, + has_params: true, + has_self_param: true, return_type: DirectConstructor, }, ), @@ -2247,8 +2247,8 @@ fn test() { "fn(&self) -> Option", Some( CompletionRelevanceFn { - has_args: true, - has_self_arg: true, + has_params: true, + has_self_param: true, return_type: Constructor, }, ), @@ -2257,8 +2257,8 @@ fn test() { "fn(&self) -> Result", Some( CompletionRelevanceFn { - has_args: true, - has_self_arg: true, + has_params: true, + has_self_param: true, return_type: Constructor, }, ), @@ -2267,8 +2267,8 @@ fn test() { "fn(&self) -> Result", Some( CompletionRelevanceFn { - has_args: true, - has_self_arg: true, + has_params: true, + has_self_param: true, return_type: Constructor, }, ), @@ -2277,8 +2277,8 @@ fn test() { "fn(&self, u32) -> Option", Some( CompletionRelevanceFn { - has_args: true, - has_self_arg: true, + has_params: true, + has_self_param: true, return_type: Constructor, }, ), @@ -2433,8 +2433,8 @@ fn foo(f: Foo) { let _: &u32 = f.b$0 } is_definite: false, function: Some( CompletionRelevanceFn { - has_args: true, - has_self_arg: true, + has_params: true, + has_self_param: true, return_type: Other, }, ), @@ -2565,8 +2565,8 @@ fn main() { is_definite: false, function: Some( CompletionRelevanceFn { - has_args: false, - has_self_arg: false, + has_params: false, + has_self_param: false, return_type: Other, }, ), @@ -2932,13 +2932,7 @@ fn main() { is_private_editable: false, postfix_match: None, is_definite: false, - function: Some( - CompletionRelevanceFn { - has_args: true, - has_self_arg: true, - return_type: Other, - }, - ), + function: None, }, }, CompletionItem { @@ -2961,13 +2955,7 @@ fn main() { is_private_editable: false, postfix_match: None, is_definite: false, - function: Some( - CompletionRelevanceFn { - has_args: true, - has_self_arg: true, - return_type: Other, - }, - ), + function: None, }, }, ] diff --git a/crates/ide-completion/src/render/function.rs b/crates/ide-completion/src/render/function.rs index 32075baef517..dc92a127de1b 100644 --- a/crates/ide-completion/src/render/function.rs +++ b/crates/ide-completion/src/render/function.rs @@ -66,9 +66,9 @@ fn render( ), _ => (name.unescaped().to_smol_str(), name.to_smol_str()), }; - + let has_self_param = func.self_param(db).is_some(); let mut item = CompletionItem::new( - if func.self_param(db).is_some() { + if has_self_param { CompletionItemKind::Method } else { CompletionItemKind::SymbolKind(SymbolKind::Function) @@ -104,18 +104,21 @@ fn render( .filter(|_| !has_call_parens) .and_then(|cap| Some((cap, params(ctx.completion, func, &func_kind, has_dot_receiver)?))); - let type_match = if has_call_parens || complete_call_parens.is_some() { - compute_type_match(completion, &ret_type) - } else { - compute_type_match(completion, &func.ty(db)) - }; - let function = assoc_item .and_then(|assoc_item| assoc_item.implementing_ty(db)) - .and_then(|self_type| compute_function_match(db, &ctx, self_type, func, &ret_type)); + .map(|self_type| compute_return_type_match(db, &ctx, self_type, &ret_type)) + .map(|return_type| CompletionRelevanceFn { + has_params: has_self_param || func.num_params(db) > 0, + has_self_param, + return_type, + }); item.set_relevance(CompletionRelevance { - type_match, + type_match: if has_call_parens || complete_call_parens.is_some() { + compute_type_match(completion, &ret_type) + } else { + compute_type_match(completion, &func.ty(db)) + }, exact_name_match: compute_exact_name_match(completion, &call), function, is_op_method, @@ -168,26 +171,22 @@ fn render( item } -fn compute_function_match( +fn compute_return_type_match( db: &dyn HirDatabase, ctx: &RenderContext<'_>, self_type: hir::Type, - func: hir::Function, - func_return_type: &hir::Type, -) -> Option { - let has_args = func.num_params(db) > 0; - let has_self_arg = func.self_param(db).is_some(); - - let return_type = if match_types(ctx.completion, &self_type, &func_return_type).is_some() { + ret_type: &hir::Type, +) -> CompletionRelevanceReturnType { + if match_types(ctx.completion, &self_type, &ret_type).is_some() { // fn([..]) -> Self CompletionRelevanceReturnType::DirectConstructor - } else if func_return_type + } else if ret_type .type_arguments() .any(|ret_type_arg| match_types(ctx.completion, &self_type, &ret_type_arg).is_some()) { // fn([..]) -> Result OR Wrapped CompletionRelevanceReturnType::Constructor - } else if func_return_type + } else if ret_type .as_adt() .and_then(|adt| adt.name(db).as_str().map(|name| name.ends_with("Builder"))) .unwrap_or(false) @@ -196,9 +195,7 @@ fn compute_function_match( CompletionRelevanceReturnType::Builder } else { CompletionRelevanceReturnType::Other - }; - - Some(CompletionRelevanceFn { return_type, has_args, has_self_arg }) + } } pub(super) fn add_call_parens<'b>( From 45e05abf7b648409e18eb898a6612f731aa0b1d8 Mon Sep 17 00:00:00 2001 From: Davis Vaughan Date: Mon, 12 Feb 2024 18:14:10 -0500 Subject: [PATCH 041/127] Activate on top level `Cargo.toml` and `rust-project.json` files --- editors/code/package.json | 2 ++ 1 file changed, 2 insertions(+) diff --git a/editors/code/package.json b/editors/code/package.json index fea11eb56c1a..ff2c4b31cb04 100644 --- a/editors/code/package.json +++ b/editors/code/package.json @@ -68,7 +68,9 @@ "typescript": "^5.1.6" }, "activationEvents": [ + "workspaceContains:Cargo.toml", "workspaceContains:*/Cargo.toml", + "workspaceContains:rust-project.json", "workspaceContains:*/rust-project.json" ], "main": "./out/main", From ca6435994536f6bb01a36bb830e1e67bb70f64fc Mon Sep 17 00:00:00 2001 From: Chengxu Bian Date: Mon, 12 Feb 2024 23:54:32 -0500 Subject: [PATCH 042/127] remove eprintln! overwrite --- crates/ide-assists/src/lib.rs | 5 ----- crates/ide/src/lib.rs | 5 ----- crates/syntax/src/lib.rs | 5 ----- 3 files changed, 15 deletions(-) diff --git a/crates/ide-assists/src/lib.rs b/crates/ide-assists/src/lib.rs index 287062005df3..dcc89014b956 100644 --- a/crates/ide-assists/src/lib.rs +++ b/crates/ide-assists/src/lib.rs @@ -60,11 +60,6 @@ #![warn(rust_2018_idioms, unused_lifetimes)] -#[allow(unused)] -macro_rules! eprintln { - ($($tt:tt)*) => { stdx::eprintln!($($tt)*) }; -} - mod assist_config; mod assist_context; #[cfg(test)] diff --git a/crates/ide/src/lib.rs b/crates/ide/src/lib.rs index effdbf2c1f04..609cfac52755 100644 --- a/crates/ide/src/lib.rs +++ b/crates/ide/src/lib.rs @@ -12,11 +12,6 @@ #![cfg_attr(feature = "in-rust-tree", feature(rustc_private))] #![recursion_limit = "128"] -#[allow(unused)] -macro_rules! eprintln { - ($($tt:tt)*) => { stdx::eprintln!($($tt)*) }; -} - #[cfg(test)] mod fixture; diff --git a/crates/syntax/src/lib.rs b/crates/syntax/src/lib.rs index 960889b74211..b755de86d32c 100644 --- a/crates/syntax/src/lib.rs +++ b/crates/syntax/src/lib.rs @@ -27,11 +27,6 @@ extern crate ra_ap_rustc_lexer as rustc_lexer; #[cfg(feature = "in-rust-tree")] extern crate rustc_lexer; -#[allow(unused)] -macro_rules! eprintln { - ($($tt:tt)*) => { stdx::eprintln!($($tt)*) }; -} - mod parsing; mod ptr; mod syntax_error; From 68365513f36a73d9c7ea906f2f08f561a99e63c5 Mon Sep 17 00:00:00 2001 From: Victor Song Date: Tue, 30 Jan 2024 00:10:23 -0600 Subject: [PATCH 043/127] Implement `literal_from_str` for proc macro srv --- .../src/server/rust_analyzer_span.rs | 67 ++++++++++++++++++- crates/proc-macro-srv/src/server/token_id.rs | 67 ++++++++++++++++++- 2 files changed, 128 insertions(+), 6 deletions(-) diff --git a/crates/proc-macro-srv/src/server/rust_analyzer_span.rs b/crates/proc-macro-srv/src/server/rust_analyzer_span.rs index c7c7bea99410..f7bbbcc09d43 100644 --- a/crates/proc-macro-srv/src/server/rust_analyzer_span.rs +++ b/crates/proc-macro-srv/src/server/rust_analyzer_span.rs @@ -13,6 +13,7 @@ use std::{ use ::tt::{TextRange, TextSize}; use proc_macro::bridge::{self, server}; use span::{Span, FIXUP_ERASED_FILE_AST_ID_MARKER}; +use syntax::ast::{self, HasModuleItem, IsString}; use crate::server::{ delim_to_external, delim_to_internal, token_stream::TokenStreamBuilder, LiteralFormatter, @@ -70,11 +71,71 @@ impl server::FreeFunctions for RaSpanServer { &mut self, s: &str, ) -> Result, ()> { - // FIXME: keep track of LitKind and Suffix + let input = s.trim(); + let source_code = format!("fn f() {{ let _ = {input}; }}"); + + let parse = ast::SourceFile::parse(&source_code); + let file = parse.tree(); + + let Some(ast::Item::Fn(func)) = file.items().next() else { return Err(()) }; + let Some(ast::Stmt::LetStmt(stmt)) = + func.body().ok_or(Err(()))?.stmt_list().ok_or(Err(()))?.statements().next() + else { + return Err(()); + }; + let Some(ast::Expr::Literal(lit)) = stmt.initializer() else { return Err(()) }; + + fn raw_delimiter_count(s: S) -> Option { + let text = s.text(); + let quote_range = s.text_range_between_quotes()?; + let range_start = s.syntax().text_range().start(); + text[TextRange::up_to((quote_range - range_start).start())] + .matches('#') + .count() + .try_into() + .ok() + } + + let mut suffix = None; + let kind = match lit.kind() { + ast::LiteralKind::String(data) => { + if data.is_raw() { + bridge::LitKind::StrRaw(raw_delimiter_count(data).ok_or(Err(()))?) + } else { + bridge::LitKind::Str + } + } + ast::LiteralKind::ByteString(data) => { + if data.is_raw() { + bridge::LitKind::ByteStrRaw(raw_delimiter_count(data).ok_or(Err(()))?) + } else { + bridge::LitKind::ByteStr + } + } + ast::LiteralKind::CString(data) => { + if data.is_raw() { + bridge::LitKind::CStrRaw(raw_delimiter_count(data).ok_or(Err(()))?) + } else { + bridge::LitKind::CStr + } + } + ast::LiteralKind::IntNumber(num) => { + suffix = num.suffix(); + bridge::LitKind::Integer + } + ast::LiteralKind::FloatNumber(num) => { + suffix = num.suffix(); + bridge::LitKind::Float + } + ast::LiteralKind::Char(_) => bridge::LitKind::Char, + ast::LiteralKind::Byte(_) => bridge::LitKind::Byte, + ast::LiteralKind::Bool(_) => unreachable!(), + }; + Ok(bridge::Literal { - kind: bridge::LitKind::Err, + kind, symbol: Symbol::intern(self.interner, s), - suffix: None, + suffix, span: self.call_site, }) } diff --git a/crates/proc-macro-srv/src/server/token_id.rs b/crates/proc-macro-srv/src/server/token_id.rs index edbdc67b482f..5c74c15b360e 100644 --- a/crates/proc-macro-srv/src/server/token_id.rs +++ b/crates/proc-macro-srv/src/server/token_id.rs @@ -6,6 +6,7 @@ use std::{ }; use proc_macro::bridge::{self, server}; +use syntax::ast::{self, HasModuleItem, IsString}; use crate::server::{ delim_to_external, delim_to_internal, token_stream::TokenStreamBuilder, LiteralFormatter, @@ -62,11 +63,71 @@ impl server::FreeFunctions for TokenIdServer { &mut self, s: &str, ) -> Result, ()> { - // FIXME: keep track of LitKind and Suffix + let input = s.trim(); + let source_code = format!("fn f() {{ let _ = {input}; }}"); + + let parse = ast::SourceFile::parse(&source_code); + let file = parse.tree(); + + let Some(ast::Item::Fn(func)) = file.items().next() else { return Err(()) }; + let Some(ast::Stmt::LetStmt(stmt)) = + func.body().ok_or(Err(()))?.stmt_list().ok_or(Err(()))?.statements().next() + else { + return Err(()); + }; + let Some(ast::Expr::Literal(lit)) = stmt.initializer() else { return Err(()) }; + + fn raw_delimiter_count(s: S) -> Option { + let text = s.text(); + let quote_range = s.text_range_between_quotes()?; + let range_start = s.syntax().text_range().start(); + text[TextRange::up_to((quote_range - range_start).start())] + .matches('#') + .count() + .try_into() + .ok() + } + + let mut suffix = None; + let kind = match lit.kind() { + ast::LiteralKind::String(data) => { + if data.is_raw() { + bridge::LitKind::StrRaw(raw_delimiter_count(data).ok_or(Err(()))?) + } else { + bridge::LitKind::Str + } + } + ast::LiteralKind::ByteString(data) => { + if data.is_raw() { + bridge::LitKind::ByteStrRaw(raw_delimiter_count(data).ok_or(Err(()))?) + } else { + bridge::LitKind::ByteStr + } + } + ast::LiteralKind::CString(data) => { + if data.is_raw() { + bridge::LitKind::CStrRaw(raw_delimiter_count(data).ok_or(Err(()))?) + } else { + bridge::LitKind::CStr + } + } + ast::LiteralKind::IntNumber(num) => { + suffix = num.suffix(); + bridge::LitKind::Integer + } + ast::LiteralKind::FloatNumber(num) => { + suffix = num.suffix(); + bridge::LitKind::Float + } + ast::LiteralKind::Char(_) => bridge::LitKind::Char, + ast::LiteralKind::Byte(_) => bridge::LitKind::Byte, + ast::LiteralKind::Bool(_) => unreachable!(), + }; + Ok(bridge::Literal { - kind: bridge::LitKind::Err, + kind, symbol: Symbol::intern(self.interner, s), - suffix: None, + suffix, span: self.call_site, }) } From adf7adf3e834908b16e04380e6df03a077b29ec4 Mon Sep 17 00:00:00 2001 From: Victor Song Date: Tue, 30 Jan 2024 00:19:53 -0600 Subject: [PATCH 044/127] Add `syntax` crate as `proc-macro-srv` dep --- Cargo.lock | 1 + crates/proc-macro-srv/Cargo.toml | 1 + 2 files changed, 2 insertions(+) diff --git a/Cargo.lock b/Cargo.lock index 49f39537c018..0fdb366c1f9d 100644 --- a/Cargo.lock +++ b/Cargo.lock @@ -1331,6 +1331,7 @@ dependencies = [ "proc-macro-test", "span", "stdx", + "syntax", "tt", ] diff --git a/crates/proc-macro-srv/Cargo.toml b/crates/proc-macro-srv/Cargo.toml index ba17ea6f7b43..d0cdc51c3c22 100644 --- a/crates/proc-macro-srv/Cargo.toml +++ b/crates/proc-macro-srv/Cargo.toml @@ -29,6 +29,7 @@ paths.workspace = true base-db.workspace = true span.workspace = true proc-macro-api.workspace = true +syntax.workspace = true [dev-dependencies] expect-test = "1.4.0" From e8c9ca2a2fdd20f9ea314671dd2cb329c82b910e Mon Sep 17 00:00:00 2001 From: Victor Song Date: Tue, 30 Jan 2024 01:09:00 -0600 Subject: [PATCH 045/127] Refactor shared `literal_from_str` code --- crates/proc-macro-srv/src/server.rs | 55 +++++++++++++++ .../src/server/rust_analyzer_span.rs | 70 +++---------------- crates/proc-macro-srv/src/server/token_id.rs | 69 +++--------------- 3 files changed, 74 insertions(+), 120 deletions(-) diff --git a/crates/proc-macro-srv/src/server.rs b/crates/proc-macro-srv/src/server.rs index ff8fd295d884..9339d226a2cc 100644 --- a/crates/proc-macro-srv/src/server.rs +++ b/crates/proc-macro-srv/src/server.rs @@ -17,6 +17,7 @@ pub mod rust_analyzer_span; mod symbol; pub mod token_id; pub use symbol::*; +use syntax::ast::{self, HasModuleItem, IsString}; use tt::Spacing; fn delim_to_internal(d: proc_macro::Delimiter, span: bridge::DelimSpan) -> tt::Delimiter { @@ -54,6 +55,60 @@ fn spacing_to_external(spacing: Spacing) -> proc_macro::Spacing { } } +fn literal_to_external(literal: ast::LiteralKind) -> Option { + Some(match lit.kind() { + ast::LiteralKind::String(data) => { + if data.is_raw() { + bridge::LitKind::StrRaw(raw_delimiter_count(data)?) + } else { + bridge::LitKind::Str + } + } + ast::LiteralKind::ByteString(data) => { + if data.is_raw() { + bridge::LitKind::ByteStrRaw(raw_delimiter_count(data)?) + } else { + bridge::LitKind::ByteStr + } + } + ast::LiteralKind::CString(data) => { + if data.is_raw() { + bridge::LitKind::CStrRaw(raw_delimiter_count(data)?) + } else { + bridge::LitKind::CStr + } + } + ast::LiteralKind::IntNumber(num) => bridge::LitKind::Integer, + ast::LiteralKind::FloatNumber(num) => bridge::LitKind::Float, + ast::LiteralKind::Char(_) => bridge::LitKind::Char, + ast::LiteralKind::Byte(_) => bridge::LitKind::Byte, + ast::LiteralKind::Bool(_) => unreachable!(), + }) +} + +fn raw_delimiter_count(s: S) -> Option { + let text = s.text(); + let quote_range = s.text_range_between_quotes()?; + let range_start = s.syntax().text_range().start(); + text[TextRange::up_to((quote_range - range_start).start())].matches('#').count().try_into().ok() +} + +fn str_to_lit_node(input: &str) -> Option { + let input = input.trim(); + let source_code = format!("fn f() {{ let _ = {input}; }}"); + + let parse = ast::SourceFile::parse(&source_code); + let file = parse.tree(); + + let ast::Item::Fn(func) = file.items().next()? else { return None }; + let ast::Stmt::LetStmt(stmt) = func.body()?.stmt_list()?.statements().next()? else { + return None; + }; + let ast::Expr::Literal(lit) = stmt.initializer()? else { return None }; + + Some(lit) +} + struct LiteralFormatter(bridge::Literal); impl LiteralFormatter { diff --git a/crates/proc-macro-srv/src/server/rust_analyzer_span.rs b/crates/proc-macro-srv/src/server/rust_analyzer_span.rs index f7bbbcc09d43..4a2ad40ad995 100644 --- a/crates/proc-macro-srv/src/server/rust_analyzer_span.rs +++ b/crates/proc-macro-srv/src/server/rust_analyzer_span.rs @@ -4,6 +4,7 @@ //! It is an unfortunate result of how the proc-macro API works that we need to look into the //! concrete representation of the spans, and as such, RustRover cannot make use of this unless they //! change their representation to be compatible with rust-analyzer's. +use core::num; use std::{ collections::{HashMap, HashSet}, iter, @@ -13,11 +14,11 @@ use std::{ use ::tt::{TextRange, TextSize}; use proc_macro::bridge::{self, server}; use span::{Span, FIXUP_ERASED_FILE_AST_ID_MARKER}; -use syntax::ast::{self, HasModuleItem, IsString}; +use syntax::ast; use crate::server::{ - delim_to_external, delim_to_internal, token_stream::TokenStreamBuilder, LiteralFormatter, - Symbol, SymbolInternerRef, SYMBOL_INTERNER, + delim_to_external, delim_to_internal, literal_to_external, str_to_lit_node, + token_stream::TokenStreamBuilder, LiteralFormatter, Symbol, SymbolInternerRef, SYMBOL_INTERNER, }; mod tt { pub use ::tt::*; @@ -71,66 +72,15 @@ impl server::FreeFunctions for RaSpanServer { &mut self, s: &str, ) -> Result, ()> { - let input = s.trim(); - let source_code = format!("fn f() {{ let _ = {input}; }}"); + let literal = str_to_lit_node(s).ok_or(Err(()))?; - let parse = ast::SourceFile::parse(&source_code); - let file = parse.tree(); + let kind = literal_to_external(literal.kind()).ok_or(Err(()))?; - let Some(ast::Item::Fn(func)) = file.items().next() else { return Err(()) }; - let Some(ast::Stmt::LetStmt(stmt)) = - func.body().ok_or(Err(()))?.stmt_list().ok_or(Err(()))?.statements().next() - else { - return Err(()); - }; - let Some(ast::Expr::Literal(lit)) = stmt.initializer() else { return Err(()) }; - - fn raw_delimiter_count(s: S) -> Option { - let text = s.text(); - let quote_range = s.text_range_between_quotes()?; - let range_start = s.syntax().text_range().start(); - text[TextRange::up_to((quote_range - range_start).start())] - .matches('#') - .count() - .try_into() - .ok() + let suffix = match literal.kind() { + ast::LiteralKind::FloatNumber(num) | ast::LiteralKind::IntNumber(num) => num.suffix(), + _ => None, } - - let mut suffix = None; - let kind = match lit.kind() { - ast::LiteralKind::String(data) => { - if data.is_raw() { - bridge::LitKind::StrRaw(raw_delimiter_count(data).ok_or(Err(()))?) - } else { - bridge::LitKind::Str - } - } - ast::LiteralKind::ByteString(data) => { - if data.is_raw() { - bridge::LitKind::ByteStrRaw(raw_delimiter_count(data).ok_or(Err(()))?) - } else { - bridge::LitKind::ByteStr - } - } - ast::LiteralKind::CString(data) => { - if data.is_raw() { - bridge::LitKind::CStrRaw(raw_delimiter_count(data).ok_or(Err(()))?) - } else { - bridge::LitKind::CStr - } - } - ast::LiteralKind::IntNumber(num) => { - suffix = num.suffix(); - bridge::LitKind::Integer - } - ast::LiteralKind::FloatNumber(num) => { - suffix = num.suffix(); - bridge::LitKind::Float - } - ast::LiteralKind::Char(_) => bridge::LitKind::Char, - ast::LiteralKind::Byte(_) => bridge::LitKind::Byte, - ast::LiteralKind::Bool(_) => unreachable!(), - }; + .map(|suffix| Symbol::intern(self.interner, suffix)); Ok(bridge::Literal { kind, diff --git a/crates/proc-macro-srv/src/server/token_id.rs b/crates/proc-macro-srv/src/server/token_id.rs index 5c74c15b360e..4dbda7e53c26 100644 --- a/crates/proc-macro-srv/src/server/token_id.rs +++ b/crates/proc-macro-srv/src/server/token_id.rs @@ -6,11 +6,11 @@ use std::{ }; use proc_macro::bridge::{self, server}; -use syntax::ast::{self, HasModuleItem, IsString}; +use syntax::ast; use crate::server::{ - delim_to_external, delim_to_internal, token_stream::TokenStreamBuilder, LiteralFormatter, - Symbol, SymbolInternerRef, SYMBOL_INTERNER, + delim_to_external, delim_to_internal, literal_to_external, str_to_lit_node, + token_stream::TokenStreamBuilder, LiteralFormatter, Symbol, SymbolInternerRef, SYMBOL_INTERNER, }; mod tt { pub use proc_macro_api::msg::TokenId; @@ -63,66 +63,15 @@ impl server::FreeFunctions for TokenIdServer { &mut self, s: &str, ) -> Result, ()> { - let input = s.trim(); - let source_code = format!("fn f() {{ let _ = {input}; }}"); + let literal = str_to_lit_node(s).ok_or(Err(()))?; - let parse = ast::SourceFile::parse(&source_code); - let file = parse.tree(); + let kind = literal_to_external(literal.kind()).ok_or(Err(()))?; - let Some(ast::Item::Fn(func)) = file.items().next() else { return Err(()) }; - let Some(ast::Stmt::LetStmt(stmt)) = - func.body().ok_or(Err(()))?.stmt_list().ok_or(Err(()))?.statements().next() - else { - return Err(()); - }; - let Some(ast::Expr::Literal(lit)) = stmt.initializer() else { return Err(()) }; - - fn raw_delimiter_count(s: S) -> Option { - let text = s.text(); - let quote_range = s.text_range_between_quotes()?; - let range_start = s.syntax().text_range().start(); - text[TextRange::up_to((quote_range - range_start).start())] - .matches('#') - .count() - .try_into() - .ok() + let suffix = match literal.kind() { + ast::LiteralKind::FloatNumber(num) | ast::LiteralKind::IntNumber(num) => num.suffix(), + _ => None, } - - let mut suffix = None; - let kind = match lit.kind() { - ast::LiteralKind::String(data) => { - if data.is_raw() { - bridge::LitKind::StrRaw(raw_delimiter_count(data).ok_or(Err(()))?) - } else { - bridge::LitKind::Str - } - } - ast::LiteralKind::ByteString(data) => { - if data.is_raw() { - bridge::LitKind::ByteStrRaw(raw_delimiter_count(data).ok_or(Err(()))?) - } else { - bridge::LitKind::ByteStr - } - } - ast::LiteralKind::CString(data) => { - if data.is_raw() { - bridge::LitKind::CStrRaw(raw_delimiter_count(data).ok_or(Err(()))?) - } else { - bridge::LitKind::CStr - } - } - ast::LiteralKind::IntNumber(num) => { - suffix = num.suffix(); - bridge::LitKind::Integer - } - ast::LiteralKind::FloatNumber(num) => { - suffix = num.suffix(); - bridge::LitKind::Float - } - ast::LiteralKind::Char(_) => bridge::LitKind::Char, - ast::LiteralKind::Byte(_) => bridge::LitKind::Byte, - ast::LiteralKind::Bool(_) => unreachable!(), - }; + .map(|suffix| Symbol::intern(self.interner, suffix)); Ok(bridge::Literal { kind, From 6cd458f3d08030cddcd488ef28c2a8b581bd7928 Mon Sep 17 00:00:00 2001 From: Victor Song Date: Tue, 30 Jan 2024 01:13:17 -0600 Subject: [PATCH 046/127] Move `raw_delimiter_count` to `syntax` crate --- crates/proc-macro-srv/src/server.rs | 13 +++---------- crates/syntax/src/ast/token_ext.rs | 10 ++++++++++ 2 files changed, 13 insertions(+), 10 deletions(-) diff --git a/crates/proc-macro-srv/src/server.rs b/crates/proc-macro-srv/src/server.rs index 9339d226a2cc..9961c5910a3e 100644 --- a/crates/proc-macro-srv/src/server.rs +++ b/crates/proc-macro-srv/src/server.rs @@ -59,21 +59,21 @@ fn literal_to_external(literal: ast::LiteralKind) -> Option { if data.is_raw() { - bridge::LitKind::StrRaw(raw_delimiter_count(data)?) + bridge::LitKind::StrRaw(data.raw_delimiter_count()?) } else { bridge::LitKind::Str } } ast::LiteralKind::ByteString(data) => { if data.is_raw() { - bridge::LitKind::ByteStrRaw(raw_delimiter_count(data)?) + bridge::LitKind::ByteStrRaw(data.raw_delimiter_count()?) } else { bridge::LitKind::ByteStr } } ast::LiteralKind::CString(data) => { if data.is_raw() { - bridge::LitKind::CStrRaw(raw_delimiter_count(data)?) + bridge::LitKind::CStrRaw(data.raw_delimiter_count()?) } else { bridge::LitKind::CStr } @@ -86,13 +86,6 @@ fn literal_to_external(literal: ast::LiteralKind) -> Option(s: S) -> Option { - let text = s.text(); - let quote_range = s.text_range_between_quotes()?; - let range_start = s.syntax().text_range().start(); - text[TextRange::up_to((quote_range - range_start).start())].matches('#').count().try_into().ok() -} - fn str_to_lit_node(input: &str) -> Option { let input = input.trim(); let source_code = format!("fn f() {{ let _ = {input}; }}"); diff --git a/crates/syntax/src/ast/token_ext.rs b/crates/syntax/src/ast/token_ext.rs index 7cd1f1550b98..c93391a9792c 100644 --- a/crates/syntax/src/ast/token_ext.rs +++ b/crates/syntax/src/ast/token_ext.rs @@ -204,6 +204,16 @@ pub trait IsString: AstToken { assert!(TextRange::up_to(contents_range.len()).contains_range(range)); Some(range + contents_range.start()) } + fn raw_delimiter_count(&self) -> Option { + let text = self.text(); + let quote_range = self.text_range_between_quotes()?; + let range_start = self.syntax().text_range().start(); + text[TextRange::up_to((quote_range - range_start).start())] + .matches('#') + .count() + .try_into() + .ok() + } } impl IsString for ast::String { From 027f263ef5a61324fea120234fdc04c57dafb995 Mon Sep 17 00:00:00 2001 From: Victor Song Date: Tue, 30 Jan 2024 01:20:27 -0600 Subject: [PATCH 047/127] Note `FIXME` for suffixes --- crates/proc-macro-srv/src/server/rust_analyzer_span.rs | 1 + crates/proc-macro-srv/src/server/token_id.rs | 1 + 2 files changed, 2 insertions(+) diff --git a/crates/proc-macro-srv/src/server/rust_analyzer_span.rs b/crates/proc-macro-srv/src/server/rust_analyzer_span.rs index 4a2ad40ad995..f52a4875b03a 100644 --- a/crates/proc-macro-srv/src/server/rust_analyzer_span.rs +++ b/crates/proc-macro-srv/src/server/rust_analyzer_span.rs @@ -76,6 +76,7 @@ impl server::FreeFunctions for RaSpanServer { let kind = literal_to_external(literal.kind()).ok_or(Err(()))?; + // FIXME: handle more than just int and float suffixes let suffix = match literal.kind() { ast::LiteralKind::FloatNumber(num) | ast::LiteralKind::IntNumber(num) => num.suffix(), _ => None, diff --git a/crates/proc-macro-srv/src/server/token_id.rs b/crates/proc-macro-srv/src/server/token_id.rs index 4dbda7e53c26..d5f735925574 100644 --- a/crates/proc-macro-srv/src/server/token_id.rs +++ b/crates/proc-macro-srv/src/server/token_id.rs @@ -67,6 +67,7 @@ impl server::FreeFunctions for TokenIdServer { let kind = literal_to_external(literal.kind()).ok_or(Err(()))?; + // FIXME: handle more than just int and float suffixes let suffix = match literal.kind() { ast::LiteralKind::FloatNumber(num) | ast::LiteralKind::IntNumber(num) => num.suffix(), _ => None, From 965b14d17a80c2e454f3616af47082056a18d0da Mon Sep 17 00:00:00 2001 From: Victor Song Date: Tue, 30 Jan 2024 01:40:56 -0600 Subject: [PATCH 048/127] Fix compilation errors --- crates/proc-macro-srv/src/server.rs | 4 ++-- crates/proc-macro-srv/src/server/rust_analyzer_span.rs | 8 ++++---- crates/proc-macro-srv/src/server/token_id.rs | 7 ++++--- 3 files changed, 10 insertions(+), 9 deletions(-) diff --git a/crates/proc-macro-srv/src/server.rs b/crates/proc-macro-srv/src/server.rs index 9961c5910a3e..af50e1b70db0 100644 --- a/crates/proc-macro-srv/src/server.rs +++ b/crates/proc-macro-srv/src/server.rs @@ -55,8 +55,8 @@ fn spacing_to_external(spacing: Spacing) -> proc_macro::Spacing { } } -fn literal_to_external(literal: ast::LiteralKind) -> Option { - Some(match lit.kind() { +fn literal_to_external(literal_kind: ast::LiteralKind) -> Option { + Some(match literal_kind { ast::LiteralKind::String(data) => { if data.is_raw() { bridge::LitKind::StrRaw(data.raw_delimiter_count()?) diff --git a/crates/proc-macro-srv/src/server/rust_analyzer_span.rs b/crates/proc-macro-srv/src/server/rust_analyzer_span.rs index f52a4875b03a..dce4c60b2f92 100644 --- a/crates/proc-macro-srv/src/server/rust_analyzer_span.rs +++ b/crates/proc-macro-srv/src/server/rust_analyzer_span.rs @@ -4,7 +4,6 @@ //! It is an unfortunate result of how the proc-macro API works that we need to look into the //! concrete representation of the spans, and as such, RustRover cannot make use of this unless they //! change their representation to be compatible with rust-analyzer's. -use core::num; use std::{ collections::{HashMap, HashSet}, iter, @@ -72,13 +71,14 @@ impl server::FreeFunctions for RaSpanServer { &mut self, s: &str, ) -> Result, ()> { - let literal = str_to_lit_node(s).ok_or(Err(()))?; + let literal = str_to_lit_node(s).ok_or(())?; - let kind = literal_to_external(literal.kind()).ok_or(Err(()))?; + let kind = literal_to_external(literal.kind()).ok_or(())?; // FIXME: handle more than just int and float suffixes let suffix = match literal.kind() { - ast::LiteralKind::FloatNumber(num) | ast::LiteralKind::IntNumber(num) => num.suffix(), + ast::LiteralKind::FloatNumber(num) => num.suffix(), + ast::LiteralKind::IntNumber(num) => num.suffix(), _ => None, } .map(|suffix| Symbol::intern(self.interner, suffix)); diff --git a/crates/proc-macro-srv/src/server/token_id.rs b/crates/proc-macro-srv/src/server/token_id.rs index d5f735925574..b2375228743a 100644 --- a/crates/proc-macro-srv/src/server/token_id.rs +++ b/crates/proc-macro-srv/src/server/token_id.rs @@ -63,13 +63,14 @@ impl server::FreeFunctions for TokenIdServer { &mut self, s: &str, ) -> Result, ()> { - let literal = str_to_lit_node(s).ok_or(Err(()))?; + let literal = str_to_lit_node(s).ok_or(())?; - let kind = literal_to_external(literal.kind()).ok_or(Err(()))?; + let kind = literal_to_external(literal.kind()).ok_or(())?; // FIXME: handle more than just int and float suffixes let suffix = match literal.kind() { - ast::LiteralKind::FloatNumber(num) | ast::LiteralKind::IntNumber(num) => num.suffix(), + ast::LiteralKind::FloatNumber(num) => num.suffix(), + ast::LiteralKind::IntNumber(num) => num.suffix(), _ => None, } .map(|suffix| Symbol::intern(self.interner, suffix)); From cdb8a88ea3a4dfb2837e66e6344cff00783d84b7 Mon Sep 17 00:00:00 2001 From: Victor Song Date: Tue, 30 Jan 2024 01:57:21 -0600 Subject: [PATCH 049/127] Fix more compilation errors --- crates/proc-macro-srv/src/server.rs | 4 ++-- crates/proc-macro-srv/src/server/rust_analyzer_span.rs | 6 +++--- crates/proc-macro-srv/src/server/token_id.rs | 6 +++--- 3 files changed, 8 insertions(+), 8 deletions(-) diff --git a/crates/proc-macro-srv/src/server.rs b/crates/proc-macro-srv/src/server.rs index af50e1b70db0..ed9d770505f2 100644 --- a/crates/proc-macro-srv/src/server.rs +++ b/crates/proc-macro-srv/src/server.rs @@ -78,8 +78,8 @@ fn literal_to_external(literal_kind: ast::LiteralKind) -> Option bridge::LitKind::Integer, - ast::LiteralKind::FloatNumber(num) => bridge::LitKind::Float, + ast::LiteralKind::IntNumber(_) => bridge::LitKind::Integer, + ast::LiteralKind::FloatNumber(_) => bridge::LitKind::Float, ast::LiteralKind::Char(_) => bridge::LitKind::Char, ast::LiteralKind::Byte(_) => bridge::LitKind::Byte, ast::LiteralKind::Bool(_) => unreachable!(), diff --git a/crates/proc-macro-srv/src/server/rust_analyzer_span.rs b/crates/proc-macro-srv/src/server/rust_analyzer_span.rs index dce4c60b2f92..1b883c87ea39 100644 --- a/crates/proc-macro-srv/src/server/rust_analyzer_span.rs +++ b/crates/proc-macro-srv/src/server/rust_analyzer_span.rs @@ -77,11 +77,11 @@ impl server::FreeFunctions for RaSpanServer { // FIXME: handle more than just int and float suffixes let suffix = match literal.kind() { - ast::LiteralKind::FloatNumber(num) => num.suffix(), - ast::LiteralKind::IntNumber(num) => num.suffix(), + ast::LiteralKind::FloatNumber(num) => num.suffix().map(ToString::to_string), + ast::LiteralKind::IntNumber(num) => num.suffix().map(ToString::to_string), _ => None, } - .map(|suffix| Symbol::intern(self.interner, suffix)); + .map(|suffix| Symbol::intern(self.interner, &suffix)); Ok(bridge::Literal { kind, diff --git a/crates/proc-macro-srv/src/server/token_id.rs b/crates/proc-macro-srv/src/server/token_id.rs index b2375228743a..7d2781d89f6c 100644 --- a/crates/proc-macro-srv/src/server/token_id.rs +++ b/crates/proc-macro-srv/src/server/token_id.rs @@ -69,11 +69,11 @@ impl server::FreeFunctions for TokenIdServer { // FIXME: handle more than just int and float suffixes let suffix = match literal.kind() { - ast::LiteralKind::FloatNumber(num) => num.suffix(), - ast::LiteralKind::IntNumber(num) => num.suffix(), + ast::LiteralKind::FloatNumber(num) => num.suffix().map(ToString::to_string), + ast::LiteralKind::IntNumber(num) => num.suffix().map(ToString::to_string), _ => None, } - .map(|suffix| Symbol::intern(self.interner, suffix)); + .map(|suffix| Symbol::intern(self.interner, &suffix)); Ok(bridge::Literal { kind, From 1918f9b9e0fe2e5b090491596a8d41b5ed2ed3bd Mon Sep 17 00:00:00 2001 From: Victor Song Date: Tue, 30 Jan 2024 03:23:39 -0600 Subject: [PATCH 050/127] Address PR comments --- crates/proc-macro-srv/src/server.rs | 67 +++++++------------ .../src/server/rust_analyzer_span.rs | 25 +++++-- crates/proc-macro-srv/src/server/token_id.rs | 25 +++++-- crates/syntax/src/lib.rs | 19 ++++++ 4 files changed, 78 insertions(+), 58 deletions(-) diff --git a/crates/proc-macro-srv/src/server.rs b/crates/proc-macro-srv/src/server.rs index ed9d770505f2..bb49dc14f96a 100644 --- a/crates/proc-macro-srv/src/server.rs +++ b/crates/proc-macro-srv/src/server.rs @@ -17,7 +17,7 @@ pub mod rust_analyzer_span; mod symbol; pub mod token_id; pub use symbol::*; -use syntax::ast::{self, HasModuleItem, IsString}; +use syntax::ast::{self, IsString}; use tt::Spacing; fn delim_to_internal(d: proc_macro::Delimiter, span: bridge::DelimSpan) -> tt::Delimiter { @@ -56,50 +56,29 @@ fn spacing_to_external(spacing: Spacing) -> proc_macro::Spacing { } fn literal_to_external(literal_kind: ast::LiteralKind) -> Option { - Some(match literal_kind { - ast::LiteralKind::String(data) => { - if data.is_raw() { - bridge::LitKind::StrRaw(data.raw_delimiter_count()?) - } else { - bridge::LitKind::Str - } - } - ast::LiteralKind::ByteString(data) => { - if data.is_raw() { - bridge::LitKind::ByteStrRaw(data.raw_delimiter_count()?) - } else { - bridge::LitKind::ByteStr - } - } - ast::LiteralKind::CString(data) => { - if data.is_raw() { - bridge::LitKind::CStrRaw(data.raw_delimiter_count()?) - } else { - bridge::LitKind::CStr - } - } - ast::LiteralKind::IntNumber(_) => bridge::LitKind::Integer, - ast::LiteralKind::FloatNumber(_) => bridge::LitKind::Float, - ast::LiteralKind::Char(_) => bridge::LitKind::Char, - ast::LiteralKind::Byte(_) => bridge::LitKind::Byte, - ast::LiteralKind::Bool(_) => unreachable!(), - }) -} + match literal_kind { + ast::LiteralKind::String(data) => Some(if data.is_raw() { + bridge::LitKind::StrRaw(data.raw_delimiter_count()?) + } else { + bridge::LitKind::Str + }), -fn str_to_lit_node(input: &str) -> Option { - let input = input.trim(); - let source_code = format!("fn f() {{ let _ = {input}; }}"); - - let parse = ast::SourceFile::parse(&source_code); - let file = parse.tree(); - - let ast::Item::Fn(func) = file.items().next()? else { return None }; - let ast::Stmt::LetStmt(stmt) = func.body()?.stmt_list()?.statements().next()? else { - return None; - }; - let ast::Expr::Literal(lit) = stmt.initializer()? else { return None }; - - Some(lit) + ast::LiteralKind::ByteString(data) => Some(if data.is_raw() { + bridge::LitKind::ByteStrRaw(data.raw_delimiter_count()?) + } else { + bridge::LitKind::ByteStr + }), + ast::LiteralKind::CString(data) => Some(if data.is_raw() { + bridge::LitKind::CStrRaw(data.raw_delimiter_count()?) + } else { + bridge::LitKind::CStr + }), + ast::LiteralKind::IntNumber(_) => Some(bridge::LitKind::Integer), + ast::LiteralKind::FloatNumber(_) => Some(bridge::LitKind::Float), + ast::LiteralKind::Char(_) => Some(bridge::LitKind::Char), + ast::LiteralKind::Byte(_) => Some(bridge::LitKind::Byte), + ast::LiteralKind::Bool(_) => None, + } } struct LiteralFormatter(bridge::Literal); diff --git a/crates/proc-macro-srv/src/server/rust_analyzer_span.rs b/crates/proc-macro-srv/src/server/rust_analyzer_span.rs index 1b883c87ea39..7313e99bb121 100644 --- a/crates/proc-macro-srv/src/server/rust_analyzer_span.rs +++ b/crates/proc-macro-srv/src/server/rust_analyzer_span.rs @@ -13,11 +13,11 @@ use std::{ use ::tt::{TextRange, TextSize}; use proc_macro::bridge::{self, server}; use span::{Span, FIXUP_ERASED_FILE_AST_ID_MARKER}; -use syntax::ast; +use syntax::ast::{self, IsString}; use crate::server::{ - delim_to_external, delim_to_internal, literal_to_external, str_to_lit_node, - token_stream::TokenStreamBuilder, LiteralFormatter, Symbol, SymbolInternerRef, SYMBOL_INTERNER, + delim_to_external, delim_to_internal, literal_to_external, token_stream::TokenStreamBuilder, + LiteralFormatter, Symbol, SymbolInternerRef, SYMBOL_INTERNER, }; mod tt { pub use ::tt::*; @@ -71,7 +71,8 @@ impl server::FreeFunctions for RaSpanServer { &mut self, s: &str, ) -> Result, ()> { - let literal = str_to_lit_node(s).ok_or(())?; + let literal = ast::Literal::parse(s); + let literal = literal.tree(); let kind = literal_to_external(literal.kind()).ok_or(())?; @@ -80,12 +81,22 @@ impl server::FreeFunctions for RaSpanServer { ast::LiteralKind::FloatNumber(num) => num.suffix().map(ToString::to_string), ast::LiteralKind::IntNumber(num) => num.suffix().map(ToString::to_string), _ => None, - } - .map(|suffix| Symbol::intern(self.interner, &suffix)); + }; + + let text = match literal.kind() { + ast::LiteralKind::String(data) => data.text_without_quotes().to_string(), + ast::LiteralKind::ByteString(data) => data.text_without_quotes().to_string(), + ast::LiteralKind::CString(data) => data.text_without_quotes().to_string(), + _ => s.to_string(), + }; + let text = if let Some(ref suffix) = suffix { text.strip_suffix(suffix) } else { None } + .unwrap_or(&text); + + let suffix = suffix.map(|suffix| Symbol::intern(self.interner, &suffix)); Ok(bridge::Literal { kind, - symbol: Symbol::intern(self.interner, s), + symbol: Symbol::intern(self.interner, text), suffix, span: self.call_site, }) diff --git a/crates/proc-macro-srv/src/server/token_id.rs b/crates/proc-macro-srv/src/server/token_id.rs index 7d2781d89f6c..d16e9d25105b 100644 --- a/crates/proc-macro-srv/src/server/token_id.rs +++ b/crates/proc-macro-srv/src/server/token_id.rs @@ -6,11 +6,11 @@ use std::{ }; use proc_macro::bridge::{self, server}; -use syntax::ast; +use syntax::ast::{self, IsString}; use crate::server::{ - delim_to_external, delim_to_internal, literal_to_external, str_to_lit_node, - token_stream::TokenStreamBuilder, LiteralFormatter, Symbol, SymbolInternerRef, SYMBOL_INTERNER, + delim_to_external, delim_to_internal, literal_to_external, token_stream::TokenStreamBuilder, + LiteralFormatter, Symbol, SymbolInternerRef, SYMBOL_INTERNER, }; mod tt { pub use proc_macro_api::msg::TokenId; @@ -63,7 +63,8 @@ impl server::FreeFunctions for TokenIdServer { &mut self, s: &str, ) -> Result, ()> { - let literal = str_to_lit_node(s).ok_or(())?; + let literal = ast::Literal::parse(s); + let literal = literal.tree(); let kind = literal_to_external(literal.kind()).ok_or(())?; @@ -72,12 +73,22 @@ impl server::FreeFunctions for TokenIdServer { ast::LiteralKind::FloatNumber(num) => num.suffix().map(ToString::to_string), ast::LiteralKind::IntNumber(num) => num.suffix().map(ToString::to_string), _ => None, - } - .map(|suffix| Symbol::intern(self.interner, &suffix)); + }; + + let text = match literal.kind() { + ast::LiteralKind::String(data) => data.text_without_quotes().to_string(), + ast::LiteralKind::ByteString(data) => data.text_without_quotes().to_string(), + ast::LiteralKind::CString(data) => data.text_without_quotes().to_string(), + _ => s.to_string(), + }; + let text = if let Some(ref suffix) = suffix { text.strip_suffix(suffix) } else { None } + .unwrap_or(&text); + + let suffix = suffix.map(|suffix| Symbol::intern(self.interner, &suffix)); Ok(bridge::Literal { kind, - symbol: Symbol::intern(self.interner, s), + symbol: Symbol::intern(self.interner, text), suffix, span: self.call_site, }) diff --git a/crates/syntax/src/lib.rs b/crates/syntax/src/lib.rs index 960889b74211..2390cacda28a 100644 --- a/crates/syntax/src/lib.rs +++ b/crates/syntax/src/lib.rs @@ -187,6 +187,25 @@ impl SourceFile { } } +impl ast::Literal { + pub fn parse(text: &str) -> Parse { + let lexed = parser::LexedStr::new(text); + let parser_input = lexed.to_input(); + let parser_output = parser::TopEntryPoint::Expr.parse(&parser_input); + let (green, mut errors, _) = parsing::build_tree(lexed, parser_output); + let root = SyntaxNode::new_root(green.clone()); + + errors.extend(validation::validate(&root)); + + assert_eq!(root.kind(), SyntaxKind::LITERAL); + Parse { + green, + errors: if errors.is_empty() { None } else { Some(errors.into()) }, + _ty: PhantomData, + } + } +} + impl ast::TokenTree { pub fn reparse_as_comma_separated_expr(self) -> Parse { let tokens = self.syntax().descendants_with_tokens().filter_map(NodeOrToken::into_token); From 4923b8a74bdffb0a8d1ce3760d00da8959a711c2 Mon Sep 17 00:00:00 2001 From: Victor Song Date: Mon, 12 Feb 2024 23:57:42 -0600 Subject: [PATCH 051/127] Return `Option>` from `ast::Literal::parse` --- .../src/server/rust_analyzer_span.rs | 2 +- crates/proc-macro-srv/src/server/token_id.rs | 2 +- crates/syntax/src/lib.rs | 15 +++++++++------ 3 files changed, 11 insertions(+), 8 deletions(-) diff --git a/crates/proc-macro-srv/src/server/rust_analyzer_span.rs b/crates/proc-macro-srv/src/server/rust_analyzer_span.rs index 7313e99bb121..cf6e816d599a 100644 --- a/crates/proc-macro-srv/src/server/rust_analyzer_span.rs +++ b/crates/proc-macro-srv/src/server/rust_analyzer_span.rs @@ -71,7 +71,7 @@ impl server::FreeFunctions for RaSpanServer { &mut self, s: &str, ) -> Result, ()> { - let literal = ast::Literal::parse(s); + let literal = ast::Literal::parse(s).ok_or(())?; let literal = literal.tree(); let kind = literal_to_external(literal.kind()).ok_or(())?; diff --git a/crates/proc-macro-srv/src/server/token_id.rs b/crates/proc-macro-srv/src/server/token_id.rs index d16e9d25105b..70e577f576fe 100644 --- a/crates/proc-macro-srv/src/server/token_id.rs +++ b/crates/proc-macro-srv/src/server/token_id.rs @@ -63,7 +63,7 @@ impl server::FreeFunctions for TokenIdServer { &mut self, s: &str, ) -> Result, ()> { - let literal = ast::Literal::parse(s); + let literal = ast::Literal::parse(s).ok_or(())?; let literal = literal.tree(); let kind = literal_to_external(literal.kind()).ok_or(())?; diff --git a/crates/syntax/src/lib.rs b/crates/syntax/src/lib.rs index 2390cacda28a..61d9c65e4ff8 100644 --- a/crates/syntax/src/lib.rs +++ b/crates/syntax/src/lib.rs @@ -188,7 +188,7 @@ impl SourceFile { } impl ast::Literal { - pub fn parse(text: &str) -> Parse { + pub fn parse(text: &str) -> Option> { let lexed = parser::LexedStr::new(text); let parser_input = lexed.to_input(); let parser_output = parser::TopEntryPoint::Expr.parse(&parser_input); @@ -197,11 +197,14 @@ impl ast::Literal { errors.extend(validation::validate(&root)); - assert_eq!(root.kind(), SyntaxKind::LITERAL); - Parse { - green, - errors: if errors.is_empty() { None } else { Some(errors.into()) }, - _ty: PhantomData, + if root.kind() == SyntaxKind::LITERAL { + Some(Parse { + green, + errors: if errors.is_empty() { None } else { Some(errors.into()) }, + _ty: PhantomData, + }) + } else { + None } } } From 2c761048d4eb0cae8cf125278b1678b5270ce24f Mon Sep 17 00:00:00 2001 From: Mohammad Mustakim Ali Date: Tue, 13 Feb 2024 10:11:17 +0000 Subject: [PATCH 052/127] fix: clippy --- crates/ide-completion/src/render/function.rs | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/crates/ide-completion/src/render/function.rs b/crates/ide-completion/src/render/function.rs index dc92a127de1b..cf9fe1ab3072 100644 --- a/crates/ide-completion/src/render/function.rs +++ b/crates/ide-completion/src/render/function.rs @@ -177,7 +177,7 @@ fn compute_return_type_match( self_type: hir::Type, ret_type: &hir::Type, ) -> CompletionRelevanceReturnType { - if match_types(ctx.completion, &self_type, &ret_type).is_some() { + if match_types(ctx.completion, &self_type, ret_type).is_some() { // fn([..]) -> Self CompletionRelevanceReturnType::DirectConstructor } else if ret_type From ed57008510b35338a9d1eff0fbadf11be3fc32c2 Mon Sep 17 00:00:00 2001 From: Lukas Wirth Date: Tue, 13 Feb 2024 12:33:51 +0100 Subject: [PATCH 053/127] fix: Validate literals in proc-macro-srv FreeFunctions::literal_from_str --- Cargo.lock | 2 +- crates/proc-macro-srv/Cargo.toml | 2 +- crates/proc-macro-srv/src/lib.rs | 5 ++ crates/proc-macro-srv/src/server.rs | 27 -------- .../src/server/rust_analyzer_span.rs | 66 +++++++++++++------ crates/proc-macro-srv/src/server/token_id.rs | 66 +++++++++++++------ crates/proc-macro-srv/src/tests/mod.rs | 18 +++-- crates/syntax/src/ast/token_ext.rs | 10 --- crates/syntax/src/lib.rs | 22 ------- 9 files changed, 110 insertions(+), 108 deletions(-) diff --git a/Cargo.lock b/Cargo.lock index 0fdb366c1f9d..7b29d7bb798d 100644 --- a/Cargo.lock +++ b/Cargo.lock @@ -1329,9 +1329,9 @@ dependencies = [ "paths", "proc-macro-api", "proc-macro-test", + "ra-ap-rustc_lexer", "span", "stdx", - "syntax", "tt", ] diff --git a/crates/proc-macro-srv/Cargo.toml b/crates/proc-macro-srv/Cargo.toml index d0cdc51c3c22..bd7a31654584 100644 --- a/crates/proc-macro-srv/Cargo.toml +++ b/crates/proc-macro-srv/Cargo.toml @@ -29,7 +29,7 @@ paths.workspace = true base-db.workspace = true span.workspace = true proc-macro-api.workspace = true -syntax.workspace = true +ra-ap-rustc_lexer.workspace = true [dev-dependencies] expect-test = "1.4.0" diff --git a/crates/proc-macro-srv/src/lib.rs b/crates/proc-macro-srv/src/lib.rs index 460a96c07f36..831632c64c0a 100644 --- a/crates/proc-macro-srv/src/lib.rs +++ b/crates/proc-macro-srv/src/lib.rs @@ -20,6 +20,11 @@ extern crate proc_macro; #[cfg(feature = "in-rust-tree")] extern crate rustc_driver as _; +#[cfg(not(feature = "in-rust-tree"))] +extern crate ra_ap_rustc_lexer as rustc_lexer; +#[cfg(feature = "in-rust-tree")] +extern crate rustc_lexer; + mod dylib; mod proc_macros; mod server; diff --git a/crates/proc-macro-srv/src/server.rs b/crates/proc-macro-srv/src/server.rs index bb49dc14f96a..ff8fd295d884 100644 --- a/crates/proc-macro-srv/src/server.rs +++ b/crates/proc-macro-srv/src/server.rs @@ -17,7 +17,6 @@ pub mod rust_analyzer_span; mod symbol; pub mod token_id; pub use symbol::*; -use syntax::ast::{self, IsString}; use tt::Spacing; fn delim_to_internal(d: proc_macro::Delimiter, span: bridge::DelimSpan) -> tt::Delimiter { @@ -55,32 +54,6 @@ fn spacing_to_external(spacing: Spacing) -> proc_macro::Spacing { } } -fn literal_to_external(literal_kind: ast::LiteralKind) -> Option { - match literal_kind { - ast::LiteralKind::String(data) => Some(if data.is_raw() { - bridge::LitKind::StrRaw(data.raw_delimiter_count()?) - } else { - bridge::LitKind::Str - }), - - ast::LiteralKind::ByteString(data) => Some(if data.is_raw() { - bridge::LitKind::ByteStrRaw(data.raw_delimiter_count()?) - } else { - bridge::LitKind::ByteStr - }), - ast::LiteralKind::CString(data) => Some(if data.is_raw() { - bridge::LitKind::CStrRaw(data.raw_delimiter_count()?) - } else { - bridge::LitKind::CStr - }), - ast::LiteralKind::IntNumber(_) => Some(bridge::LitKind::Integer), - ast::LiteralKind::FloatNumber(_) => Some(bridge::LitKind::Float), - ast::LiteralKind::Char(_) => Some(bridge::LitKind::Char), - ast::LiteralKind::Byte(_) => Some(bridge::LitKind::Byte), - ast::LiteralKind::Bool(_) => None, - } -} - struct LiteralFormatter(bridge::Literal); impl LiteralFormatter { diff --git a/crates/proc-macro-srv/src/server/rust_analyzer_span.rs b/crates/proc-macro-srv/src/server/rust_analyzer_span.rs index cf6e816d599a..17159ffbb887 100644 --- a/crates/proc-macro-srv/src/server/rust_analyzer_span.rs +++ b/crates/proc-macro-srv/src/server/rust_analyzer_span.rs @@ -13,11 +13,10 @@ use std::{ use ::tt::{TextRange, TextSize}; use proc_macro::bridge::{self, server}; use span::{Span, FIXUP_ERASED_FILE_AST_ID_MARKER}; -use syntax::ast::{self, IsString}; use crate::server::{ - delim_to_external, delim_to_internal, literal_to_external, token_stream::TokenStreamBuilder, - LiteralFormatter, Symbol, SymbolInternerRef, SYMBOL_INTERNER, + delim_to_external, delim_to_internal, token_stream::TokenStreamBuilder, LiteralFormatter, + Symbol, SymbolInternerRef, SYMBOL_INTERNER, }; mod tt { pub use ::tt::*; @@ -71,32 +70,57 @@ impl server::FreeFunctions for RaSpanServer { &mut self, s: &str, ) -> Result, ()> { - let literal = ast::Literal::parse(s).ok_or(())?; - let literal = literal.tree(); + use proc_macro::bridge::LitKind; + use rustc_lexer::{LiteralKind, Token, TokenKind}; - let kind = literal_to_external(literal.kind()).ok_or(())?; + let mut tokens = rustc_lexer::tokenize(s); + let minus_or_lit = tokens.next().unwrap_or(Token { kind: TokenKind::Eof, len: 0 }); - // FIXME: handle more than just int and float suffixes - let suffix = match literal.kind() { - ast::LiteralKind::FloatNumber(num) => num.suffix().map(ToString::to_string), - ast::LiteralKind::IntNumber(num) => num.suffix().map(ToString::to_string), - _ => None, + let lit = if minus_or_lit.kind == TokenKind::Minus { + let lit = tokens.next().ok_or(())?; + if !matches!( + lit.kind, + TokenKind::Literal { + kind: LiteralKind::Int { .. } | LiteralKind::Float { .. }, + .. + } + ) { + return Err(()); + } + lit + } else { + minus_or_lit }; - let text = match literal.kind() { - ast::LiteralKind::String(data) => data.text_without_quotes().to_string(), - ast::LiteralKind::ByteString(data) => data.text_without_quotes().to_string(), - ast::LiteralKind::CString(data) => data.text_without_quotes().to_string(), - _ => s.to_string(), - }; - let text = if let Some(ref suffix) = suffix { text.strip_suffix(suffix) } else { None } - .unwrap_or(&text); + if tokens.next().is_some() { + return Err(()); + } - let suffix = suffix.map(|suffix| Symbol::intern(self.interner, &suffix)); + let TokenKind::Literal { kind, suffix_start } = lit.kind else { return Err(()) }; + let kind = match kind { + LiteralKind::Int { .. } => LitKind::Integer, + LiteralKind::Float { .. } => LitKind::Float, + LiteralKind::Char { .. } => LitKind::Char, + LiteralKind::Byte { .. } => LitKind::Byte, + LiteralKind::Str { .. } => LitKind::Str, + LiteralKind::ByteStr { .. } => LitKind::ByteStr, + LiteralKind::CStr { .. } => LitKind::CStr, + LiteralKind::RawStr { n_hashes } => LitKind::StrRaw(n_hashes.unwrap_or_default()), + LiteralKind::RawByteStr { n_hashes } => { + LitKind::ByteStrRaw(n_hashes.unwrap_or_default()) + } + LiteralKind::RawCStr { n_hashes } => LitKind::CStrRaw(n_hashes.unwrap_or_default()), + }; + + let (lit, suffix) = s.split_at(suffix_start as usize); + let suffix = match suffix { + "" | "_" => None, + suffix => Some(Symbol::intern(self.interner, suffix)), + }; Ok(bridge::Literal { kind, - symbol: Symbol::intern(self.interner, text), + symbol: Symbol::intern(self.interner, lit), suffix, span: self.call_site, }) diff --git a/crates/proc-macro-srv/src/server/token_id.rs b/crates/proc-macro-srv/src/server/token_id.rs index 70e577f576fe..eddd6b1e6b9d 100644 --- a/crates/proc-macro-srv/src/server/token_id.rs +++ b/crates/proc-macro-srv/src/server/token_id.rs @@ -6,11 +6,10 @@ use std::{ }; use proc_macro::bridge::{self, server}; -use syntax::ast::{self, IsString}; use crate::server::{ - delim_to_external, delim_to_internal, literal_to_external, token_stream::TokenStreamBuilder, - LiteralFormatter, Symbol, SymbolInternerRef, SYMBOL_INTERNER, + delim_to_external, delim_to_internal, token_stream::TokenStreamBuilder, LiteralFormatter, + Symbol, SymbolInternerRef, SYMBOL_INTERNER, }; mod tt { pub use proc_macro_api::msg::TokenId; @@ -63,32 +62,57 @@ impl server::FreeFunctions for TokenIdServer { &mut self, s: &str, ) -> Result, ()> { - let literal = ast::Literal::parse(s).ok_or(())?; - let literal = literal.tree(); + use proc_macro::bridge::LitKind; + use rustc_lexer::{LiteralKind, Token, TokenKind}; - let kind = literal_to_external(literal.kind()).ok_or(())?; + let mut tokens = rustc_lexer::tokenize(s); + let minus_or_lit = tokens.next().unwrap_or(Token { kind: TokenKind::Eof, len: 0 }); - // FIXME: handle more than just int and float suffixes - let suffix = match literal.kind() { - ast::LiteralKind::FloatNumber(num) => num.suffix().map(ToString::to_string), - ast::LiteralKind::IntNumber(num) => num.suffix().map(ToString::to_string), - _ => None, + let lit = if minus_or_lit.kind == TokenKind::Minus { + let lit = tokens.next().ok_or(())?; + if !matches!( + lit.kind, + TokenKind::Literal { + kind: LiteralKind::Int { .. } | LiteralKind::Float { .. }, + .. + } + ) { + return Err(()); + } + lit + } else { + minus_or_lit }; - let text = match literal.kind() { - ast::LiteralKind::String(data) => data.text_without_quotes().to_string(), - ast::LiteralKind::ByteString(data) => data.text_without_quotes().to_string(), - ast::LiteralKind::CString(data) => data.text_without_quotes().to_string(), - _ => s.to_string(), - }; - let text = if let Some(ref suffix) = suffix { text.strip_suffix(suffix) } else { None } - .unwrap_or(&text); + if tokens.next().is_some() { + return Err(()); + } - let suffix = suffix.map(|suffix| Symbol::intern(self.interner, &suffix)); + let TokenKind::Literal { kind, suffix_start } = lit.kind else { return Err(()) }; + let kind = match kind { + LiteralKind::Int { .. } => LitKind::Integer, + LiteralKind::Float { .. } => LitKind::Float, + LiteralKind::Char { .. } => LitKind::Char, + LiteralKind::Byte { .. } => LitKind::Byte, + LiteralKind::Str { .. } => LitKind::Str, + LiteralKind::ByteStr { .. } => LitKind::ByteStr, + LiteralKind::CStr { .. } => LitKind::CStr, + LiteralKind::RawStr { n_hashes } => LitKind::StrRaw(n_hashes.unwrap_or_default()), + LiteralKind::RawByteStr { n_hashes } => { + LitKind::ByteStrRaw(n_hashes.unwrap_or_default()) + } + LiteralKind::RawCStr { n_hashes } => LitKind::CStrRaw(n_hashes.unwrap_or_default()), + }; + + let (lit, suffix) = s.split_at(suffix_start as usize); + let suffix = match suffix { + "" | "_" => None, + suffix => Some(Symbol::intern(self.interner, suffix)), + }; Ok(bridge::Literal { kind, - symbol: Symbol::intern(self.interner, text), + symbol: Symbol::intern(self.interner, lit), suffix, span: self.call_site, }) diff --git a/crates/proc-macro-srv/src/tests/mod.rs b/crates/proc-macro-srv/src/tests/mod.rs index 87d832cc76fa..e5bfe5ee92cd 100644 --- a/crates/proc-macro-srv/src/tests/mod.rs +++ b/crates/proc-macro-srv/src/tests/mod.rs @@ -169,8 +169,8 @@ fn test_fn_like_mk_idents() { fn test_fn_like_macro_clone_literals() { assert_expand( "fn_like_clone_tokens", - r#"1u16, 2_u32, -4i64, 3.14f32, "hello bridge""#, - expect![[r#" + r###"1u16, 2_u32, -4i64, 3.14f32, "hello bridge", "suffixed"suffix, r##"raw"##"###, + expect![[r###" SUBTREE $$ 1 1 LITERAL 1u16 1 PUNCH , [alone] 1 @@ -181,8 +181,12 @@ fn test_fn_like_macro_clone_literals() { PUNCH , [alone] 1 LITERAL 3.14f32 1 PUNCH , [alone] 1 - LITERAL "hello bridge" 1"#]], - expect![[r#" + LITERAL ""hello bridge"" 1 + PUNCH , [alone] 1 + LITERAL ""suffixed""suffix 1 + PUNCH , [alone] 1 + LITERAL r##"r##"raw"##"## 1"###]], + expect![[r###" SUBTREE $$ SpanData { range: 0..100, anchor: SpanAnchor(FileId(42), 2), ctx: SyntaxContextId(0) } SpanData { range: 0..100, anchor: SpanAnchor(FileId(42), 2), ctx: SyntaxContextId(0) } LITERAL 1u16 SpanData { range: 0..4, anchor: SpanAnchor(FileId(42), 2), ctx: SyntaxContextId(0) } PUNCH , [alone] SpanData { range: 4..5, anchor: SpanAnchor(FileId(42), 2), ctx: SyntaxContextId(0) } @@ -193,7 +197,11 @@ fn test_fn_like_macro_clone_literals() { PUNCH , [alone] SpanData { range: 18..19, anchor: SpanAnchor(FileId(42), 2), ctx: SyntaxContextId(0) } LITERAL 3.14f32 SpanData { range: 20..27, anchor: SpanAnchor(FileId(42), 2), ctx: SyntaxContextId(0) } PUNCH , [alone] SpanData { range: 27..28, anchor: SpanAnchor(FileId(42), 2), ctx: SyntaxContextId(0) } - LITERAL "hello bridge" SpanData { range: 29..43, anchor: SpanAnchor(FileId(42), 2), ctx: SyntaxContextId(0) }"#]], + LITERAL ""hello bridge"" SpanData { range: 29..43, anchor: SpanAnchor(FileId(42), 2), ctx: SyntaxContextId(0) } + PUNCH , [alone] SpanData { range: 43..44, anchor: SpanAnchor(FileId(42), 2), ctx: SyntaxContextId(0) } + LITERAL ""suffixed""suffix SpanData { range: 45..61, anchor: SpanAnchor(FileId(42), 2), ctx: SyntaxContextId(0) } + PUNCH , [alone] SpanData { range: 61..62, anchor: SpanAnchor(FileId(42), 2), ctx: SyntaxContextId(0) } + LITERAL r##"r##"raw"##"## SpanData { range: 63..73, anchor: SpanAnchor(FileId(42), 2), ctx: SyntaxContextId(0) }"###]], ); } diff --git a/crates/syntax/src/ast/token_ext.rs b/crates/syntax/src/ast/token_ext.rs index c93391a9792c..7cd1f1550b98 100644 --- a/crates/syntax/src/ast/token_ext.rs +++ b/crates/syntax/src/ast/token_ext.rs @@ -204,16 +204,6 @@ pub trait IsString: AstToken { assert!(TextRange::up_to(contents_range.len()).contains_range(range)); Some(range + contents_range.start()) } - fn raw_delimiter_count(&self) -> Option { - let text = self.text(); - let quote_range = self.text_range_between_quotes()?; - let range_start = self.syntax().text_range().start(); - text[TextRange::up_to((quote_range - range_start).start())] - .matches('#') - .count() - .try_into() - .ok() - } } impl IsString for ast::String { diff --git a/crates/syntax/src/lib.rs b/crates/syntax/src/lib.rs index f562da150372..b755de86d32c 100644 --- a/crates/syntax/src/lib.rs +++ b/crates/syntax/src/lib.rs @@ -182,28 +182,6 @@ impl SourceFile { } } -impl ast::Literal { - pub fn parse(text: &str) -> Option> { - let lexed = parser::LexedStr::new(text); - let parser_input = lexed.to_input(); - let parser_output = parser::TopEntryPoint::Expr.parse(&parser_input); - let (green, mut errors, _) = parsing::build_tree(lexed, parser_output); - let root = SyntaxNode::new_root(green.clone()); - - errors.extend(validation::validate(&root)); - - if root.kind() == SyntaxKind::LITERAL { - Some(Parse { - green, - errors: if errors.is_empty() { None } else { Some(errors.into()) }, - _ty: PhantomData, - }) - } else { - None - } - } -} - impl ast::TokenTree { pub fn reparse_as_comma_separated_expr(self) -> Parse { let tokens = self.syntax().descendants_with_tokens().filter_map(NodeOrToken::into_token); From 9897662bf70f3cbf79291ff8a880abce021ec2e2 Mon Sep 17 00:00:00 2001 From: dfireBird Date: Tue, 13 Feb 2024 18:22:32 +0530 Subject: [PATCH 054/127] add completions to show only traits with qualified path prefix --- crates/ide-completion/src/completions/type.rs | 14 +++++++++-- crates/ide-completion/src/context.rs | 1 + crates/ide-completion/src/tests/type_pos.rs | 24 ++++++++++++++++++- 3 files changed, 36 insertions(+), 3 deletions(-) diff --git a/crates/ide-completion/src/completions/type.rs b/crates/ide-completion/src/completions/type.rs index eca17bb7c7f2..e4678089462a 100644 --- a/crates/ide-completion/src/completions/type.rs +++ b/crates/ide-completion/src/completions/type.rs @@ -31,6 +31,11 @@ pub(crate) fn complete_type_path( ScopeDef::ImplSelfType(_) => location.complete_self_type(), // Don't suggest attribute macros and derives. ScopeDef::ModuleDef(Macro(mac)) => mac.is_fn_like(ctx.db), + ScopeDef::ModuleDef(Trait(_) | Module(_)) + if matches!(location, TypeLocation::ImplTrait) => + { + true + } // Type things are fine ScopeDef::ModuleDef( BuiltinType(_) | Adt(_) | Module(_) | Trait(_) | TraitAlias(_) | TypeAlias(_), @@ -187,8 +192,13 @@ pub(crate) fn complete_type_path( TypeLocation::ImplTrait => { acc.add_nameref_keywords_with_colon(ctx); ctx.process_all_names(&mut |name, def, doc_aliases| { - let is_trait = matches!(def, ScopeDef::ModuleDef(hir::ModuleDef::Trait(_))); - if is_trait { + let is_trait_or_module = matches!( + def, + ScopeDef::ModuleDef( + hir::ModuleDef::Module(_) | hir::ModuleDef::Trait(_) + ) + ); + if is_trait_or_module { acc.add_path_resolution(ctx, path_ctx, name, def, doc_aliases); } }); diff --git a/crates/ide-completion/src/context.rs b/crates/ide-completion/src/context.rs index 2a0004f60b82..314f84f5df16 100644 --- a/crates/ide-completion/src/context.rs +++ b/crates/ide-completion/src/context.rs @@ -202,6 +202,7 @@ impl TypeLocation { } TypeLocation::AssocConstEq => false, TypeLocation::AssocTypeEq => true, + TypeLocation::ImplTrait => false, _ => true, } } diff --git a/crates/ide-completion/src/tests/type_pos.rs b/crates/ide-completion/src/tests/type_pos.rs index 656592ebfd62..db4ac9381ced 100644 --- a/crates/ide-completion/src/tests/type_pos.rs +++ b/crates/ide-completion/src/tests/type_pos.rs @@ -998,8 +998,10 @@ trait Foo {} struct Bar; -impl $0 for Bar { }"#, +impl $0 for Bar { } +"#, expect![[r#" + md module tt Foo tt Trait kw crate:: @@ -1007,3 +1009,23 @@ impl $0 for Bar { }"#, "#]], ); } + +#[test] +fn complete_traits_with_path_on_impl_trait_block() { + check( + r#" +mod outer { + pub trait Foo {} + pub struct Bar; + pub mod inner { + } +} + +impl outer::$0 for Bar { } +"#, + expect![[r#" + md inner + tt Foo + "#]], + ); +} From 0e47befaf3cdf30fcf851fd34d4d6fdb4d274382 Mon Sep 17 00:00:00 2001 From: dfireBird Date: Tue, 13 Feb 2024 19:31:04 +0530 Subject: [PATCH 055/127] fix flyimport showing other types in `impl` trait statement --- .../src/completions/flyimport.rs | 2 ++ crates/ide-completion/src/tests/flyimport.rs | 19 +++++++++++++++++++ 2 files changed, 21 insertions(+) diff --git a/crates/ide-completion/src/completions/flyimport.rs b/crates/ide-completion/src/completions/flyimport.rs index b9f91d34b2c2..1c6cbf713bc8 100644 --- a/crates/ide-completion/src/completions/flyimport.rs +++ b/crates/ide-completion/src/completions/flyimport.rs @@ -238,6 +238,8 @@ fn import_on_the_fly( (PathKind::Type { location }, ItemInNs::Types(ty)) => { if matches!(location, TypeLocation::TypeBound) { matches!(ty, ModuleDef::Trait(_)) + } else if matches!(location, TypeLocation::ImplTrait) { + matches!(ty, ModuleDef::Trait(_)) || matches!(ty, ModuleDef::Module(_)) } else { true } diff --git a/crates/ide-completion/src/tests/flyimport.rs b/crates/ide-completion/src/tests/flyimport.rs index eaa1bebc03c7..fff193ba4c9b 100644 --- a/crates/ide-completion/src/tests/flyimport.rs +++ b/crates/ide-completion/src/tests/flyimport.rs @@ -1397,3 +1397,22 @@ pub use bridge2::server2::Span2; "#]], ); } + +#[test] +fn flyimport_only_traits_in_impl_trait_block() { + check( + r#" +//- /main.rs crate:main deps:dep +pub struct Bar; + +impl Foo$0 for Bar { } +//- /lib.rs crate:dep +pub trait FooTrait; + +pub struct FooStruct; +"#, + expect![[r#" + tt FooTrait (use dep::FooTrait) + "#]], + ); +} From 7f661782cdd09dc9b94230e979b5f26b751803d9 Mon Sep 17 00:00:00 2001 From: Lukas Wirth Date: Tue, 13 Feb 2024 17:26:14 +0100 Subject: [PATCH 056/127] Simplify --- crates/ide-completion/src/completions/flyimport.rs | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/crates/ide-completion/src/completions/flyimport.rs b/crates/ide-completion/src/completions/flyimport.rs index 1c6cbf713bc8..3bc329ecd748 100644 --- a/crates/ide-completion/src/completions/flyimport.rs +++ b/crates/ide-completion/src/completions/flyimport.rs @@ -239,7 +239,7 @@ fn import_on_the_fly( if matches!(location, TypeLocation::TypeBound) { matches!(ty, ModuleDef::Trait(_)) } else if matches!(location, TypeLocation::ImplTrait) { - matches!(ty, ModuleDef::Trait(_)) || matches!(ty, ModuleDef::Module(_)) + matches!(ty, ModuleDef::Trait(_) | ModuleDef::Module(_)) } else { true } From ccccc299c850a250e28fd3469e75e1ec24e9c672 Mon Sep 17 00:00:00 2001 From: Lukas Wirth Date: Tue, 13 Feb 2024 18:23:28 +0100 Subject: [PATCH 057/127] fix: Recover from `=` in record constructor field assignment --- crates/parser/src/grammar/expressions.rs | 29 +++++++++---- ...cord_literal_before_ellipsis_recovery.rast | 36 ++++++++-------- ...0032_record_literal_field_eq_recovery.rast | 41 +++++++++++++++++++ .../0032_record_literal_field_eq_recovery.rs | 3 ++ 4 files changed, 82 insertions(+), 27 deletions(-) create mode 100644 crates/parser/test_data/parser/inline/err/0032_record_literal_field_eq_recovery.rast create mode 100644 crates/parser/test_data/parser/inline/err/0032_record_literal_field_eq_recovery.rs diff --git a/crates/parser/src/grammar/expressions.rs b/crates/parser/src/grammar/expressions.rs index f40c515fa079..052300f6d5ad 100644 --- a/crates/parser/src/grammar/expressions.rs +++ b/crates/parser/src/grammar/expressions.rs @@ -678,27 +678,38 @@ pub(crate) fn record_expr_field_list(p: &mut Parser<'_>) { attributes::outer_attrs(p); match p.current() { - IDENT | INT_NUMBER => { + IDENT | INT_NUMBER if p.nth_at(1, T![::]) => { // test_err record_literal_missing_ellipsis_recovery // fn main() { // S { S::default() } // } - if p.nth_at(1, T![::]) { - m.abandon(p); - p.expect(T![..]); - expr(p); - } else { + m.abandon(p); + p.expect(T![..]); + expr(p); + } + IDENT | INT_NUMBER => { + if p.nth_at(1, T![..]) { // test_err record_literal_before_ellipsis_recovery // fn main() { // S { field ..S::default() } // } - if p.nth_at(1, T![:]) || p.nth_at(1, T![..]) { + name_ref_or_index(p); + p.error("expected colon"); + } else { + // test_err record_literal_field_eq_recovery + // fn main() { + // S { field = foo } + // } + if p.nth_at(1, T![:]) { name_ref_or_index(p); - p.expect(T![:]); + p.bump(T![:]); + } else if p.nth_at(1, T![=]) { + name_ref_or_index(p); + p.err_and_bump("expected colon"); } expr(p); - m.complete(p, RECORD_EXPR_FIELD); } + m.complete(p, RECORD_EXPR_FIELD); } T![.] if p.at(T![..]) => { m.abandon(p); diff --git a/crates/parser/test_data/parser/inline/err/0014_record_literal_before_ellipsis_recovery.rast b/crates/parser/test_data/parser/inline/err/0014_record_literal_before_ellipsis_recovery.rast index f511960040d5..c59ea2604b9a 100644 --- a/crates/parser/test_data/parser/inline/err/0014_record_literal_before_ellipsis_recovery.rast +++ b/crates/parser/test_data/parser/inline/err/0014_record_literal_before_ellipsis_recovery.rast @@ -24,26 +24,26 @@ SOURCE_FILE RECORD_EXPR_FIELD NAME_REF IDENT "field" - WHITESPACE " " - RANGE_EXPR - DOT2 ".." - CALL_EXPR - PATH_EXPR - PATH - PATH - PATH_SEGMENT - NAME_REF - IDENT "S" - COLON2 "::" - PATH_SEGMENT - NAME_REF - IDENT "default" - ARG_LIST - L_PAREN "(" - R_PAREN ")" + WHITESPACE " " + DOT2 ".." + CALL_EXPR + PATH_EXPR + PATH + PATH + PATH_SEGMENT + NAME_REF + IDENT "S" + COLON2 "::" + PATH_SEGMENT + NAME_REF + IDENT "default" + ARG_LIST + L_PAREN "(" + R_PAREN ")" WHITESPACE " " R_CURLY "}" WHITESPACE "\n" R_CURLY "}" WHITESPACE "\n" -error 25: expected COLON +error 25: expected colon +error 25: expected COMMA diff --git a/crates/parser/test_data/parser/inline/err/0032_record_literal_field_eq_recovery.rast b/crates/parser/test_data/parser/inline/err/0032_record_literal_field_eq_recovery.rast new file mode 100644 index 000000000000..4ba6f9117f9b --- /dev/null +++ b/crates/parser/test_data/parser/inline/err/0032_record_literal_field_eq_recovery.rast @@ -0,0 +1,41 @@ +SOURCE_FILE + FN + FN_KW "fn" + WHITESPACE " " + NAME + IDENT "main" + PARAM_LIST + L_PAREN "(" + R_PAREN ")" + WHITESPACE " " + BLOCK_EXPR + STMT_LIST + L_CURLY "{" + WHITESPACE "\n " + RECORD_EXPR + PATH + PATH_SEGMENT + NAME_REF + IDENT "S" + WHITESPACE " " + RECORD_EXPR_FIELD_LIST + L_CURLY "{" + WHITESPACE " " + RECORD_EXPR_FIELD + NAME_REF + IDENT "field" + WHITESPACE " " + ERROR + EQ "=" + WHITESPACE " " + PATH_EXPR + PATH + PATH_SEGMENT + NAME_REF + IDENT "foo" + WHITESPACE " " + R_CURLY "}" + WHITESPACE "\n" + R_CURLY "}" + WHITESPACE "\n" +error 26: expected colon diff --git a/crates/parser/test_data/parser/inline/err/0032_record_literal_field_eq_recovery.rs b/crates/parser/test_data/parser/inline/err/0032_record_literal_field_eq_recovery.rs new file mode 100644 index 000000000000..1eb1aa9b9264 --- /dev/null +++ b/crates/parser/test_data/parser/inline/err/0032_record_literal_field_eq_recovery.rs @@ -0,0 +1,3 @@ +fn main() { + S { field = foo } +} From bf115a6064379d1d1e8250c62e0e66d0cbe506cb Mon Sep 17 00:00:00 2001 From: Lukas Wirth Date: Tue, 13 Feb 2024 18:25:40 +0100 Subject: [PATCH 058/127] fix: Recover from `=` in record pattern field --- crates/parser/src/grammar/expressions.rs | 4 +- crates/parser/src/grammar/patterns.rs | 9 ++++ ...cord_literal_before_ellipsis_recovery.rast | 2 +- ...0032_record_literal_field_eq_recovery.rast | 2 +- .../0033_record_pat_field_eq_recovery.rast | 43 +++++++++++++++++++ .../err/0033_record_pat_field_eq_recovery.rs | 3 ++ 6 files changed, 59 insertions(+), 4 deletions(-) create mode 100644 crates/parser/test_data/parser/inline/err/0033_record_pat_field_eq_recovery.rast create mode 100644 crates/parser/test_data/parser/inline/err/0033_record_pat_field_eq_recovery.rs diff --git a/crates/parser/src/grammar/expressions.rs b/crates/parser/src/grammar/expressions.rs index 052300f6d5ad..6b660180f823 100644 --- a/crates/parser/src/grammar/expressions.rs +++ b/crates/parser/src/grammar/expressions.rs @@ -694,7 +694,7 @@ pub(crate) fn record_expr_field_list(p: &mut Parser<'_>) { // S { field ..S::default() } // } name_ref_or_index(p); - p.error("expected colon"); + p.error("expected `:`"); } else { // test_err record_literal_field_eq_recovery // fn main() { @@ -705,7 +705,7 @@ pub(crate) fn record_expr_field_list(p: &mut Parser<'_>) { p.bump(T![:]); } else if p.nth_at(1, T![=]) { name_ref_or_index(p); - p.err_and_bump("expected colon"); + p.err_and_bump("expected `:`"); } expr(p); } diff --git a/crates/parser/src/grammar/patterns.rs b/crates/parser/src/grammar/patterns.rs index 39ded41bb241..503674233792 100644 --- a/crates/parser/src/grammar/patterns.rs +++ b/crates/parser/src/grammar/patterns.rs @@ -323,6 +323,15 @@ fn record_pat_field(p: &mut Parser<'_>) { p.bump(T![:]); pattern(p); } + // test_err record_pat_field_eq_recovery + // fn main() { + // let S { field = foo }; + // } + IDENT | INT_NUMBER if p.nth(1) == T![=] => { + name_ref_or_index(p); + p.err_and_bump("expected `:`"); + pattern(p); + } T![box] => { // FIXME: not all box patterns should be allowed box_pat(p); diff --git a/crates/parser/test_data/parser/inline/err/0014_record_literal_before_ellipsis_recovery.rast b/crates/parser/test_data/parser/inline/err/0014_record_literal_before_ellipsis_recovery.rast index c59ea2604b9a..741b7845e7f1 100644 --- a/crates/parser/test_data/parser/inline/err/0014_record_literal_before_ellipsis_recovery.rast +++ b/crates/parser/test_data/parser/inline/err/0014_record_literal_before_ellipsis_recovery.rast @@ -45,5 +45,5 @@ SOURCE_FILE WHITESPACE "\n" R_CURLY "}" WHITESPACE "\n" -error 25: expected colon +error 25: expected `:` error 25: expected COMMA diff --git a/crates/parser/test_data/parser/inline/err/0032_record_literal_field_eq_recovery.rast b/crates/parser/test_data/parser/inline/err/0032_record_literal_field_eq_recovery.rast index 4ba6f9117f9b..ad4deeb0b67c 100644 --- a/crates/parser/test_data/parser/inline/err/0032_record_literal_field_eq_recovery.rast +++ b/crates/parser/test_data/parser/inline/err/0032_record_literal_field_eq_recovery.rast @@ -38,4 +38,4 @@ SOURCE_FILE WHITESPACE "\n" R_CURLY "}" WHITESPACE "\n" -error 26: expected colon +error 26: expected `:` diff --git a/crates/parser/test_data/parser/inline/err/0033_record_pat_field_eq_recovery.rast b/crates/parser/test_data/parser/inline/err/0033_record_pat_field_eq_recovery.rast new file mode 100644 index 000000000000..6940a84b6830 --- /dev/null +++ b/crates/parser/test_data/parser/inline/err/0033_record_pat_field_eq_recovery.rast @@ -0,0 +1,43 @@ +SOURCE_FILE + FN + FN_KW "fn" + WHITESPACE " " + NAME + IDENT "main" + PARAM_LIST + L_PAREN "(" + R_PAREN ")" + WHITESPACE " " + BLOCK_EXPR + STMT_LIST + L_CURLY "{" + WHITESPACE "\n " + LET_STMT + LET_KW "let" + WHITESPACE " " + RECORD_PAT + PATH + PATH_SEGMENT + NAME_REF + IDENT "S" + WHITESPACE " " + RECORD_PAT_FIELD_LIST + L_CURLY "{" + WHITESPACE " " + RECORD_PAT_FIELD + NAME_REF + IDENT "field" + WHITESPACE " " + ERROR + EQ "=" + WHITESPACE " " + IDENT_PAT + NAME + IDENT "foo" + WHITESPACE " " + R_CURLY "}" + SEMICOLON ";" + WHITESPACE "\n" + R_CURLY "}" + WHITESPACE "\n" +error 30: expected `:` diff --git a/crates/parser/test_data/parser/inline/err/0033_record_pat_field_eq_recovery.rs b/crates/parser/test_data/parser/inline/err/0033_record_pat_field_eq_recovery.rs new file mode 100644 index 000000000000..c4949d6e12e7 --- /dev/null +++ b/crates/parser/test_data/parser/inline/err/0033_record_pat_field_eq_recovery.rs @@ -0,0 +1,3 @@ +fn main() { + let S { field = foo }; +} From a981db53fa52534a989a4a30494fb33eb697bf8c Mon Sep 17 00:00:00 2001 From: Lukas Wirth Date: Tue, 13 Feb 2024 19:42:03 +0100 Subject: [PATCH 059/127] fix: Pass .cargo/config.toml env vars to proc-macro server --- crates/load-cargo/src/lib.rs | 4 +-- crates/proc-macro-api/src/lib.rs | 8 +++-- crates/proc-macro-api/src/process.rs | 25 ++++++++++++---- crates/project-model/src/tests.rs | 3 ++ crates/project-model/src/workspace.rs | 43 +++++++++++++++++++++++++-- crates/rust-analyzer/src/reload.rs | 13 +++++++- 6 files changed, 83 insertions(+), 13 deletions(-) diff --git a/crates/load-cargo/src/lib.rs b/crates/load-cargo/src/lib.rs index a52cc19650f4..6958a4be17ba 100644 --- a/crates/load-cargo/src/lib.rs +++ b/crates/load-cargo/src/lib.rs @@ -67,9 +67,9 @@ pub fn load_workspace( let proc_macro_server = match &load_config.with_proc_macro_server { ProcMacroServerChoice::Sysroot => ws .find_sysroot_proc_macro_srv() - .and_then(|it| ProcMacroServer::spawn(it).map_err(Into::into)), + .and_then(|it| ProcMacroServer::spawn(it, extra_env).map_err(Into::into)), ProcMacroServerChoice::Explicit(path) => { - ProcMacroServer::spawn(path.clone()).map_err(Into::into) + ProcMacroServer::spawn(path.clone(), extra_env).map_err(Into::into) } ProcMacroServerChoice::None => Err(anyhow::format_err!("proc macro server disabled")), }; diff --git a/crates/proc-macro-api/src/lib.rs b/crates/proc-macro-api/src/lib.rs index 1dadfc40ac43..6b16711a8d87 100644 --- a/crates/proc-macro-api/src/lib.rs +++ b/crates/proc-macro-api/src/lib.rs @@ -13,6 +13,7 @@ mod version; use indexmap::IndexSet; use paths::AbsPathBuf; +use rustc_hash::FxHashMap; use span::Span; use std::{ fmt, io, @@ -107,8 +108,11 @@ pub struct MacroPanic { impl ProcMacroServer { /// Spawns an external process as the proc macro server and returns a client connected to it. - pub fn spawn(process_path: AbsPathBuf) -> io::Result { - let process = ProcMacroProcessSrv::run(process_path)?; + pub fn spawn( + process_path: AbsPathBuf, + env: &FxHashMap, + ) -> io::Result { + let process = ProcMacroProcessSrv::run(process_path, env)?; Ok(ProcMacroServer { process: Arc::new(Mutex::new(process)) }) } diff --git a/crates/proc-macro-api/src/process.rs b/crates/proc-macro-api/src/process.rs index 96f97bf5e205..12eafcea442d 100644 --- a/crates/proc-macro-api/src/process.rs +++ b/crates/proc-macro-api/src/process.rs @@ -7,6 +7,7 @@ use std::{ }; use paths::{AbsPath, AbsPathBuf}; +use rustc_hash::FxHashMap; use stdx::JodChild; use crate::{ @@ -26,9 +27,12 @@ pub(crate) struct ProcMacroProcessSrv { } impl ProcMacroProcessSrv { - pub(crate) fn run(process_path: AbsPathBuf) -> io::Result { + pub(crate) fn run( + process_path: AbsPathBuf, + env: &FxHashMap, + ) -> io::Result { let create_srv = |null_stderr| { - let mut process = Process::run(process_path.clone(), null_stderr)?; + let mut process = Process::run(process_path.clone(), env, null_stderr)?; let (stdin, stdout) = process.stdio().expect("couldn't access child stdio"); io::Result::Ok(ProcMacroProcessSrv { @@ -147,8 +151,12 @@ struct Process { } impl Process { - fn run(path: AbsPathBuf, null_stderr: bool) -> io::Result { - let child = JodChild(mk_child(&path, null_stderr)?); + fn run( + path: AbsPathBuf, + env: &FxHashMap, + null_stderr: bool, + ) -> io::Result { + let child = JodChild(mk_child(&path, env, null_stderr)?); Ok(Process { child }) } @@ -161,9 +169,14 @@ impl Process { } } -fn mk_child(path: &AbsPath, null_stderr: bool) -> io::Result { +fn mk_child( + path: &AbsPath, + env: &FxHashMap, + null_stderr: bool, +) -> io::Result { let mut cmd = Command::new(path.as_os_str()); - cmd.env("RUST_ANALYZER_INTERNALS_DO_NOT_USE", "this is unstable") + cmd.envs(env) + .env("RUST_ANALYZER_INTERNALS_DO_NOT_USE", "this is unstable") .stdin(Stdio::piped()) .stdout(Stdio::piped()) .stderr(if null_stderr { Stdio::null() } else { Stdio::inherit() }); diff --git a/crates/project-model/src/tests.rs b/crates/project-model/src/tests.rs index 75d48004b64a..b50750c61492 100644 --- a/crates/project-model/src/tests.rs +++ b/crates/project-model/src/tests.rs @@ -34,6 +34,7 @@ fn load_cargo_with_overrides( cfg_overrides, toolchain: None, target_layout: Err("target_data_layout not loaded".into()), + cargo_config_extra_env: Default::default(), }; to_crate_graph(project_workspace) } @@ -53,6 +54,7 @@ fn load_cargo_with_fake_sysroot( cfg_overrides: Default::default(), toolchain: None, target_layout: Err("target_data_layout not loaded".into()), + cargo_config_extra_env: Default::default(), }; project_workspace.to_crate_graph( &mut { @@ -332,6 +334,7 @@ fn smoke_test_real_sysroot_cargo() { cfg_overrides: Default::default(), toolchain: None, target_layout: Err("target_data_layout not loaded".into()), + cargo_config_extra_env: Default::default(), }; project_workspace.to_crate_graph( &mut { diff --git a/crates/project-model/src/workspace.rs b/crates/project-model/src/workspace.rs index 800939252834..cacd506d1b21 100644 --- a/crates/project-model/src/workspace.rs +++ b/crates/project-model/src/workspace.rs @@ -73,6 +73,7 @@ pub enum ProjectWorkspace { cfg_overrides: CfgOverrides, toolchain: Option, target_layout: Result, + cargo_config_extra_env: FxHashMap, }, /// Project workspace was manually specified using a `rust-project.json` file. Json { @@ -115,7 +116,8 @@ impl fmt::Debug for ProjectWorkspace { rustc_cfg, cfg_overrides, toolchain, - target_layout: data_layout, + target_layout, + cargo_config_extra_env, } => f .debug_struct("Cargo") .field("root", &cargo.workspace_root().file_name()) @@ -128,7 +130,8 @@ impl fmt::Debug for ProjectWorkspace { .field("n_rustc_cfg", &rustc_cfg.len()) .field("n_cfg_overrides", &cfg_overrides.len()) .field("toolchain", &toolchain) - .field("data_layout", &data_layout) + .field("data_layout", &target_layout) + .field("cargo_config_extra_env", &cargo_config_extra_env) .finish(), ProjectWorkspace::Json { project, @@ -320,6 +323,8 @@ impl ProjectWorkspace { })?; let cargo = CargoWorkspace::new(meta); + let cargo_config_extra_env = + cargo_config_env(cargo_toml, &config.extra_env, sysroot_ref); ProjectWorkspace::Cargo { cargo, build_scripts: WorkspaceBuildScripts::default(), @@ -329,6 +334,7 @@ impl ProjectWorkspace { cfg_overrides, toolchain, target_layout: data_layout.map_err(|it| it.to_string()), + cargo_config_extra_env, } } }; @@ -589,6 +595,7 @@ impl ProjectWorkspace { build_scripts, toolchain: _, target_layout: _, + cargo_config_extra_env: _, } => { cargo .packages() @@ -700,6 +707,7 @@ impl ProjectWorkspace { build_scripts, toolchain, target_layout, + cargo_config_extra_env: _, } => cargo_to_crate_graph( load, rustc.as_ref().map(|a| a.as_ref()).ok(), @@ -742,6 +750,7 @@ impl ProjectWorkspace { rustc_cfg, cfg_overrides, toolchain, + cargo_config_extra_env, build_scripts: _, target_layout: _, }, @@ -752,6 +761,7 @@ impl ProjectWorkspace { rustc_cfg: o_rustc_cfg, cfg_overrides: o_cfg_overrides, toolchain: o_toolchain, + cargo_config_extra_env: o_cargo_config_extra_env, build_scripts: _, target_layout: _, }, @@ -762,6 +772,7 @@ impl ProjectWorkspace { && cfg_overrides == o_cfg_overrides && toolchain == o_toolchain && sysroot == o_sysroot + && cargo_config_extra_env == o_cargo_config_extra_env } ( Self::Json { project, sysroot, rustc_cfg, toolchain, target_layout: _ }, @@ -1598,3 +1609,31 @@ fn create_cfg_options(rustc_cfg: Vec) -> CfgOptions { cfg_options.insert_atom("debug_assertions".into()); cfg_options } + +fn cargo_config_env( + cargo_toml: &ManifestPath, + extra_env: &FxHashMap, + sysroot: Option<&Sysroot>, +) -> FxHashMap { + let Ok(program) = Sysroot::discover_tool(sysroot, toolchain::Tool::Cargo) else { + return Default::default(); + }; + let mut cargo_config = Command::new(program); + cargo_config.envs(extra_env); + cargo_config + .current_dir(cargo_toml.parent()) + .args(["-Z", "unstable-options", "config", "get", "env"]) + .env("RUSTC_BOOTSTRAP", "1"); + // if successful we receive `env.key.value = "value" per entry + tracing::debug!("Discovering cargo config env by {:?}", cargo_config); + utf8_stdout(cargo_config).map(parse_output_cargo_config_env).unwrap_or_default() +} + +fn parse_output_cargo_config_env(stdout: String) -> FxHashMap { + stdout + .lines() + .filter_map(|l| l.strip_prefix("env.")) + .filter_map(|l| l.split_once(".value = ")) + .map(|(key, value)| (key.to_owned(), value.trim_matches('"').to_owned())) + .collect() +} diff --git a/crates/rust-analyzer/src/reload.rs b/crates/rust-analyzer/src/reload.rs index 3c2ba2f115aa..93b6a53908f2 100644 --- a/crates/rust-analyzer/src/reload.rs +++ b/crates/rust-analyzer/src/reload.rs @@ -468,8 +468,19 @@ impl GlobalState { None => ws.find_sysroot_proc_macro_srv()?, }; + let env = match ws { + ProjectWorkspace::Cargo { cargo_config_extra_env, .. } => { + cargo_config_extra_env + .iter() + .chain(self.config.extra_env()) + .map(|(a, b)| (a.clone(), b.clone())) + .collect() + } + _ => Default::default(), + }; tracing::info!("Using proc-macro server at {path}"); - ProcMacroServer::spawn(path.clone()).map_err(|err| { + + ProcMacroServer::spawn(path.clone(), &env).map_err(|err| { tracing::error!( "Failed to run proc-macro server from path {path}, error: {err:?}", ); From 931f563a8920ce329dee285f96ed9ea2335beb74 Mon Sep 17 00:00:00 2001 From: Wilfred Hughes Date: Tue, 13 Feb 2024 14:29:20 -0800 Subject: [PATCH 060/127] Update JsonProject to include optional fields These were documented in #15014 in the manual, but this definition wasn't updated to match. --- editors/code/src/rust_project.ts | 19 +++++++++++++++++++ 1 file changed, 19 insertions(+) diff --git a/editors/code/src/rust_project.ts b/editors/code/src/rust_project.ts index bf65ad43ba59..c983874fc009 100644 --- a/editors/code/src/rust_project.ts +++ b/editors/code/src/rust_project.ts @@ -1,7 +1,26 @@ export interface JsonProject { + /// Path to the sysroot directory. + /// + /// The sysroot is where rustc looks for the + /// crates that are built-in to rust, such as + /// std. + /// + /// https://doc.rust-lang.org/rustc/command-line-arguments.html#--sysroot-override-the-system-root + /// + /// To see the current value of sysroot, you + /// can query rustc: + /// + /// ``` + /// $ rustc --print sysroot + /// /Users/yourname/.rustup/toolchains/stable-x86_64-apple-darwin + /// ``` + sysroot?: string; /// Path to the directory with *source code* of /// sysroot crates. /// + /// By default, this is `lib/rustlib/src/rust/library` + /// relative to the sysroot. + /// /// It should point to the directory where std, /// core, and friends can be found: /// From c73865518816aff7309b9336939961b73508b2ac Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Lauren=C8=9Biu=20Nicola?= Date: Wed, 14 Feb 2024 08:45:11 +0200 Subject: [PATCH 061/127] Set channel override when querying the sysroot metadata --- crates/project-model/src/sysroot.rs | 10 +++++++++- xtask/src/metrics.rs | 2 -- 2 files changed, 9 insertions(+), 3 deletions(-) diff --git a/crates/project-model/src/sysroot.rs b/crates/project-model/src/sysroot.rs index b0a8f0d4a416..24c2edd2c4fc 100644 --- a/crates/project-model/src/sysroot.rs +++ b/crates/project-model/src/sysroot.rs @@ -254,10 +254,18 @@ impl Sysroot { .ok()?; let current_dir = AbsPathBuf::try_from(&*format!("{sysroot_src_dir}/sysroot")).ok()?; + + let mut cargo_config = CargoConfig::default(); + // the sysroot uses `public-dependency`, so we make cargo think it's a nightly + cargo_config.extra_env.insert( + "__CARGO_TEST_CHANNEL_OVERRIDE_DO_NOT_USE_THIS".to_owned(), + "nightly".to_owned(), + ); + let res = CargoWorkspace::fetch_metadata( &sysroot_cargo_toml, ¤t_dir, - &CargoConfig::default(), + &cargo_config, None, &|_| (), ) diff --git a/xtask/src/metrics.rs b/xtask/src/metrics.rs index 9bd3a661c24d..2efafa10a828 100644 --- a/xtask/src/metrics.rs +++ b/xtask/src/metrics.rs @@ -117,8 +117,6 @@ impl Metrics { sh, "./target/release/rust-analyzer -q analysis-stats {path} --query-sysroot-metadata" ) - // the sysroot uses `public-dependency`, so we make cargo think it's a nightly - .env("__CARGO_TEST_CHANNEL_OVERRIDE_DO_NOT_USE_THIS", "nightly") .read()?; for (metric, value, unit) in parse_metrics(&output) { self.report(&format!("analysis-stats/{name}/{metric}"), value, unit.into()); From c125e8ca899b0ed182bb37a321bf202a40eaa3ed Mon Sep 17 00:00:00 2001 From: Lukas Wirth Date: Wed, 14 Feb 2024 11:26:11 +0100 Subject: [PATCH 062/127] Add github ci problem matcher for clippy --- .github/rust.json | 33 +++++++++++++++++++++++++++++++++ .github/workflows/ci.yaml | 4 ++++ 2 files changed, 37 insertions(+) create mode 100644 .github/rust.json diff --git a/.github/rust.json b/.github/rust.json new file mode 100644 index 000000000000..ddaa1b0824b9 --- /dev/null +++ b/.github/rust.json @@ -0,0 +1,33 @@ +{ + "problemMatcher": [ + { + "owner": "rustfmt", + "severity": "warning", + "pattern": [ + { + "regexp": "^(Diff in (.+)) at line (\\d+):$", + "message": 1, + "file": 2, + "line": 3 + } + ] + }, + { + "owner": "clippy", + "pattern": [ + { + "regexp": "^(?:\\x1b\\[[\\d;]+m)*(warning|warn|error)(?:\\x1b\\[[\\d;]+m)*(\\[(.*)\\])?(?:\\x1b\\[[\\d;]+m)*:(?:\\x1b\\[[\\d;]+m)* ([^\\x1b]*)(?:\\x1b\\[[\\d;]+m)*$", + "severity": 1, + "message": 4, + "code": 3 + }, + { + "regexp": "^(?:\\x1b\\[[\\d;]+m)*\\s*(?:\\x1b\\[[\\d;]+m)*\\s*--> (?:\\x1b\\[[\\d;]+m)*(.*):(\\d*):(\\d*)(?:\\x1b\\[[\\d;]+m)*$", + "file": 1, + "line": 2, + "column": 3 + } + ] + } + ] +} diff --git a/.github/workflows/ci.yaml b/.github/workflows/ci.yaml index 964be478fa3a..4f3e1b7df0d4 100644 --- a/.github/workflows/ci.yaml +++ b/.github/workflows/ci.yaml @@ -66,6 +66,10 @@ jobs: rustup component add --toolchain ${{ env.RUST_CHANNEL }} rustfmt rust-src rustup default ${{ env.RUST_CHANNEL }} + - name: Install Rust Problem Matcher + if: matrix.os == 'ubuntu-latest' + run: echo "::add-matcher::.github/rust.json" + - name: Cache Dependencies uses: Swatinem/rust-cache@988c164c3d0e93c4dbab36aaf5bbeb77425b2894 with: From f481181a14456f666fb701651905378003d319c6 Mon Sep 17 00:00:00 2001 From: Lukas Wirth Date: Wed, 14 Feb 2024 11:34:43 +0100 Subject: [PATCH 063/127] Run rustfmt directly on CI --- .github/workflows/ci.yaml | 6 +++++- crates/rust-analyzer/tests/slow-tests/tidy.rs | 21 ------------------- 2 files changed, 5 insertions(+), 22 deletions(-) diff --git a/.github/workflows/ci.yaml b/.github/workflows/ci.yaml index 4f3e1b7df0d4..d751c3ee0da6 100644 --- a/.github/workflows/ci.yaml +++ b/.github/workflows/ci.yaml @@ -65,7 +65,7 @@ jobs: rustup update --no-self-update ${{ env.RUST_CHANNEL }} rustup component add --toolchain ${{ env.RUST_CHANNEL }} rustfmt rust-src rustup default ${{ env.RUST_CHANNEL }} - + # https://github.com/actions-rust-lang/setup-rust-toolchain/blob/main/rust.json - name: Install Rust Problem Matcher if: matrix.os == 'ubuntu-latest' run: echo "::add-matcher::.github/rust.json" @@ -111,6 +111,10 @@ jobs: if: matrix.os == 'windows-latest' run: cargo clippy --all-targets -- -D clippy::disallowed_macros -D clippy::dbg_macro -D clippy::todo -D clippy::print_stdout -D clippy::print_stderr + - name: rustfmt + if: matrix.os == 'ubuntu-latest' + run: cargo fmt -- --check + # Weird targets to catch non-portable code rust-cross: if: github.repository == 'rust-lang/rust-analyzer' diff --git a/crates/rust-analyzer/tests/slow-tests/tidy.rs b/crates/rust-analyzer/tests/slow-tests/tidy.rs index 3e38fc3ebcd7..78da4487d4c9 100644 --- a/crates/rust-analyzer/tests/slow-tests/tidy.rs +++ b/crates/rust-analyzer/tests/slow-tests/tidy.rs @@ -9,27 +9,6 @@ use xshell::Shell; #[cfg(not(feature = "in-rust-tree"))] use xshell::cmd; -#[cfg(not(feature = "in-rust-tree"))] -#[test] -fn check_code_formatting() { - let sh = &Shell::new().unwrap(); - sh.change_dir(sourcegen::project_root()); - - let out = cmd!(sh, "rustup run stable rustfmt --version").read().unwrap(); - if !out.contains("stable") { - panic!( - "Failed to run rustfmt from toolchain 'stable'. \ - Please run `rustup component add rustfmt --toolchain stable` to install it.", - ) - } - - let res = cmd!(sh, "rustup run stable cargo fmt -- --check").run(); - if res.is_err() { - let _ = cmd!(sh, "rustup run stable cargo fmt").run(); - } - res.unwrap() -} - #[test] fn check_lsp_extensions_docs() { let sh = &Shell::new().unwrap(); From 465ddef7cc095317ef2f9e3c238573eb23f8c911 Mon Sep 17 00:00:00 2001 From: Lukas Wirth Date: Wed, 14 Feb 2024 15:13:45 +0100 Subject: [PATCH 064/127] fix: Set RUSTUP_TOOLCHAIN and invoke the proxies instead of directly invoking sysroot binaries --- crates/flycheck/src/lib.rs | 23 ++++++++--- crates/project-model/src/build_scripts.rs | 12 +++--- crates/project-model/src/cargo_workspace.rs | 10 +++-- crates/project-model/src/rustc_cfg.rs | 13 +++---- crates/project-model/src/sysroot.rs | 24 +++--------- .../project-model/src/target_data_layout.rs | 8 ++-- crates/project-model/src/workspace.rs | 30 +++++++------- crates/rust-analyzer/src/handlers/request.rs | 2 +- crates/rust-analyzer/src/reload.rs | 39 ++++++++++--------- 9 files changed, 82 insertions(+), 79 deletions(-) diff --git a/crates/flycheck/src/lib.rs b/crates/flycheck/src/lib.rs index 71882b6f0dcf..ee39a2790bc3 100644 --- a/crates/flycheck/src/lib.rs +++ b/crates/flycheck/src/lib.rs @@ -23,6 +23,7 @@ pub use cargo_metadata::diagnostic::{ Applicability, Diagnostic, DiagnosticCode, DiagnosticLevel, DiagnosticSpan, DiagnosticSpanMacroExpansion, }; +use toolchain::Tool; #[derive(Copy, Clone, Debug, Default, PartialEq, Eq)] pub enum InvocationStrategy { @@ -89,10 +90,10 @@ impl FlycheckHandle { id: usize, sender: Box, config: FlycheckConfig, - cargo: PathBuf, + sysroot_root: Option, workspace_root: AbsPathBuf, ) -> FlycheckHandle { - let actor = FlycheckActor::new(id, sender, config, cargo, workspace_root); + let actor = FlycheckActor::new(id, sender, config, sysroot_root, workspace_root); let (sender, receiver) = unbounded::(); let thread = stdx::thread::Builder::new(stdx::thread::ThreadIntent::Worker) .name("Flycheck".to_owned()) @@ -174,7 +175,7 @@ struct FlycheckActor { /// Either the workspace root of the workspace we are flychecking, /// or the project root of the project. root: AbsPathBuf, - cargo: PathBuf, + sysroot_root: Option, /// CargoHandle exists to wrap around the communication needed to be able to /// run `cargo check` without blocking. Currently the Rust standard library /// doesn't provide a way to read sub-process output without blocking, so we @@ -195,11 +196,18 @@ impl FlycheckActor { id: usize, sender: Box, config: FlycheckConfig, - cargo: PathBuf, + sysroot_root: Option, workspace_root: AbsPathBuf, ) -> FlycheckActor { tracing::info!(%id, ?workspace_root, "Spawning flycheck"); - FlycheckActor { id, sender, config, cargo, root: workspace_root, command_handle: None } + FlycheckActor { + id, + sender, + config, + sysroot_root, + root: workspace_root, + command_handle: None, + } } fn report_progress(&self, progress: Progress) { @@ -334,7 +342,10 @@ impl FlycheckActor { ansi_color_output, target_dir, } => { - let mut cmd = Command::new(&self.cargo); + let mut cmd = Command::new(Tool::Cargo.path()); + if let Some(sysroot_root) = &self.sysroot_root { + cmd.env("RUSTUP_TOOLCHAIN", AsRef::::as_ref(sysroot_root)); + } cmd.arg(command); cmd.current_dir(&self.root); diff --git a/crates/project-model/src/build_scripts.rs b/crates/project-model/src/build_scripts.rs index 51b9c8f5769f..98060e876ac1 100644 --- a/crates/project-model/src/build_scripts.rs +++ b/crates/project-model/src/build_scripts.rs @@ -71,10 +71,8 @@ impl WorkspaceBuildScripts { cmd } _ => { - let mut cmd = Command::new( - Sysroot::discover_tool(sysroot, Tool::Cargo) - .map_err(|e| io::Error::new(io::ErrorKind::NotFound, e))?, - ); + let mut cmd = Command::new(Tool::Cargo.path()); + Sysroot::set_rustup_toolchain_env(&mut cmd, sysroot); cmd.args(["check", "--quiet", "--workspace", "--message-format=json"]); cmd.args(&config.extra_args); @@ -431,7 +429,8 @@ impl WorkspaceBuildScripts { } let res = (|| { let target_libdir = (|| { - let mut cargo_config = Command::new(Sysroot::discover_tool(sysroot, Tool::Cargo)?); + let mut cargo_config = Command::new(Tool::Cargo.path()); + Sysroot::set_rustup_toolchain_env(&mut cargo_config, sysroot); cargo_config.envs(extra_env); cargo_config .current_dir(current_dir) @@ -440,7 +439,8 @@ impl WorkspaceBuildScripts { if let Ok(it) = utf8_stdout(cargo_config) { return Ok(it); } - let mut cmd = Command::new(Sysroot::discover_tool(sysroot, Tool::Rustc)?); + let mut cmd = Command::new(Tool::Rustc.path()); + Sysroot::set_rustup_toolchain_env(&mut cmd, sysroot); cmd.envs(extra_env); cmd.args(["--print", "target-libdir"]); utf8_stdout(cmd) diff --git a/crates/project-model/src/cargo_workspace.rs b/crates/project-model/src/cargo_workspace.rs index dae03afde22c..8136dc221f2e 100644 --- a/crates/project-model/src/cargo_workspace.rs +++ b/crates/project-model/src/cargo_workspace.rs @@ -243,7 +243,7 @@ impl CargoWorkspace { let targets = find_list_of_build_targets(config, cargo_toml, sysroot); let mut meta = MetadataCommand::new(); - meta.cargo_path(Sysroot::discover_tool(sysroot, Tool::Cargo)?); + meta.cargo_path(Tool::Cargo.path()); meta.manifest_path(cargo_toml.to_path_buf()); match &config.features { CargoFeatures::All => { @@ -291,6 +291,7 @@ impl CargoWorkspace { (|| -> Result { let mut command = meta.cargo_command(); + Sysroot::set_rustup_toolchain_env(&mut command, sysroot); command.envs(&config.extra_env); let output = command.output()?; if !output.status.success() { @@ -500,7 +501,8 @@ fn rustc_discover_host_triple( extra_env: &FxHashMap, sysroot: Option<&Sysroot>, ) -> Option { - let mut rustc = Command::new(Sysroot::discover_tool(sysroot, Tool::Rustc).ok()?); + let mut rustc = Command::new(Tool::Rustc.path()); + Sysroot::set_rustup_toolchain_env(&mut rustc, sysroot); rustc.envs(extra_env); rustc.current_dir(cargo_toml.parent()).arg("-vV"); tracing::debug!("Discovering host platform by {:?}", rustc); @@ -528,8 +530,8 @@ fn cargo_config_build_target( extra_env: &FxHashMap, sysroot: Option<&Sysroot>, ) -> Vec { - let Ok(program) = Sysroot::discover_tool(sysroot, Tool::Cargo) else { return vec![] }; - let mut cargo_config = Command::new(program); + let mut cargo_config = Command::new(Tool::Cargo.path()); + Sysroot::set_rustup_toolchain_env(&mut cargo_config, sysroot); cargo_config.envs(extra_env); cargo_config .current_dir(cargo_toml.parent()) diff --git a/crates/project-model/src/rustc_cfg.rs b/crates/project-model/src/rustc_cfg.rs index 1d07e4a8a656..1ad6e7255bf1 100644 --- a/crates/project-model/src/rustc_cfg.rs +++ b/crates/project-model/src/rustc_cfg.rs @@ -68,11 +68,11 @@ fn get_rust_cfgs( config: RustcCfgConfig<'_>, ) -> anyhow::Result { let sysroot = match config { - RustcCfgConfig::Cargo(sysroot, cargo_oml) => { - let cargo = Sysroot::discover_tool(sysroot, toolchain::Tool::Cargo)?; - let mut cmd = Command::new(cargo); + RustcCfgConfig::Cargo(sysroot, cargo_toml) => { + let mut cmd = Command::new(toolchain::Tool::Cargo.path()); + Sysroot::set_rustup_toolchain_env(&mut cmd, sysroot); cmd.envs(extra_env); - cmd.current_dir(cargo_oml.parent()) + cmd.current_dir(cargo_toml.parent()) .args(["rustc", "-Z", "unstable-options", "--print", "cfg"]) .env("RUSTC_BOOTSTRAP", "1"); if let Some(target) = target { @@ -90,9 +90,8 @@ fn get_rust_cfgs( RustcCfgConfig::Rustc(sysroot) => sysroot, }; - let rustc = Sysroot::discover_tool(sysroot, toolchain::Tool::Rustc)?; - tracing::debug!(?rustc, "using explicit rustc from sysroot"); - let mut cmd = Command::new(rustc); + let mut cmd = Command::new(toolchain::Tool::Rustc.path()); + Sysroot::set_rustup_toolchain_env(&mut cmd, sysroot); cmd.envs(extra_env); cmd.args(["--print", "cfg", "-O"]); if let Some(target) = target { diff --git a/crates/project-model/src/sysroot.rs b/crates/project-model/src/sysroot.rs index 24c2edd2c4fc..07cfaba2d2ca 100644 --- a/crates/project-model/src/sysroot.rs +++ b/crates/project-model/src/sysroot.rs @@ -6,13 +6,13 @@ use std::{env, fs, iter, ops, path::PathBuf, process::Command, sync::Arc}; -use anyhow::{format_err, Context, Result}; +use anyhow::{format_err, Result}; use base_db::CrateName; use itertools::Itertools; use la_arena::{Arena, Idx}; use paths::{AbsPath, AbsPathBuf}; use rustc_hash::FxHashMap; -use toolchain::{probe_for_binary, Tool}; +use toolchain::probe_for_binary; use crate::{utf8_stdout, CargoConfig, CargoWorkspace, ManifestPath}; @@ -193,23 +193,9 @@ impl Sysroot { Ok(Sysroot::load(sysroot_dir, Some(sysroot_src_dir), metadata)) } - pub fn discover_binary(&self, binary: &str) -> anyhow::Result { - toolchain::probe_for_binary(self.root.join("bin").join(binary).into()) - .ok_or_else(|| anyhow::anyhow!("no rustc binary found in {}", self.root.join("bin"))) - .and_then(|rustc| { - fs::metadata(&rustc).map(|_| AbsPathBuf::assert(rustc)).with_context(|| { - format!( - "failed to discover rustc in sysroot: {:?}", - AsRef::::as_ref(&self.root) - ) - }) - }) - } - - pub fn discover_tool(sysroot: Option<&Self>, tool: Tool) -> anyhow::Result { - match sysroot { - Some(sysroot) => sysroot.discover_binary(tool.name()).map(Into::into), - None => Ok(tool.path()), + pub fn set_rustup_toolchain_env(cmd: &mut Command, sysroot: Option<&Self>) { + if let Some(sysroot) = sysroot { + cmd.env("RUSTUP_TOOLCHAIN", AsRef::::as_ref(&sysroot.root)); } } diff --git a/crates/project-model/src/target_data_layout.rs b/crates/project-model/src/target_data_layout.rs index 4c5200cee81b..af635dda5782 100644 --- a/crates/project-model/src/target_data_layout.rs +++ b/crates/project-model/src/target_data_layout.rs @@ -28,8 +28,8 @@ pub fn get( }; let sysroot = match config { RustcDataLayoutConfig::Cargo(sysroot, cargo_toml) => { - let cargo = Sysroot::discover_tool(sysroot, toolchain::Tool::Cargo)?; - let mut cmd = Command::new(cargo); + let mut cmd = Command::new(toolchain::Tool::Cargo.path()); + Sysroot::set_rustup_toolchain_env(&mut cmd, sysroot); cmd.envs(extra_env); cmd.current_dir(cargo_toml.parent()) .args(["rustc", "--", "-Z", "unstable-options", "--print", "target-spec-json"]) @@ -48,8 +48,8 @@ pub fn get( RustcDataLayoutConfig::Rustc(sysroot) => sysroot, }; - let rustc = Sysroot::discover_tool(sysroot, toolchain::Tool::Rustc)?; - let mut cmd = Command::new(rustc); + let mut cmd = Command::new(toolchain::Tool::Rustc.path()); + Sysroot::set_rustup_toolchain_env(&mut cmd, sysroot); cmd.envs(extra_env) .args(["-Z", "unstable-options", "--print", "target-spec-json"]) .env("RUSTC_BOOTSTRAP", "1"); diff --git a/crates/project-model/src/workspace.rs b/crates/project-model/src/workspace.rs index cacd506d1b21..6a53fd00aeb5 100644 --- a/crates/project-model/src/workspace.rs +++ b/crates/project-model/src/workspace.rs @@ -2,9 +2,7 @@ //! metadata` or `rust-project.json`) into representation stored in the salsa //! database -- `CrateGraph`. -use std::{ - collections::VecDeque, fmt, fs, iter, path::PathBuf, process::Command, str::FromStr, sync, -}; +use std::{collections::VecDeque, fmt, fs, iter, process::Command, str::FromStr, sync}; use anyhow::{format_err, Context}; use base_db::{ @@ -16,6 +14,7 @@ use paths::{AbsPath, AbsPathBuf}; use rustc_hash::{FxHashMap, FxHashSet}; use semver::Version; use stdx::always; +use toolchain::Tool; use triomphe::Arc; use crate::{ @@ -163,12 +162,14 @@ impl fmt::Debug for ProjectWorkspace { fn get_toolchain_version( current_dir: &AbsPath, - cmd_path: Result, + sysroot: Option<&Sysroot>, + tool: Tool, extra_env: &FxHashMap, prefix: &str, ) -> Result, anyhow::Error> { let cargo_version = utf8_stdout({ - let mut cmd = Command::new(cmd_path?); + let mut cmd = Command::new(tool.path()); + Sysroot::set_rustup_toolchain_env(&mut cmd, sysroot); cmd.envs(extra_env); cmd.arg("--version").current_dir(current_dir); cmd @@ -289,7 +290,8 @@ impl ProjectWorkspace { let toolchain = get_toolchain_version( cargo_toml.parent(), - Sysroot::discover_tool(sysroot_ref, toolchain::Tool::Cargo), + sysroot_ref, + toolchain::Tool::Cargo, &config.extra_env, "cargo ", )?; @@ -370,9 +372,13 @@ impl ProjectWorkspace { let sysroot_ref = sysroot.as_ref().ok(); let cfg_config = RustcCfgConfig::Rustc(sysroot_ref); let data_layout_config = RustcDataLayoutConfig::Rustc(sysroot_ref); - let rustc = Sysroot::discover_tool(sysroot_ref, toolchain::Tool::Rustc).map(Into::into); - let toolchain = match get_toolchain_version(project_json.path(), rustc, extra_env, "rustc ") - { + let toolchain = match get_toolchain_version( + project_json.path(), + sysroot_ref, + toolchain::Tool::Rustc, + extra_env, + "rustc ", + ) { Ok(it) => it, Err(e) => { tracing::error!("{e}"); @@ -1615,10 +1621,8 @@ fn cargo_config_env( extra_env: &FxHashMap, sysroot: Option<&Sysroot>, ) -> FxHashMap { - let Ok(program) = Sysroot::discover_tool(sysroot, toolchain::Tool::Cargo) else { - return Default::default(); - }; - let mut cargo_config = Command::new(program); + let mut cargo_config = Command::new(Tool::Cargo.path()); + Sysroot::set_rustup_toolchain_env(&mut cargo_config, sysroot); cargo_config.envs(extra_env); cargo_config .current_dir(cargo_toml.parent()) diff --git a/crates/rust-analyzer/src/handlers/request.rs b/crates/rust-analyzer/src/handlers/request.rs index b175dbc895ce..1eb840afe5ed 100644 --- a/crates/rust-analyzer/src/handlers/request.rs +++ b/crates/rust-analyzer/src/handlers/request.rs @@ -1937,7 +1937,7 @@ fn run_rustfmt( let mut command = match snap.config.rustfmt() { RustfmtConfig::Rustfmt { extra_args, enable_range_formatting } => { - // FIXME: This should use the sysroot's rustfmt if its loaded + // FIXME: Set RUSTUP_TOOLCHAIN let mut cmd = process::Command::new(toolchain::rustfmt()); cmd.envs(snap.config.extra_env()); cmd.args(extra_args); diff --git a/crates/rust-analyzer/src/reload.rs b/crates/rust-analyzer/src/reload.rs index 93b6a53908f2..f223bb40c4a2 100644 --- a/crates/rust-analyzer/src/reload.rs +++ b/crates/rust-analyzer/src/reload.rs @@ -24,7 +24,7 @@ use ide_db::{ use itertools::Itertools; use load_cargo::{load_proc_macro, ProjectFolders}; use proc_macro_api::ProcMacroServer; -use project_model::{ProjectWorkspace, Sysroot, WorkspaceBuildScripts}; +use project_model::{ProjectWorkspace, WorkspaceBuildScripts}; use rustc_hash::FxHashSet; use stdx::{format_to, thread::ThreadIntent}; use triomphe::Arc; @@ -468,16 +468,20 @@ impl GlobalState { None => ws.find_sysroot_proc_macro_srv()?, }; - let env = match ws { - ProjectWorkspace::Cargo { cargo_config_extra_env, .. } => { - cargo_config_extra_env - .iter() - .chain(self.config.extra_env()) - .map(|(a, b)| (a.clone(), b.clone())) - .collect() - } - _ => Default::default(), - }; + let env = + match ws { + ProjectWorkspace::Cargo { cargo_config_extra_env, sysroot, .. } => { + cargo_config_extra_env + .iter() + .chain(self.config.extra_env()) + .map(|(a, b)| (a.clone(), b.clone())) + .chain(sysroot.as_ref().map(|it| { + ("RUSTUP_TOOLCHAIN".to_owned(), it.root().to_string()) + })) + .collect() + } + _ => Default::default(), + }; tracing::info!("Using proc-macro server at {path}"); ProcMacroServer::spawn(path.clone(), &env).map_err(|err| { @@ -620,7 +624,7 @@ impl GlobalState { 0, Box::new(move |msg| sender.send(msg).unwrap()), config, - toolchain::cargo(), + None, self.config.root_path().clone(), )], flycheck::InvocationStrategy::PerWorkspace => { @@ -631,7 +635,7 @@ impl GlobalState { ProjectWorkspace::Cargo { cargo, sysroot, .. } => Some(( id, cargo.workspace_root(), - Sysroot::discover_tool(sysroot.as_ref().ok(), toolchain::Tool::Cargo), + sysroot.as_ref().ok().map(|sysroot| sysroot.root().to_owned()), )), ProjectWorkspace::Json { project, sysroot, .. } => { // Enable flychecks for json projects if a custom flycheck command was supplied @@ -640,23 +644,20 @@ impl GlobalState { FlycheckConfig::CustomCommand { .. } => Some(( id, project.path(), - Sysroot::discover_tool( - sysroot.as_ref().ok(), - toolchain::Tool::Cargo, - ), + sysroot.as_ref().ok().map(|sysroot| sysroot.root().to_owned()), )), _ => None, } } ProjectWorkspace::DetachedFiles { .. } => None, }) - .map(|(id, root, cargo)| { + .map(|(id, root, sysroot_root)| { let sender = sender.clone(); FlycheckHandle::spawn( id, Box::new(move |msg| sender.send(msg).unwrap()), config.clone(), - cargo.unwrap_or_else(|_| toolchain::cargo()), + sysroot_root, root.to_path_buf(), ) }) From 1e6cef94dfbc359e7b1e7d392de387c2c539b965 Mon Sep 17 00:00:00 2001 From: Lukas Wirth Date: Thu, 4 Jan 2024 10:19:25 +0100 Subject: [PATCH 065/127] fix: Fix build scripts not being rebuilt in some occasions --- crates/hir-def/src/nameres/collector.rs | 8 ++-- crates/hir-expand/src/lib.rs | 4 +- crates/hir-expand/src/proc_macro.rs | 8 +++- crates/ide/src/parent_module.rs | 2 +- crates/project-model/src/build_scripts.rs | 8 +++- crates/project-model/src/cargo_workspace.rs | 12 +++--- crates/project-model/src/workspace.rs | 36 ++++++++-------- crates/rust-analyzer/src/cargo_target_spec.rs | 2 +- crates/rust-analyzer/src/config.rs | 2 +- crates/rust-analyzer/src/global_state.rs | 41 ++++++++++++------- .../src/handlers/notification.rs | 12 +++--- crates/rust-analyzer/src/handlers/request.rs | 4 +- crates/rust-analyzer/src/main_loop.rs | 38 ++++++++++------- crates/rust-analyzer/src/reload.rs | 17 ++++++-- docs/user/generated_config.adoc | 2 +- editors/code/package.json | 2 +- 16 files changed, 121 insertions(+), 77 deletions(-) diff --git a/crates/hir-def/src/nameres/collector.rs b/crates/hir-def/src/nameres/collector.rs index 8f64df280c18..88838f58fe78 100644 --- a/crates/hir-def/src/nameres/collector.rs +++ b/crates/hir-def/src/nameres/collector.rs @@ -101,9 +101,9 @@ pub(super) fn collect_defs(db: &dyn DefDatabase, def_map: DefMap, tree_id: TreeI if it.disabled { CustomProcMacroExpander::disabled() } else { - CustomProcMacroExpander::new(hir_expand::proc_macro::ProcMacroId( - idx as u32, - )) + CustomProcMacroExpander::new( + hir_expand::proc_macro::ProcMacroId::new(idx as u32), + ) }, ) }) @@ -2354,7 +2354,7 @@ impl ModCollector<'_, '_> { resolved_res.resolved_def.take_macros().map(|it| db.macro_def(it)) }, ) { - // FIXME: if there were errors, this mightve been in the eager expansion from an + // FIXME: if there were errors, this might've been in the eager expansion from an // unresolved macro, so we need to push this into late macro resolution. see fixme above if res.err.is_none() { // Legacy macros need to be expanded immediately, so that any macros they produce diff --git a/crates/hir-expand/src/lib.rs b/crates/hir-expand/src/lib.rs index 3a7febc2eb89..020ca75d80cb 100644 --- a/crates/hir-expand/src/lib.rs +++ b/crates/hir-expand/src/lib.rs @@ -227,8 +227,8 @@ pub enum MacroCallKind { }, Attr { ast_id: AstId, - // FIXME: This is being interned, subtrees can vary quickly differ just slightly causing - // leakage problems here + // FIXME: This shouldn't be here, we can derive this from `invoc_attr_index` + // but we need to fix the `cfg_attr` handling first. attr_args: Option>, /// Syntactical index of the invoking `#[attribute]`. /// diff --git a/crates/hir-expand/src/proc_macro.rs b/crates/hir-expand/src/proc_macro.rs index 7c4de2f1b873..ca6fc0afe2d7 100644 --- a/crates/hir-expand/src/proc_macro.rs +++ b/crates/hir-expand/src/proc_macro.rs @@ -12,7 +12,13 @@ use syntax::SmolStr; use crate::{db::ExpandDatabase, tt, ExpandError, ExpandResult}; #[derive(Debug, Copy, Clone, PartialEq, Eq, Hash)] -pub struct ProcMacroId(pub u32); +pub struct ProcMacroId(u32); + +impl ProcMacroId { + pub fn new(u32: u32) -> Self { + ProcMacroId(u32) + } +} #[derive(Copy, Clone, Eq, PartialEq, Debug, Hash)] pub enum ProcMacroKind { diff --git a/crates/ide/src/parent_module.rs b/crates/ide/src/parent_module.rs index 413dbf9c5dfc..f67aea2d5b9c 100644 --- a/crates/ide/src/parent_module.rs +++ b/crates/ide/src/parent_module.rs @@ -54,7 +54,7 @@ pub(crate) fn parent_module(db: &RootDatabase, position: FilePosition) -> Vec Vec { db.relevant_crates(file_id) .iter() diff --git a/crates/project-model/src/build_scripts.rs b/crates/project-model/src/build_scripts.rs index 51b9c8f5769f..c710629dc889 100644 --- a/crates/project-model/src/build_scripts.rs +++ b/crates/project-model/src/build_scripts.rs @@ -24,7 +24,7 @@ use toolchain::Tool; use crate::{ cfg_flag::CfgFlag, utf8_stdout, CargoConfig, CargoFeatures, CargoWorkspace, InvocationLocation, - InvocationStrategy, Package, Sysroot, + InvocationStrategy, Package, Sysroot, TargetKind, }; #[derive(Debug, Default, Clone, PartialEq, Eq)] @@ -467,7 +467,11 @@ impl WorkspaceBuildScripts { .collect(); for p in rustc.packages() { let package = &rustc[p]; - if package.targets.iter().any(|&it| rustc[it].is_proc_macro) { + if package + .targets + .iter() + .any(|&it| matches!(rustc[it].kind, TargetKind::Lib { is_proc_macro: true })) + { if let Some((_, path)) = proc_macro_dylibs .iter() .find(|(name, _)| *name.trim_start_matches("lib") == package.name) diff --git a/crates/project-model/src/cargo_workspace.rs b/crates/project-model/src/cargo_workspace.rs index dae03afde22c..946a742294e0 100644 --- a/crates/project-model/src/cargo_workspace.rs +++ b/crates/project-model/src/cargo_workspace.rs @@ -189,8 +189,6 @@ pub struct TargetData { pub root: AbsPathBuf, /// Kind of target pub kind: TargetKind, - /// Is this target a proc-macro - pub is_proc_macro: bool, /// Required features of the target without which it won't build pub required_features: Vec, } @@ -199,7 +197,10 @@ pub struct TargetData { pub enum TargetKind { Bin, /// Any kind of Cargo lib crate-type (dylib, rlib, proc-macro, ...). - Lib, + Lib { + /// Is this target a proc-macro + is_proc_macro: bool, + }, Example, Test, Bench, @@ -216,8 +217,8 @@ impl TargetKind { "bench" => TargetKind::Bench, "example" => TargetKind::Example, "custom-build" => TargetKind::BuildScript, - "proc-macro" => TargetKind::Lib, - _ if kind.contains("lib") => TargetKind::Lib, + "proc-macro" => TargetKind::Lib { is_proc_macro: true }, + _ if kind.contains("lib") => TargetKind::Lib { is_proc_macro: false }, _ => continue, }; } @@ -370,7 +371,6 @@ impl CargoWorkspace { name, root: AbsPathBuf::assert(src_path.into()), kind: TargetKind::new(&kind), - is_proc_macro: *kind == ["proc-macro"], required_features, }); pkg_data.targets.push(tgt); diff --git a/crates/project-model/src/workspace.rs b/crates/project-model/src/workspace.rs index cacd506d1b21..42fa1e74fd4d 100644 --- a/crates/project-model/src/workspace.rs +++ b/crates/project-model/src/workspace.rs @@ -619,7 +619,7 @@ impl ProjectWorkspace { let extra_targets = cargo[pkg] .targets .iter() - .filter(|&&tgt| cargo[tgt].kind == TargetKind::Lib) + .filter(|&&tgt| matches!(cargo[tgt].kind, TargetKind::Lib { .. })) .filter_map(|&tgt| cargo[tgt].root.parent()) .map(|tgt| tgt.normalize().to_path_buf()) .filter(|path| !path.starts_with(&pkg_root)); @@ -985,7 +985,7 @@ fn cargo_to_crate_graph( let mut lib_tgt = None; for &tgt in cargo[pkg].targets.iter() { - if cargo[tgt].kind != TargetKind::Lib && !cargo[pkg].is_member { + if !matches!(cargo[tgt].kind, TargetKind::Lib { .. }) && !cargo[pkg].is_member { // For non-workspace-members, Cargo does not resolve dev-dependencies, so we don't // add any targets except the library target, since those will not work correctly if // they use dev-dependencies. @@ -993,7 +993,7 @@ fn cargo_to_crate_graph( // https://github.com/rust-lang/rust-analyzer/issues/11300 continue; } - let &TargetData { ref name, kind, is_proc_macro, ref root, .. } = &cargo[tgt]; + let &TargetData { ref name, kind, ref root, .. } = &cargo[tgt]; let Some(file_id) = load(root) else { continue }; @@ -1005,19 +1005,24 @@ fn cargo_to_crate_graph( cfg_options.clone(), file_id, name, - is_proc_macro, + kind, target_layout.clone(), false, toolchain.cloned(), ); - if kind == TargetKind::Lib { + if let TargetKind::Lib { .. } = kind { lib_tgt = Some((crate_id, name.clone())); pkg_to_lib_crate.insert(pkg, crate_id); } // Even crates that don't set proc-macro = true are allowed to depend on proc_macro // (just none of the APIs work when called outside of a proc macro). if let Some(proc_macro) = libproc_macro { - add_proc_macro_dep(crate_graph, crate_id, proc_macro, is_proc_macro); + add_proc_macro_dep( + crate_graph, + crate_id, + proc_macro, + matches!(kind, TargetKind::Lib { is_proc_macro: true }), + ); } pkg_crates.entry(pkg).or_insert_with(Vec::new).push((crate_id, kind)); @@ -1215,9 +1220,9 @@ fn handle_rustc_crates( }; for &tgt in rustc_workspace[pkg].targets.iter() { - if rustc_workspace[tgt].kind != TargetKind::Lib { + let kind @ TargetKind::Lib { is_proc_macro } = rustc_workspace[tgt].kind else { continue; - } + }; if let Some(file_id) = load(&rustc_workspace[tgt].root) { let crate_id = add_target_crate_root( crate_graph, @@ -1227,7 +1232,7 @@ fn handle_rustc_crates( cfg_options.clone(), file_id, &rustc_workspace[tgt].name, - rustc_workspace[tgt].is_proc_macro, + kind, target_layout.clone(), true, toolchain.cloned(), @@ -1236,12 +1241,7 @@ fn handle_rustc_crates( // Add dependencies on core / std / alloc for this crate public_deps.add_to_crate_graph(crate_graph, crate_id); if let Some(proc_macro) = libproc_macro { - add_proc_macro_dep( - crate_graph, - crate_id, - proc_macro, - rustc_workspace[tgt].is_proc_macro, - ); + add_proc_macro_dep(crate_graph, crate_id, proc_macro, is_proc_macro); } rustc_pkg_crates.entry(pkg).or_insert_with(Vec::new).push(crate_id); } @@ -1303,7 +1303,7 @@ fn add_target_crate_root( cfg_options: CfgOptions, file_id: FileId, cargo_name: &str, - is_proc_macro: bool, + kind: TargetKind, target_layout: TargetLayoutLoadResult, rustc_crate: bool, toolchain: Option, @@ -1353,7 +1353,7 @@ fn add_target_crate_root( cfg_options, potential_cfg_options, env, - is_proc_macro, + matches!(kind, TargetKind::Lib { is_proc_macro: true }), if rustc_crate { CrateOrigin::Rustc { name: pkg.name.clone() } } else if pkg.is_member { @@ -1364,7 +1364,7 @@ fn add_target_crate_root( target_layout, toolchain, ); - if is_proc_macro { + if let TargetKind::Lib { is_proc_macro: true } = kind { let proc_macro = match build_data.as_ref().map(|it| it.proc_macro_dylib_path.as_ref()) { Some(it) => it.cloned().map(|path| Ok((Some(cargo_name.to_owned()), path))), None => Some(Err("crate has not yet been built".to_owned())), diff --git a/crates/rust-analyzer/src/cargo_target_spec.rs b/crates/rust-analyzer/src/cargo_target_spec.rs index 0190ca3cab85..9a9357a53989 100644 --- a/crates/rust-analyzer/src/cargo_target_spec.rs +++ b/crates/rust-analyzer/src/cargo_target_spec.rs @@ -174,7 +174,7 @@ impl CargoTargetSpec { buf.push("--example".to_owned()); buf.push(self.target); } - TargetKind::Lib => { + TargetKind::Lib { is_proc_macro: _ } => { buf.push("--lib".to_owned()); } TargetKind::Other | TargetKind::BuildScript => (), diff --git a/crates/rust-analyzer/src/config.rs b/crates/rust-analyzer/src/config.rs index 1c26871a1c2b..9bb972e24b2e 100644 --- a/crates/rust-analyzer/src/config.rs +++ b/crates/rust-analyzer/src/config.rs @@ -112,7 +112,7 @@ config_data! { cargo_buildScripts_overrideCommand: Option> = "null", /// Rerun proc-macros building/build-scripts running when proc-macro /// or build-script sources change and are saved. - cargo_buildScripts_rebuildOnSave: bool = "false", + cargo_buildScripts_rebuildOnSave: bool = "true", /// Use `RUSTC_WRAPPER=rust-analyzer` when running build scripts to /// avoid checking unnecessary things. cargo_buildScripts_useRustcWrapper: bool = "true", diff --git a/crates/rust-analyzer/src/global_state.rs b/crates/rust-analyzer/src/global_state.rs index da4422a60a8a..293807a383ba 100644 --- a/crates/rust-analyzer/src/global_state.rs +++ b/crates/rust-analyzer/src/global_state.rs @@ -9,7 +9,7 @@ use crossbeam_channel::{unbounded, Receiver, Sender}; use flycheck::FlycheckHandle; use hir::Change; use ide::{Analysis, AnalysisHost, Cancellable, FileId}; -use ide_db::base_db::{CrateId, FileLoader, ProcMacroPaths, SourceDatabase}; +use ide_db::base_db::{CrateId, ProcMacroPaths}; use load_cargo::SourceRootConfig; use lsp_types::{SemanticTokens, Url}; use nohash_hasher::IntMap; @@ -74,8 +74,8 @@ pub(crate) struct GlobalState { pub(crate) last_reported_status: Option, // proc macros - pub(crate) proc_macro_changed: bool, pub(crate) proc_macro_clients: Arc<[anyhow::Result]>, + pub(crate) build_deps_changed: bool, // Flycheck pub(crate) flycheck: Arc<[FlycheckHandle]>, @@ -203,9 +203,10 @@ impl GlobalState { source_root_config: SourceRootConfig::default(), config_errors: Default::default(), - proc_macro_changed: false, proc_macro_clients: Arc::from_iter([]), + build_deps_changed: false, + flycheck: Arc::from_iter([]), flycheck_sender, flycheck_receiver, @@ -300,12 +301,19 @@ impl GlobalState { if let Some(path) = vfs_path.as_path() { let path = path.to_path_buf(); if reload::should_refresh_for_change(&path, file.kind()) { - workspace_structure_change = Some((path.clone(), false)); + workspace_structure_change = Some(( + path.clone(), + false, + AsRef::::as_ref(&path).ends_with("build.rs"), + )); } if file.is_created_or_deleted() { has_structure_changes = true; - workspace_structure_change = - Some((path, self.crate_graph_file_dependencies.contains(vfs_path))); + workspace_structure_change = Some(( + path, + self.crate_graph_file_dependencies.contains(vfs_path), + false, + )); } else if path.extension() == Some("rs".as_ref()) { modified_rust_files.push(file.file_id); } @@ -346,23 +354,28 @@ impl GlobalState { }; self.analysis_host.apply_change(change); + { - let raw_database = self.analysis_host.raw_database(); + if !matches!(&workspace_structure_change, Some((.., true))) { + _ = self + .deferred_task_queue + .sender + .send(crate::main_loop::QueuedTask::CheckProcMacroSources(modified_rust_files)); + } // FIXME: ideally we should only trigger a workspace fetch for non-library changes // but something's going wrong with the source root business when we add a new local // crate see https://github.com/rust-lang/rust-analyzer/issues/13029 - if let Some((path, force_crate_graph_reload)) = workspace_structure_change { + if let Some((path, force_crate_graph_reload, build_scripts_touched)) = + workspace_structure_change + { self.fetch_workspaces_queue.request_op( format!("workspace vfs file change: {path}"), force_crate_graph_reload, ); + if build_scripts_touched { + self.fetch_build_data_queue.request_op(format!("build.rs changed: {path}"), ()); + } } - self.proc_macro_changed = modified_rust_files.into_iter().any(|file_id| { - let crates = raw_database.relevant_crates(file_id); - let crate_graph = raw_database.crate_graph(); - - crates.iter().any(|&krate| crate_graph[krate].is_proc_macro) - }); } true diff --git a/crates/rust-analyzer/src/handlers/notification.rs b/crates/rust-analyzer/src/handlers/notification.rs index 09ca4392f46e..b13c709dbfe6 100644 --- a/crates/rust-analyzer/src/handlers/notification.rs +++ b/crates/rust-analyzer/src/handlers/notification.rs @@ -145,11 +145,11 @@ pub(crate) fn handle_did_save_text_document( state: &mut GlobalState, params: DidSaveTextDocumentParams, ) -> anyhow::Result<()> { - if state.config.script_rebuild_on_save() && state.proc_macro_changed { - // reset the flag - state.proc_macro_changed = false; - // rebuild the proc macros - state.fetch_build_data_queue.request_op("ScriptRebuildOnSave".to_owned(), ()); + if state.config.script_rebuild_on_save() && state.build_deps_changed { + state.build_deps_changed = false; + state + .fetch_build_data_queue + .request_op("build_deps_changed - save notification".to_owned(), ()); } if let Ok(vfs_path) = from_proto::vfs_path(¶ms.text_document.uri) { @@ -158,7 +158,7 @@ pub(crate) fn handle_did_save_text_document( if reload::should_refresh_for_change(abs_path, ChangeKind::Modify) { state .fetch_workspaces_queue - .request_op(format!("DidSaveTextDocument {abs_path}"), false); + .request_op(format!("workspace vfs file change saved {abs_path}"), false); } } diff --git a/crates/rust-analyzer/src/handlers/request.rs b/crates/rust-analyzer/src/handlers/request.rs index b175dbc895ce..7a3933e899be 100644 --- a/crates/rust-analyzer/src/handlers/request.rs +++ b/crates/rust-analyzer/src/handlers/request.rs @@ -52,7 +52,7 @@ use crate::{ pub(crate) fn handle_workspace_reload(state: &mut GlobalState, _: ()) -> anyhow::Result<()> { state.proc_macro_clients = Arc::from_iter([]); - state.proc_macro_changed = false; + state.build_deps_changed = false; state.fetch_workspaces_queue.request_op("reload workspace request".to_owned(), false); Ok(()) @@ -60,7 +60,7 @@ pub(crate) fn handle_workspace_reload(state: &mut GlobalState, _: ()) -> anyhow: pub(crate) fn handle_proc_macros_rebuild(state: &mut GlobalState, _: ()) -> anyhow::Result<()> { state.proc_macro_clients = Arc::from_iter([]); - state.proc_macro_changed = false; + state.build_deps_changed = false; state.fetch_build_data_queue.request_op("rebuild proc macros request".to_owned(), ()); Ok(()) diff --git a/crates/rust-analyzer/src/main_loop.rs b/crates/rust-analyzer/src/main_loop.rs index 5c3d34cc5432..72f6d0fde5fe 100644 --- a/crates/rust-analyzer/src/main_loop.rs +++ b/crates/rust-analyzer/src/main_loop.rs @@ -8,11 +8,10 @@ use std::{ use always_assert::always; use crossbeam_channel::{select, Receiver}; -use ide_db::base_db::{SourceDatabaseExt, VfsPath}; +use ide_db::base_db::{SourceDatabase, SourceDatabaseExt, VfsPath}; use lsp_server::{Connection, Notification, Request}; use lsp_types::notification::Notification as _; use stdx::thread::ThreadIntent; -use triomphe::Arc; use vfs::FileId; use crate::{ @@ -76,6 +75,7 @@ impl fmt::Display for Event { #[derive(Debug)] pub(crate) enum QueuedTask { CheckIfIndexed(lsp_types::Url), + CheckProcMacroSources(Vec), } #[derive(Debug)] @@ -88,6 +88,7 @@ pub(crate) enum Task { FetchWorkspace(ProjectWorkspaceProgress), FetchBuildData(BuildDataProgress), LoadProcMacros(ProcMacroProgress), + BuildDepsHaveChanged, } #[derive(Debug)] @@ -357,9 +358,7 @@ impl GlobalState { } // Refresh inlay hints if the client supports it. - if (self.send_hint_refresh_query || self.proc_macro_changed) - && self.config.inlay_hints_refresh() - { + if self.send_hint_refresh_query && self.config.inlay_hints_refresh() { self.send_request::((), |_, _| ()); self.send_hint_refresh_query = false; } @@ -554,16 +553,7 @@ impl GlobalState { if let Err(e) = self.fetch_workspace_error() { tracing::error!("FetchWorkspaceError:\n{e}"); } - - let old = Arc::clone(&self.workspaces); self.switch_workspaces("fetched workspace".to_owned()); - let workspaces_updated = !Arc::ptr_eq(&old, &self.workspaces); - - if self.config.run_build_scripts() && workspaces_updated { - self.fetch_build_data_queue - .request_op("workspace updated".to_owned(), ()); - } - (Progress::End, None) } }; @@ -607,6 +597,7 @@ impl GlobalState { self.report_progress("Loading", state, msg, None, None); } } + Task::BuildDepsHaveChanged => self.build_deps_changed = true, } } @@ -685,6 +676,25 @@ impl GlobalState { } }); } + QueuedTask::CheckProcMacroSources(modified_rust_files) => { + let crate_graph = self.analysis_host.raw_database().crate_graph(); + let snap = self.snapshot(); + self.task_pool.handle.spawn_with_sender(stdx::thread::ThreadIntent::Worker, { + move |sender| { + if modified_rust_files.into_iter().any(|file_id| { + // FIXME: Check whether these files could be build script related + match snap.analysis.crates_for(file_id) { + Ok(crates) => { + crates.iter().any(|&krate| crate_graph[krate].is_proc_macro) + } + _ => false, + } + }) { + sender.send(Task::BuildDepsHaveChanged).unwrap(); + } + } + }); + } } } diff --git a/crates/rust-analyzer/src/reload.rs b/crates/rust-analyzer/src/reload.rs index 93b6a53908f2..7c724b81d026 100644 --- a/crates/rust-analyzer/src/reload.rs +++ b/crates/rust-analyzer/src/reload.rs @@ -83,7 +83,7 @@ impl GlobalState { } if self.config.linked_or_discovered_projects() != old_config.linked_or_discovered_projects() { - self.fetch_workspaces_queue.request_op("linked projects changed".to_owned(), false) + self.fetch_workspaces_queue.request_op("discovered projects changed".to_owned(), false) } else if self.config.flycheck() != old_config.flycheck() { self.reload_flycheck(); } @@ -106,9 +106,11 @@ impl GlobalState { }; let mut message = String::new(); - if self.proc_macro_changed { + if self.build_deps_changed { status.health = lsp_ext::Health::Warning; - message.push_str("Proc-macros have changed and need to be rebuilt.\n\n"); + message.push_str( + "Proc-macros and/or build scripts have changed and need to be rebuilt.\n\n", + ); } if self.fetch_build_data_error().is_err() { status.health = lsp_ext::Health::Warning; @@ -408,6 +410,10 @@ impl GlobalState { if *force_reload_crate_graph { self.recreate_crate_graph(cause); } + if self.build_deps_changed && self.config.run_build_scripts() { + self.build_deps_changed = false; + self.fetch_build_data_queue.request_op("build_deps_changed".to_owned(), ()); + } // Current build scripts do not match the version of the active // workspace, so there's nothing for us to update. return; @@ -419,6 +425,11 @@ impl GlobalState { // we don't care about build-script results, they are stale. // FIXME: can we abort the build scripts here? self.workspaces = Arc::new(workspaces); + + if self.config.run_build_scripts() { + self.build_deps_changed = false; + self.fetch_build_data_queue.request_op("workspace updated".to_owned(), ()); + } } if let FilesWatcher::Client = self.config.files().watcher { diff --git a/docs/user/generated_config.adoc b/docs/user/generated_config.adoc index db61b5baf8f1..9503b9c3ac15 100644 --- a/docs/user/generated_config.adoc +++ b/docs/user/generated_config.adoc @@ -71,7 +71,7 @@ cargo check --quiet --workspace --message-format=json --all-targets ``` . -- -[[rust-analyzer.cargo.buildScripts.rebuildOnSave]]rust-analyzer.cargo.buildScripts.rebuildOnSave (default: `false`):: +[[rust-analyzer.cargo.buildScripts.rebuildOnSave]]rust-analyzer.cargo.buildScripts.rebuildOnSave (default: `true`):: + -- Rerun proc-macros building/build-scripts running when proc-macro diff --git a/editors/code/package.json b/editors/code/package.json index ff2c4b31cb04..8b33d05cc8ec 100644 --- a/editors/code/package.json +++ b/editors/code/package.json @@ -590,7 +590,7 @@ }, "rust-analyzer.cargo.buildScripts.rebuildOnSave": { "markdownDescription": "Rerun proc-macros building/build-scripts running when proc-macro\nor build-script sources change and are saved.", - "default": false, + "default": true, "type": "boolean" }, "rust-analyzer.cargo.buildScripts.useRustcWrapper": { From e1461399578d20f34f6f984d7a88133acd24b089 Mon Sep 17 00:00:00 2001 From: Maybe Waffle Date: Wed, 7 Jun 2023 16:30:31 +0000 Subject: [PATCH 066/127] Add support for `become` expr/tail calls --- crates/hir-def/src/body/lower.rs | 5 +++ crates/hir-def/src/body/pretty.rs | 5 +++ crates/hir-def/src/hir.rs | 4 +++ crates/hir-ty/src/infer/closure.rs | 3 ++ crates/hir-ty/src/infer/expr.rs | 22 ++++++++++++ crates/hir-ty/src/infer/mutability.rs | 3 ++ crates/hir-ty/src/mir/lower.rs | 1 + crates/ide-db/src/syntax_helpers/node_ext.rs | 1 + crates/parser/src/grammar/expressions/atom.rs | 14 ++++++++ crates/parser/src/syntax_kind/generated.rs | 6 +++- .../parser/inline/ok/0209_become_expr.rast | 31 +++++++++++++++++ .../parser/inline/ok/0209_become_expr.rs | 3 ++ crates/syntax/rust.ungram | 4 +++ crates/syntax/src/ast/generated/nodes.rs | 34 +++++++++++++++++++ crates/syntax/src/ast/prec.rs | 8 +++-- crates/syntax/src/tests/ast_src.rs | 6 ++-- 16 files changed, 144 insertions(+), 6 deletions(-) create mode 100644 crates/parser/test_data/parser/inline/ok/0209_become_expr.rast create mode 100644 crates/parser/test_data/parser/inline/ok/0209_become_expr.rs diff --git a/crates/hir-def/src/body/lower.rs b/crates/hir-def/src/body/lower.rs index 8a589a6cf1b5..11b87f1952fb 100644 --- a/crates/hir-def/src/body/lower.rs +++ b/crates/hir-def/src/body/lower.rs @@ -416,6 +416,11 @@ impl ExprCollector<'_> { let expr = e.expr().map(|e| self.collect_expr(e)); self.alloc_expr(Expr::Return { expr }, syntax_ptr) } + ast::Expr::BecomeExpr(e) => { + let expr = + e.expr().map(|e| self.collect_expr(e)).unwrap_or_else(|| self.missing_expr()); + self.alloc_expr(Expr::Become { expr }, syntax_ptr) + } ast::Expr::YieldExpr(e) => { self.is_lowering_coroutine = true; let expr = e.expr().map(|e| self.collect_expr(e)); diff --git a/crates/hir-def/src/body/pretty.rs b/crates/hir-def/src/body/pretty.rs index 4afb40865170..773b7c575e42 100644 --- a/crates/hir-def/src/body/pretty.rs +++ b/crates/hir-def/src/body/pretty.rs @@ -261,6 +261,11 @@ impl Printer<'_> { self.print_expr(*expr); } } + Expr::Become { expr } => { + w!(self, "become"); + self.whitespace(); + self.print_expr(*expr); + } Expr::Yield { expr } => { w!(self, "yield"); if let Some(expr) = expr { diff --git a/crates/hir-def/src/hir.rs b/crates/hir-def/src/hir.rs index ac44d379415c..f008ae761a56 100644 --- a/crates/hir-def/src/hir.rs +++ b/crates/hir-def/src/hir.rs @@ -216,6 +216,9 @@ pub enum Expr { Return { expr: Option, }, + Become { + expr: ExprId, + }, Yield { expr: Option, }, @@ -410,6 +413,7 @@ impl Expr { f(expr); } } + Expr::Become { expr } => f(*expr), Expr::RecordLit { fields, spread, .. } => { for field in fields.iter() { f(field.expr); diff --git a/crates/hir-ty/src/infer/closure.rs b/crates/hir-ty/src/infer/closure.rs index c3746f787067..83ad6f54d6ce 100644 --- a/crates/hir-ty/src/infer/closure.rs +++ b/crates/hir-ty/src/infer/closure.rs @@ -531,6 +531,9 @@ impl InferenceContext<'_> { self.consume_expr(expr); } } + &Expr::Become { expr } => { + self.consume_expr(expr); + } Expr::RecordLit { fields, spread, .. } => { if let &Some(expr) = spread { self.consume_expr(expr); diff --git a/crates/hir-ty/src/infer/expr.rs b/crates/hir-ty/src/infer/expr.rs index 8b8e97b0081c..30c676a240c1 100644 --- a/crates/hir-ty/src/infer/expr.rs +++ b/crates/hir-ty/src/infer/expr.rs @@ -502,6 +502,7 @@ impl InferenceContext<'_> { self.result.standard_types.never.clone() } &Expr::Return { expr } => self.infer_expr_return(tgt_expr, expr), + &Expr::Become { expr } => self.infer_expr_become(expr), Expr::Yield { expr } => { if let Some((resume_ty, yield_ty)) = self.resume_yield_tys.clone() { if let Some(expr) = expr { @@ -1084,6 +1085,27 @@ impl InferenceContext<'_> { self.result.standard_types.never.clone() } + fn infer_expr_become(&mut self, expr: ExprId) -> Ty { + match &self.return_coercion { + Some(return_coercion) => { + let ret_ty = return_coercion.expected_ty(); + + let call_expr_ty = + self.infer_expr_inner(expr, &Expectation::HasType(ret_ty.clone())); + + // NB: this should *not* coerce. + // tail calls don't support any coercions except lifetimes ones (like `&'static u8 -> &'a u8`). + self.unify(&call_expr_ty, &ret_ty); + } + None => { + // FIXME: diagnose `become` outside of functions + self.infer_expr_no_expect(expr); + } + } + + self.result.standard_types.never.clone() + } + fn infer_expr_box(&mut self, inner_expr: ExprId, expected: &Expectation) -> Ty { if let Some(box_id) = self.resolve_boxed_box() { let table = &mut self.table; diff --git a/crates/hir-ty/src/infer/mutability.rs b/crates/hir-ty/src/infer/mutability.rs index 663ea8532318..c9fc5bccea79 100644 --- a/crates/hir-ty/src/infer/mutability.rs +++ b/crates/hir-ty/src/infer/mutability.rs @@ -93,6 +93,9 @@ impl InferenceContext<'_> { self.infer_mut_expr(expr, Mutability::Not); } } + Expr::Become { expr } => { + self.infer_mut_expr(*expr, Mutability::Not); + } Expr::RecordLit { path: _, fields, spread, ellipsis: _, is_assignee_expr: _ } => { self.infer_mut_not_expr_iter(fields.iter().map(|it| it.expr).chain(*spread)) } diff --git a/crates/hir-ty/src/mir/lower.rs b/crates/hir-ty/src/mir/lower.rs index 0ba8a17103e0..dad1ed10ce58 100644 --- a/crates/hir-ty/src/mir/lower.rs +++ b/crates/hir-ty/src/mir/lower.rs @@ -775,6 +775,7 @@ impl<'ctx> MirLowerCtx<'ctx> { self.set_terminator(current, TerminatorKind::Return, expr_id.into()); Ok(None) } + Expr::Become { .. } => not_supported!("tail-calls"), Expr::Yield { .. } => not_supported!("yield"), Expr::RecordLit { fields, path, spread, ellipsis: _, is_assignee_expr: _ } => { let spread_place = match spread { diff --git a/crates/ide-db/src/syntax_helpers/node_ext.rs b/crates/ide-db/src/syntax_helpers/node_ext.rs index e4e735cecd89..4f706e26af2b 100644 --- a/crates/ide-db/src/syntax_helpers/node_ext.rs +++ b/crates/ide-db/src/syntax_helpers/node_ext.rs @@ -329,6 +329,7 @@ pub fn for_each_tail_expr(expr: &ast::Expr, cb: &mut dyn FnMut(&ast::Expr)) { | ast::Expr::RecordExpr(_) | ast::Expr::RefExpr(_) | ast::Expr::ReturnExpr(_) + | ast::Expr::BecomeExpr(_) | ast::Expr::TryExpr(_) | ast::Expr::TupleExpr(_) | ast::Expr::LetExpr(_) diff --git a/crates/parser/src/grammar/expressions/atom.rs b/crates/parser/src/grammar/expressions/atom.rs index 4197f248e0a9..48600641ad05 100644 --- a/crates/parser/src/grammar/expressions/atom.rs +++ b/crates/parser/src/grammar/expressions/atom.rs @@ -58,6 +58,7 @@ pub(super) const ATOM_EXPR_FIRST: TokenSet = T![match], T![move], T![return], + T![become], T![static], T![try], T![unsafe], @@ -102,6 +103,7 @@ pub(super) fn atom_expr( T![try] => try_block_expr(p, None), T![match] => match_expr(p), T![return] => return_expr(p), + T![become] => become_expr(p), T![yield] => yield_expr(p), T![do] if p.nth_at_contextual_kw(1, T![yeet]) => yeet_expr(p), T![continue] => continue_expr(p), @@ -621,6 +623,18 @@ fn return_expr(p: &mut Parser<'_>) -> CompletedMarker { m.complete(p, RETURN_EXPR) } +// test become_expr +// fn foo() { +// become foo(); +// } +fn become_expr(p: &mut Parser<'_>) -> CompletedMarker { + assert!(p.at(T![become])); + let m = p.start(); + p.bump(T![become]); + expr(p); + m.complete(p, BECOME_EXPR) +} + // test yield_expr // fn foo() { // yield; diff --git a/crates/parser/src/syntax_kind/generated.rs b/crates/parser/src/syntax_kind/generated.rs index 4b589037672f..6ecfdc9f4664 100644 --- a/crates/parser/src/syntax_kind/generated.rs +++ b/crates/parser/src/syntax_kind/generated.rs @@ -90,6 +90,7 @@ pub enum SyntaxKind { PUB_KW, REF_KW, RETURN_KW, + BECOME_KW, SELF_KW, SELF_TYPE_KW, STATIC_KW, @@ -195,6 +196,7 @@ pub enum SyntaxKind { BLOCK_EXPR, STMT_LIST, RETURN_EXPR, + BECOME_EXPR, YIELD_EXPR, YEET_EXPR, LET_EXPR, @@ -307,6 +309,7 @@ impl SyntaxKind { | PUB_KW | REF_KW | RETURN_KW + | BECOME_KW | SELF_KW | SELF_TYPE_KW | STATIC_KW @@ -425,6 +428,7 @@ impl SyntaxKind { "pub" => PUB_KW, "ref" => REF_KW, "return" => RETURN_KW, + "become" => BECOME_KW, "self" => SELF_KW, "Self" => SELF_TYPE_KW, "static" => STATIC_KW, @@ -496,4 +500,4 @@ impl SyntaxKind { } } #[macro_export] -macro_rules ! T { [;] => { $ crate :: SyntaxKind :: SEMICOLON } ; [,] => { $ crate :: SyntaxKind :: COMMA } ; ['('] => { $ crate :: SyntaxKind :: L_PAREN } ; [')'] => { $ crate :: SyntaxKind :: R_PAREN } ; ['{'] => { $ crate :: SyntaxKind :: L_CURLY } ; ['}'] => { $ crate :: SyntaxKind :: R_CURLY } ; ['['] => { $ crate :: SyntaxKind :: L_BRACK } ; [']'] => { $ crate :: SyntaxKind :: R_BRACK } ; [<] => { $ crate :: SyntaxKind :: L_ANGLE } ; [>] => { $ crate :: SyntaxKind :: R_ANGLE } ; [@] => { $ crate :: SyntaxKind :: AT } ; [#] => { $ crate :: SyntaxKind :: POUND } ; [~] => { $ crate :: SyntaxKind :: TILDE } ; [?] => { $ crate :: SyntaxKind :: QUESTION } ; [$] => { $ crate :: SyntaxKind :: DOLLAR } ; [&] => { $ crate :: SyntaxKind :: AMP } ; [|] => { $ crate :: SyntaxKind :: PIPE } ; [+] => { $ crate :: SyntaxKind :: PLUS } ; [*] => { $ crate :: SyntaxKind :: STAR } ; [/] => { $ crate :: SyntaxKind :: SLASH } ; [^] => { $ crate :: SyntaxKind :: CARET } ; [%] => { $ crate :: SyntaxKind :: PERCENT } ; [_] => { $ crate :: SyntaxKind :: UNDERSCORE } ; [.] => { $ crate :: SyntaxKind :: DOT } ; [..] => { $ crate :: SyntaxKind :: DOT2 } ; [...] => { $ crate :: SyntaxKind :: DOT3 } ; [..=] => { $ crate :: SyntaxKind :: DOT2EQ } ; [:] => { $ crate :: SyntaxKind :: COLON } ; [::] => { $ crate :: SyntaxKind :: COLON2 } ; [=] => { $ crate :: SyntaxKind :: EQ } ; [==] => { $ crate :: SyntaxKind :: EQ2 } ; [=>] => { $ crate :: SyntaxKind :: FAT_ARROW } ; [!] => { $ crate :: SyntaxKind :: BANG } ; [!=] => { $ crate :: SyntaxKind :: NEQ } ; [-] => { $ crate :: SyntaxKind :: MINUS } ; [->] => { $ crate :: SyntaxKind :: THIN_ARROW } ; [<=] => { $ crate :: SyntaxKind :: LTEQ } ; [>=] => { $ crate :: SyntaxKind :: GTEQ } ; [+=] => { $ crate :: SyntaxKind :: PLUSEQ } ; [-=] => { $ crate :: SyntaxKind :: MINUSEQ } ; [|=] => { $ crate :: SyntaxKind :: PIPEEQ } ; [&=] => { $ crate :: SyntaxKind :: AMPEQ } ; [^=] => { $ crate :: SyntaxKind :: CARETEQ } ; [/=] => { $ crate :: SyntaxKind :: SLASHEQ } ; [*=] => { $ crate :: SyntaxKind :: STAREQ } ; [%=] => { $ crate :: SyntaxKind :: PERCENTEQ } ; [&&] => { $ crate :: SyntaxKind :: AMP2 } ; [||] => { $ crate :: SyntaxKind :: PIPE2 } ; [<<] => { $ crate :: SyntaxKind :: SHL } ; [>>] => { $ crate :: SyntaxKind :: SHR } ; [<<=] => { $ crate :: SyntaxKind :: SHLEQ } ; [>>=] => { $ crate :: SyntaxKind :: SHREQ } ; [as] => { $ crate :: SyntaxKind :: AS_KW } ; [async] => { $ crate :: SyntaxKind :: ASYNC_KW } ; [await] => { $ crate :: SyntaxKind :: AWAIT_KW } ; [box] => { $ crate :: SyntaxKind :: BOX_KW } ; [break] => { $ crate :: SyntaxKind :: BREAK_KW } ; [const] => { $ crate :: SyntaxKind :: CONST_KW } ; [continue] => { $ crate :: SyntaxKind :: CONTINUE_KW } ; [crate] => { $ crate :: SyntaxKind :: CRATE_KW } ; [do] => { $ crate :: SyntaxKind :: DO_KW } ; [dyn] => { $ crate :: SyntaxKind :: DYN_KW } ; [else] => { $ crate :: SyntaxKind :: ELSE_KW } ; [enum] => { $ crate :: SyntaxKind :: ENUM_KW } ; [extern] => { $ crate :: SyntaxKind :: EXTERN_KW } ; [false] => { $ crate :: SyntaxKind :: FALSE_KW } ; [fn] => { $ crate :: SyntaxKind :: FN_KW } ; [for] => { $ crate :: SyntaxKind :: FOR_KW } ; [if] => { $ crate :: SyntaxKind :: IF_KW } ; [impl] => { $ crate :: SyntaxKind :: IMPL_KW } ; [in] => { $ crate :: SyntaxKind :: IN_KW } ; [let] => { $ crate :: SyntaxKind :: LET_KW } ; [loop] => { $ crate :: SyntaxKind :: LOOP_KW } ; [macro] => { $ crate :: SyntaxKind :: MACRO_KW } ; [match] => { $ crate :: SyntaxKind :: MATCH_KW } ; [mod] => { $ crate :: SyntaxKind :: MOD_KW } ; [move] => { $ crate :: SyntaxKind :: MOVE_KW } ; [mut] => { $ crate :: SyntaxKind :: MUT_KW } ; [pub] => { $ crate :: SyntaxKind :: PUB_KW } ; [ref] => { $ crate :: SyntaxKind :: REF_KW } ; [return] => { $ crate :: SyntaxKind :: RETURN_KW } ; [self] => { $ crate :: SyntaxKind :: SELF_KW } ; [Self] => { $ crate :: SyntaxKind :: SELF_TYPE_KW } ; [static] => { $ crate :: SyntaxKind :: STATIC_KW } ; [struct] => { $ crate :: SyntaxKind :: STRUCT_KW } ; [super] => { $ crate :: SyntaxKind :: SUPER_KW } ; [trait] => { $ crate :: SyntaxKind :: TRAIT_KW } ; [true] => { $ crate :: SyntaxKind :: TRUE_KW } ; [try] => { $ crate :: SyntaxKind :: TRY_KW } ; [type] => { $ crate :: SyntaxKind :: TYPE_KW } ; [unsafe] => { $ crate :: SyntaxKind :: UNSAFE_KW } ; [use] => { $ crate :: SyntaxKind :: USE_KW } ; [where] => { $ crate :: SyntaxKind :: WHERE_KW } ; [while] => { $ crate :: SyntaxKind :: WHILE_KW } ; [yield] => { $ crate :: SyntaxKind :: YIELD_KW } ; [auto] => { $ crate :: SyntaxKind :: AUTO_KW } ; [builtin] => { $ crate :: SyntaxKind :: BUILTIN_KW } ; [default] => { $ crate :: SyntaxKind :: DEFAULT_KW } ; [existential] => { $ crate :: SyntaxKind :: EXISTENTIAL_KW } ; [union] => { $ crate :: SyntaxKind :: UNION_KW } ; [raw] => { $ crate :: SyntaxKind :: RAW_KW } ; [macro_rules] => { $ crate :: SyntaxKind :: MACRO_RULES_KW } ; [yeet] => { $ crate :: SyntaxKind :: YEET_KW } ; [offset_of] => { $ crate :: SyntaxKind :: OFFSET_OF_KW } ; [asm] => { $ crate :: SyntaxKind :: ASM_KW } ; [format_args] => { $ crate :: SyntaxKind :: FORMAT_ARGS_KW } ; [lifetime_ident] => { $ crate :: SyntaxKind :: LIFETIME_IDENT } ; [ident] => { $ crate :: SyntaxKind :: IDENT } ; [shebang] => { $ crate :: SyntaxKind :: SHEBANG } ; } +macro_rules ! T { [;] => { $ crate :: SyntaxKind :: SEMICOLON } ; [,] => { $ crate :: SyntaxKind :: COMMA } ; ['('] => { $ crate :: SyntaxKind :: L_PAREN } ; [')'] => { $ crate :: SyntaxKind :: R_PAREN } ; ['{'] => { $ crate :: SyntaxKind :: L_CURLY } ; ['}'] => { $ crate :: SyntaxKind :: R_CURLY } ; ['['] => { $ crate :: SyntaxKind :: L_BRACK } ; [']'] => { $ crate :: SyntaxKind :: R_BRACK } ; [<] => { $ crate :: SyntaxKind :: L_ANGLE } ; [>] => { $ crate :: SyntaxKind :: R_ANGLE } ; [@] => { $ crate :: SyntaxKind :: AT } ; [#] => { $ crate :: SyntaxKind :: POUND } ; [~] => { $ crate :: SyntaxKind :: TILDE } ; [?] => { $ crate :: SyntaxKind :: QUESTION } ; [$] => { $ crate :: SyntaxKind :: DOLLAR } ; [&] => { $ crate :: SyntaxKind :: AMP } ; [|] => { $ crate :: SyntaxKind :: PIPE } ; [+] => { $ crate :: SyntaxKind :: PLUS } ; [*] => { $ crate :: SyntaxKind :: STAR } ; [/] => { $ crate :: SyntaxKind :: SLASH } ; [^] => { $ crate :: SyntaxKind :: CARET } ; [%] => { $ crate :: SyntaxKind :: PERCENT } ; [_] => { $ crate :: SyntaxKind :: UNDERSCORE } ; [.] => { $ crate :: SyntaxKind :: DOT } ; [..] => { $ crate :: SyntaxKind :: DOT2 } ; [...] => { $ crate :: SyntaxKind :: DOT3 } ; [..=] => { $ crate :: SyntaxKind :: DOT2EQ } ; [:] => { $ crate :: SyntaxKind :: COLON } ; [::] => { $ crate :: SyntaxKind :: COLON2 } ; [=] => { $ crate :: SyntaxKind :: EQ } ; [==] => { $ crate :: SyntaxKind :: EQ2 } ; [=>] => { $ crate :: SyntaxKind :: FAT_ARROW } ; [!] => { $ crate :: SyntaxKind :: BANG } ; [!=] => { $ crate :: SyntaxKind :: NEQ } ; [-] => { $ crate :: SyntaxKind :: MINUS } ; [->] => { $ crate :: SyntaxKind :: THIN_ARROW } ; [<=] => { $ crate :: SyntaxKind :: LTEQ } ; [>=] => { $ crate :: SyntaxKind :: GTEQ } ; [+=] => { $ crate :: SyntaxKind :: PLUSEQ } ; [-=] => { $ crate :: SyntaxKind :: MINUSEQ } ; [|=] => { $ crate :: SyntaxKind :: PIPEEQ } ; [&=] => { $ crate :: SyntaxKind :: AMPEQ } ; [^=] => { $ crate :: SyntaxKind :: CARETEQ } ; [/=] => { $ crate :: SyntaxKind :: SLASHEQ } ; [*=] => { $ crate :: SyntaxKind :: STAREQ } ; [%=] => { $ crate :: SyntaxKind :: PERCENTEQ } ; [&&] => { $ crate :: SyntaxKind :: AMP2 } ; [||] => { $ crate :: SyntaxKind :: PIPE2 } ; [<<] => { $ crate :: SyntaxKind :: SHL } ; [>>] => { $ crate :: SyntaxKind :: SHR } ; [<<=] => { $ crate :: SyntaxKind :: SHLEQ } ; [>>=] => { $ crate :: SyntaxKind :: SHREQ } ; [as] => { $ crate :: SyntaxKind :: AS_KW } ; [async] => { $ crate :: SyntaxKind :: ASYNC_KW } ; [await] => { $ crate :: SyntaxKind :: AWAIT_KW } ; [box] => { $ crate :: SyntaxKind :: BOX_KW } ; [break] => { $ crate :: SyntaxKind :: BREAK_KW } ; [const] => { $ crate :: SyntaxKind :: CONST_KW } ; [continue] => { $ crate :: SyntaxKind :: CONTINUE_KW } ; [crate] => { $ crate :: SyntaxKind :: CRATE_KW } ; [do] => { $ crate :: SyntaxKind :: DO_KW } ; [dyn] => { $ crate :: SyntaxKind :: DYN_KW } ; [else] => { $ crate :: SyntaxKind :: ELSE_KW } ; [enum] => { $ crate :: SyntaxKind :: ENUM_KW } ; [extern] => { $ crate :: SyntaxKind :: EXTERN_KW } ; [false] => { $ crate :: SyntaxKind :: FALSE_KW } ; [fn] => { $ crate :: SyntaxKind :: FN_KW } ; [for] => { $ crate :: SyntaxKind :: FOR_KW } ; [if] => { $ crate :: SyntaxKind :: IF_KW } ; [impl] => { $ crate :: SyntaxKind :: IMPL_KW } ; [in] => { $ crate :: SyntaxKind :: IN_KW } ; [let] => { $ crate :: SyntaxKind :: LET_KW } ; [loop] => { $ crate :: SyntaxKind :: LOOP_KW } ; [macro] => { $ crate :: SyntaxKind :: MACRO_KW } ; [match] => { $ crate :: SyntaxKind :: MATCH_KW } ; [mod] => { $ crate :: SyntaxKind :: MOD_KW } ; [move] => { $ crate :: SyntaxKind :: MOVE_KW } ; [mut] => { $ crate :: SyntaxKind :: MUT_KW } ; [pub] => { $ crate :: SyntaxKind :: PUB_KW } ; [ref] => { $ crate :: SyntaxKind :: REF_KW } ; [return] => { $ crate :: SyntaxKind :: RETURN_KW } ; [become] => { $ crate :: SyntaxKind :: BECOME_KW } ; [self] => { $ crate :: SyntaxKind :: SELF_KW } ; [Self] => { $ crate :: SyntaxKind :: SELF_TYPE_KW } ; [static] => { $ crate :: SyntaxKind :: STATIC_KW } ; [struct] => { $ crate :: SyntaxKind :: STRUCT_KW } ; [super] => { $ crate :: SyntaxKind :: SUPER_KW } ; [trait] => { $ crate :: SyntaxKind :: TRAIT_KW } ; [true] => { $ crate :: SyntaxKind :: TRUE_KW } ; [try] => { $ crate :: SyntaxKind :: TRY_KW } ; [type] => { $ crate :: SyntaxKind :: TYPE_KW } ; [unsafe] => { $ crate :: SyntaxKind :: UNSAFE_KW } ; [use] => { $ crate :: SyntaxKind :: USE_KW } ; [where] => { $ crate :: SyntaxKind :: WHERE_KW } ; [while] => { $ crate :: SyntaxKind :: WHILE_KW } ; [yield] => { $ crate :: SyntaxKind :: YIELD_KW } ; [auto] => { $ crate :: SyntaxKind :: AUTO_KW } ; [builtin] => { $ crate :: SyntaxKind :: BUILTIN_KW } ; [default] => { $ crate :: SyntaxKind :: DEFAULT_KW } ; [existential] => { $ crate :: SyntaxKind :: EXISTENTIAL_KW } ; [union] => { $ crate :: SyntaxKind :: UNION_KW } ; [raw] => { $ crate :: SyntaxKind :: RAW_KW } ; [macro_rules] => { $ crate :: SyntaxKind :: MACRO_RULES_KW } ; [yeet] => { $ crate :: SyntaxKind :: YEET_KW } ; [offset_of] => { $ crate :: SyntaxKind :: OFFSET_OF_KW } ; [asm] => { $ crate :: SyntaxKind :: ASM_KW } ; [format_args] => { $ crate :: SyntaxKind :: FORMAT_ARGS_KW } ; [lifetime_ident] => { $ crate :: SyntaxKind :: LIFETIME_IDENT } ; [ident] => { $ crate :: SyntaxKind :: IDENT } ; [shebang] => { $ crate :: SyntaxKind :: SHEBANG } ; } diff --git a/crates/parser/test_data/parser/inline/ok/0209_become_expr.rast b/crates/parser/test_data/parser/inline/ok/0209_become_expr.rast new file mode 100644 index 000000000000..c544cf4e5e3e --- /dev/null +++ b/crates/parser/test_data/parser/inline/ok/0209_become_expr.rast @@ -0,0 +1,31 @@ +SOURCE_FILE + FN + FN_KW "fn" + WHITESPACE " " + NAME + IDENT "foo" + PARAM_LIST + L_PAREN "(" + R_PAREN ")" + WHITESPACE " " + BLOCK_EXPR + STMT_LIST + L_CURLY "{" + WHITESPACE "\n " + EXPR_STMT + BECOME_EXPR + BECOME_KW "become" + WHITESPACE " " + CALL_EXPR + PATH_EXPR + PATH + PATH_SEGMENT + NAME_REF + IDENT "foo" + ARG_LIST + L_PAREN "(" + R_PAREN ")" + SEMICOLON ";" + WHITESPACE "\n" + R_CURLY "}" + WHITESPACE "\n" diff --git a/crates/parser/test_data/parser/inline/ok/0209_become_expr.rs b/crates/parser/test_data/parser/inline/ok/0209_become_expr.rs new file mode 100644 index 000000000000..918a83ca6e83 --- /dev/null +++ b/crates/parser/test_data/parser/inline/ok/0209_become_expr.rs @@ -0,0 +1,3 @@ +fn foo() { + become foo(); +} diff --git a/crates/syntax/rust.ungram b/crates/syntax/rust.ungram index c3010d090c6b..36d677e151f6 100644 --- a/crates/syntax/rust.ungram +++ b/crates/syntax/rust.ungram @@ -367,6 +367,7 @@ Expr = | RecordExpr | RefExpr | ReturnExpr +| BecomeExpr | TryExpr | TupleExpr | WhileExpr @@ -528,6 +529,9 @@ MatchGuard = ReturnExpr = Attr* 'return' Expr? +BecomeExpr = + Attr* 'become' Expr + YieldExpr = Attr* 'yield' Expr? diff --git a/crates/syntax/src/ast/generated/nodes.rs b/crates/syntax/src/ast/generated/nodes.rs index 6c86e591044a..affcdc94d078 100644 --- a/crates/syntax/src/ast/generated/nodes.rs +++ b/crates/syntax/src/ast/generated/nodes.rs @@ -1095,6 +1095,16 @@ impl ReturnExpr { pub fn expr(&self) -> Option { support::child(&self.syntax) } } +#[derive(Debug, Clone, PartialEq, Eq, Hash)] +pub struct BecomeExpr { + pub(crate) syntax: SyntaxNode, +} +impl ast::HasAttrs for BecomeExpr {} +impl BecomeExpr { + pub fn become_token(&self) -> Option { support::token(&self.syntax, T![become]) } + pub fn expr(&self) -> Option { support::child(&self.syntax) } +} + #[derive(Debug, Clone, PartialEq, Eq, Hash)] pub struct TryExpr { pub(crate) syntax: SyntaxNode, @@ -1633,6 +1643,7 @@ pub enum Expr { RecordExpr(RecordExpr), RefExpr(RefExpr), ReturnExpr(ReturnExpr), + BecomeExpr(BecomeExpr), TryExpr(TryExpr), TupleExpr(TupleExpr), WhileExpr(WhileExpr), @@ -2792,6 +2803,17 @@ impl AstNode for ReturnExpr { } fn syntax(&self) -> &SyntaxNode { &self.syntax } } +impl AstNode for BecomeExpr { + fn can_cast(kind: SyntaxKind) -> bool { kind == BECOME_EXPR } + fn cast(syntax: SyntaxNode) -> Option { + if Self::can_cast(syntax.kind()) { + Some(Self { syntax }) + } else { + None + } + } + fn syntax(&self) -> &SyntaxNode { &self.syntax } +} impl AstNode for TryExpr { fn can_cast(kind: SyntaxKind) -> bool { kind == TRY_EXPR } fn cast(syntax: SyntaxNode) -> Option { @@ -3540,6 +3562,9 @@ impl From for Expr { impl From for Expr { fn from(node: ReturnExpr) -> Expr { Expr::ReturnExpr(node) } } +impl From for Expr { + fn from(node: BecomeExpr) -> Expr { Expr::BecomeExpr(node) } +} impl From for Expr { fn from(node: TryExpr) -> Expr { Expr::TryExpr(node) } } @@ -3593,6 +3618,7 @@ impl AstNode for Expr { | RECORD_EXPR | REF_EXPR | RETURN_EXPR + | BECOME_EXPR | TRY_EXPR | TUPLE_EXPR | WHILE_EXPR @@ -3632,6 +3658,7 @@ impl AstNode for Expr { RECORD_EXPR => Expr::RecordExpr(RecordExpr { syntax }), REF_EXPR => Expr::RefExpr(RefExpr { syntax }), RETURN_EXPR => Expr::ReturnExpr(ReturnExpr { syntax }), + BECOME_EXPR => Expr::BecomeExpr(BecomeExpr { syntax }), TRY_EXPR => Expr::TryExpr(TryExpr { syntax }), TUPLE_EXPR => Expr::TupleExpr(TupleExpr { syntax }), WHILE_EXPR => Expr::WhileExpr(WhileExpr { syntax }), @@ -3673,6 +3700,7 @@ impl AstNode for Expr { Expr::RecordExpr(it) => &it.syntax, Expr::RefExpr(it) => &it.syntax, Expr::ReturnExpr(it) => &it.syntax, + Expr::BecomeExpr(it) => &it.syntax, Expr::TryExpr(it) => &it.syntax, Expr::TupleExpr(it) => &it.syntax, Expr::WhileExpr(it) => &it.syntax, @@ -4150,6 +4178,7 @@ impl AstNode for AnyHasAttrs { | RANGE_EXPR | REF_EXPR | RETURN_EXPR + | BECOME_EXPR | TRY_EXPR | TUPLE_EXPR | WHILE_EXPR @@ -4851,6 +4880,11 @@ impl std::fmt::Display for ReturnExpr { std::fmt::Display::fmt(self.syntax(), f) } } +impl std::fmt::Display for BecomeExpr { + fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result { + std::fmt::Display::fmt(self.syntax(), f) + } +} impl std::fmt::Display for TryExpr { fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result { std::fmt::Display::fmt(self.syntax(), f) diff --git a/crates/syntax/src/ast/prec.rs b/crates/syntax/src/ast/prec.rs index 9ddf5a0a9804..9131cd2f1799 100644 --- a/crates/syntax/src/ast/prec.rs +++ b/crates/syntax/src/ast/prec.rs @@ -130,8 +130,8 @@ impl Expr { // ContinueExpr(_) => (0, 0), - ClosureExpr(_) | ReturnExpr(_) | YieldExpr(_) | YeetExpr(_) | BreakExpr(_) - | OffsetOfExpr(_) | FormatArgsExpr(_) | AsmExpr(_) => (0, 1), + ClosureExpr(_) | ReturnExpr(_) | BecomeExpr(_) | YieldExpr(_) | YeetExpr(_) + | BreakExpr(_) | OffsetOfExpr(_) | FormatArgsExpr(_) | AsmExpr(_) => (0, 1), RangeExpr(_) => (5, 5), @@ -288,6 +288,7 @@ impl Expr { PrefixExpr(e) => e.op_token(), RefExpr(e) => e.amp_token(), ReturnExpr(e) => e.return_token(), + BecomeExpr(e) => e.become_token(), TryExpr(e) => e.question_mark_token(), YieldExpr(e) => e.yield_token(), YeetExpr(e) => e.do_token(), @@ -316,7 +317,8 @@ impl Expr { // For BinExpr and RangeExpr this is technically wrong -- the child can be on the left... BinExpr(_) | RangeExpr(_) | BreakExpr(_) | ContinueExpr(_) | PrefixExpr(_) - | RefExpr(_) | ReturnExpr(_) | YieldExpr(_) | YeetExpr(_) | LetExpr(_) => self + | RefExpr(_) | ReturnExpr(_) | BecomeExpr(_) | YieldExpr(_) | YeetExpr(_) + | LetExpr(_) => self .syntax() .parent() .and_then(Expr::cast) diff --git a/crates/syntax/src/tests/ast_src.rs b/crates/syntax/src/tests/ast_src.rs index 341bda892ba1..8221c577892d 100644 --- a/crates/syntax/src/tests/ast_src.rs +++ b/crates/syntax/src/tests/ast_src.rs @@ -67,8 +67,9 @@ pub(crate) const KINDS_SRC: KindsSrc<'_> = KindsSrc { keywords: &[ "as", "async", "await", "box", "break", "const", "continue", "crate", "do", "dyn", "else", "enum", "extern", "false", "fn", "for", "if", "impl", "in", "let", "loop", "macro", - "match", "mod", "move", "mut", "pub", "ref", "return", "self", "Self", "static", "struct", - "super", "trait", "true", "try", "type", "unsafe", "use", "where", "while", "yield", + "match", "mod", "move", "mut", "pub", "ref", "return", "become", "self", "Self", "static", + "struct", "super", "trait", "true", "try", "type", "unsafe", "use", "where", "while", + "yield", ], contextual_keywords: &[ "auto", @@ -154,6 +155,7 @@ pub(crate) const KINDS_SRC: KindsSrc<'_> = KindsSrc { "BLOCK_EXPR", "STMT_LIST", "RETURN_EXPR", + "BECOME_EXPR", "YIELD_EXPR", "YEET_EXPR", "LET_EXPR", From 851ef63576dc4d598e5690afe1ee4e066cda306a Mon Sep 17 00:00:00 2001 From: Ralf Jung Date: Wed, 14 Feb 2024 16:26:03 +0100 Subject: [PATCH 067/127] use core::intrinsics::simd --- crates/std_float/src/lib.rs | 38 +++++++++---------------------------- 1 file changed, 9 insertions(+), 29 deletions(-) diff --git a/crates/std_float/src/lib.rs b/crates/std_float/src/lib.rs index 1fef17242ca8..4c547777fdeb 100644 --- a/crates/std_float/src/lib.rs +++ b/crates/std_float/src/lib.rs @@ -1,7 +1,7 @@ #![cfg_attr(feature = "as_crate", no_std)] // We are std! #![cfg_attr( feature = "as_crate", - feature(platform_intrinsics), + feature(core_intrinsics), feature(portable_simd), allow(internal_features) )] @@ -10,6 +10,8 @@ use core::simd; #[cfg(feature = "as_crate")] use core_simd::simd; +use core::intrinsics::simd as intrinsics; + use simd::{LaneCount, Simd, SupportedLaneCount}; #[cfg(feature = "as_crate")] @@ -22,28 +24,6 @@ use experimental as sealed; use crate::sealed::Sealed; -// "platform intrinsics" are essentially "codegen intrinsics" -// each of these may be scalarized and lowered to a libm call -extern "platform-intrinsic" { - // ceil - fn simd_ceil(x: T) -> T; - - // floor - fn simd_floor(x: T) -> T; - - // round - fn simd_round(x: T) -> T; - - // trunc - fn simd_trunc(x: T) -> T; - - // fsqrt - fn simd_fsqrt(x: T) -> T; - - // fma - fn simd_fma(x: T, y: T, z: T) -> T; -} - /// This trait provides a possibly-temporary implementation of float functions /// that may, in the absence of hardware support, canonicalize to calling an /// operating system's `math.h` dynamically-loaded library (also known as a @@ -74,7 +54,7 @@ pub trait StdFloat: Sealed + Sized { #[inline] #[must_use = "method returns a new vector and does not mutate the original value"] fn mul_add(self, a: Self, b: Self) -> Self { - unsafe { simd_fma(self, a, b) } + unsafe { intrinsics::simd_fma(self, a, b) } } /// Produces a vector where every lane has the square root value @@ -82,35 +62,35 @@ pub trait StdFloat: Sealed + Sized { #[inline] #[must_use = "method returns a new vector and does not mutate the original value"] fn sqrt(self) -> Self { - unsafe { simd_fsqrt(self) } + unsafe { intrinsics::simd_fsqrt(self) } } /// Returns the smallest integer greater than or equal to each lane. #[must_use = "method returns a new vector and does not mutate the original value"] #[inline] fn ceil(self) -> Self { - unsafe { simd_ceil(self) } + unsafe { intrinsics::simd_ceil(self) } } /// Returns the largest integer value less than or equal to each lane. #[must_use = "method returns a new vector and does not mutate the original value"] #[inline] fn floor(self) -> Self { - unsafe { simd_floor(self) } + unsafe { intrinsics::simd_floor(self) } } /// Rounds to the nearest integer value. Ties round toward zero. #[must_use = "method returns a new vector and does not mutate the original value"] #[inline] fn round(self) -> Self { - unsafe { simd_round(self) } + unsafe { intrinsics::simd_round(self) } } /// Returns the floating point's integer value, with its fractional part removed. #[must_use = "method returns a new vector and does not mutate the original value"] #[inline] fn trunc(self) -> Self { - unsafe { simd_trunc(self) } + unsafe { intrinsics::simd_trunc(self) } } /// Returns the floating point's fractional value, with its integer part removed. From dda641c62c8e4db52eabc509112dfe18cc757a75 Mon Sep 17 00:00:00 2001 From: Wilfred Hughes Date: Tue, 13 Feb 2024 12:48:07 -0800 Subject: [PATCH 068/127] Set documentation field in SCIP from doc comment Previously, the documentation field was the same as the text shown to users when they hover over that symbol. The documentation should really just be the doc comment, and as of #16179 the signature is already stored in the signatureDocumentation field. --- crates/ide/src/static_index.rs | 23 ++++++++++++++++++--- crates/rust-analyzer/src/cli/scip.rs | 31 +++++++++++++++++++++------- 2 files changed, 44 insertions(+), 10 deletions(-) diff --git a/crates/ide/src/static_index.rs b/crates/ide/src/static_index.rs index dee5afbf8d9e..5feaf21aa979 100644 --- a/crates/ide/src/static_index.rs +++ b/crates/ide/src/static_index.rs @@ -1,14 +1,16 @@ //! This module provides `StaticIndex` which is used for powering //! read-only code browsers and emitting LSIF -use hir::{db::HirDatabase, Crate, HirFileIdExt, Module}; +use hir::{db::HirDatabase, Crate, HirFileIdExt, Module, Semantics}; use ide_db::{ base_db::{FileId, FileRange, SourceDatabaseExt}, defs::Definition, + documentation::Documentation, + famous_defs::FamousDefs, helpers::get_definition, FxHashMap, FxHashSet, RootDatabase, }; -use syntax::{AstNode, SyntaxKind::*, TextRange, T}; +use syntax::{AstNode, SyntaxKind::*, SyntaxNode, TextRange, T}; use crate::inlay_hints::InlayFieldsToResolve; use crate::navigation_target::UpmappingResult; @@ -22,7 +24,7 @@ use crate::{ /// A static representation of fully analyzed source code. /// -/// The intended use-case is powering read-only code browsers and emitting LSIF +/// The intended use-case is powering read-only code browsers and emitting LSIF/SCIP. #[derive(Debug)] pub struct StaticIndex<'a> { pub files: Vec, @@ -40,6 +42,7 @@ pub struct ReferenceData { #[derive(Debug)] pub struct TokenStaticData { + pub documentation: Option, pub hover: Option, pub definition: Option, pub references: Vec, @@ -103,6 +106,19 @@ fn all_modules(db: &dyn HirDatabase) -> Vec { modules } +fn documentation_for_definition( + sema: &Semantics<'_, RootDatabase>, + def: Definition, + scope_node: &SyntaxNode, +) -> Option { + let famous_defs = match &def { + Definition::BuiltinType(_) => Some(FamousDefs(sema, sema.scope(scope_node)?.krate())), + _ => None, + }; + + def.docs(sema.db, famous_defs.as_ref()) +} + impl StaticIndex<'_> { fn add_file(&mut self, file_id: FileId) { let current_crate = crates_for(self.db, file_id).pop().map(Into::into); @@ -169,6 +185,7 @@ impl StaticIndex<'_> { *it } else { let it = self.tokens.insert(TokenStaticData { + documentation: documentation_for_definition(&sema, def, &node), hover: hover_for_definition(&sema, file_id, def, &node, &hover_config), definition: def.try_to_nav(self.db).map(UpmappingResult::call_site).map(|it| { FileRange { file_id: it.file_id, range: it.focus_or_full_range() } diff --git a/crates/rust-analyzer/src/cli/scip.rs b/crates/rust-analyzer/src/cli/scip.rs index f4aec288348f..2d56830c87f3 100644 --- a/crates/rust-analyzer/src/cli/scip.rs +++ b/crates/rust-analyzer/src/cli/scip.rs @@ -135,12 +135,11 @@ impl flags::Scip { } if symbols_emitted.insert(id) { - let documentation = token - .hover - .as_ref() - .map(|hover| hover.markup.as_str()) - .filter(|it| !it.is_empty()) - .map(|it| vec![it.to_owned()]); + let documentation = match &token.documentation { + Some(doc) => vec![doc.as_str().to_owned()], + None => vec![], + }; + let position_encoding = scip_types::PositionEncoding::UTF8CodeUnitOffsetFromLineStart.into(); let signature_documentation = @@ -153,7 +152,7 @@ impl flags::Scip { }); let symbol_info = scip_types::SymbolInformation { symbol: symbol.clone(), - documentation: documentation.unwrap_or_default(), + documentation, relationships: Vec::new(), special_fields: Default::default(), kind: symbol_kind(token.kind).into(), @@ -599,4 +598,22 @@ pub mod example_mod { "rust-analyzer cargo main . MyTypeAlias#", ); } + + #[test] + fn documentation_matches_doc_comment() { + let s = "/// foo\nfn bar() {}"; + + let mut host = AnalysisHost::default(); + let change_fixture = ChangeFixture::parse(s); + host.raw_database_mut().apply_change(change_fixture.change); + + let analysis = host.analysis(); + let si = StaticIndex::compute(&analysis); + + let file = si.files.first().unwrap(); + let (_, token_id) = file.tokens.first().unwrap(); + let token = si.tokens.get(*token_id).unwrap(); + + assert_eq!(token.documentation.as_ref().map(|d| d.as_str()), Some("foo")); + } } From 0d6024c021e1764e97deec42f8ef57924f9d1e4f Mon Sep 17 00:00:00 2001 From: DropDemBits Date: Wed, 14 Feb 2024 20:27:24 -0500 Subject: [PATCH 069/127] Add tests for snippet range adjustment Uses actual source for tests to easily confirm the results. --- crates/rust-analyzer/src/lsp/to_proto.rs | 455 ++++++++++++++++++++++- 1 file changed, 451 insertions(+), 4 deletions(-) diff --git a/crates/rust-analyzer/src/lsp/to_proto.rs b/crates/rust-analyzer/src/lsp/to_proto.rs index bc4666c1221b..866e16db071d 100644 --- a/crates/rust-analyzer/src/lsp/to_proto.rs +++ b/crates/rust-analyzer/src/lsp/to_proto.rs @@ -1659,15 +1659,43 @@ fn bar(_: usize) {} assert!(!docs.contains("use crate::bar")); } + #[track_caller] fn check_rendered_snippets(edit: TextEdit, snippets: SnippetEdit, expect: Expect) { - let text = r#"/* place to put all ranges in */"#; + check_rendered_snippets_in_source( + r"/* place to put all ranges in */", + edit, + snippets, + expect, + ); + } + + #[track_caller] + fn check_rendered_snippets_in_source( + ra_fixture: &str, + edit: TextEdit, + snippets: SnippetEdit, + expect: Expect, + ) { + let source = stdx::trim_indent(ra_fixture); let line_index = LineIndex { - index: Arc::new(ide::LineIndex::new(text)), + index: Arc::new(ide::LineIndex::new(&source)), endings: LineEndings::Unix, encoding: PositionEncoding::Utf8, }; let res = merge_text_and_snippet_edits(&line_index, edit, snippets); + + // Ensure that none of the ranges overlap + { + let mut sorted = res.clone(); + sorted.sort_by_key(|edit| (edit.range.start, edit.range.end)); + let disjoint_ranges = sorted + .iter() + .zip(sorted.iter().skip(1)) + .all(|(l, r)| l.range.end <= r.range.start || l == r); + assert!(disjoint_ranges, "ranges overlap for {res:#?}"); + } + expect.assert_debug_eq(&res); } @@ -1812,7 +1840,8 @@ fn bar(_: usize) {} let mut edit = TextEdit::builder(); edit.insert(0.into(), "abc".to_owned()); let edit = edit.finish(); - let snippets = SnippetEdit::new(vec![Snippet::Tabstop(7.into())]); + // Note: tabstops are positioned in the source where all text edits have been applied + let snippets = SnippetEdit::new(vec![Snippet::Tabstop(10.into())]); check_rendered_snippets( edit, @@ -1929,8 +1958,9 @@ fn bar(_: usize) {} edit.insert(0.into(), "abc".to_owned()); edit.insert(7.into(), "abc".to_owned()); let edit = edit.finish(); + // Note: tabstops are positioned in the source where all text edits have been applied let snippets = - SnippetEdit::new(vec![Snippet::Tabstop(4.into()), Snippet::Tabstop(4.into())]); + SnippetEdit::new(vec![Snippet::Tabstop(7.into()), Snippet::Tabstop(7.into())]); check_rendered_snippets( edit, @@ -2134,6 +2164,423 @@ fn bar(_: usize) {} ); } + #[test] + fn snippet_rendering_tabstop_adjust_offset_deleted() { + // negative offset from inserting a smaller range + let mut edit = TextEdit::builder(); + edit.replace(TextRange::new(47.into(), 56.into()), "let".to_owned()); + edit.replace( + TextRange::new(57.into(), 89.into()), + "disabled = false;\n ProcMacro {\n disabled,\n }".to_owned(), + ); + let edit = edit.finish(); + let snippets = SnippetEdit::new(vec![Snippet::Tabstop(51.into())]); + + check_rendered_snippets_in_source( + r" +fn expander_to_proc_macro() -> ProcMacro { + ProcMacro { + disabled: false, + } +} + +struct ProcMacro { + disabled: bool, +}", + edit, + snippets, + expect![[r#" + [ + SnippetTextEdit { + range: Range { + start: Position { + line: 1, + character: 4, + }, + end: Position { + line: 1, + character: 13, + }, + }, + new_text: "let", + insert_text_format: None, + annotation_id: None, + }, + SnippetTextEdit { + range: Range { + start: Position { + line: 1, + character: 14, + }, + end: Position { + line: 3, + character: 5, + }, + }, + new_text: "$0disabled = false;\n ProcMacro {\n disabled,\n }", + insert_text_format: Some( + Snippet, + ), + annotation_id: None, + }, + ] +"#]], + ); + } + + #[test] + fn snippet_rendering_tabstop_adjust_offset_added() { + // positive offset from inserting a larger range + let mut edit = TextEdit::builder(); + edit.replace(TextRange::new(39.into(), 40.into()), "let".to_owned()); + edit.replace( + TextRange::new(41.into(), 73.into()), + "disabled = false;\n ProcMacro {\n disabled,\n }".to_owned(), + ); + let edit = edit.finish(); + let snippets = SnippetEdit::new(vec![Snippet::Tabstop(43.into())]); + + check_rendered_snippets_in_source( + r" +fn expander_to_proc_macro() -> P { + P { + disabled: false, + } +} + +struct P { + disabled: bool, +}", + edit, + snippets, + expect![[r#" + [ + SnippetTextEdit { + range: Range { + start: Position { + line: 1, + character: 4, + }, + end: Position { + line: 1, + character: 5, + }, + }, + new_text: "let", + insert_text_format: None, + annotation_id: None, + }, + SnippetTextEdit { + range: Range { + start: Position { + line: 1, + character: 6, + }, + end: Position { + line: 3, + character: 5, + }, + }, + new_text: "$0disabled = false;\n ProcMacro {\n disabled,\n }", + insert_text_format: Some( + Snippet, + ), + annotation_id: None, + }, + ] +"#]], + ); + } + + #[test] + fn snippet_rendering_placeholder_adjust_offset_deleted() { + // negative offset from inserting a smaller range + let mut edit = TextEdit::builder(); + edit.replace(TextRange::new(47.into(), 56.into()), "let".to_owned()); + edit.replace( + TextRange::new(57.into(), 89.into()), + "disabled = false;\n ProcMacro {\n disabled,\n }".to_owned(), + ); + let edit = edit.finish(); + let snippets = + SnippetEdit::new(vec![Snippet::Placeholder(TextRange::new(51.into(), 59.into()))]); + + check_rendered_snippets_in_source( + r" +fn expander_to_proc_macro() -> ProcMacro { + ProcMacro { + disabled: false, + } +} + +struct ProcMacro { + disabled: bool, +}", + edit, + snippets, + expect![[r#" + [ + SnippetTextEdit { + range: Range { + start: Position { + line: 1, + character: 4, + }, + end: Position { + line: 1, + character: 13, + }, + }, + new_text: "let", + insert_text_format: None, + annotation_id: None, + }, + SnippetTextEdit { + range: Range { + start: Position { + line: 1, + character: 14, + }, + end: Position { + line: 3, + character: 5, + }, + }, + new_text: "${0:disabled} = false;\n ProcMacro {\n disabled,\n }", + insert_text_format: Some( + Snippet, + ), + annotation_id: None, + }, + ] + "#]], + ); + } + + #[test] + fn snippet_rendering_placeholder_adjust_offset_added() { + // positive offset from inserting a larger range + let mut edit = TextEdit::builder(); + edit.replace(TextRange::new(39.into(), 40.into()), "let".to_owned()); + edit.replace( + TextRange::new(41.into(), 73.into()), + "disabled = false;\n ProcMacro {\n disabled,\n }".to_owned(), + ); + let edit = edit.finish(); + let snippets = + SnippetEdit::new(vec![Snippet::Placeholder(TextRange::new(43.into(), 51.into()))]); + + check_rendered_snippets_in_source( + r" +fn expander_to_proc_macro() -> P { + P { + disabled: false, + } +} + +struct P { + disabled: bool, +}", + edit, + snippets, + expect![[r#" + [ + SnippetTextEdit { + range: Range { + start: Position { + line: 1, + character: 4, + }, + end: Position { + line: 1, + character: 5, + }, + }, + new_text: "let", + insert_text_format: None, + annotation_id: None, + }, + SnippetTextEdit { + range: Range { + start: Position { + line: 1, + character: 6, + }, + end: Position { + line: 3, + character: 5, + }, + }, + new_text: "${0:disabled} = false;\n ProcMacro {\n disabled,\n }", + insert_text_format: Some( + Snippet, + ), + annotation_id: None, + }, + ] + "#]], + ); + } + + #[test] + fn snippet_rendering_tabstop_adjust_offset_between_text_edits() { + // inserting between edits, tabstop should be at (1, 14) + let mut edit = TextEdit::builder(); + edit.replace(TextRange::new(47.into(), 56.into()), "let".to_owned()); + edit.replace( + TextRange::new(58.into(), 90.into()), + "disabled = false;\n ProcMacro {\n disabled,\n }".to_owned(), + ); + let edit = edit.finish(); + let snippets = SnippetEdit::new(vec![Snippet::Tabstop(51.into())]); + + // add an extra space between `ProcMacro` and `{` to insert the tabstop at + check_rendered_snippets_in_source( + r" +fn expander_to_proc_macro() -> ProcMacro { + ProcMacro { + disabled: false, + } +} + +struct ProcMacro { + disabled: bool, +}", + edit, + snippets, + expect![[r#" + [ + SnippetTextEdit { + range: Range { + start: Position { + line: 1, + character: 4, + }, + end: Position { + line: 1, + character: 13, + }, + }, + new_text: "let", + insert_text_format: None, + annotation_id: None, + }, + SnippetTextEdit { + range: Range { + start: Position { + line: 1, + character: 14, + }, + end: Position { + line: 1, + character: 14, + }, + }, + new_text: "$0", + insert_text_format: Some( + Snippet, + ), + annotation_id: None, + }, + SnippetTextEdit { + range: Range { + start: Position { + line: 1, + character: 15, + }, + end: Position { + line: 3, + character: 5, + }, + }, + new_text: "disabled = false;\n ProcMacro {\n disabled,\n }", + insert_text_format: None, + annotation_id: None, + }, + ] +"#]], + ); + } + + #[test] + fn snippet_rendering_tabstop_adjust_offset_after_text_edits() { + // inserting after edits, tabstop should be before the closing curly of the fn + let mut edit = TextEdit::builder(); + edit.replace(TextRange::new(47.into(), 56.into()), "let".to_owned()); + edit.replace( + TextRange::new(57.into(), 89.into()), + "disabled = false;\n ProcMacro {\n disabled,\n }".to_owned(), + ); + let edit = edit.finish(); + let snippets = SnippetEdit::new(vec![Snippet::Tabstop(109.into())]); + + check_rendered_snippets_in_source( + r" +fn expander_to_proc_macro() -> ProcMacro { + ProcMacro { + disabled: false, + } +} + +struct ProcMacro { + disabled: bool, +}", + edit, + snippets, + expect![[r#" + [ + SnippetTextEdit { + range: Range { + start: Position { + line: 1, + character: 4, + }, + end: Position { + line: 1, + character: 13, + }, + }, + new_text: "let", + insert_text_format: None, + annotation_id: None, + }, + SnippetTextEdit { + range: Range { + start: Position { + line: 1, + character: 14, + }, + end: Position { + line: 3, + character: 5, + }, + }, + new_text: "disabled = false;\n ProcMacro {\n disabled,\n }", + insert_text_format: None, + annotation_id: None, + }, + SnippetTextEdit { + range: Range { + start: Position { + line: 4, + character: 0, + }, + end: Position { + line: 4, + character: 0, + }, + }, + new_text: "$0", + insert_text_format: Some( + Snippet, + ), + annotation_id: None, + }, + ] +"#]], + ); + } + // `Url` is not able to parse windows paths on unix machines. #[test] #[cfg(target_os = "windows")] From 7cf4a8a3bfd7072d54fa2d558e9fd0a15a99883c Mon Sep 17 00:00:00 2001 From: DropDemBits Date: Wed, 14 Feb 2024 21:35:17 -0500 Subject: [PATCH 070/127] fix: Place snippets correctly in multi-edit assists --- crates/rust-analyzer/src/lsp/to_proto.rs | 38 ++++++++++++++++++++---- 1 file changed, 33 insertions(+), 5 deletions(-) diff --git a/crates/rust-analyzer/src/lsp/to_proto.rs b/crates/rust-analyzer/src/lsp/to_proto.rs index 866e16db071d..60281202f84d 100644 --- a/crates/rust-analyzer/src/lsp/to_proto.rs +++ b/crates/rust-analyzer/src/lsp/to_proto.rs @@ -930,6 +930,16 @@ fn merge_text_and_snippet_edits( let mut edits: Vec = vec![]; let mut snippets = snippet_edit.into_edit_ranges().into_iter().peekable(); let text_edits = edit.into_iter(); + // offset to go from the final source location to the original source location + let mut source_text_offset = 0i32; + + let offset_range = |range: TextRange, offset: i32| -> TextRange { + // map the snippet range from the target location into the original source location + let start = u32::from(range.start()).checked_add_signed(offset).unwrap_or(0); + let end = u32::from(range.end()).checked_add_signed(offset).unwrap_or(0); + + TextRange::new(start.into(), end.into()) + }; for current_indel in text_edits { let new_range = { @@ -938,10 +948,17 @@ fn merge_text_and_snippet_edits( TextRange::at(current_indel.delete.start(), insert_len) }; + // figure out how much this Indel will shift future ranges from the initial source + let offset_adjustment = + u32::from(current_indel.delete.len()) as i32 - u32::from(new_range.len()) as i32; + // insert any snippets before the text edit - for (snippet_index, snippet_range) in - snippets.take_while_ref(|(_, range)| range.end() < new_range.start()) - { + for (snippet_index, snippet_range) in snippets.peeking_take_while(|(_, range)| { + offset_range(*range, source_text_offset).end() < new_range.start() + }) { + // adjust the snippet range into the corresponding initial source location + let snippet_range = offset_range(snippet_range, source_text_offset); + let snippet_range = if !stdx::always!( snippet_range.is_empty(), "placeholder range {:?} is before current text edit range {:?}", @@ -965,11 +982,16 @@ fn merge_text_and_snippet_edits( }) } - if snippets.peek().is_some_and(|(_, range)| new_range.intersect(*range).is_some()) { + if snippets.peek().is_some_and(|(_, range)| { + new_range.intersect(offset_range(*range, source_text_offset)).is_some() + }) { // at least one snippet edit intersects this text edit, // so gather all of the edits that intersect this text edit let mut all_snippets = snippets - .take_while_ref(|(_, range)| new_range.intersect(*range).is_some()) + .peeking_take_while(|(_, range)| { + new_range.intersect(offset_range(*range, source_text_offset)).is_some() + }) + .map(|(tabstop, range)| (tabstop, offset_range(range, source_text_offset))) .collect_vec(); // ensure all of the ranges are wholly contained inside of the new range @@ -1010,10 +1032,16 @@ fn merge_text_and_snippet_edits( // since it wasn't consumed, it's available for the next pass edits.push(snippet_text_edit(line_index, false, current_indel)); } + + // update the final source -> initial source mapping offset + source_text_offset += offset_adjustment; } // insert any remaining tabstops edits.extend(snippets.map(|(snippet_index, snippet_range)| { + // adjust the snippet range into the corresponding initial source location + let snippet_range = offset_range(snippet_range, source_text_offset); + let snippet_range = if !stdx::always!( snippet_range.is_empty(), "found placeholder snippet {:?} without a text edit", From 1443d49b7273f52a4a3ed2151a6cfd0fd1227e00 Mon Sep 17 00:00:00 2001 From: Lukas Wirth Date: Thu, 15 Feb 2024 17:21:18 +0100 Subject: [PATCH 071/127] Remove salsa doc compile tests --- crates/rust-analyzer/src/bin/main.rs | 28 ++--- crates/rust-analyzer/src/bin/rustc_wrapper.rs | 19 +-- crates/salsa/src/doctest.rs | 115 ------------------ crates/salsa/src/lib.rs | 1 - 4 files changed, 23 insertions(+), 140 deletions(-) delete mode 100644 crates/salsa/src/doctest.rs diff --git a/crates/rust-analyzer/src/bin/main.rs b/crates/rust-analyzer/src/bin/main.rs index 269dd3cfffe9..07e04a836617 100644 --- a/crates/rust-analyzer/src/bin/main.rs +++ b/crates/rust-analyzer/src/bin/main.rs @@ -11,7 +11,7 @@ extern crate rustc_driver as _; mod rustc_wrapper; -use std::{env, fs, path::PathBuf, process, sync::Arc}; +use std::{env, fs, path::PathBuf, process::ExitCode, sync::Arc}; use anyhow::Context; use lsp_server::Connection; @@ -27,21 +27,15 @@ static ALLOC: mimalloc::MiMalloc = mimalloc::MiMalloc; #[global_allocator] static ALLOC: jemallocator::Jemalloc = jemallocator::Jemalloc; -fn main() -> anyhow::Result<()> { +fn main() -> anyhow::Result { if std::env::var("RA_RUSTC_WRAPPER").is_ok() { - let mut args = std::env::args_os(); - let _me = args.next().unwrap(); - let rustc = args.next().unwrap(); - let code = match rustc_wrapper::run_rustc_skipping_cargo_checking(rustc, args.collect()) { - Ok(rustc_wrapper::ExitCode(code)) => code.unwrap_or(102), - Err(err) => { - eprintln!("{err}"); - 101 - } - }; - process::exit(code); + rustc_wrapper::main().map_err(Into::into) + } else { + actual_main() } +} +fn actual_main() -> anyhow::Result { let flags = flags::RustAnalyzer::from_env_or_exit(); #[cfg(debug_assertions)] @@ -58,14 +52,14 @@ fn main() -> anyhow::Result<()> { let verbosity = flags.verbosity(); match flags.subcommand { - flags::RustAnalyzerCmd::LspServer(cmd) => { + flags::RustAnalyzerCmd::LspServer(cmd) => 'lsp_server: { if cmd.print_config_schema { println!("{:#}", Config::json_schema()); - return Ok(()); + break 'lsp_server; } if cmd.version { println!("rust-analyzer {}", rust_analyzer::version()); - return Ok(()); + break 'lsp_server; } // rust-analyzer’s “main thread” is actually @@ -90,7 +84,7 @@ fn main() -> anyhow::Result<()> { flags::RustAnalyzerCmd::RunTests(cmd) => cmd.run()?, flags::RustAnalyzerCmd::RustcTests(cmd) => cmd.run()?, } - Ok(()) + Ok(ExitCode::SUCCESS) } fn setup_logging(log_file_flag: Option) -> anyhow::Result<()> { diff --git a/crates/rust-analyzer/src/bin/rustc_wrapper.rs b/crates/rust-analyzer/src/bin/rustc_wrapper.rs index 38e9c7dd7e11..684b3f52afc8 100644 --- a/crates/rust-analyzer/src/bin/rustc_wrapper.rs +++ b/crates/rust-analyzer/src/bin/rustc_wrapper.rs @@ -7,13 +7,17 @@ use std::{ ffi::OsString, io, - process::{Command, Stdio}, + process::{Command, ExitCode, Stdio}, }; -/// ExitCode/ExitStatus are impossible to create :(. -pub(crate) struct ExitCode(pub(crate) Option); +pub(crate) fn main() -> io::Result { + let mut args = std::env::args_os(); + let _me = args.next().unwrap(); + let rustc = args.next().unwrap(); + run_rustc_skipping_cargo_checking(rustc, args.collect()) +} -pub(crate) fn run_rustc_skipping_cargo_checking( +fn run_rustc_skipping_cargo_checking( rustc_executable: OsString, args: Vec, ) -> io::Result { @@ -35,9 +39,10 @@ pub(crate) fn run_rustc_skipping_cargo_checking( arg.starts_with("--emit=") && arg.contains("metadata") && !arg.contains("link") }); if not_invoked_by_build_script && is_cargo_check { - return Ok(ExitCode(Some(0))); + Ok(ExitCode::from(0)) + } else { + run_rustc(rustc_executable, args) } - run_rustc(rustc_executable, args) } fn run_rustc(rustc_executable: OsString, args: Vec) -> io::Result { @@ -47,5 +52,5 @@ fn run_rustc(rustc_executable: OsString, args: Vec) -> io::Result Rc; -/// fn no_send_sync_key(&self, key: Rc) -> bool; -/// } -/// -/// fn no_send_sync_value(_db: &dyn NoSendSyncDatabase, key: bool) -> Rc { -/// Rc::new(key) -/// } -/// -/// fn no_send_sync_key(_db: &dyn NoSendSyncDatabase, key: Rc) -> bool { -/// *key -/// } -/// -/// #[salsa::database(NoSendSyncStorage)] -/// #[derive(Default)] -/// struct DatabaseImpl { -/// storage: salsa::Storage, -/// } -/// -/// impl salsa::Database for DatabaseImpl { -/// } -/// -/// fn is_send(_: T) { } -/// -/// fn assert_send() { -/// is_send(DatabaseImpl::default()); -/// } -/// ``` -fn test_key_not_send_db_not_send() {} - -/// Test that a database with a key/value that is not `Sync` will not -/// be `Send`. -/// -/// ```compile_fail,E0277 -/// use std::rc::Rc; -/// use std::cell::Cell; -/// -/// #[salsa::query_group(NoSendSyncStorage)] -/// trait NoSendSyncDatabase: salsa::Database { -/// fn no_send_sync_value(&self, key: bool) -> Cell; -/// fn no_send_sync_key(&self, key: Cell) -> bool; -/// } -/// -/// fn no_send_sync_value(_db: &dyn NoSendSyncDatabase, key: bool) -> Cell { -/// Cell::new(key) -/// } -/// -/// fn no_send_sync_key(_db: &dyn NoSendSyncDatabase, key: Cell) -> bool { -/// *key -/// } -/// -/// #[salsa::database(NoSendSyncStorage)] -/// #[derive(Default)] -/// struct DatabaseImpl { -/// runtime: salsa::Storage, -/// } -/// -/// impl salsa::Database for DatabaseImpl { -/// } -/// -/// fn is_send(_: T) { } -/// -/// fn assert_send() { -/// is_send(DatabaseImpl::default()); -/// } -/// ``` -fn test_key_not_sync_db_not_send() {} - -/// Test that a database with a key/value that is not `Sync` will -/// not be `Sync`. -/// -/// ```compile_fail,E0277 -/// use std::cell::Cell; -/// use std::rc::Rc; -/// -/// #[salsa::query_group(NoSendSyncStorage)] -/// trait NoSendSyncDatabase: salsa::Database { -/// fn no_send_sync_value(&self, key: bool) -> Cell; -/// fn no_send_sync_key(&self, key: Cell) -> bool; -/// } -/// -/// fn no_send_sync_value(_db: &dyn NoSendSyncDatabase, key: bool) -> Cell { -/// Cell::new(key) -/// } -/// -/// fn no_send_sync_key(_db: &dyn NoSendSyncDatabase, key: Cell) -> bool { -/// *key -/// } -/// -/// #[salsa::database(NoSendSyncStorage)] -/// #[derive(Default)] -/// struct DatabaseImpl { -/// runtime: salsa::Storage, -/// } -/// -/// impl salsa::Database for DatabaseImpl { -/// } -/// -/// fn is_sync(_: T) { } -/// -/// fn assert_send() { -/// is_sync(DatabaseImpl::default()); -/// } -/// ``` -fn test_key_not_sync_db_not_sync() {} diff --git a/crates/salsa/src/lib.rs b/crates/salsa/src/lib.rs index 2d58beafb2a0..668dcfd925d8 100644 --- a/crates/salsa/src/lib.rs +++ b/crates/salsa/src/lib.rs @@ -11,7 +11,6 @@ //! from previous invocations as appropriate. mod derived; -mod doctest; mod durability; mod hash; mod input; From 9ae0f924dd0fa873cc59ffb75723504c8eb28e96 Mon Sep 17 00:00:00 2001 From: davidsemakula Date: Thu, 15 Feb 2024 20:00:40 +0300 Subject: [PATCH 072/127] fix "needless return" for trailing item declarations --- crates/hir-def/src/body/lower.rs | 2 +- crates/hir-def/src/body/pretty.rs | 1 + crates/hir-def/src/body/scope.rs | 1 + crates/hir-def/src/hir.rs | 4 ++++ crates/hir-ty/src/infer/closure.rs | 1 + crates/hir-ty/src/infer/expr.rs | 1 + crates/hir-ty/src/infer/mutability.rs | 1 + crates/hir-ty/src/mir/lower.rs | 1 + .../src/handlers/remove_trailing_return.rs | 12 ++++++++++++ 9 files changed, 23 insertions(+), 1 deletion(-) diff --git a/crates/hir-def/src/body/lower.rs b/crates/hir-def/src/body/lower.rs index 11b87f1952fb..5dc5fedd2307 100644 --- a/crates/hir-def/src/body/lower.rs +++ b/crates/hir-def/src/body/lower.rs @@ -1113,7 +1113,7 @@ impl ExprCollector<'_> { statements.push(Statement::Expr { expr, has_semi }); } } - ast::Stmt::Item(_item) => (), + ast::Stmt::Item(_item) => statements.push(Statement::Item), } } diff --git a/crates/hir-def/src/body/pretty.rs b/crates/hir-def/src/body/pretty.rs index 773b7c575e42..7007dea638ef 100644 --- a/crates/hir-def/src/body/pretty.rs +++ b/crates/hir-def/src/body/pretty.rs @@ -628,6 +628,7 @@ impl Printer<'_> { } wln!(self); } + Statement::Item => (), } } diff --git a/crates/hir-def/src/body/scope.rs b/crates/hir-def/src/body/scope.rs index ab623250d407..69b82ae871a4 100644 --- a/crates/hir-def/src/body/scope.rs +++ b/crates/hir-def/src/body/scope.rs @@ -197,6 +197,7 @@ fn compute_block_scopes( Statement::Expr { expr, .. } => { compute_expr_scopes(*expr, body, scopes, scope); } + Statement::Item => (), } } if let Some(expr) = tail { diff --git a/crates/hir-def/src/hir.rs b/crates/hir-def/src/hir.rs index f008ae761a56..bbe62f27b9a0 100644 --- a/crates/hir-def/src/hir.rs +++ b/crates/hir-def/src/hir.rs @@ -352,6 +352,9 @@ pub enum Statement { expr: ExprId, has_semi: bool, }, + // At the moment, we only use this to figure out if a return expression + // is really the last statement of a block. See #16566 + Item, } impl Expr { @@ -385,6 +388,7 @@ impl Expr { } } Statement::Expr { expr: expression, .. } => f(*expression), + Statement::Item => (), } } if let &Some(expr) = tail { diff --git a/crates/hir-ty/src/infer/closure.rs b/crates/hir-ty/src/infer/closure.rs index 83ad6f54d6ce..22a70f951ea7 100644 --- a/crates/hir-ty/src/infer/closure.rs +++ b/crates/hir-ty/src/infer/closure.rs @@ -485,6 +485,7 @@ impl InferenceContext<'_> { Statement::Expr { expr, has_semi: _ } => { self.consume_expr(*expr); } + Statement::Item => (), } } if let Some(tail) = tail { diff --git a/crates/hir-ty/src/infer/expr.rs b/crates/hir-ty/src/infer/expr.rs index 30c676a240c1..428ed6748c6c 100644 --- a/crates/hir-ty/src/infer/expr.rs +++ b/crates/hir-ty/src/infer/expr.rs @@ -1389,6 +1389,7 @@ impl InferenceContext<'_> { ); } } + Statement::Item => (), } } diff --git a/crates/hir-ty/src/infer/mutability.rs b/crates/hir-ty/src/infer/mutability.rs index c9fc5bccea79..00e5eac229fb 100644 --- a/crates/hir-ty/src/infer/mutability.rs +++ b/crates/hir-ty/src/infer/mutability.rs @@ -65,6 +65,7 @@ impl InferenceContext<'_> { Statement::Expr { expr, has_semi: _ } => { self.infer_mut_expr(*expr, Mutability::Not); } + Statement::Item => (), } } if let Some(tail) = tail { diff --git a/crates/hir-ty/src/mir/lower.rs b/crates/hir-ty/src/mir/lower.rs index dad1ed10ce58..b038900cdacb 100644 --- a/crates/hir-ty/src/mir/lower.rs +++ b/crates/hir-ty/src/mir/lower.rs @@ -1781,6 +1781,7 @@ impl<'ctx> MirLowerCtx<'ctx> { self.push_fake_read(c, p, expr.into()); current = scope2.pop_and_drop(self, c, expr.into()); } + hir_def::hir::Statement::Item => (), } } if let Some(tail) = tail { diff --git a/crates/ide-diagnostics/src/handlers/remove_trailing_return.rs b/crates/ide-diagnostics/src/handlers/remove_trailing_return.rs index a0d5d742d362..b7667dc318f0 100644 --- a/crates/ide-diagnostics/src/handlers/remove_trailing_return.rs +++ b/crates/ide-diagnostics/src/handlers/remove_trailing_return.rs @@ -182,6 +182,18 @@ fn foo() -> u8 { ); } + #[test] + fn no_diagnostic_if_not_last_statement2() { + check_diagnostics( + r#" +fn foo() -> u8 { + return 2; + fn bar() {} +} +"#, + ); + } + #[test] fn replace_with_expr() { check_fix( From fafb16b0521876bedc14a9ee62f32cfe3015ecee Mon Sep 17 00:00:00 2001 From: Michel Lind Date: Thu, 15 Feb 2024 18:01:36 -0600 Subject: [PATCH 073/127] lsp-server: add license files MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit The `lsp-server` crate is currently published without license files, which is needed when packaging in Linux distributions. Symlink the files from the repository root so they are kept in sync. Test showing the files get picked up by `cargo package`: ``` michel in rust-analyzer/lib/lsp-server on  add-lsp-server-license [+] is 📦 v0.7.6 via 🐍 v3.12.1 (.venv311) via 🦀 v1.76.0 ⬢ [fedora:39] ❯ cargo package --allow-dirty --no-verify Updating crates.io index Packaging lsp-server v0.7.6 (/home/michel/src/github/rust-lang/rust-analyzer/lib/lsp-server) Updating crates.io index Packaged 12 files, 59.6KiB (16.3KiB compressed) michel in rust-analyzer/lib/lsp-server on  add-lsp-server-license [+] is 📦 v0.7.6 via 🐍 v3.12.1 (.venv311) via 🦀 v1.76.0 ⬢ [fedora:39] ❯ tar tf ../../target/package/lsp-server-0.7.6.crate | grep LICENSE lsp-server-0.7.6/LICENSE-APACHE lsp-server-0.7.6/LICENSE-MIT ``` Signed-off-by: Michel Lind --- lib/lsp-server/LICENSE-APACHE | 1 + lib/lsp-server/LICENSE-MIT | 1 + 2 files changed, 2 insertions(+) create mode 120000 lib/lsp-server/LICENSE-APACHE create mode 120000 lib/lsp-server/LICENSE-MIT diff --git a/lib/lsp-server/LICENSE-APACHE b/lib/lsp-server/LICENSE-APACHE new file mode 120000 index 000000000000..1cd601d0a3af --- /dev/null +++ b/lib/lsp-server/LICENSE-APACHE @@ -0,0 +1 @@ +../../LICENSE-APACHE \ No newline at end of file diff --git a/lib/lsp-server/LICENSE-MIT b/lib/lsp-server/LICENSE-MIT new file mode 120000 index 000000000000..b2cfbdc7b0b4 --- /dev/null +++ b/lib/lsp-server/LICENSE-MIT @@ -0,0 +1 @@ +../../LICENSE-MIT \ No newline at end of file From 581d457e13d321c1f462f5e9fc830e9709b17348 Mon Sep 17 00:00:00 2001 From: DropDemBits Date: Thu, 15 Feb 2024 20:24:31 -0500 Subject: [PATCH 074/127] Add `add_placeholder_snippet_group` Used for allowing newly generated syntax constructs to be renamed without having to go through a separate rename step. --- crates/ide-db/src/source_change.rs | 48 +++++++++++++++++++++++------- 1 file changed, 37 insertions(+), 11 deletions(-) diff --git a/crates/ide-db/src/source_change.rs b/crates/ide-db/src/source_change.rs index 73be6a4071e4..f09401f70afe 100644 --- a/crates/ide-db/src/source_change.rs +++ b/crates/ide-db/src/source_change.rs @@ -138,7 +138,7 @@ impl SnippetEdit { .into_iter() .zip(1..) .with_position() - .map(|pos| { + .flat_map(|pos| { let (snippet, index) = match pos { (itertools::Position::First, it) | (itertools::Position::Middle, it) => it, // last/only snippet gets index 0 @@ -146,11 +146,13 @@ impl SnippetEdit { | (itertools::Position::Only, (snippet, _)) => (snippet, 0), }; - let range = match snippet { - Snippet::Tabstop(pos) => TextRange::empty(pos), - Snippet::Placeholder(range) => range, - }; - (index, range) + match snippet { + Snippet::Tabstop(pos) => vec![(index, TextRange::empty(pos))], + Snippet::Placeholder(range) => vec![(index, range)], + Snippet::PlaceholderGroup(ranges) => { + ranges.into_iter().map(|range| (index, range)).collect() + } + } }) .collect_vec(); @@ -248,7 +250,7 @@ impl SourceChangeBuilder { fn commit(&mut self) { let snippet_edit = self.snippet_builder.take().map(|builder| { SnippetEdit::new( - builder.places.into_iter().map(PlaceSnippet::finalize_position).collect_vec(), + builder.places.into_iter().flat_map(PlaceSnippet::finalize_position).collect(), ) }); @@ -356,6 +358,17 @@ impl SourceChangeBuilder { self.add_snippet(PlaceSnippet::Over(node.syntax().clone().into())) } + /// Adds a snippet to move the cursor selected over `nodes` + /// + /// This allows for renaming newly generated items without having to go + /// through a separate rename step. + pub fn add_placeholder_snippet_group(&mut self, _cap: SnippetCap, nodes: Vec) { + assert!(nodes.iter().all(|node| node.parent().is_some())); + self.add_snippet(PlaceSnippet::OverGroup( + nodes.into_iter().map(|node| node.into()).collect(), + )) + } + fn add_snippet(&mut self, snippet: PlaceSnippet) { let snippet_builder = self.snippet_builder.get_or_insert(SnippetBuilder { places: vec![] }); snippet_builder.places.push(snippet); @@ -400,6 +413,13 @@ pub enum Snippet { Tabstop(TextSize), /// A placeholder snippet (e.g. `${0:placeholder}`). Placeholder(TextRange), + /// A group of placeholder snippets, e.g. + /// + /// ```no_run + /// let ${0:new_var} = 4; + /// fun(1, 2, 3, ${0:new_var}); + /// ``` + PlaceholderGroup(Vec), } enum PlaceSnippet { @@ -409,14 +429,20 @@ enum PlaceSnippet { After(SyntaxElement), /// Place a placeholder snippet in place of the element Over(SyntaxElement), + /// Place a group of placeholder snippets which are linked together + /// in place of the elements + OverGroup(Vec), } impl PlaceSnippet { - fn finalize_position(self) -> Snippet { + fn finalize_position(self) -> Vec { match self { - PlaceSnippet::Before(it) => Snippet::Tabstop(it.text_range().start()), - PlaceSnippet::After(it) => Snippet::Tabstop(it.text_range().end()), - PlaceSnippet::Over(it) => Snippet::Placeholder(it.text_range()), + PlaceSnippet::Before(it) => vec![Snippet::Tabstop(it.text_range().start())], + PlaceSnippet::After(it) => vec![Snippet::Tabstop(it.text_range().end())], + PlaceSnippet::Over(it) => vec![Snippet::Placeholder(it.text_range())], + PlaceSnippet::OverGroup(it) => { + vec![Snippet::PlaceholderGroup(it.into_iter().map(|it| it.text_range()).collect())] + } } } } From 115646d7d5e8c2b75acc3d8b693490509001abaa Mon Sep 17 00:00:00 2001 From: DropDemBits Date: Thu, 15 Feb 2024 20:40:14 -0500 Subject: [PATCH 075/127] Align `set_visibility` with the rest of the `set_` edit-in-place methods --- .../src/handlers/fix_visibility.rs | 4 +-- crates/syntax/src/ast/edit_in_place.rs | 28 +++++++++++-------- 2 files changed, 18 insertions(+), 14 deletions(-) diff --git a/crates/ide-assists/src/handlers/fix_visibility.rs b/crates/ide-assists/src/handlers/fix_visibility.rs index 204e796fa2c0..589591a6777e 100644 --- a/crates/ide-assists/src/handlers/fix_visibility.rs +++ b/crates/ide-assists/src/handlers/fix_visibility.rs @@ -79,7 +79,7 @@ fn add_vis_to_referenced_module_def(acc: &mut Assists, ctx: &AssistContext<'_>) edit.edit_file(target_file); let vis_owner = edit.make_mut(vis_owner); - vis_owner.set_visibility(missing_visibility.clone_for_update()); + vis_owner.set_visibility(Some(missing_visibility.clone_for_update())); if let Some((cap, vis)) = ctx.config.snippet_cap.zip(vis_owner.visibility()) { edit.add_tabstop_before(cap, vis); @@ -131,7 +131,7 @@ fn add_vis_to_referenced_record_field(acc: &mut Assists, ctx: &AssistContext<'_> edit.edit_file(target_file); let vis_owner = edit.make_mut(vis_owner); - vis_owner.set_visibility(missing_visibility.clone_for_update()); + vis_owner.set_visibility(Some(missing_visibility.clone_for_update())); if let Some((cap, vis)) = ctx.config.snippet_cap.zip(vis_owner.visibility()) { edit.add_tabstop_before(cap, vis); diff --git a/crates/syntax/src/ast/edit_in_place.rs b/crates/syntax/src/ast/edit_in_place.rs index bc9c54d0b73e..41d33c457ce7 100644 --- a/crates/syntax/src/ast/edit_in_place.rs +++ b/crates/syntax/src/ast/edit_in_place.rs @@ -1007,20 +1007,24 @@ impl ast::IdentPat { } pub trait HasVisibilityEdit: ast::HasVisibility { - fn set_visibility(&self, visibility: ast::Visibility) { - match self.visibility() { - Some(current_visibility) => { - ted::replace(current_visibility.syntax(), visibility.syntax()) - } - None => { - let vis_before = self - .syntax() - .children_with_tokens() - .find(|it| !matches!(it.kind(), WHITESPACE | COMMENT | ATTR)) - .unwrap_or_else(|| self.syntax().first_child_or_token().unwrap()); + fn set_visibility(&self, visibility: Option) { + if let Some(visibility) = visibility { + match self.visibility() { + Some(current_visibility) => { + ted::replace(current_visibility.syntax(), visibility.syntax()) + } + None => { + let vis_before = self + .syntax() + .children_with_tokens() + .find(|it| !matches!(it.kind(), WHITESPACE | COMMENT | ATTR)) + .unwrap_or_else(|| self.syntax().first_child_or_token().unwrap()); - ted::insert(ted::Position::before(vis_before), visibility.syntax()); + ted::insert(ted::Position::before(vis_before), visibility.syntax()); + } } + } else if let Some(visibility) = self.visibility() { + ted::remove(visibility.syntax()); } } } From eb6d6ba17c96d09bcb4def21a2a3c9c91ef91181 Mon Sep 17 00:00:00 2001 From: DropDemBits Date: Thu, 15 Feb 2024 21:09:45 -0500 Subject: [PATCH 076/127] Migrate `generate_trait_from_impl` to mutable ast --- .../src/handlers/generate_trait_from_impl.rs | 106 ++++++++---------- crates/ide-assists/src/tests/generated.rs | 4 +- crates/syntax/src/ast/make.rs | 2 +- 3 files changed, 50 insertions(+), 62 deletions(-) diff --git a/crates/ide-assists/src/handlers/generate_trait_from_impl.rs b/crates/ide-assists/src/handlers/generate_trait_from_impl.rs index 24094de22c8d..5f7350bc2812 100644 --- a/crates/ide-assists/src/handlers/generate_trait_from_impl.rs +++ b/crates/ide-assists/src/handlers/generate_trait_from_impl.rs @@ -1,8 +1,13 @@ use crate::assist_context::{AssistContext, Assists}; use ide_db::assists::AssistId; use syntax::{ - ast::{self, edit::IndentLevel, make, HasGenericParams, HasVisibility}, - ted, AstNode, SyntaxKind, + ast::{ + self, + edit_in_place::{HasVisibilityEdit, Indent}, + make, HasGenericParams, HasName, + }, + ted::{self, Position}, + AstNode, SyntaxKind, T, }; // NOTES : @@ -44,7 +49,7 @@ use syntax::{ // }; // } // -// trait ${0:TraitName} { +// trait ${0:NewTrait} { // // Used as an associated constant. // const CONST_ASSOC: usize = N * 4; // @@ -53,7 +58,7 @@ use syntax::{ // const_maker! {i32, 7} // } // -// impl ${0:TraitName} for Foo { +// impl ${0:NewTrait} for Foo { // // Used as an associated constant. // const CONST_ASSOC: usize = N * 4; // @@ -94,8 +99,10 @@ pub(crate) fn generate_trait_from_impl(acc: &mut Assists, ctx: &AssistContext<'_ "Generate trait from impl", impl_ast.syntax().text_range(), |builder| { + let impl_ast = builder.make_mut(impl_ast); let trait_items = assoc_items.clone_for_update(); - let impl_items = assoc_items.clone_for_update(); + let impl_items = builder.make_mut(assoc_items); + let impl_name = builder.make_mut(impl_name); trait_items.assoc_items().for_each(|item| { strip_body(&item); @@ -112,46 +119,42 @@ pub(crate) fn generate_trait_from_impl(acc: &mut Assists, ctx: &AssistContext<'_ impl_ast.generic_param_list(), impl_ast.where_clause(), trait_items, - ); + ) + .clone_for_update(); + + let trait_name = trait_ast.name().expect("new trait should have a name"); + let trait_name_ref = make::name_ref(&trait_name.to_string()).clone_for_update(); // Change `impl Foo` to `impl NewTrait for Foo` - let arg_list = if let Some(genpars) = impl_ast.generic_param_list() { - genpars.to_generic_args().to_string() - } else { - "".to_owned() - }; + let mut elements = vec![ + trait_name_ref.syntax().clone().into(), + make::tokens::single_space().into(), + make::token(T![for]).into(), + ]; - if let Some(snippet_cap) = ctx.config.snippet_cap { - builder.replace_snippet( - snippet_cap, - impl_name.syntax().text_range(), - format!("${{0:TraitName}}{} for {}", arg_list, impl_name), - ); - - // Insert trait before TraitImpl - builder.insert_snippet( - snippet_cap, - impl_ast.syntax().text_range().start(), - format!( - "{}\n\n{}", - trait_ast.to_string().replace("NewTrait", "${0:TraitName}"), - IndentLevel::from_node(impl_ast.syntax()) - ), - ); - } else { - builder.replace( - impl_name.syntax().text_range(), - format!("NewTrait{} for {}", arg_list, impl_name), - ); - - // Insert trait before TraitImpl - builder.insert( - impl_ast.syntax().text_range().start(), - format!("{}\n\n{}", trait_ast, IndentLevel::from_node(impl_ast.syntax())), - ); + if let Some(params) = impl_ast.generic_param_list() { + let gen_args = ¶ms.to_generic_args().clone_for_update(); + elements.insert(1, gen_args.syntax().clone().into()); } - builder.replace(assoc_items.syntax().text_range(), impl_items.to_string()); + ted::insert_all(Position::before(impl_name.syntax()), elements); + + // Insert trait before TraitImpl + ted::insert_all_raw( + Position::before(impl_ast.syntax()), + vec![ + trait_ast.syntax().clone().into(), + make::tokens::whitespace(&format!("\n\n{}", impl_ast.indent_level())).into(), + ], + ); + + // Link the trait name & trait ref names together as a placeholder snippet group + if let Some(cap) = ctx.config.snippet_cap { + builder.add_placeholder_snippet_group( + cap, + vec![trait_name.syntax().clone(), trait_name_ref.syntax().clone()], + ); + } }, ); @@ -160,23 +163,8 @@ pub(crate) fn generate_trait_from_impl(acc: &mut Assists, ctx: &AssistContext<'_ /// `E0449` Trait items always share the visibility of their trait fn remove_items_visibility(item: &ast::AssocItem) { - match item { - ast::AssocItem::Const(c) => { - if let Some(vis) = c.visibility() { - ted::remove(vis.syntax()); - } - } - ast::AssocItem::Fn(f) => { - if let Some(vis) = f.visibility() { - ted::remove(vis.syntax()); - } - } - ast::AssocItem::TypeAlias(t) => { - if let Some(vis) = t.visibility() { - ted::remove(vis.syntax()); - } - } - _ => (), + if let Some(has_vis) = ast::AnyHasVisibility::cast(item.syntax().clone()) { + has_vis.set_visibility(None); } } @@ -404,12 +392,12 @@ impl F$0oo { r#" struct Foo([i32; N]); -trait ${0:TraitName} { +trait ${0:NewTrait} { // Used as an associated constant. const CONST: usize = N * 4; } -impl ${0:TraitName} for Foo { +impl ${0:NewTrait} for Foo { // Used as an associated constant. const CONST: usize = N * 4; } diff --git a/crates/ide-assists/src/tests/generated.rs b/crates/ide-assists/src/tests/generated.rs index 8ad735d0ae80..268ba3225b66 100644 --- a/crates/ide-assists/src/tests/generated.rs +++ b/crates/ide-assists/src/tests/generated.rs @@ -1665,7 +1665,7 @@ macro_rules! const_maker { }; } -trait ${0:TraitName} { +trait ${0:NewTrait} { // Used as an associated constant. const CONST_ASSOC: usize = N * 4; @@ -1674,7 +1674,7 @@ trait ${0:TraitName} { const_maker! {i32, 7} } -impl ${0:TraitName} for Foo { +impl ${0:NewTrait} for Foo { // Used as an associated constant. const CONST_ASSOC: usize = N * 4; diff --git a/crates/syntax/src/ast/make.rs b/crates/syntax/src/ast/make.rs index 120d801c8d17..02246fc3291d 100644 --- a/crates/syntax/src/ast/make.rs +++ b/crates/syntax/src/ast/make.rs @@ -1147,7 +1147,7 @@ pub mod tokens { pub(super) static SOURCE_FILE: Lazy> = Lazy::new(|| { SourceFile::parse( - "const C: <()>::Item = ( true && true , true || true , 1 != 1, 2 == 2, 3 < 3, 4 <= 4, 5 > 5, 6 >= 6, !true, *p, &p , &mut p, { let a @ [] })\n;\n\n", + "const C: <()>::Item = ( true && true , true || true , 1 != 1, 2 == 2, 3 < 3, 4 <= 4, 5 > 5, 6 >= 6, !true, *p, &p , &mut p, { let a @ [] })\n;\n\nimpl A for B where: {}", ) }); From 4af075dcda72ff09e6782674d11f3c48d312d7d7 Mon Sep 17 00:00:00 2001 From: DropDemBits Date: Thu, 15 Feb 2024 21:19:37 -0500 Subject: [PATCH 077/127] Remove `SourceChangeBuilder::{insert,remove}_snippet` All assists have been migrated to use the structured snippet versions of these methods. --- crates/ide-db/src/source_change.rs | 20 -------------------- 1 file changed, 20 deletions(-) diff --git a/crates/ide-db/src/source_change.rs b/crates/ide-db/src/source_change.rs index f09401f70afe..f59d8d08c892 100644 --- a/crates/ide-db/src/source_change.rs +++ b/crates/ide-db/src/source_change.rs @@ -289,30 +289,10 @@ impl SourceChangeBuilder { pub fn insert(&mut self, offset: TextSize, text: impl Into) { self.edit.insert(offset, text.into()) } - /// Append specified `snippet` at the given `offset` - pub fn insert_snippet( - &mut self, - _cap: SnippetCap, - offset: TextSize, - snippet: impl Into, - ) { - self.source_change.is_snippet = true; - self.insert(offset, snippet); - } /// Replaces specified `range` of text with a given string. pub fn replace(&mut self, range: TextRange, replace_with: impl Into) { self.edit.replace(range, replace_with.into()) } - /// Replaces specified `range` of text with a given `snippet`. - pub fn replace_snippet( - &mut self, - _cap: SnippetCap, - range: TextRange, - snippet: impl Into, - ) { - self.source_change.is_snippet = true; - self.replace(range, snippet); - } pub fn replace_ast(&mut self, old: N, new: N) { algo::diff(old.syntax(), new.syntax()).into_text_edit(&mut self.edit) } From 9b3fcf9ad44faa6362be348ef233bbc81d128308 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Esteban=20K=C3=BCber?= Date: Wed, 14 Feb 2024 17:48:40 +0000 Subject: [PATCH 078/127] Detect when method call on argument could be removed to fulfill failed trait bound When encountering ```rust struct Foo; struct Bar; impl From for Foo { fn from(_: Bar) -> Self { Foo } } fn qux(_: impl From) {} fn main() { qux(Bar.into()); } ``` Suggest removing `.into()`: ``` error[E0283]: type annotations needed --> f100.rs:8:13 | 8 | qux(Bar.into()); | --- ^^^^ | | | required by a bound introduced by this call | = note: cannot satisfy `_: From` note: required by a bound in `qux` --> f100.rs:6:16 | 6 | fn qux(_: impl From) {} | ^^^^^^^^^ required by this bound in `qux` help: try using a fully qualified path to specify the expected types | 8 | qux(>::into(Bar)); | +++++++++++++++++++++++ ~ help: consider removing this method call, as the receiver has type `Bar` and `Bar: From` can be fulfilled | 8 - qux(Bar.into()); 8 + qux(Bar); | ``` Fix #71252 --- .../src/traits/error_reporting/suggestions.rs | 20 +++++++++++++- tests/ui/async-await/issue-72442.stderr | 5 ++++ tests/ui/error-should-say-copy-not-pod.stderr | 5 ++++ .../suggestions/issue-84973-blacklist.stderr | 5 ++++ .../argument-with-unnecessary-method-call.rs | 11 ++++++++ ...gument-with-unnecessary-method-call.stderr | 27 +++++++++++++++++++ 6 files changed, 72 insertions(+), 1 deletion(-) create mode 100644 tests/ui/trait-bounds/argument-with-unnecessary-method-call.rs create mode 100644 tests/ui/trait-bounds/argument-with-unnecessary-method-call.stderr diff --git a/compiler/rustc_trait_selection/src/traits/error_reporting/suggestions.rs b/compiler/rustc_trait_selection/src/traits/error_reporting/suggestions.rs index 335e6ff28226..00672af52ead 100644 --- a/compiler/rustc_trait_selection/src/traits/error_reporting/suggestions.rs +++ b/compiler/rustc_trait_selection/src/traits/error_reporting/suggestions.rs @@ -3961,6 +3961,24 @@ impl<'tcx> TypeErrCtxtExt<'tcx> for TypeErrCtxt<'_, 'tcx> { if let Node::Expr(expr) = tcx.hir_node(arg_hir_id) && let Some(typeck_results) = &self.typeck_results { + if let hir::Expr { kind: hir::ExprKind::MethodCall(_, rcvr, _, _), .. } = expr + && let Some(ty) = typeck_results.node_type_opt(rcvr.hir_id) + && let Some(failed_pred) = failed_pred.to_opt_poly_trait_pred() + && let pred = failed_pred.map_bound(|pred| pred.with_self_ty(tcx, ty)) + && self.predicate_must_hold_modulo_regions(&Obligation::misc( + tcx, expr.span, body_id, param_env, pred, + )) + { + err.span_suggestion_verbose( + expr.span.with_lo(rcvr.span.hi()), + format!( + "consider removing this method call, as the receiver has type `{ty}` and \ + `{pred}` trivially holds", + ), + "", + Applicability::MaybeIncorrect, + ); + } if let hir::Expr { kind: hir::ExprKind::Block(block, _), .. } = expr { let inner_expr = expr.peel_blocks(); let ty = typeck_results @@ -4096,7 +4114,7 @@ impl<'tcx> TypeErrCtxtExt<'tcx> for TypeErrCtxt<'_, 'tcx> { } } - if let Node::Expr(expr) = tcx.hir_node(call_hir_id) { + if let Node::Expr(expr) = call_node { if let hir::ExprKind::Call(hir::Expr { span, .. }, _) | hir::ExprKind::MethodCall( hir::PathSegment { ident: Ident { span, .. }, .. }, diff --git a/tests/ui/async-await/issue-72442.stderr b/tests/ui/async-await/issue-72442.stderr index 313f6079c7c1..e68f7a299908 100644 --- a/tests/ui/async-await/issue-72442.stderr +++ b/tests/ui/async-await/issue-72442.stderr @@ -8,6 +8,11 @@ LL | let mut f = File::open(path.to_str())?; | note: required by a bound in `File::open` --> $SRC_DIR/std/src/fs.rs:LL:COL +help: consider removing this method call, as the receiver has type `&Path` and `&Path: AsRef` trivially holds + | +LL - let mut f = File::open(path.to_str())?; +LL + let mut f = File::open(path)?; + | error: aborting due to 1 previous error diff --git a/tests/ui/error-should-say-copy-not-pod.stderr b/tests/ui/error-should-say-copy-not-pod.stderr index 658584e2ff4b..6aa129fa29b5 100644 --- a/tests/ui/error-should-say-copy-not-pod.stderr +++ b/tests/ui/error-should-say-copy-not-pod.stderr @@ -11,6 +11,11 @@ note: required by a bound in `check_bound` | LL | fn check_bound(_: T) {} | ^^^^ required by this bound in `check_bound` +help: consider removing this method call, as the receiver has type `&'static str` and `&'static str: Copy` trivially holds + | +LL - check_bound("nocopy".to_string()); +LL + check_bound("nocopy"); + | error: aborting due to 1 previous error diff --git a/tests/ui/suggestions/issue-84973-blacklist.stderr b/tests/ui/suggestions/issue-84973-blacklist.stderr index 8e980997089e..333382283285 100644 --- a/tests/ui/suggestions/issue-84973-blacklist.stderr +++ b/tests/ui/suggestions/issue-84973-blacklist.stderr @@ -11,6 +11,11 @@ note: required by a bound in `f_copy` | LL | fn f_copy(t: T) {} | ^^^^ required by this bound in `f_copy` +help: consider removing this method call, as the receiver has type `&'static str` and `&'static str: Copy` trivially holds + | +LL - f_copy("".to_string()); +LL + f_copy(""); + | error[E0277]: the trait bound `S: Clone` is not satisfied --> $DIR/issue-84973-blacklist.rs:16:13 diff --git a/tests/ui/trait-bounds/argument-with-unnecessary-method-call.rs b/tests/ui/trait-bounds/argument-with-unnecessary-method-call.rs new file mode 100644 index 000000000000..d8fd1d44a985 --- /dev/null +++ b/tests/ui/trait-bounds/argument-with-unnecessary-method-call.rs @@ -0,0 +1,11 @@ +struct Foo; +struct Bar; +impl From for Foo { + fn from(_: Bar) -> Self { Foo } +} +fn qux(_: impl From) {} +fn main() { + qux(Bar.into()); //~ ERROR type annotations needed + //~| HELP try using a fully qualified path to specify the expected types + //~| HELP consider removing this method call, as the receiver has type `Bar` and `Bar: From` trivially holds +} diff --git a/tests/ui/trait-bounds/argument-with-unnecessary-method-call.stderr b/tests/ui/trait-bounds/argument-with-unnecessary-method-call.stderr new file mode 100644 index 000000000000..49230c98a12c --- /dev/null +++ b/tests/ui/trait-bounds/argument-with-unnecessary-method-call.stderr @@ -0,0 +1,27 @@ +error[E0283]: type annotations needed + --> $DIR/argument-with-unnecessary-method-call.rs:8:13 + | +LL | qux(Bar.into()); + | --- ^^^^ + | | + | required by a bound introduced by this call + | + = note: cannot satisfy `_: From` +note: required by a bound in `qux` + --> $DIR/argument-with-unnecessary-method-call.rs:6:16 + | +LL | fn qux(_: impl From) {} + | ^^^^^^^^^ required by this bound in `qux` +help: try using a fully qualified path to specify the expected types + | +LL | qux(>::into(Bar)); + | +++++++++++++++++++++++ ~ +help: consider removing this method call, as the receiver has type `Bar` and `Bar: From` trivially holds + | +LL - qux(Bar.into()); +LL + qux(Bar); + | + +error: aborting due to 1 previous error + +For more information about this error, try `rustc --explain E0283`. From e9efb568f6d8b81d672ab0cbae0bb7b08c7ec2eb Mon Sep 17 00:00:00 2001 From: DropDemBits Date: Thu, 15 Feb 2024 23:16:27 -0500 Subject: [PATCH 079/127] Add dos line ending test --- crates/rust-analyzer/src/lsp/to_proto.rs | 40 +++++++++++++++++++++++- 1 file changed, 39 insertions(+), 1 deletion(-) diff --git a/crates/rust-analyzer/src/lsp/to_proto.rs b/crates/rust-analyzer/src/lsp/to_proto.rs index 60281202f84d..9cce55c66f84 100644 --- a/crates/rust-analyzer/src/lsp/to_proto.rs +++ b/crates/rust-analyzer/src/lsp/to_proto.rs @@ -1705,9 +1705,10 @@ fn bar(_: usize) {} expect: Expect, ) { let source = stdx::trim_indent(ra_fixture); + let endings = if source.contains('\r') { LineEndings::Dos } else { LineEndings::Unix }; let line_index = LineIndex { index: Arc::new(ide::LineIndex::new(&source)), - endings: LineEndings::Unix, + endings, encoding: PositionEncoding::Utf8, }; @@ -2609,6 +2610,43 @@ struct ProcMacro { ); } + #[test] + fn snippet_rendering_handle_dos_line_endings() { + // unix -> dos conversion should be handled after placing snippets + let mut edit = TextEdit::builder(); + edit.insert(6.into(), "\n\n->".to_owned()); + + let edit = edit.finish(); + let snippets = SnippetEdit::new(vec![Snippet::Tabstop(10.into())]); + + check_rendered_snippets_in_source( + "yeah\r\n<-tabstop here", + edit, + snippets, + expect![[r#" + [ + SnippetTextEdit { + range: Range { + start: Position { + line: 1, + character: 0, + }, + end: Position { + line: 1, + character: 0, + }, + }, + new_text: "\r\n\r\n->$0", + insert_text_format: Some( + Snippet, + ), + annotation_id: None, + }, + ] + "#]], + ) + } + // `Url` is not able to parse windows paths on unix machines. #[test] #[cfg(target_os = "windows")] From e4a3cc34d517fe34b5c14295d2a13b66f339f408 Mon Sep 17 00:00:00 2001 From: DropDemBits Date: Thu, 15 Feb 2024 23:29:17 -0500 Subject: [PATCH 080/127] Add better snippet bits test --- crates/rust-analyzer/src/lsp/to_proto.rs | 90 +++++++++++++++--------- 1 file changed, 55 insertions(+), 35 deletions(-) diff --git a/crates/rust-analyzer/src/lsp/to_proto.rs b/crates/rust-analyzer/src/lsp/to_proto.rs index 9cce55c66f84..1fef52cc8230 100644 --- a/crates/rust-analyzer/src/lsp/to_proto.rs +++ b/crates/rust-analyzer/src/lsp/to_proto.rs @@ -2145,51 +2145,71 @@ fn bar(_: usize) {} fn snippet_rendering_escape_snippet_bits() { // only needed for snippet formats let mut edit = TextEdit::builder(); - edit.insert(0.into(), r"abc\def$".to_owned()); - edit.insert(8.into(), r"ghi\jkl$".to_owned()); + edit.insert(0.into(), r"$ab{}$c\def".to_owned()); + edit.insert(8.into(), r"ghi\jk<-check_insert_here$".to_owned()); + edit.insert(10.into(), r"a\\b\\c{}$".to_owned()); let edit = edit.finish(); - let snippets = - SnippetEdit::new(vec![Snippet::Placeholder(TextRange::new(0.into(), 3.into()))]); + let snippets = SnippetEdit::new(vec![ + Snippet::Placeholder(TextRange::new(1.into(), 9.into())), + Snippet::Tabstop(25.into()), + ]); check_rendered_snippets( edit, snippets, expect![[r#" - [ - SnippetTextEdit { - range: Range { - start: Position { - line: 0, - character: 0, - }, - end: Position { - line: 0, - character: 0, + [ + SnippetTextEdit { + range: Range { + start: Position { + line: 0, + character: 0, + }, + end: Position { + line: 0, + character: 0, + }, }, + new_text: "\\$${1:ab{}\\$c\\\\d}ef", + insert_text_format: Some( + Snippet, + ), + annotation_id: None, }, - new_text: "${0:abc}\\\\def\\$", - insert_text_format: Some( - Snippet, - ), - annotation_id: None, - }, - SnippetTextEdit { - range: Range { - start: Position { - line: 0, - character: 8, - }, - end: Position { - line: 0, - character: 8, + SnippetTextEdit { + range: Range { + start: Position { + line: 0, + character: 8, + }, + end: Position { + line: 0, + character: 8, + }, }, + new_text: "ghi\\\\jk$0<-check_insert_here\\$", + insert_text_format: Some( + Snippet, + ), + annotation_id: None, }, - new_text: "ghi\\jkl$", - insert_text_format: None, - annotation_id: None, - }, - ] - "#]], + SnippetTextEdit { + range: Range { + start: Position { + line: 0, + character: 10, + }, + end: Position { + line: 0, + character: 10, + }, + }, + new_text: "a\\\\b\\\\c{}$", + insert_text_format: None, + annotation_id: None, + }, + ] + "#]], ); } From 1aeec93412d4e06fed78d72ae824be9d107187e8 Mon Sep 17 00:00:00 2001 From: DropDemBits Date: Thu, 15 Feb 2024 23:37:54 -0500 Subject: [PATCH 081/127] Only use `snippet_text_edit` to make snippet `SnippetTextEdit`s The eventual LSP representation looks like it will diverge from RA's representation of `SnippetTextEdit`s, so this'll make it easier to transition to the LSP representation later. --- crates/rust-analyzer/src/lsp/to_proto.rs | 51 ++++++++++-------------- 1 file changed, 21 insertions(+), 30 deletions(-) diff --git a/crates/rust-analyzer/src/lsp/to_proto.rs b/crates/rust-analyzer/src/lsp/to_proto.rs index 1fef52cc8230..3d3bb8e83daa 100644 --- a/crates/rust-analyzer/src/lsp/to_proto.rs +++ b/crates/rust-analyzer/src/lsp/to_proto.rs @@ -971,15 +971,11 @@ fn merge_text_and_snippet_edits( snippet_range }; - let range = range(line_index, snippet_range); - let new_text = format!("${snippet_index}"); - - edits.push(SnippetTextEdit { - range, - new_text, - insert_text_format: Some(lsp_types::InsertTextFormat::SNIPPET), - annotation_id: None, - }) + edits.push(snippet_text_edit( + line_index, + true, + Indel { insert: format!("${snippet_index}"), delete: snippet_range }, + )) } if snippets.peek().is_some_and(|(_, range)| { @@ -1002,11 +998,11 @@ fn merge_text_and_snippet_edits( ) }); - let mut text_edit = text_edit(line_index, current_indel); + let mut new_text = current_indel.insert; // escape out snippet text - stdx::replace(&mut text_edit.new_text, '\\', r"\\"); - stdx::replace(&mut text_edit.new_text, '$', r"\$"); + stdx::replace(&mut new_text, '\\', r"\\"); + stdx::replace(&mut new_text, '$', r"\$"); // ...and apply! for (index, range) in all_snippets.iter().rev() { @@ -1014,19 +1010,18 @@ fn merge_text_and_snippet_edits( let end = (range.end() - new_range.start()).into(); if range.is_empty() { - text_edit.new_text.insert_str(start, &format!("${index}")); + new_text.insert_str(start, &format!("${index}")); } else { - text_edit.new_text.insert(end, '}'); - text_edit.new_text.insert_str(start, &format!("${{{index}:")); + new_text.insert(end, '}'); + new_text.insert_str(start, &format!("${{{index}:")); } } - edits.push(SnippetTextEdit { - range: text_edit.range, - new_text: text_edit.new_text, - insert_text_format: Some(lsp_types::InsertTextFormat::SNIPPET), - annotation_id: None, - }) + edits.push(snippet_text_edit( + line_index, + true, + Indel { insert: new_text, delete: current_indel.delete }, + )) } else { // snippet edit was beyond the current one // since it wasn't consumed, it's available for the next pass @@ -1052,15 +1047,11 @@ fn merge_text_and_snippet_edits( snippet_range }; - let range = range(line_index, snippet_range); - let new_text = format!("${snippet_index}"); - - SnippetTextEdit { - range, - new_text, - insert_text_format: Some(lsp_types::InsertTextFormat::SNIPPET), - annotation_id: None, - } + snippet_text_edit( + line_index, + true, + Indel { insert: format!("${snippet_index}"), delete: snippet_range }, + ) })); edits From 1d8ed3408e16a8fa9fc591b94ce3295d3a10ca41 Mon Sep 17 00:00:00 2001 From: DropDemBits Date: Fri, 16 Feb 2024 00:14:39 -0500 Subject: [PATCH 082/127] Escape snippet bits in-between placing snippets Done so that we don't shift the range that we insert the snippet at. --- crates/rust-analyzer/src/lsp/to_proto.rs | 24 ++++++++++++++++++------ 1 file changed, 18 insertions(+), 6 deletions(-) diff --git a/crates/rust-analyzer/src/lsp/to_proto.rs b/crates/rust-analyzer/src/lsp/to_proto.rs index 3d3bb8e83daa..6d4bae5206b8 100644 --- a/crates/rust-analyzer/src/lsp/to_proto.rs +++ b/crates/rust-analyzer/src/lsp/to_proto.rs @@ -1000,23 +1000,35 @@ fn merge_text_and_snippet_edits( let mut new_text = current_indel.insert; - // escape out snippet text - stdx::replace(&mut new_text, '\\', r"\\"); - stdx::replace(&mut new_text, '$', r"\$"); + // find which snippet bits need to be escaped + let escape_places = + new_text.rmatch_indices(['\\', '$']).map(|(insert, _)| insert).collect_vec(); + let mut escape_places = escape_places.into_iter().peekable(); + let mut escape_prior_bits = |new_text: &mut String, up_to: usize| { + for before in escape_places.peeking_take_while(|insert| *insert >= up_to) { + new_text.insert(before, '\\'); + } + }; - // ...and apply! + // insert snippets, and escaping any needed bits along the way for (index, range) in all_snippets.iter().rev() { - let start = (range.start() - new_range.start()).into(); - let end = (range.end() - new_range.start()).into(); + let text_range = range - new_range.start(); + let (start, end) = (text_range.start().into(), text_range.end().into()); if range.is_empty() { + escape_prior_bits(&mut new_text, start); new_text.insert_str(start, &format!("${index}")); } else { + escape_prior_bits(&mut new_text, end); new_text.insert(end, '}'); + escape_prior_bits(&mut new_text, start); new_text.insert_str(start, &format!("${{{index}:")); } } + // escape any remaining bits + escape_prior_bits(&mut new_text, 0); + edits.push(snippet_text_edit( line_index, true, From e8457bb78b85ec4bcf0f28a3a18e4ed70cd3ccb9 Mon Sep 17 00:00:00 2001 From: DropDemBits Date: Fri, 16 Feb 2024 00:18:00 -0500 Subject: [PATCH 083/127] Escape `{` and `}` as well These are used in placeholder snippets, which may occur elsewhere in the insert text. --- crates/rust-analyzer/src/lsp/to_proto.rs | 152 ++++++++++++----------- 1 file changed, 77 insertions(+), 75 deletions(-) diff --git a/crates/rust-analyzer/src/lsp/to_proto.rs b/crates/rust-analyzer/src/lsp/to_proto.rs index 6d4bae5206b8..727007bba083 100644 --- a/crates/rust-analyzer/src/lsp/to_proto.rs +++ b/crates/rust-analyzer/src/lsp/to_proto.rs @@ -1001,8 +1001,10 @@ fn merge_text_and_snippet_edits( let mut new_text = current_indel.insert; // find which snippet bits need to be escaped - let escape_places = - new_text.rmatch_indices(['\\', '$']).map(|(insert, _)| insert).collect_vec(); + let escape_places = new_text + .rmatch_indices(['\\', '$', '{', '}']) + .map(|(insert, _)| insert) + .collect_vec(); let mut escape_places = escape_places.into_iter().peekable(); let mut escape_prior_bits = |new_text: &mut String, up_to: usize| { for before in escape_places.peeking_take_while(|insert| *insert >= up_to) { @@ -2173,7 +2175,7 @@ fn bar(_: usize) {} character: 0, }, }, - new_text: "\\$${1:ab{}\\$c\\\\d}ef", + new_text: "\\$${1:ab\\{\\}\\$c\\\\d}ef", insert_text_format: Some( Snippet, ), @@ -2242,41 +2244,41 @@ struct ProcMacro { edit, snippets, expect![[r#" - [ - SnippetTextEdit { - range: Range { - start: Position { - line: 1, - character: 4, - }, - end: Position { - line: 1, - character: 13, - }, - }, - new_text: "let", - insert_text_format: None, - annotation_id: None, - }, - SnippetTextEdit { - range: Range { - start: Position { - line: 1, - character: 14, - }, - end: Position { - line: 3, - character: 5, - }, - }, - new_text: "$0disabled = false;\n ProcMacro {\n disabled,\n }", - insert_text_format: Some( - Snippet, - ), - annotation_id: None, - }, - ] -"#]], + [ + SnippetTextEdit { + range: Range { + start: Position { + line: 1, + character: 4, + }, + end: Position { + line: 1, + character: 13, + }, + }, + new_text: "let", + insert_text_format: None, + annotation_id: None, + }, + SnippetTextEdit { + range: Range { + start: Position { + line: 1, + character: 14, + }, + end: Position { + line: 3, + character: 5, + }, + }, + new_text: "$0disabled = false;\n ProcMacro \\{\n disabled,\n \\}", + insert_text_format: Some( + Snippet, + ), + annotation_id: None, + }, + ] + "#]], ); } @@ -2306,41 +2308,41 @@ struct P { edit, snippets, expect![[r#" - [ - SnippetTextEdit { - range: Range { - start: Position { - line: 1, - character: 4, - }, - end: Position { - line: 1, - character: 5, - }, - }, - new_text: "let", - insert_text_format: None, - annotation_id: None, - }, - SnippetTextEdit { - range: Range { - start: Position { - line: 1, - character: 6, - }, - end: Position { - line: 3, - character: 5, - }, - }, - new_text: "$0disabled = false;\n ProcMacro {\n disabled,\n }", - insert_text_format: Some( - Snippet, - ), - annotation_id: None, - }, - ] -"#]], + [ + SnippetTextEdit { + range: Range { + start: Position { + line: 1, + character: 4, + }, + end: Position { + line: 1, + character: 5, + }, + }, + new_text: "let", + insert_text_format: None, + annotation_id: None, + }, + SnippetTextEdit { + range: Range { + start: Position { + line: 1, + character: 6, + }, + end: Position { + line: 3, + character: 5, + }, + }, + new_text: "$0disabled = false;\n ProcMacro \\{\n disabled,\n \\}", + insert_text_format: Some( + Snippet, + ), + annotation_id: None, + }, + ] + "#]], ); } @@ -2398,7 +2400,7 @@ struct ProcMacro { character: 5, }, }, - new_text: "${0:disabled} = false;\n ProcMacro {\n disabled,\n }", + new_text: "${0:disabled} = false;\n ProcMacro \\{\n disabled,\n \\}", insert_text_format: Some( Snippet, ), @@ -2463,7 +2465,7 @@ struct P { character: 5, }, }, - new_text: "${0:disabled} = false;\n ProcMacro {\n disabled,\n }", + new_text: "${0:disabled} = false;\n ProcMacro \\{\n disabled,\n \\}", insert_text_format: Some( Snippet, ), From c00c9ee95965953c8f79b9ce71f766b06eff1a33 Mon Sep 17 00:00:00 2001 From: Lukas Wirth Date: Fri, 16 Feb 2024 10:54:54 +0100 Subject: [PATCH 084/127] fix: Respect textual length of paths in find-path --- crates/hir-def/src/find_path.rs | 63 +++++++++++++++++++++++++------ crates/hir-def/src/hir.rs | 1 + crates/hir-expand/src/mod_path.rs | 15 ++++++++ 3 files changed, 68 insertions(+), 11 deletions(-) diff --git a/crates/hir-def/src/find_path.rs b/crates/hir-def/src/find_path.rs index 2e137f67b4c2..26247ba5b507 100644 --- a/crates/hir-def/src/find_path.rs +++ b/crates/hir-def/src/find_path.rs @@ -447,18 +447,25 @@ fn select_best_path( } const STD_CRATES: [Name; 3] = [known::std, known::core, known::alloc]; - let choose = |new_path: (ModPath, _), old_path: (ModPath, _)| { - let new_has_prelude = new_path.0.segments().iter().any(|seg| seg == &known::prelude); - let old_has_prelude = old_path.0.segments().iter().any(|seg| seg == &known::prelude); + let choose = |new: (ModPath, _), old: (ModPath, _)| { + let (new_path, _) = &new; + let (old_path, _) = &old; + let new_has_prelude = new_path.segments().iter().any(|seg| seg == &known::prelude); + let old_has_prelude = old_path.segments().iter().any(|seg| seg == &known::prelude); match (new_has_prelude, old_has_prelude, prefer_prelude) { - (true, false, true) | (false, true, false) => new_path, - (true, false, false) | (false, true, true) => old_path, - // no prelude difference in the paths, so pick the smaller one + (true, false, true) | (false, true, false) => new, + (true, false, false) | (false, true, true) => old, + // no prelude difference in the paths, so pick the shorter one (true, true, _) | (false, false, _) => { - if new_path.0.len() < old_path.0.len() { - new_path + let new_path_is_shorter = new_path + .len() + .cmp(&old_path.len()) + .then_with(|| new_path.textual_len().cmp(&old_path.textual_len())) + .is_lt(); + if new_path_is_shorter { + new } else { - old_path + old } } } @@ -469,8 +476,8 @@ fn select_best_path( let rank = match prefer_no_std { false => |name: &Name| match name { name if name == &known::core => 0, - name if name == &known::alloc => 0, - name if name == &known::std => 1, + name if name == &known::alloc => 1, + name if name == &known::std => 2, _ => unreachable!(), }, true => |name: &Name| match name { @@ -1539,4 +1546,38 @@ pub mod foo { "krate::prelude::Foo", ); } + + #[test] + fn respect_segment_length() { + check_found_path( + r#" +//- /main.rs crate:main deps:petgraph +$0 +//- /petgraph.rs crate:petgraph +pub mod graph { + pub use crate::graph_impl::{ + NodeIndex + }; +} + +mod graph_impl { + pub struct NodeIndex(Ix); +} + +pub mod stable_graph { + #[doc(no_inline)] + pub use crate::graph::{NodeIndex}; +} + +pub mod prelude { + #[doc(no_inline)] + pub use crate::graph::{NodeIndex}; +} +"#, + "petgraph::graph::NodeIndex", + "petgraph::graph::NodeIndex", + "petgraph::graph::NodeIndex", + "petgraph::graph::NodeIndex", + ); + } } diff --git a/crates/hir-def/src/hir.rs b/crates/hir-def/src/hir.rs index bbe62f27b9a0..34b2910b4f5e 100644 --- a/crates/hir-def/src/hir.rs +++ b/crates/hir-def/src/hir.rs @@ -182,6 +182,7 @@ pub enum Expr { tail: Option, }, Const(ConstBlockId), + // FIXME: Fold this into Block with an unsafe flag? Unsafe { id: Option, statements: Box<[Statement]>, diff --git a/crates/hir-expand/src/mod_path.rs b/crates/hir-expand/src/mod_path.rs index b64c3549e421..136b0935be27 100644 --- a/crates/hir-expand/src/mod_path.rs +++ b/crates/hir-expand/src/mod_path.rs @@ -94,6 +94,21 @@ impl ModPath { } } + pub fn textual_len(&self) -> usize { + let base = match self.kind { + PathKind::Plain => 0, + PathKind::Super(0) => "self".len(), + PathKind::Super(i) => "super".len() * i as usize, + PathKind::Crate => "crate".len(), + PathKind::Abs => 0, + PathKind::DollarCrate(_) => "$crate".len(), + }; + self.segments() + .iter() + .map(|segment| segment.as_str().map_or(0, str::len)) + .fold(base, core::ops::Add::add) + } + pub fn is_ident(&self) -> bool { self.as_ident().is_some() } From fd652ceb73539ebee1fc9ea3166b35c227167b25 Mon Sep 17 00:00:00 2001 From: Lukas Wirth Date: Fri, 16 Feb 2024 12:16:43 +0100 Subject: [PATCH 085/127] fix: Don't show type mismatches for `{unknown}` to non-`{unknown}` mismatches --- crates/hir-ty/src/infer.rs | 131 ++++++++++++++++++++++++- crates/hir-ty/src/tests/diagnostics.rs | 42 ++++++++ crates/hir-ty/src/tests/simple.rs | 3 - 3 files changed, 168 insertions(+), 8 deletions(-) diff --git a/crates/hir-ty/src/infer.rs b/crates/hir-ty/src/infer.rs index b0ae437ee3c1..1977f00517cd 100644 --- a/crates/hir-ty/src/infer.rs +++ b/crates/hir-ty/src/infer.rs @@ -26,7 +26,7 @@ use std::{convert::identity, ops::Index}; use chalk_ir::{ cast::Cast, fold::TypeFoldable, interner::HasInterner, DebruijnIndex, Mutability, Safety, - Scalar, TyKind, TypeFlags, + Scalar, TyKind, TypeFlags, Variance, }; use either::Either; use hir_def::{ @@ -58,8 +58,9 @@ use crate::{ static_lifetime, to_assoc_type_id, traits::FnTrait, utils::{InTypeConstIdMetadata, UnevaluatedConstEvaluatorFolder}, - AliasEq, AliasTy, ClosureId, DomainGoal, GenericArg, Goal, ImplTraitId, InEnvironment, - Interner, ProjectionTy, RpitId, Substitution, TraitEnvironment, TraitRef, Ty, TyBuilder, TyExt, + AliasEq, AliasTy, Binders, ClosureId, Const, DomainGoal, GenericArg, Goal, ImplTraitId, + InEnvironment, Interner, Lifetime, ProjectionTy, RpitId, Substitution, TraitEnvironment, + TraitRef, Ty, TyBuilder, TyExt, }; // This lint has a false positive here. See the link below for details. @@ -688,10 +689,17 @@ impl<'a> InferenceContext<'a> { for ty in type_of_for_iterator.values_mut() { *ty = table.resolve_completely(ty.clone()); } - for mismatch in type_mismatches.values_mut() { + type_mismatches.retain(|_, mismatch| { mismatch.expected = table.resolve_completely(mismatch.expected.clone()); mismatch.actual = table.resolve_completely(mismatch.actual.clone()); - } + chalk_ir::zip::Zip::zip_with( + &mut UnknownMismatch(self.db), + Variance::Invariant, + &mismatch.expected, + &mismatch.actual, + ) + .is_ok() + }); diagnostics.retain_mut(|diagnostic| { use InferenceDiagnostic::*; match diagnostic { @@ -1502,3 +1510,116 @@ impl std::ops::BitOrAssign for Diverges { *self = *self | other; } } +/// A zipper that checks for unequal `{unknown}` occurrences in the two types. Used to filter out +/// mismatch diagnostics that only differ in `{unknown}`. These mismatches are usually not helpful. +/// As the cause is usually an underlying name resolution problem. +struct UnknownMismatch<'db>(&'db dyn HirDatabase); +impl chalk_ir::zip::Zipper for UnknownMismatch<'_> { + fn zip_tys(&mut self, variance: Variance, a: &Ty, b: &Ty) -> chalk_ir::Fallible<()> { + let zip_substs = |this: &mut Self, + variances, + sub_a: &Substitution, + sub_b: &Substitution| { + this.zip_substs(variance, variances, sub_a.as_slice(Interner), sub_b.as_slice(Interner)) + }; + match (a.kind(Interner), b.kind(Interner)) { + (TyKind::Adt(id_a, sub_a), TyKind::Adt(id_b, sub_b)) if id_a == id_b => zip_substs( + self, + Some(self.unification_database().adt_variance(*id_a)), + sub_a, + sub_b, + )?, + ( + TyKind::AssociatedType(assoc_ty_a, sub_a), + TyKind::AssociatedType(assoc_ty_b, sub_b), + ) if assoc_ty_a == assoc_ty_b => zip_substs(self, None, sub_a, sub_b)?, + (TyKind::Tuple(arity_a, sub_a), TyKind::Tuple(arity_b, sub_b)) + if arity_a == arity_b => + { + zip_substs(self, None, sub_a, sub_b)? + } + (TyKind::OpaqueType(opaque_ty_a, sub_a), TyKind::OpaqueType(opaque_ty_b, sub_b)) + if opaque_ty_a == opaque_ty_b => + { + zip_substs(self, None, sub_a, sub_b)? + } + (TyKind::Slice(ty_a), TyKind::Slice(ty_b)) => self.zip_tys(variance, ty_a, ty_b)?, + (TyKind::FnDef(fn_def_a, sub_a), TyKind::FnDef(fn_def_b, sub_b)) + if fn_def_a == fn_def_b => + { + zip_substs( + self, + Some(self.unification_database().fn_def_variance(*fn_def_a)), + sub_a, + sub_b, + )? + } + (TyKind::Ref(mutability_a, _, ty_a), TyKind::Ref(mutability_b, _, ty_b)) + if mutability_a == mutability_b => + { + self.zip_tys(variance, ty_a, ty_b)? + } + (TyKind::Raw(mutability_a, ty_a), TyKind::Raw(mutability_b, ty_b)) + if mutability_a == mutability_b => + { + self.zip_tys(variance, ty_a, ty_b)? + } + (TyKind::Array(ty_a, const_a), TyKind::Array(ty_b, const_b)) if const_a == const_b => { + self.zip_tys(variance, ty_a, ty_b)? + } + (TyKind::Closure(id_a, sub_a), TyKind::Closure(id_b, sub_b)) if id_a == id_b => { + zip_substs(self, None, sub_a, sub_b)? + } + (TyKind::Coroutine(coroutine_a, sub_a), TyKind::Coroutine(coroutine_b, sub_b)) + if coroutine_a == coroutine_b => + { + zip_substs(self, None, sub_a, sub_b)? + } + ( + TyKind::CoroutineWitness(coroutine_a, sub_a), + TyKind::CoroutineWitness(coroutine_b, sub_b), + ) if coroutine_a == coroutine_b => zip_substs(self, None, sub_a, sub_b)?, + (TyKind::Function(fn_ptr_a), TyKind::Function(fn_ptr_b)) + if fn_ptr_a.sig == fn_ptr_b.sig && fn_ptr_a.num_binders == fn_ptr_b.num_binders => + { + zip_substs(self, None, &fn_ptr_a.substitution.0, &fn_ptr_b.substitution.0)? + } + (TyKind::Error, TyKind::Error) => (), + (TyKind::Error, _) | (_, TyKind::Error) => return Err(chalk_ir::NoSolution), + _ => (), + } + + Ok(()) + } + + fn zip_lifetimes(&mut self, _: Variance, _: &Lifetime, _: &Lifetime) -> chalk_ir::Fallible<()> { + Ok(()) + } + + fn zip_consts(&mut self, _: Variance, _: &Const, _: &Const) -> chalk_ir::Fallible<()> { + Ok(()) + } + + fn zip_binders( + &mut self, + variance: Variance, + a: &Binders, + b: &Binders, + ) -> chalk_ir::Fallible<()> + where + T: Clone + + HasInterner + + chalk_ir::zip::Zip + + TypeFoldable, + { + chalk_ir::zip::Zip::zip_with(self, variance, a.skip_binders(), b.skip_binders()) + } + + fn interner(&self) -> Interner { + Interner + } + + fn unification_database(&self) -> &dyn chalk_ir::UnificationDatabase { + &self.0 + } +} diff --git a/crates/hir-ty/src/tests/diagnostics.rs b/crates/hir-ty/src/tests/diagnostics.rs index 1876be303ad4..80f92eaf4355 100644 --- a/crates/hir-ty/src/tests/diagnostics.rs +++ b/crates/hir-ty/src/tests/diagnostics.rs @@ -1,3 +1,5 @@ +use crate::tests::check_no_mismatches; + use super::check; #[test] @@ -94,3 +96,43 @@ fn test(x: bool) { "#, ); } + +#[test] +fn no_mismatches_on_atpit() { + check_no_mismatches( + r#" +//- minicore: option, sized +#![feature(impl_trait_in_assoc_type)] + +trait WrappedAssoc { + type Assoc; + fn do_thing(&self) -> Option; +} + +struct Foo; +impl WrappedAssoc for Foo { + type Assoc = impl Sized; + + fn do_thing(&self) -> Option { + Some(()) + } +} +"#, + ); + check_no_mismatches( + r#" +//- minicore: option, sized +#![feature(impl_trait_in_assoc_type)] + +trait Trait { + type Assoc; + const DEFINE: Option; +} + +impl Trait for () { + type Assoc = impl Sized; + const DEFINE: Option = Option::Some(()); +} +"#, + ); +} diff --git a/crates/hir-ty/src/tests/simple.rs b/crates/hir-ty/src/tests/simple.rs index 847478228260..6c7dbe1db6ff 100644 --- a/crates/hir-ty/src/tests/simple.rs +++ b/crates/hir-ty/src/tests/simple.rs @@ -3376,11 +3376,8 @@ fn main() { [x,] = &[1,]; //^^^^expected &[i32; 1], got [{unknown}; _] - // FIXME we only want the outermost error, but this matches the current - // behavior of slice patterns let x; [(x,),] = &[(1,),]; - // ^^^^expected {unknown}, got ({unknown},) //^^^^^^^expected &[(i32,); 1], got [{unknown}; _] let x; From ed425f8700ba9917126c652e44473fae1d4513c7 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Lauren=C8=9Biu=20Nicola?= Date: Fri, 16 Feb 2024 14:23:10 +0200 Subject: [PATCH 086/127] Bump actions/checkout to v4 --- .github/workflows/autopublish.yaml | 2 +- .github/workflows/ci.yaml | 8 ++++---- .github/workflows/fuzz.yml | 2 +- .github/workflows/metrics.yaml | 6 +++--- .github/workflows/publish-libs.yaml | 2 +- .github/workflows/release.yaml | 8 ++++---- .github/workflows/rustdoc.yaml | 2 +- 7 files changed, 15 insertions(+), 15 deletions(-) diff --git a/.github/workflows/autopublish.yaml b/.github/workflows/autopublish.yaml index 9a5015005b3d..4b97637088c3 100644 --- a/.github/workflows/autopublish.yaml +++ b/.github/workflows/autopublish.yaml @@ -15,7 +15,7 @@ jobs: runs-on: ubuntu-latest steps: - name: Checkout repository - uses: actions/checkout@v3 + uses: actions/checkout@v4 with: fetch-depth: 0 diff --git a/.github/workflows/ci.yaml b/.github/workflows/ci.yaml index d751c3ee0da6..6ea9b6e253b7 100644 --- a/.github/workflows/ci.yaml +++ b/.github/workflows/ci.yaml @@ -27,7 +27,7 @@ jobs: typescript: ${{ steps.filter.outputs.typescript }} proc_macros: ${{ steps.filter.outputs.proc_macros }} steps: - - uses: actions/checkout@v3 + - uses: actions/checkout@v4 - uses: dorny/paths-filter@1441771bbfdd59dcd748680ee64ebd8faab1a242 id: filter with: @@ -56,7 +56,7 @@ jobs: steps: - name: Checkout repository - uses: actions/checkout@v3 + uses: actions/checkout@v4 with: ref: ${{ github.event.pull_request.head.sha }} @@ -129,7 +129,7 @@ jobs: steps: - name: Checkout repository - uses: actions/checkout@v3 + uses: actions/checkout@v4 - name: Install Rust toolchain run: | @@ -161,7 +161,7 @@ jobs: steps: - name: Checkout repository - uses: actions/checkout@v3 + uses: actions/checkout@v4 if: needs.changes.outputs.typescript == 'true' - name: Install Nodejs diff --git a/.github/workflows/fuzz.yml b/.github/workflows/fuzz.yml index 5af8aa1f77aa..f88c7f95d5c9 100644 --- a/.github/workflows/fuzz.yml +++ b/.github/workflows/fuzz.yml @@ -27,7 +27,7 @@ jobs: steps: - name: Checkout repository - uses: actions/checkout@v3 + uses: actions/checkout@v4 with: ref: ${{ github.event.pull_request.head.sha }} fetch-depth: 1 diff --git a/.github/workflows/metrics.yaml b/.github/workflows/metrics.yaml index e6a9917a0bf3..ed775ae2d35b 100644 --- a/.github/workflows/metrics.yaml +++ b/.github/workflows/metrics.yaml @@ -36,7 +36,7 @@ jobs: steps: - name: Checkout repository - uses: actions/checkout@v3 + uses: actions/checkout@v4 - name: Restore cargo cache uses: actions/cache@v3 @@ -73,7 +73,7 @@ jobs: steps: - name: Checkout repository - uses: actions/checkout@v3 + uses: actions/checkout@v4 - name: Restore cargo cache uses: actions/cache@v3 @@ -106,7 +106,7 @@ jobs: needs: [build_metrics, other_metrics] steps: - name: Checkout repository - uses: actions/checkout@v3 + uses: actions/checkout@v4 - name: Download build metrics uses: actions/download-artifact@v3 diff --git a/.github/workflows/publish-libs.yaml b/.github/workflows/publish-libs.yaml index 6d026c9ad910..862373ec1cce 100644 --- a/.github/workflows/publish-libs.yaml +++ b/.github/workflows/publish-libs.yaml @@ -13,7 +13,7 @@ jobs: runs-on: ubuntu-latest steps: - name: Checkout repository - uses: actions/checkout@v3 + uses: actions/checkout@v4 with: fetch-depth: 0 diff --git a/.github/workflows/release.yaml b/.github/workflows/release.yaml index 9077a9ac21eb..f22e40181ba1 100644 --- a/.github/workflows/release.yaml +++ b/.github/workflows/release.yaml @@ -59,7 +59,7 @@ jobs: steps: - name: Checkout repository - uses: actions/checkout@v3 + uses: actions/checkout@v4 with: fetch-depth: ${{ env.FETCH_DEPTH }} @@ -78,7 +78,7 @@ jobs: rustup component add rust-src - name: Install Node.js - uses: actions/setup-node@v3 + uses: actions/setup-node@v4 with: node-version: 16 @@ -154,7 +154,7 @@ jobs: run: apk add --no-cache git clang lld musl-dev nodejs npm - name: Checkout repository - uses: actions/checkout@v3 + uses: actions/checkout@v4 with: fetch-depth: ${{ env.FETCH_DEPTH }} @@ -199,7 +199,7 @@ jobs: - run: 'echo "TAG: $TAG"' - name: Checkout repository - uses: actions/checkout@v3 + uses: actions/checkout@v4 with: fetch-depth: ${{ env.FETCH_DEPTH }} diff --git a/.github/workflows/rustdoc.yaml b/.github/workflows/rustdoc.yaml index 05f3e254e5f5..12a1a791fda2 100644 --- a/.github/workflows/rustdoc.yaml +++ b/.github/workflows/rustdoc.yaml @@ -17,7 +17,7 @@ jobs: steps: - name: Checkout repository - uses: actions/checkout@v3 + uses: actions/checkout@v4 - name: Install Rust toolchain run: rustup update --no-self-update stable From b1404d387ae4d9d6735c59ea74b75ea1d526bf98 Mon Sep 17 00:00:00 2001 From: Lukas Wirth Date: Fri, 16 Feb 2024 14:48:25 +0100 Subject: [PATCH 087/127] fix: Split toolchain and datalayout out of CrateData --- crates/base-db/src/input.rs | 65 +++------ crates/base-db/src/lib.rs | 14 ++ crates/hir-expand/src/change.rs | 34 ++++- crates/hir-expand/src/declarative.rs | 6 +- crates/hir-ty/src/layout/target.rs | 8 +- crates/hir-ty/src/layout/tests.rs | 4 +- crates/ide-completion/src/context.rs | 2 +- crates/ide/src/doc_links.rs | 2 +- crates/ide/src/lib.rs | 4 +- crates/ide/src/shuffle_crate_graph.rs | 2 - crates/ide/src/status.rs | 8 -- crates/load-cargo/src/lib.rs | 15 +- crates/project-model/src/tests.rs | 11 +- crates/project-model/src/workspace.rs | 129 +++++------------- .../cargo_hello_world_project_model.txt | 20 --- ...project_model_with_selective_overrides.txt | 20 --- ..._project_model_with_wildcard_overrides.txt | 20 --- ...rust_project_hello_world_project_model.txt | 44 ------ crates/rust-analyzer/src/reload.rs | 80 ++++++++--- .../rust-analyzer/tests/slow-tests/support.rs | 9 +- crates/test-fixture/src/lib.rs | 56 +++----- crates/test-utils/src/fixture.rs | 38 ++++-- 22 files changed, 246 insertions(+), 345 deletions(-) diff --git a/crates/base-db/src/input.rs b/crates/base-db/src/input.rs index 1db8c2f29d30..3a3effae036f 100644 --- a/crates/base-db/src/input.rs +++ b/crates/base-db/src/input.rs @@ -11,7 +11,6 @@ use std::{fmt, mem, ops, str::FromStr}; use cfg::CfgOptions; use la_arena::{Arena, Idx, RawIdx}; use rustc_hash::{FxHashMap, FxHashSet}; -use semver::Version; use syntax::SmolStr; use triomphe::Arc; use vfs::{file_set::FileSet, AbsPathBuf, AnchoredPath, FileId, VfsPath}; @@ -292,16 +291,11 @@ pub struct CrateData { pub dependencies: Vec, pub origin: CrateOrigin, pub is_proc_macro: bool, - // FIXME: These things should not be per crate! These are more per workspace crate graph level - // things. This info does need to be somewhat present though as to prevent deduplication from - // happening across different workspaces with different layouts. - pub target_layout: TargetLayoutLoadResult, - pub toolchain: Option, } impl CrateData { /// Check if [`other`] is almost equal to [`self`] ignoring `CrateOrigin` value. - pub fn eq_ignoring_origin_and_deps(&self, other: &CrateData, ignore_dev_deps: bool) -> bool { + fn eq_ignoring_origin_and_deps(&self, other: &CrateData, ignore_dev_deps: bool) -> bool { // This method has some obscure bits. These are mostly there to be compliant with // some patches. References to the patches are given. if self.root_file_id != other.root_file_id { @@ -353,10 +347,6 @@ impl CrateData { slf_deps.eq(other_deps) } - - pub fn channel(&self) -> Option { - self.toolchain.as_ref().and_then(|v| ReleaseChannel::from_str(&v.pre)) - } } #[derive(Debug, Clone, Copy, PartialEq, Eq, PartialOrd, Ord, Hash)] @@ -439,8 +429,6 @@ impl CrateGraph { env: Env, is_proc_macro: bool, origin: CrateOrigin, - target_layout: Result, Arc>, - toolchain: Option, ) -> CrateId { let data = CrateData { root_file_id, @@ -452,9 +440,7 @@ impl CrateGraph { env, dependencies: Vec::new(), origin, - target_layout, is_proc_macro, - toolchain, }; self.arena.alloc(data) } @@ -524,6 +510,10 @@ impl CrateGraph { self.arena.is_empty() } + pub fn len(&self) -> usize { + self.arena.len() + } + pub fn iter(&self) -> impl Iterator + '_ { self.arena.iter().map(|(idx, _)| idx) } @@ -624,13 +614,16 @@ impl CrateGraph { /// /// This will deduplicate the crates of the graph where possible. /// Note that for deduplication to fully work, `self`'s crate dependencies must be sorted by crate id. - /// If the crate dependencies were sorted, the resulting graph from this `extend` call will also have the crate dependencies sorted. + /// If the crate dependencies were sorted, the resulting graph from this `extend` call will also + /// have the crate dependencies sorted. + /// + /// Returns a mapping from `other`'s crate ids to the new crate ids in `self`. pub fn extend( &mut self, mut other: CrateGraph, proc_macros: &mut ProcMacroPaths, - on_finished: impl FnOnce(&FxHashMap), - ) { + may_merge: impl Fn((CrateId, &CrateData), (CrateId, &CrateData)) -> bool, + ) -> FxHashMap { let topo = other.crates_in_topological_order(); let mut id_map: FxHashMap = FxHashMap::default(); for topo in topo { @@ -639,6 +632,10 @@ impl CrateGraph { crate_data.dependencies.iter_mut().for_each(|dep| dep.crate_id = id_map[&dep.crate_id]); crate_data.dependencies.sort_by_key(|dep| dep.crate_id); let res = self.arena.iter().find_map(|(id, data)| { + if !may_merge((id, &data), (topo, &crate_data)) { + return None; + } + match (&data.origin, &crate_data.origin) { (a, b) if a == b => { if data.eq_ignoring_origin_and_deps(crate_data, false) { @@ -663,8 +660,7 @@ impl CrateGraph { None }); - if let Some((res, should_update_lib_to_local)) = res { - id_map.insert(topo, res); + let new_id = if let Some((res, should_update_lib_to_local)) = res { if should_update_lib_to_local { assert!(self.arena[res].origin.is_lib()); assert!(crate_data.origin.is_local()); @@ -673,16 +669,17 @@ impl CrateGraph { // Move local's dev dependencies into the newly-local-formerly-lib crate. self.arena[res].dependencies = crate_data.dependencies.clone(); } + res } else { - let id = self.arena.alloc(crate_data.clone()); - id_map.insert(topo, id); - } + self.arena.alloc(crate_data.clone()) + }; + id_map.insert(topo, new_id); } *proc_macros = mem::take(proc_macros).into_iter().map(|(id, macros)| (id_map[&id], macros)).collect(); - on_finished(&id_map); + id_map } fn find_path( @@ -889,8 +886,6 @@ mod tests { Env::default(), false, CrateOrigin::Local { repo: None, name: None }, - Err("".into()), - None, ); let crate2 = graph.add_crate_root( FileId::from_raw(2u32), @@ -902,8 +897,6 @@ mod tests { Env::default(), false, CrateOrigin::Local { repo: None, name: None }, - Err("".into()), - None, ); let crate3 = graph.add_crate_root( FileId::from_raw(3u32), @@ -915,8 +908,6 @@ mod tests { Env::default(), false, CrateOrigin::Local { repo: None, name: None }, - Err("".into()), - None, ); assert!(graph .add_dep( @@ -951,8 +942,6 @@ mod tests { Env::default(), false, CrateOrigin::Local { repo: None, name: None }, - Err("".into()), - None, ); let crate2 = graph.add_crate_root( FileId::from_raw(2u32), @@ -964,8 +953,6 @@ mod tests { Env::default(), false, CrateOrigin::Local { repo: None, name: None }, - Err("".into()), - None, ); assert!(graph .add_dep( @@ -994,8 +981,6 @@ mod tests { Env::default(), false, CrateOrigin::Local { repo: None, name: None }, - Err("".into()), - None, ); let crate2 = graph.add_crate_root( FileId::from_raw(2u32), @@ -1007,8 +992,6 @@ mod tests { Env::default(), false, CrateOrigin::Local { repo: None, name: None }, - Err("".into()), - None, ); let crate3 = graph.add_crate_root( FileId::from_raw(3u32), @@ -1020,8 +1003,6 @@ mod tests { Env::default(), false, CrateOrigin::Local { repo: None, name: None }, - Err("".into()), - None, ); assert!(graph .add_dep( @@ -1050,8 +1031,6 @@ mod tests { Env::default(), false, CrateOrigin::Local { repo: None, name: None }, - Err("".into()), - None, ); let crate2 = graph.add_crate_root( FileId::from_raw(2u32), @@ -1063,8 +1042,6 @@ mod tests { Env::default(), false, CrateOrigin::Local { repo: None, name: None }, - Err("".into()), - None, ); assert!(graph .add_dep( diff --git a/crates/base-db/src/lib.rs b/crates/base-db/src/lib.rs index d7fc9d4c95cd..cb2e6cdaa28d 100644 --- a/crates/base-db/src/lib.rs +++ b/crates/base-db/src/lib.rs @@ -62,6 +62,20 @@ pub trait SourceDatabase: FileLoader + std::fmt::Debug { /// The crate graph. #[salsa::input] fn crate_graph(&self) -> Arc; + + // FIXME: Consider removing this, making HirDatabase::target_data_layout an input query + #[salsa::input] + fn data_layout(&self, krate: CrateId) -> TargetLayoutLoadResult; + + #[salsa::input] + fn toolchain(&self, krate: CrateId) -> Option; + + #[salsa::transparent] + fn toolchain_channel(&self, krate: CrateId) -> Option; +} + +fn toolchain_channel(db: &dyn SourceDatabase, krate: CrateId) -> Option { + db.toolchain(krate).as_ref().and_then(|v| ReleaseChannel::from_str(&v.pre)) } fn parse(db: &dyn SourceDatabase, file_id: FileId) -> Parse { diff --git a/crates/hir-expand/src/change.rs b/crates/hir-expand/src/change.rs index 67b7df198e93..c6611438e64d 100644 --- a/crates/hir-expand/src/change.rs +++ b/crates/hir-expand/src/change.rs @@ -1,6 +1,10 @@ //! Defines a unit of change that can applied to the database to get the next //! state. Changes are transactional. -use base_db::{salsa::Durability, CrateGraph, FileChange, SourceDatabaseExt, SourceRoot}; +use base_db::{ + salsa::Durability, CrateGraph, CrateId, FileChange, SourceDatabaseExt, SourceRoot, + TargetLayoutLoadResult, Version, +}; +use la_arena::RawIdx; use span::FileId; use triomphe::Arc; @@ -10,6 +14,8 @@ use crate::{db::ExpandDatabase, proc_macro::ProcMacros}; pub struct Change { pub source_change: FileChange, pub proc_macros: Option, + pub toolchains: Option>>, + pub target_data_layouts: Option>, } impl Change { @@ -22,6 +28,24 @@ impl Change { if let Some(proc_macros) = self.proc_macros { db.set_proc_macros_with_durability(Arc::new(proc_macros), Durability::HIGH); } + if let Some(target_data_layouts) = self.target_data_layouts { + for (id, val) in target_data_layouts.into_iter().enumerate() { + db.set_data_layout_with_durability( + CrateId::from_raw(RawIdx::from(id as u32)), + val, + Durability::HIGH, + ); + } + } + if let Some(toolchains) = self.toolchains { + for (id, val) in toolchains.into_iter().enumerate() { + db.set_toolchain_with_durability( + CrateId::from_raw(RawIdx::from(id as u32)), + val, + Durability::HIGH, + ); + } + } } pub fn change_file(&mut self, file_id: FileId, new_text: Option>) { @@ -36,6 +60,14 @@ impl Change { self.proc_macros = Some(proc_macros); } + pub fn set_toolchains(&mut self, toolchains: Vec>) { + self.toolchains = Some(toolchains); + } + + pub fn set_target_data_layouts(&mut self, target_data_layouts: Vec) { + self.target_data_layouts = Some(target_data_layouts); + } + pub fn set_roots(&mut self, roots: Vec) { self.source_change.set_roots(roots) } diff --git a/crates/hir-expand/src/declarative.rs b/crates/hir-expand/src/declarative.rs index 345e5cee266b..6874336cd2d0 100644 --- a/crates/hir-expand/src/declarative.rs +++ b/crates/hir-expand/src/declarative.rs @@ -31,7 +31,7 @@ impl DeclarativeMacroExpander { call_id: MacroCallId, ) -> ExpandResult { let loc = db.lookup_intern_macro_call(call_id); - let toolchain = &db.crate_graph()[loc.def.krate].toolchain; + let toolchain = db.toolchain(loc.def.krate); let new_meta_vars = toolchain.as_ref().map_or(false, |version| { REQUIREMENT.get_or_init(|| VersionReq::parse(">=1.76").unwrap()).matches( &base_db::Version { @@ -67,7 +67,7 @@ impl DeclarativeMacroExpander { krate: CrateId, call_site: Span, ) -> ExpandResult { - let toolchain = &db.crate_graph()[krate].toolchain; + let toolchain = db.toolchain(krate); let new_meta_vars = toolchain.as_ref().map_or(false, |version| { REQUIREMENT.get_or_init(|| VersionReq::parse(">=1.76").unwrap()).matches( &base_db::Version { @@ -119,7 +119,7 @@ impl DeclarativeMacroExpander { _ => None, } }; - let toolchain = crate_data.toolchain.as_ref(); + let toolchain = db.toolchain(def_crate); let new_meta_vars = toolchain.as_ref().map_or(false, |version| { REQUIREMENT.get_or_init(|| VersionReq::parse(">=1.76").unwrap()).matches( &base_db::Version { diff --git a/crates/hir-ty/src/layout/target.rs b/crates/hir-ty/src/layout/target.rs index 5bfe7bf010f1..9b1424548c2a 100644 --- a/crates/hir-ty/src/layout/target.rs +++ b/crates/hir-ty/src/layout/target.rs @@ -11,10 +11,8 @@ pub fn target_data_layout_query( db: &dyn HirDatabase, krate: CrateId, ) -> Result, Arc> { - let crate_graph = db.crate_graph(); - let res = crate_graph[krate].target_layout.as_deref(); - match res { - Ok(it) => match TargetDataLayout::parse_from_llvm_datalayout_string(it) { + match db.data_layout(krate) { + Ok(it) => match TargetDataLayout::parse_from_llvm_datalayout_string(&it) { Ok(it) => Ok(Arc::new(it)), Err(e) => { Err(match e { @@ -44,6 +42,6 @@ pub fn target_data_layout_query( }.into()) } }, - Err(e) => Err(Arc::from(&**e)), + Err(e) => Err(e), } } diff --git a/crates/hir-ty/src/layout/tests.rs b/crates/hir-ty/src/layout/tests.rs index ff53f1448610..6c1eccb75e63 100644 --- a/crates/hir-ty/src/layout/tests.rs +++ b/crates/hir-ty/src/layout/tests.rs @@ -27,7 +27,7 @@ fn current_machine_data_layout() -> String { fn eval_goal(ra_fixture: &str, minicore: &str) -> Result, LayoutError> { let target_data_layout = current_machine_data_layout(); let ra_fixture = format!( - "{minicore}//- /main.rs crate:test target_data_layout:{target_data_layout}\n{ra_fixture}", + "//- target_data_layout: {target_data_layout}\n{minicore}//- /main.rs crate:test\n{ra_fixture}", ); let (db, file_ids) = TestDB::with_many_files(&ra_fixture); @@ -76,7 +76,7 @@ fn eval_goal(ra_fixture: &str, minicore: &str) -> Result, LayoutErro fn eval_expr(ra_fixture: &str, minicore: &str) -> Result, LayoutError> { let target_data_layout = current_machine_data_layout(); let ra_fixture = format!( - "{minicore}//- /main.rs crate:test target_data_layout:{target_data_layout}\nfn main(){{let goal = {{{ra_fixture}}};}}", + "//- target_data_layout: {target_data_layout}\n{minicore}//- /main.rs crate:test\nfn main(){{let goal = {{{ra_fixture}}};}}", ); let (db, file_id) = TestDB::with_single_file(&ra_fixture); diff --git a/crates/ide-completion/src/context.rs b/crates/ide-completion/src/context.rs index 314f84f5df16..aa22155feffe 100644 --- a/crates/ide-completion/src/context.rs +++ b/crates/ide-completion/src/context.rs @@ -717,7 +717,7 @@ impl<'a> CompletionContext<'a> { let krate = scope.krate(); let module = scope.module(); - let toolchain = db.crate_graph()[krate.into()].channel(); + let toolchain = db.toolchain_channel(krate.into()); // `toolchain == None` means we're in some detached files. Since we have no information on // the toolchain being used, let's just allow unstable items to be listed. let is_nightly = matches!(toolchain, Some(base_db::ReleaseChannel::Nightly) | None); diff --git a/crates/ide/src/doc_links.rs b/crates/ide/src/doc_links.rs index dbe6a5507cc3..18821bd78bfa 100644 --- a/crates/ide/src/doc_links.rs +++ b/crates/ide/src/doc_links.rs @@ -501,7 +501,7 @@ fn get_doc_base_urls( let Some(krate) = def.krate(db) else { return Default::default() }; let Some(display_name) = krate.display_name(db) else { return Default::default() }; let crate_data = &db.crate_graph()[krate.into()]; - let channel = crate_data.channel().unwrap_or(ReleaseChannel::Nightly).as_str(); + let channel = db.toolchain_channel(krate.into()).unwrap_or(ReleaseChannel::Nightly).as_str(); let (web_base, local_base) = match &crate_data.origin { // std and co do not specify `html_root_url` any longer so we gotta handwrite this ourself. diff --git a/crates/ide/src/lib.rs b/crates/ide/src/lib.rs index 609cfac52755..1409d17cfeac 100644 --- a/crates/ide/src/lib.rs +++ b/crates/ide/src/lib.rs @@ -253,11 +253,11 @@ impl Analysis { Env::default(), false, CrateOrigin::Local { repo: None, name: None }, - Err("Analysis::from_single_file has no target layout".into()), - None, ); change.change_file(file_id, Some(Arc::from(text))); change.set_crate_graph(crate_graph); + change.set_target_data_layouts(vec![Err("fixture has no layout".into())]); + change.set_toolchains(vec![None]); host.apply_change(change); (host.analysis(), file_id) } diff --git a/crates/ide/src/shuffle_crate_graph.rs b/crates/ide/src/shuffle_crate_graph.rs index bf6ad47a4952..453d1836e16e 100644 --- a/crates/ide/src/shuffle_crate_graph.rs +++ b/crates/ide/src/shuffle_crate_graph.rs @@ -39,8 +39,6 @@ pub(crate) fn shuffle_crate_graph(db: &mut RootDatabase) { data.env.clone(), data.is_proc_macro, data.origin.clone(), - data.target_layout.clone(), - data.toolchain.clone(), ); new_proc_macros.insert(new_id, proc_macros[&old_id].clone()); map.insert(old_id, new_id); diff --git a/crates/ide/src/status.rs b/crates/ide/src/status.rs index 3321a0513b6f..c3d85e38936d 100644 --- a/crates/ide/src/status.rs +++ b/crates/ide/src/status.rs @@ -72,8 +72,6 @@ pub(crate) fn status(db: &RootDatabase, file_id: Option) -> String { dependencies, origin, is_proc_macro, - target_layout, - toolchain, } = &crate_graph[crate_id]; format_to!( buf, @@ -91,12 +89,6 @@ pub(crate) fn status(db: &RootDatabase, file_id: Option) -> String { format_to!(buf, " Env: {:?}\n", env); format_to!(buf, " Origin: {:?}\n", origin); format_to!(buf, " Is a proc macro crate: {}\n", is_proc_macro); - format_to!(buf, " Workspace Target Layout: {:?}\n", target_layout); - format_to!( - buf, - " Workspace Toolchain: {}\n", - toolchain.as_ref().map_or_else(|| "n/a".into(), |v| v.to_string()) - ); let deps = dependencies .iter() .map(|dep| format!("{}={}", dep.name, dep.crate_id.into_raw())) diff --git a/crates/load-cargo/src/lib.rs b/crates/load-cargo/src/lib.rs index 6958a4be17ba..8c5592da63ec 100644 --- a/crates/load-cargo/src/lib.rs +++ b/crates/load-cargo/src/lib.rs @@ -2,7 +2,7 @@ //! for incorporating changes. // Note, don't remove any public api from this. This API is consumed by external tools // to run rust-analyzer as a library. -use std::{collections::hash_map::Entry, mem, path::Path, sync}; +use std::{collections::hash_map::Entry, iter, mem, path::Path, sync}; use crossbeam_channel::{unbounded, Receiver}; use hir_expand::proc_macro::{ @@ -106,7 +106,7 @@ pub fn load_workspace( .collect() }; - let project_folders = ProjectFolders::new(&[ws], &[]); + let project_folders = ProjectFolders::new(std::slice::from_ref(&ws), &[]); loader.set_config(vfs::loader::Config { load: project_folders.load, watch: vec![], @@ -114,6 +114,7 @@ pub fn load_workspace( }); let host = load_crate_graph( + &ws, crate_graph, proc_macros, project_folders.source_root_config, @@ -301,6 +302,7 @@ pub fn load_proc_macro( } fn load_crate_graph( + ws: &ProjectWorkspace, crate_graph: CrateGraph, proc_macros: ProcMacros, source_root_config: SourceRootConfig, @@ -339,8 +341,17 @@ fn load_crate_graph( let source_roots = source_root_config.partition(vfs); analysis_change.set_roots(source_roots); + let num_crates = crate_graph.len(); analysis_change.set_crate_graph(crate_graph); analysis_change.set_proc_macros(proc_macros); + if let ProjectWorkspace::Cargo { toolchain, target_layout, .. } + | ProjectWorkspace::Json { toolchain, target_layout, .. } = ws + { + analysis_change.set_target_data_layouts( + iter::repeat(target_layout.clone()).take(num_crates).collect(), + ); + analysis_change.set_toolchains(iter::repeat(toolchain.clone()).take(num_crates).collect()); + } host.apply_change(analysis_change); host diff --git a/crates/project-model/src/tests.rs b/crates/project-model/src/tests.rs index b50750c61492..017b055d729a 100644 --- a/crates/project-model/src/tests.rs +++ b/crates/project-model/src/tests.rs @@ -9,6 +9,7 @@ use expect_test::{expect_file, ExpectFile}; use paths::{AbsPath, AbsPathBuf}; use rustc_hash::FxHashMap; use serde::de::DeserializeOwned; +use triomphe::Arc; use crate::{ CargoWorkspace, CfgOverrides, ProjectJson, ProjectJsonData, ProjectWorkspace, Sysroot, @@ -76,7 +77,7 @@ fn load_rust_project(file: &str) -> (CrateGraph, ProcMacroPaths) { sysroot, rustc_cfg: Vec::new(), toolchain: None, - target_layout: Err("test has no data layout".to_owned()), + target_layout: Err(Arc::from("test has no data layout")), }; to_crate_graph(project_workspace) } @@ -237,7 +238,7 @@ fn crate_graph_dedup_identical() { let (d_crate_graph, mut d_proc_macros) = (crate_graph.clone(), proc_macros.clone()); - crate_graph.extend(d_crate_graph.clone(), &mut d_proc_macros, |_| ()); + crate_graph.extend(d_crate_graph.clone(), &mut d_proc_macros, |_, _| true); assert!(crate_graph.iter().eq(d_crate_graph.iter())); assert_eq!(proc_macros, d_proc_macros); } @@ -253,7 +254,7 @@ fn crate_graph_dedup() { load_cargo_with_fake_sysroot(path_map, "regex-metadata.json"); assert_eq!(regex_crate_graph.iter().count(), 60); - crate_graph.extend(regex_crate_graph, &mut regex_proc_macros, |_| ()); + crate_graph.extend(regex_crate_graph, &mut regex_proc_macros, |_, _| true); assert_eq!(crate_graph.iter().count(), 118); } @@ -266,7 +267,7 @@ fn test_deduplicate_origin_dev() { let (crate_graph_1, mut _proc_macros_2) = load_cargo_with_fake_sysroot(path_map, "deduplication_crate_graph_B.json"); - crate_graph.extend(crate_graph_1, &mut _proc_macros_2, |_| ()); + crate_graph.extend(crate_graph_1, &mut _proc_macros_2, |_, _| true); let mut crates_named_p2 = vec![]; for id in crate_graph.iter() { @@ -292,7 +293,7 @@ fn test_deduplicate_origin_dev_rev() { let (crate_graph_1, mut _proc_macros_2) = load_cargo_with_fake_sysroot(path_map, "deduplication_crate_graph_A.json"); - crate_graph.extend(crate_graph_1, &mut _proc_macros_2, |_| ()); + crate_graph.extend(crate_graph_1, &mut _proc_macros_2, |_, _| true); let mut crates_named_p2 = vec![]; for id in crate_graph.iter() { diff --git a/crates/project-model/src/workspace.rs b/crates/project-model/src/workspace.rs index 8e709d7f0756..9d3d01688060 100644 --- a/crates/project-model/src/workspace.rs +++ b/crates/project-model/src/workspace.rs @@ -71,7 +71,7 @@ pub enum ProjectWorkspace { rustc_cfg: Vec, cfg_overrides: CfgOverrides, toolchain: Option, - target_layout: Result, + target_layout: TargetLayoutLoadResult, cargo_config_extra_env: FxHashMap, }, /// Project workspace was manually specified using a `rust-project.json` file. @@ -82,7 +82,7 @@ pub enum ProjectWorkspace { /// `rustc --print cfg`. rustc_cfg: Vec, toolchain: Option, - target_layout: Result, + target_layout: TargetLayoutLoadResult, }, // FIXME: The primary limitation of this approach is that the set of detached files needs to be fixed at the beginning. // That's not the end user experience we should strive for. @@ -335,7 +335,9 @@ impl ProjectWorkspace { rustc_cfg, cfg_overrides, toolchain, - target_layout: data_layout.map_err(|it| it.to_string()), + target_layout: data_layout + .map(Arc::from) + .map_err(|it| Arc::from(it.to_string())), cargo_config_extra_env, } } @@ -393,7 +395,7 @@ impl ProjectWorkspace { sysroot, rustc_cfg, toolchain, - target_layout: data_layout.map_err(|it| it.to_string()), + target_layout: data_layout.map(Arc::from).map_err(|it| Arc::from(it.to_string())), } } @@ -690,20 +692,19 @@ impl ProjectWorkspace { let _p = tracing::span!(tracing::Level::INFO, "ProjectWorkspace::to_crate_graph").entered(); let (mut crate_graph, proc_macros) = match self { - ProjectWorkspace::Json { project, sysroot, rustc_cfg, toolchain, target_layout } => { - project_json_to_crate_graph( - rustc_cfg.clone(), - load, - project, - sysroot.as_ref().ok(), - extra_env, - match target_layout.as_ref() { - Ok(it) => Ok(Arc::from(it.as_str())), - Err(it) => Err(Arc::from(it.as_str())), - }, - toolchain.clone(), - ) - } + ProjectWorkspace::Json { + project, + sysroot, + rustc_cfg, + toolchain: _, + target_layout: _, + } => project_json_to_crate_graph( + rustc_cfg.clone(), + load, + project, + sysroot.as_ref().ok(), + extra_env, + ), ProjectWorkspace::Cargo { cargo, sysroot, @@ -711,8 +712,8 @@ impl ProjectWorkspace { rustc_cfg, cfg_overrides, build_scripts, - toolchain, - target_layout, + toolchain: _, + target_layout: _, cargo_config_extra_env: _, } => cargo_to_crate_graph( load, @@ -722,20 +723,9 @@ impl ProjectWorkspace { rustc_cfg.clone(), cfg_overrides, build_scripts, - match target_layout.as_ref() { - Ok(it) => Ok(Arc::from(it.as_str())), - Err(it) => Err(Arc::from(it.as_str())), - }, - toolchain.as_ref(), ), ProjectWorkspace::DetachedFiles { files, sysroot, rustc_cfg } => { - detached_files_to_crate_graph( - rustc_cfg.clone(), - load, - files, - sysroot.as_ref().ok(), - Err("detached file projects have no target layout set".into()), - ) + detached_files_to_crate_graph(rustc_cfg.clone(), load, files, sysroot.as_ref().ok()) } }; if crate_graph.patch_cfg_if() { @@ -818,21 +808,12 @@ fn project_json_to_crate_graph( project: &ProjectJson, sysroot: Option<&Sysroot>, extra_env: &FxHashMap, - target_layout: TargetLayoutLoadResult, - toolchain: Option, ) -> (CrateGraph, ProcMacroPaths) { let mut res = (CrateGraph::default(), ProcMacroPaths::default()); let (crate_graph, proc_macros) = &mut res; - let sysroot_deps = sysroot.as_ref().map(|sysroot| { - sysroot_to_crate_graph( - crate_graph, - sysroot, - rustc_cfg.clone(), - target_layout.clone(), - load, - toolchain.as_ref(), - ) - }); + let sysroot_deps = sysroot + .as_ref() + .map(|sysroot| sysroot_to_crate_graph(crate_graph, sysroot, rustc_cfg.clone(), load)); let r_a_cfg_flag = CfgFlag::Atom("rust_analyzer".to_owned()); let mut cfg_cache: FxHashMap<&str, Vec> = FxHashMap::default(); @@ -887,8 +868,6 @@ fn project_json_to_crate_graph( } else { CrateOrigin::Local { repo: None, name: None } }, - target_layout.clone(), - toolchain.clone(), ); if *is_proc_macro { if let Some(path) = proc_macro_dylib_path.clone() { @@ -931,22 +910,13 @@ fn cargo_to_crate_graph( rustc_cfg: Vec, override_cfg: &CfgOverrides, build_scripts: &WorkspaceBuildScripts, - target_layout: TargetLayoutLoadResult, - toolchain: Option<&Version>, ) -> (CrateGraph, ProcMacroPaths) { let _p = tracing::span!(tracing::Level::INFO, "cargo_to_crate_graph").entered(); let mut res = (CrateGraph::default(), ProcMacroPaths::default()); let crate_graph = &mut res.0; let proc_macros = &mut res.1; let (public_deps, libproc_macro) = match sysroot { - Some(sysroot) => sysroot_to_crate_graph( - crate_graph, - sysroot, - rustc_cfg.clone(), - target_layout.clone(), - load, - toolchain, - ), + Some(sysroot) => sysroot_to_crate_graph(crate_graph, sysroot, rustc_cfg.clone(), load), None => (SysrootPublicDeps::default(), None), }; @@ -1012,9 +982,7 @@ fn cargo_to_crate_graph( file_id, name, kind, - target_layout.clone(), false, - toolchain.cloned(), ); if let TargetKind::Lib { .. } = kind { lib_tgt = Some((crate_id, name.clone())); @@ -1106,8 +1074,6 @@ fn cargo_to_crate_graph( } else { rustc_build_scripts }, - target_layout, - toolchain, ); } } @@ -1119,19 +1085,11 @@ fn detached_files_to_crate_graph( load: &mut dyn FnMut(&AbsPath) -> Option, detached_files: &[AbsPathBuf], sysroot: Option<&Sysroot>, - target_layout: TargetLayoutLoadResult, ) -> (CrateGraph, ProcMacroPaths) { let _p = tracing::span!(tracing::Level::INFO, "detached_files_to_crate_graph").entered(); let mut crate_graph = CrateGraph::default(); let (public_deps, _libproc_macro) = match sysroot { - Some(sysroot) => sysroot_to_crate_graph( - &mut crate_graph, - sysroot, - rustc_cfg.clone(), - target_layout.clone(), - load, - None, - ), + Some(sysroot) => sysroot_to_crate_graph(&mut crate_graph, sysroot, rustc_cfg.clone(), load), None => (SysrootPublicDeps::default(), None), }; @@ -1163,8 +1121,6 @@ fn detached_files_to_crate_graph( repo: None, name: display_name.map(|n| n.canonical_name().to_owned()), }, - target_layout.clone(), - None, ); public_deps.add_to_crate_graph(&mut crate_graph, detached_file_crate); @@ -1185,8 +1141,6 @@ fn handle_rustc_crates( cfg_options: &CfgOptions, override_cfg: &CfgOverrides, build_scripts: &WorkspaceBuildScripts, - target_layout: TargetLayoutLoadResult, - toolchain: Option<&Version>, ) { let mut rustc_pkg_crates = FxHashMap::default(); // The root package of the rustc-dev component is rustc_driver, so we match that @@ -1239,9 +1193,7 @@ fn handle_rustc_crates( file_id, &rustc_workspace[tgt].name, kind, - target_layout.clone(), true, - toolchain.cloned(), ); pkg_to_lib_crate.insert(pkg, crate_id); // Add dependencies on core / std / alloc for this crate @@ -1310,9 +1262,7 @@ fn add_target_crate_root( file_id: FileId, cargo_name: &str, kind: TargetKind, - target_layout: TargetLayoutLoadResult, rustc_crate: bool, - toolchain: Option, ) -> CrateId { let edition = pkg.edition; let potential_cfg_options = if pkg.features.is_empty() { @@ -1367,8 +1317,6 @@ fn add_target_crate_root( } else { CrateOrigin::Library { repo: pkg.repository.clone(), name: pkg.name.clone() } }, - target_layout, - toolchain, ); if let TargetKind::Lib { is_proc_macro: true } = kind { let proc_macro = match build_data.as_ref().map(|it| it.proc_macro_dylib_path.as_ref()) { @@ -1408,9 +1356,7 @@ fn sysroot_to_crate_graph( crate_graph: &mut CrateGraph, sysroot: &Sysroot, rustc_cfg: Vec, - target_layout: TargetLayoutLoadResult, load: &mut dyn FnMut(&AbsPath) -> Option, - toolchain: Option<&Version>, ) -> (SysrootPublicDeps, Option) { let _p = tracing::span!(tracing::Level::INFO, "sysroot_to_crate_graph").entered(); match sysroot.mode() { @@ -1423,8 +1369,6 @@ fn sysroot_to_crate_graph( rustc_cfg, &CfgOverrides::default(), &WorkspaceBuildScripts::default(), - target_layout, - toolchain, ); let mut pub_deps = vec![]; @@ -1467,17 +1411,16 @@ fn sysroot_to_crate_graph( // Remove all crates except the ones we are interested in to keep the sysroot graph small. let removed_mapping = cg.remove_crates_except(&marker_set); + let mapping = crate_graph.extend(cg, &mut pm, |_, _| true); - crate_graph.extend(cg, &mut pm, |mapping| { - // Map the id through the removal mapping first, then through the crate graph extension mapping. - pub_deps.iter_mut().for_each(|(_, cid, _)| { - *cid = mapping[&removed_mapping[cid.into_raw().into_u32() as usize].unwrap()] - }); - if let Some(libproc_macro) = &mut libproc_macro { - *libproc_macro = mapping - [&removed_mapping[libproc_macro.into_raw().into_u32() as usize].unwrap()]; - } + // Map the id through the removal mapping first, then through the crate graph extension mapping. + pub_deps.iter_mut().for_each(|(_, cid, _)| { + *cid = mapping[&removed_mapping[cid.into_raw().into_u32() as usize].unwrap()] }); + if let Some(libproc_macro) = &mut libproc_macro { + *libproc_macro = mapping + [&removed_mapping[libproc_macro.into_raw().into_u32() as usize].unwrap()]; + } (SysrootPublicDeps { deps: pub_deps }, libproc_macro) } @@ -1501,8 +1444,6 @@ fn sysroot_to_crate_graph( env, false, CrateOrigin::Lang(LangCrateOrigin::from(&*stitched[krate].name)), - target_layout.clone(), - toolchain.cloned(), ); Some((krate, crate_id)) }) diff --git a/crates/project-model/test_data/output/cargo_hello_world_project_model.txt b/crates/project-model/test_data/output/cargo_hello_world_project_model.txt index d8d9e559e5c1..4f7169a10eba 100644 --- a/crates/project-model/test_data/output/cargo_hello_world_project_model.txt +++ b/crates/project-model/test_data/output/cargo_hello_world_project_model.txt @@ -59,10 +59,6 @@ ), }, is_proc_macro: false, - target_layout: Err( - "target_data_layout not loaded", - ), - toolchain: None, }, 1: CrateData { root_file_id: FileId( @@ -132,10 +128,6 @@ ), }, is_proc_macro: false, - target_layout: Err( - "target_data_layout not loaded", - ), - toolchain: None, }, 2: CrateData { root_file_id: FileId( @@ -205,10 +197,6 @@ ), }, is_proc_macro: false, - target_layout: Err( - "target_data_layout not loaded", - ), - toolchain: None, }, 3: CrateData { root_file_id: FileId( @@ -278,10 +266,6 @@ ), }, is_proc_macro: false, - target_layout: Err( - "target_data_layout not loaded", - ), - toolchain: None, }, 4: CrateData { root_file_id: FileId( @@ -347,9 +331,5 @@ name: "libc", }, is_proc_macro: false, - target_layout: Err( - "target_data_layout not loaded", - ), - toolchain: None, }, } \ No newline at end of file diff --git a/crates/project-model/test_data/output/cargo_hello_world_project_model_with_selective_overrides.txt b/crates/project-model/test_data/output/cargo_hello_world_project_model_with_selective_overrides.txt index d8d9e559e5c1..4f7169a10eba 100644 --- a/crates/project-model/test_data/output/cargo_hello_world_project_model_with_selective_overrides.txt +++ b/crates/project-model/test_data/output/cargo_hello_world_project_model_with_selective_overrides.txt @@ -59,10 +59,6 @@ ), }, is_proc_macro: false, - target_layout: Err( - "target_data_layout not loaded", - ), - toolchain: None, }, 1: CrateData { root_file_id: FileId( @@ -132,10 +128,6 @@ ), }, is_proc_macro: false, - target_layout: Err( - "target_data_layout not loaded", - ), - toolchain: None, }, 2: CrateData { root_file_id: FileId( @@ -205,10 +197,6 @@ ), }, is_proc_macro: false, - target_layout: Err( - "target_data_layout not loaded", - ), - toolchain: None, }, 3: CrateData { root_file_id: FileId( @@ -278,10 +266,6 @@ ), }, is_proc_macro: false, - target_layout: Err( - "target_data_layout not loaded", - ), - toolchain: None, }, 4: CrateData { root_file_id: FileId( @@ -347,9 +331,5 @@ name: "libc", }, is_proc_macro: false, - target_layout: Err( - "target_data_layout not loaded", - ), - toolchain: None, }, } \ No newline at end of file diff --git a/crates/project-model/test_data/output/cargo_hello_world_project_model_with_wildcard_overrides.txt b/crates/project-model/test_data/output/cargo_hello_world_project_model_with_wildcard_overrides.txt index e0ba5ed498fa..5c9a2f019c5e 100644 --- a/crates/project-model/test_data/output/cargo_hello_world_project_model_with_wildcard_overrides.txt +++ b/crates/project-model/test_data/output/cargo_hello_world_project_model_with_wildcard_overrides.txt @@ -58,10 +58,6 @@ ), }, is_proc_macro: false, - target_layout: Err( - "target_data_layout not loaded", - ), - toolchain: None, }, 1: CrateData { root_file_id: FileId( @@ -130,10 +126,6 @@ ), }, is_proc_macro: false, - target_layout: Err( - "target_data_layout not loaded", - ), - toolchain: None, }, 2: CrateData { root_file_id: FileId( @@ -202,10 +194,6 @@ ), }, is_proc_macro: false, - target_layout: Err( - "target_data_layout not loaded", - ), - toolchain: None, }, 3: CrateData { root_file_id: FileId( @@ -274,10 +262,6 @@ ), }, is_proc_macro: false, - target_layout: Err( - "target_data_layout not loaded", - ), - toolchain: None, }, 4: CrateData { root_file_id: FileId( @@ -343,9 +327,5 @@ name: "libc", }, is_proc_macro: false, - target_layout: Err( - "target_data_layout not loaded", - ), - toolchain: None, }, } \ No newline at end of file diff --git a/crates/project-model/test_data/output/rust_project_hello_world_project_model.txt b/crates/project-model/test_data/output/rust_project_hello_world_project_model.txt index 0489c4213b45..b6043f7e2d26 100644 --- a/crates/project-model/test_data/output/rust_project_hello_world_project_model.txt +++ b/crates/project-model/test_data/output/rust_project_hello_world_project_model.txt @@ -36,10 +36,6 @@ Alloc, ), is_proc_macro: false, - target_layout: Err( - "test has no data layout", - ), - toolchain: None, }, 1: CrateData { root_file_id: FileId( @@ -69,10 +65,6 @@ Core, ), is_proc_macro: false, - target_layout: Err( - "test has no data layout", - ), - toolchain: None, }, 2: CrateData { root_file_id: FileId( @@ -102,10 +94,6 @@ Other, ), is_proc_macro: false, - target_layout: Err( - "test has no data layout", - ), - toolchain: None, }, 3: CrateData { root_file_id: FileId( @@ -135,10 +123,6 @@ Other, ), is_proc_macro: false, - target_layout: Err( - "test has no data layout", - ), - toolchain: None, }, 4: CrateData { root_file_id: FileId( @@ -185,10 +169,6 @@ ProcMacro, ), is_proc_macro: false, - target_layout: Err( - "test has no data layout", - ), - toolchain: None, }, 5: CrateData { root_file_id: FileId( @@ -218,10 +198,6 @@ Other, ), is_proc_macro: false, - target_layout: Err( - "test has no data layout", - ), - toolchain: None, }, 6: CrateData { root_file_id: FileId( @@ -316,10 +292,6 @@ Std, ), is_proc_macro: false, - target_layout: Err( - "test has no data layout", - ), - toolchain: None, }, 7: CrateData { root_file_id: FileId( @@ -349,10 +321,6 @@ Other, ), is_proc_macro: false, - target_layout: Err( - "test has no data layout", - ), - toolchain: None, }, 8: CrateData { root_file_id: FileId( @@ -382,10 +350,6 @@ Test, ), is_proc_macro: false, - target_layout: Err( - "test has no data layout", - ), - toolchain: None, }, 9: CrateData { root_file_id: FileId( @@ -415,10 +379,6 @@ Other, ), is_proc_macro: false, - target_layout: Err( - "test has no data layout", - ), - toolchain: None, }, 10: CrateData { root_file_id: FileId( @@ -492,9 +452,5 @@ ), }, is_proc_macro: false, - target_layout: Err( - "test has no data layout", - ), - toolchain: None, }, } \ No newline at end of file diff --git a/crates/rust-analyzer/src/reload.rs b/crates/rust-analyzer/src/reload.rs index 98eb547c1562..cfdb5c1861c3 100644 --- a/crates/rust-analyzer/src/reload.rs +++ b/crates/rust-analyzer/src/reload.rs @@ -524,8 +524,8 @@ impl GlobalState { } fn recreate_crate_graph(&mut self, cause: String) { - // Create crate graph from all the workspaces - let (crate_graph, proc_macro_paths, crate_graph_file_dependencies) = { + { + // Create crate graph from all the workspaces let vfs = &mut self.vfs.write().0; let loader = &mut self.loader; // crate graph construction relies on these paths, record them so when one of them gets @@ -548,31 +548,71 @@ impl GlobalState { }; let mut crate_graph = CrateGraph::default(); - let mut proc_macros = Vec::default(); + let mut proc_macro_paths = Vec::default(); + let mut layouts = Vec::default(); + let mut toolchains = Vec::default(); + let e = Err(Arc::from("missing layout")); for ws in &**self.workspaces { let (other, mut crate_proc_macros) = ws.to_crate_graph(&mut load, self.config.extra_env()); - crate_graph.extend(other, &mut crate_proc_macros, |_| {}); - proc_macros.push(crate_proc_macros); + let num_layouts = layouts.len(); + let num_toolchains = toolchains.len(); + let (toolchain, layout) = match ws { + ProjectWorkspace::Cargo { toolchain, target_layout, .. } + | ProjectWorkspace::Json { toolchain, target_layout, .. } => { + (toolchain.clone(), target_layout.clone()) + } + ProjectWorkspace::DetachedFiles { .. } => { + (None, Err("detached files have no layout".into())) + } + }; + + let mapping = crate_graph.extend( + other, + &mut crate_proc_macros, + |(cg_id, _cg_data), (_o_id, _o_data)| { + // if the newly created crate graph's layout is equal to the crate of the merged graph, then + // we can merge the crates. + layouts[cg_id.into_raw().into_u32() as usize] == layout + && toolchains[cg_id.into_raw().into_u32() as usize] == toolchain + }, + ); + // Populate the side tables for the newly merged crates + mapping.values().for_each(|val| { + let idx = val.into_raw().into_u32() as usize; + // we only need to consider crates that were not merged and remapped, as the + // ones that were remapped already have the correct layout and toolchain + if idx >= num_layouts { + if layouts.len() <= idx { + layouts.resize(idx + 1, e.clone()); + } + layouts[idx] = layout.clone(); + } + if idx >= num_toolchains { + if toolchains.len() <= idx { + toolchains.resize(idx + 1, None); + } + toolchains[idx] = toolchain.clone(); + } + }); + proc_macro_paths.push(crate_proc_macros); } - (crate_graph, proc_macros, crate_graph_file_dependencies) - }; - let mut change = Change::new(); - if self.config.expand_proc_macros() { - change.set_proc_macros( - crate_graph - .iter() - .map(|id| (id, Err("Proc-macros have not been built yet".to_owned()))) - .collect(), - ); - self.fetch_proc_macros_queue.request_op(cause, proc_macro_paths); + let mut change = Change::new(); + if self.config.expand_proc_macros() { + change.set_proc_macros( + crate_graph + .iter() + .map(|id| (id, Err("Proc-macros have not been built yet".to_owned()))) + .collect(), + ); + self.fetch_proc_macros_queue.request_op(cause, proc_macro_paths); + } + change.set_crate_graph(crate_graph); + self.analysis_host.apply_change(change); + self.crate_graph_file_dependencies = crate_graph_file_dependencies; } - change.set_crate_graph(crate_graph); - self.analysis_host.apply_change(change); - self.crate_graph_file_dependencies = crate_graph_file_dependencies; self.process_changes(); - self.reload_flycheck(); } diff --git a/crates/rust-analyzer/tests/slow-tests/support.rs b/crates/rust-analyzer/tests/slow-tests/support.rs index d02cb45b8e35..392a71702070 100644 --- a/crates/rust-analyzer/tests/slow-tests/support.rs +++ b/crates/rust-analyzer/tests/slow-tests/support.rs @@ -101,8 +101,13 @@ impl Project<'_> { }; }); - let FixtureWithProjectMeta { fixture, mini_core, proc_macro_names, toolchain } = - FixtureWithProjectMeta::parse(self.fixture); + let FixtureWithProjectMeta { + fixture, + mini_core, + proc_macro_names, + toolchain, + target_data_layout: _, + } = FixtureWithProjectMeta::parse(self.fixture); assert!(proc_macro_names.is_empty()); assert!(mini_core.is_none()); assert!(toolchain.is_none()); diff --git a/crates/test-fixture/src/lib.rs b/crates/test-fixture/src/lib.rs index f966013b9780..abf5a6f3bebd 100644 --- a/crates/test-fixture/src/lib.rs +++ b/crates/test-fixture/src/lib.rs @@ -1,5 +1,5 @@ //! A set of high-level utility fixture methods to use in tests. -use std::{mem, ops::Not, str::FromStr, sync}; +use std::{iter, mem, ops::Not, str::FromStr, sync}; use base_db::{ CrateDisplayName, CrateGraph, CrateId, CrateName, CrateOrigin, Dependency, DependencyKind, @@ -118,8 +118,14 @@ impl ChangeFixture { ra_fixture: &str, mut proc_macro_defs: Vec<(String, ProcMacro)>, ) -> ChangeFixture { - let FixtureWithProjectMeta { fixture, mini_core, proc_macro_names, toolchain } = - FixtureWithProjectMeta::parse(ra_fixture); + let FixtureWithProjectMeta { + fixture, + mini_core, + proc_macro_names, + toolchain, + target_data_layout, + } = FixtureWithProjectMeta::parse(ra_fixture); + let target_data_layout = Ok(target_data_layout.into()); let toolchain = Some({ let channel = toolchain.as_deref().unwrap_or("stable"); Version::parse(&format!("1.76.0-{channel}")).unwrap() @@ -131,7 +137,6 @@ impl ChangeFixture { let mut crates = FxHashMap::default(); let mut crate_deps = Vec::new(); let mut default_crate_root: Option = None; - let mut default_target_data_layout: Option = None; let mut default_cfg = CfgOptions::default(); let mut default_env = Env::new_for_test_fixture(); @@ -187,11 +192,6 @@ impl ChangeFixture { meta.env, false, origin, - meta.target_data_layout - .as_deref() - .map(From::from) - .ok_or_else(|| "target_data_layout unset".into()), - toolchain.clone(), ); let prev = crates.insert(crate_name.clone(), crate_id); assert!(prev.is_none(), "multiple crates with same name: {}", crate_name); @@ -205,7 +205,6 @@ impl ChangeFixture { default_crate_root = Some(file_id); default_cfg.extend(meta.cfg.into_iter()); default_env.extend(meta.env.iter().map(|(x, y)| (x.to_owned(), y.to_owned()))); - default_target_data_layout = meta.target_data_layout; } source_change.change_file(file_id, Some(text.into())); @@ -228,10 +227,6 @@ impl ChangeFixture { default_env, false, CrateOrigin::Local { repo: None, name: None }, - default_target_data_layout - .map(|it| it.into()) - .ok_or_else(|| "target_data_layout unset".into()), - toolchain.clone(), ); } else { for (from, to, prelude) in crate_deps { @@ -250,10 +245,6 @@ impl ChangeFixture { .unwrap(); } } - let target_layout = crate_graph.iter().next().map_or_else( - || Err("target_data_layout unset".into()), - |it| crate_graph[it].target_layout.clone(), - ); if let Some(mini_core) = mini_core { let core_file = file_id; @@ -277,8 +268,6 @@ impl ChangeFixture { Env::new_for_test_fixture(), false, CrateOrigin::Lang(LangCrateOrigin::Core), - target_layout.clone(), - toolchain.clone(), ); for krate in all_crates { @@ -322,8 +311,6 @@ impl ChangeFixture { Env::new_for_test_fixture(), true, CrateOrigin::Local { repo: None, name: None }, - target_layout, - toolchain, ); proc_macros.insert(proc_macros_crate, Ok(proc_macro)); @@ -346,17 +333,20 @@ impl ChangeFixture { SourceRootKind::Library => SourceRoot::new_library(mem::take(&mut file_set)), }; roots.push(root); - source_change.set_roots(roots); - source_change.set_crate_graph(crate_graph); - ChangeFixture { - file_position, - files, - change: Change { - source_change, - proc_macros: proc_macros.is_empty().not().then_some(proc_macros), - }, - } + let mut change = Change { + source_change, + proc_macros: proc_macros.is_empty().not().then_some(proc_macros), + toolchains: Some(iter::repeat(toolchain).take(crate_graph.len()).collect()), + target_data_layouts: Some( + iter::repeat(target_data_layout).take(crate_graph.len()).collect(), + ), + }; + + change.source_change.set_roots(roots); + change.source_change.set_crate_graph(crate_graph); + + ChangeFixture { file_position, files, change } } } @@ -475,7 +465,6 @@ struct FileMeta { edition: Edition, env: Env, introduce_new_source_root: Option, - target_data_layout: Option, } impl FileMeta { @@ -507,7 +496,6 @@ impl FileMeta { edition: f.edition.map_or(Edition::CURRENT, |v| Edition::from_str(&v).unwrap()), env: f.env.into_iter().collect(), introduce_new_source_root, - target_data_layout: f.target_data_layout, } } } diff --git a/crates/test-utils/src/fixture.rs b/crates/test-utils/src/fixture.rs index 595281336d58..7e34c3618995 100644 --- a/crates/test-utils/src/fixture.rs +++ b/crates/test-utils/src/fixture.rs @@ -126,11 +126,6 @@ pub struct Fixture { /// /// Syntax: `library` pub library: bool, - /// Specifies LLVM data layout to be used. - /// - /// You probably don't want to manually specify this. See LLVM manual for the - /// syntax, if you must: https://llvm.org/docs/LangRef.html#data-layout - pub target_data_layout: Option, /// Actual file contents. All meta comments are stripped. pub text: String, } @@ -145,6 +140,11 @@ pub struct FixtureWithProjectMeta { pub mini_core: Option, pub proc_macro_names: Vec, pub toolchain: Option, + /// Specifies LLVM data layout to be used. + /// + /// You probably don't want to manually specify this. See LLVM manual for the + /// syntax, if you must: https://llvm.org/docs/LangRef.html#data-layout + pub target_data_layout: String, } impl FixtureWithProjectMeta { @@ -172,6 +172,8 @@ impl FixtureWithProjectMeta { let fixture = trim_indent(ra_fixture); let mut fixture = fixture.as_str(); let mut toolchain = None; + let mut target_data_layout = + "e-m:e-p270:32:32-p271:32:32-p272:64:64-i64:64-f80:128-n8:16:32:64-S128".to_owned(); let mut mini_core = None; let mut res: Vec = Vec::new(); let mut proc_macro_names = vec![]; @@ -182,6 +184,12 @@ impl FixtureWithProjectMeta { fixture = remain; } + if let Some(meta) = fixture.strip_prefix("//- target_data_layout:") { + let (meta, remain) = meta.split_once('\n').unwrap(); + target_data_layout = meta.trim().to_owned(); + fixture = remain; + } + if let Some(meta) = fixture.strip_prefix("//- proc_macros:") { let (meta, remain) = meta.split_once('\n').unwrap(); proc_macro_names = meta.split(',').map(|it| it.trim().to_owned()).collect(); @@ -225,7 +233,7 @@ impl FixtureWithProjectMeta { } } - Self { fixture: res, mini_core, proc_macro_names, toolchain } + Self { fixture: res, mini_core, proc_macro_names, toolchain, target_data_layout } } //- /lib.rs crate:foo deps:bar,baz cfg:foo=a,bar=b env:OUTDIR=path/to,OTHER=foo @@ -245,9 +253,6 @@ impl FixtureWithProjectMeta { let mut env = FxHashMap::default(); let mut introduce_new_source_root = None; let mut library = false; - let mut target_data_layout = Some( - "e-m:e-p270:32:32-p271:32:32-p272:64:64-i64:64-f80:128-n8:16:32:64-S128".to_owned(), - ); for component in components { if component == "library" { library = true; @@ -284,7 +289,6 @@ impl FixtureWithProjectMeta { } } "new_source_root" => introduce_new_source_root = Some(value.to_owned()), - "target_data_layout" => target_data_layout = Some(value.to_owned()), _ => panic!("bad component: {component:?}"), } } @@ -307,7 +311,6 @@ impl FixtureWithProjectMeta { env, introduce_new_source_root, library, - target_data_layout, } } } @@ -476,16 +479,21 @@ fn parse_fixture_checks_further_indented_metadata() { #[test] fn parse_fixture_gets_full_meta() { - let FixtureWithProjectMeta { fixture: parsed, mini_core, proc_macro_names, toolchain } = - FixtureWithProjectMeta::parse( - r#" + let FixtureWithProjectMeta { + fixture: parsed, + mini_core, + proc_macro_names, + toolchain, + target_data_layout: _, + } = FixtureWithProjectMeta::parse( + r#" //- toolchain: nightly //- proc_macros: identity //- minicore: coerce_unsized //- /lib.rs crate:foo deps:bar,baz cfg:foo=a,bar=b,atom env:OUTDIR=path/to,OTHER=foo mod m; "#, - ); + ); assert_eq!(toolchain, Some("nightly".to_owned())); assert_eq!(proc_macro_names, vec!["identity".to_owned()]); assert_eq!(mini_core.unwrap().activated_flags, vec!["coerce_unsized".to_owned()]); From 0ccb3b873118bd617ceca06ee3864aa6266b7e5e Mon Sep 17 00:00:00 2001 From: Lukas Wirth Date: Fri, 16 Feb 2024 15:47:25 +0100 Subject: [PATCH 088/127] Move dedup-dev-deps tests into rust-analyzer crate --- crates/base-db/src/input.rs | 53 ++---- crates/project-model/src/tests.rs | 56 +------ crates/project-model/src/workspace.rs | 2 +- crates/rust-analyzer/src/lib.rs | 4 +- crates/rust-analyzer/src/reload.rs | 152 ++++++++++++------ crates/rust-analyzer/tests/crate_graph.rs | 118 ++++++++++++++ .../deduplication_crate_graph_A.json | 0 .../deduplication_crate_graph_B.json | 0 8 files changed, 233 insertions(+), 152 deletions(-) create mode 100644 crates/rust-analyzer/tests/crate_graph.rs rename crates/{project-model => rust-analyzer/tests}/test_data/deduplication_crate_graph_A.json (100%) rename crates/{project-model => rust-analyzer/tests}/test_data/deduplication_crate_graph_B.json (100%) diff --git a/crates/base-db/src/input.rs b/crates/base-db/src/input.rs index 3a3effae036f..f0a4b9bf8557 100644 --- a/crates/base-db/src/input.rs +++ b/crates/base-db/src/input.rs @@ -295,7 +295,7 @@ pub struct CrateData { impl CrateData { /// Check if [`other`] is almost equal to [`self`] ignoring `CrateOrigin` value. - fn eq_ignoring_origin_and_deps(&self, other: &CrateData, ignore_dev_deps: bool) -> bool { + pub fn eq_ignoring_origin_and_deps(&self, other: &CrateData, ignore_dev_deps: bool) -> bool { // This method has some obscure bits. These are mostly there to be compliant with // some patches. References to the patches are given. if self.root_file_id != other.root_file_id { @@ -622,8 +622,9 @@ impl CrateGraph { &mut self, mut other: CrateGraph, proc_macros: &mut ProcMacroPaths, - may_merge: impl Fn((CrateId, &CrateData), (CrateId, &CrateData)) -> bool, + merge: impl Fn((CrateId, &mut CrateData), (CrateId, &CrateData)) -> bool, ) -> FxHashMap { + let m = self.len(); let topo = other.crates_in_topological_order(); let mut id_map: FxHashMap = FxHashMap::default(); for topo in topo { @@ -631,48 +632,14 @@ impl CrateGraph { crate_data.dependencies.iter_mut().for_each(|dep| dep.crate_id = id_map[&dep.crate_id]); crate_data.dependencies.sort_by_key(|dep| dep.crate_id); - let res = self.arena.iter().find_map(|(id, data)| { - if !may_merge((id, &data), (topo, &crate_data)) { - return None; - } + let res = self + .arena + .iter_mut() + .take(m) + .find_map(|(id, data)| merge((id, data), (topo, &crate_data)).then_some(id)); - match (&data.origin, &crate_data.origin) { - (a, b) if a == b => { - if data.eq_ignoring_origin_and_deps(crate_data, false) { - return Some((id, false)); - } - } - (a @ CrateOrigin::Local { .. }, CrateOrigin::Library { .. }) - | (a @ CrateOrigin::Library { .. }, CrateOrigin::Local { .. }) => { - // If the origins differ, check if the two crates are equal without - // considering the dev dependencies, if they are, they most likely are in - // different loaded workspaces which may cause issues. We keep the local - // version and discard the library one as the local version may have - // dev-dependencies that we want to keep resolving. See #15656 for more - // information. - if data.eq_ignoring_origin_and_deps(crate_data, true) { - return Some((id, !a.is_local())); - } - } - (_, _) => return None, - } - - None - }); - - let new_id = if let Some((res, should_update_lib_to_local)) = res { - if should_update_lib_to_local { - assert!(self.arena[res].origin.is_lib()); - assert!(crate_data.origin.is_local()); - self.arena[res].origin = crate_data.origin.clone(); - - // Move local's dev dependencies into the newly-local-formerly-lib crate. - self.arena[res].dependencies = crate_data.dependencies.clone(); - } - res - } else { - self.arena.alloc(crate_data.clone()) - }; + let new_id = + if let Some(res) = res { res } else { self.arena.alloc(crate_data.clone()) }; id_map.insert(topo, new_id); } diff --git a/crates/project-model/src/tests.rs b/crates/project-model/src/tests.rs index 017b055d729a..b9b1b701f6d4 100644 --- a/crates/project-model/src/tests.rs +++ b/crates/project-model/src/tests.rs @@ -238,7 +238,7 @@ fn crate_graph_dedup_identical() { let (d_crate_graph, mut d_proc_macros) = (crate_graph.clone(), proc_macros.clone()); - crate_graph.extend(d_crate_graph.clone(), &mut d_proc_macros, |_, _| true); + crate_graph.extend(d_crate_graph.clone(), &mut d_proc_macros, |(_, a), (_, b)| a == b); assert!(crate_graph.iter().eq(d_crate_graph.iter())); assert_eq!(proc_macros, d_proc_macros); } @@ -254,62 +254,10 @@ fn crate_graph_dedup() { load_cargo_with_fake_sysroot(path_map, "regex-metadata.json"); assert_eq!(regex_crate_graph.iter().count(), 60); - crate_graph.extend(regex_crate_graph, &mut regex_proc_macros, |_, _| true); + crate_graph.extend(regex_crate_graph, &mut regex_proc_macros, |(_, a), (_, b)| a == b); assert_eq!(crate_graph.iter().count(), 118); } -#[test] -fn test_deduplicate_origin_dev() { - let path_map = &mut Default::default(); - let (mut crate_graph, _proc_macros) = - load_cargo_with_fake_sysroot(path_map, "deduplication_crate_graph_A.json"); - crate_graph.sort_deps(); - let (crate_graph_1, mut _proc_macros_2) = - load_cargo_with_fake_sysroot(path_map, "deduplication_crate_graph_B.json"); - - crate_graph.extend(crate_graph_1, &mut _proc_macros_2, |_, _| true); - - let mut crates_named_p2 = vec![]; - for id in crate_graph.iter() { - let krate = &crate_graph[id]; - if let Some(name) = krate.display_name.as_ref() { - if name.to_string() == "p2" { - crates_named_p2.push(krate); - } - } - } - - assert!(crates_named_p2.len() == 1); - let p2 = crates_named_p2[0]; - assert!(p2.origin.is_local()); -} - -#[test] -fn test_deduplicate_origin_dev_rev() { - let path_map = &mut Default::default(); - let (mut crate_graph, _proc_macros) = - load_cargo_with_fake_sysroot(path_map, "deduplication_crate_graph_B.json"); - crate_graph.sort_deps(); - let (crate_graph_1, mut _proc_macros_2) = - load_cargo_with_fake_sysroot(path_map, "deduplication_crate_graph_A.json"); - - crate_graph.extend(crate_graph_1, &mut _proc_macros_2, |_, _| true); - - let mut crates_named_p2 = vec![]; - for id in crate_graph.iter() { - let krate = &crate_graph[id]; - if let Some(name) = krate.display_name.as_ref() { - if name.to_string() == "p2" { - crates_named_p2.push(krate); - } - } - } - - assert!(crates_named_p2.len() == 1); - let p2 = crates_named_p2[0]; - assert!(p2.origin.is_local()); -} - #[test] fn smoke_test_real_sysroot_cargo() { if std::env::var("SYSROOT_CARGO_METADATA").is_err() { diff --git a/crates/project-model/src/workspace.rs b/crates/project-model/src/workspace.rs index 9d3d01688060..fcebca80b3e2 100644 --- a/crates/project-model/src/workspace.rs +++ b/crates/project-model/src/workspace.rs @@ -1411,7 +1411,7 @@ fn sysroot_to_crate_graph( // Remove all crates except the ones we are interested in to keep the sysroot graph small. let removed_mapping = cg.remove_crates_except(&marker_set); - let mapping = crate_graph.extend(cg, &mut pm, |_, _| true); + let mapping = crate_graph.extend(cg, &mut pm, |(_, a), (_, b)| a == b); // Map the id through the removal mapping first, then through the crate graph extension mapping. pub_deps.iter_mut().for_each(|(_, cid, _)| { diff --git a/crates/rust-analyzer/src/lib.rs b/crates/rust-analyzer/src/lib.rs index b1809f58ae70..473ca991ad9b 100644 --- a/crates/rust-analyzer/src/lib.rs +++ b/crates/rust-analyzer/src/lib.rs @@ -47,7 +47,9 @@ mod integrated_benchmarks; use serde::de::DeserializeOwned; -pub use crate::{caps::server_capabilities, main_loop::main_loop, version::version}; +pub use crate::{ + caps::server_capabilities, main_loop::main_loop, reload::ws_to_crate_graph, version::version, +}; pub fn from_json( what: &'static str, diff --git a/crates/rust-analyzer/src/reload.rs b/crates/rust-analyzer/src/reload.rs index cfdb5c1861c3..8a9ee1777283 100644 --- a/crates/rust-analyzer/src/reload.rs +++ b/crates/rust-analyzer/src/reload.rs @@ -17,8 +17,9 @@ use std::{iter, mem}; use flycheck::{FlycheckConfig, FlycheckHandle}; use hir::{db::DefDatabase, Change, ProcMacros}; +use ide::CrateId; use ide_db::{ - base_db::{salsa::Durability, CrateGraph, ProcMacroPaths}, + base_db::{salsa::Durability, CrateGraph, CrateOrigin, ProcMacroPaths, Version}, FxHashMap, }; use itertools::Itertools; @@ -28,7 +29,7 @@ use project_model::{ProjectWorkspace, WorkspaceBuildScripts}; use rustc_hash::FxHashSet; use stdx::{format_to, thread::ThreadIntent}; use triomphe::Arc; -use vfs::{AbsPath, ChangeKind}; +use vfs::{AbsPath, AbsPathBuf, ChangeKind}; use crate::{ config::{Config, FilesWatcher, LinkedProject}, @@ -532,7 +533,7 @@ impl GlobalState { // deleted or created we trigger a reconstruction of the crate graph let mut crate_graph_file_dependencies = FxHashSet::default(); - let mut load = |path: &AbsPath| { + let load = |path: &AbsPath| { let _p = tracing::span!(tracing::Level::DEBUG, "switch_workspaces::load").entered(); let vfs_path = vfs::VfsPath::from(path.to_path_buf()); crate_graph_file_dependencies.insert(vfs_path.clone()); @@ -547,56 +548,8 @@ impl GlobalState { } }; - let mut crate_graph = CrateGraph::default(); - let mut proc_macro_paths = Vec::default(); - let mut layouts = Vec::default(); - let mut toolchains = Vec::default(); - let e = Err(Arc::from("missing layout")); - for ws in &**self.workspaces { - let (other, mut crate_proc_macros) = - ws.to_crate_graph(&mut load, self.config.extra_env()); - let num_layouts = layouts.len(); - let num_toolchains = toolchains.len(); - let (toolchain, layout) = match ws { - ProjectWorkspace::Cargo { toolchain, target_layout, .. } - | ProjectWorkspace::Json { toolchain, target_layout, .. } => { - (toolchain.clone(), target_layout.clone()) - } - ProjectWorkspace::DetachedFiles { .. } => { - (None, Err("detached files have no layout".into())) - } - }; - - let mapping = crate_graph.extend( - other, - &mut crate_proc_macros, - |(cg_id, _cg_data), (_o_id, _o_data)| { - // if the newly created crate graph's layout is equal to the crate of the merged graph, then - // we can merge the crates. - layouts[cg_id.into_raw().into_u32() as usize] == layout - && toolchains[cg_id.into_raw().into_u32() as usize] == toolchain - }, - ); - // Populate the side tables for the newly merged crates - mapping.values().for_each(|val| { - let idx = val.into_raw().into_u32() as usize; - // we only need to consider crates that were not merged and remapped, as the - // ones that were remapped already have the correct layout and toolchain - if idx >= num_layouts { - if layouts.len() <= idx { - layouts.resize(idx + 1, e.clone()); - } - layouts[idx] = layout.clone(); - } - if idx >= num_toolchains { - if toolchains.len() <= idx { - toolchains.resize(idx + 1, None); - } - toolchains[idx] = toolchain.clone(); - } - }); - proc_macro_paths.push(crate_proc_macros); - } + let (crate_graph, proc_macro_paths, layouts, toolchains) = + ws_to_crate_graph(&self.workspaces, self.config.extra_env(), load); let mut change = Change::new(); if self.config.expand_proc_macros() { @@ -609,6 +562,8 @@ impl GlobalState { self.fetch_proc_macros_queue.request_op(cause, proc_macro_paths); } change.set_crate_graph(crate_graph); + change.set_target_data_layouts(layouts); + change.set_toolchains(toolchains); self.analysis_host.apply_change(change); self.crate_graph_file_dependencies = crate_graph_file_dependencies; } @@ -719,6 +674,97 @@ impl GlobalState { } } +// FIXME: Move this into load-cargo? +pub fn ws_to_crate_graph( + workspaces: &[ProjectWorkspace], + extra_env: &FxHashMap, + mut load: impl FnMut(&AbsPath) -> Option, +) -> ( + CrateGraph, + Vec, AbsPathBuf), String>>>, + Vec, Arc>>, + Vec>, +) { + let mut crate_graph = CrateGraph::default(); + let mut proc_macro_paths = Vec::default(); + let mut layouts = Vec::default(); + let mut toolchains = Vec::default(); + let e = Err(Arc::from("missing layout")); + for ws in workspaces { + let (other, mut crate_proc_macros) = ws.to_crate_graph(&mut load, extra_env); + let num_layouts = layouts.len(); + let num_toolchains = toolchains.len(); + let (toolchain, layout) = match ws { + ProjectWorkspace::Cargo { toolchain, target_layout, .. } + | ProjectWorkspace::Json { toolchain, target_layout, .. } => { + (toolchain.clone(), target_layout.clone()) + } + ProjectWorkspace::DetachedFiles { .. } => { + (None, Err("detached files have no layout".into())) + } + }; + + let mapping = crate_graph.extend( + other, + &mut crate_proc_macros, + |(cg_id, _cg_data), (_o_id, _o_data)| { + // if the newly created crate graph's layout is equal to the crate of the merged graph, then + // we can merge the crates. + let id = cg_id.into_raw().into_u32() as usize; + if layouts[id] == layout && toolchains[id] == toolchain { + let (res, update) = match (&_cg_data.origin, &_o_data.origin) { + (a, b) + if a == b && _cg_data.eq_ignoring_origin_and_deps(_o_data, false) => + { + (true, false) + } + (a @ CrateOrigin::Local { .. }, CrateOrigin::Library { .. }) + | (a @ CrateOrigin::Library { .. }, CrateOrigin::Local { .. }) + if _cg_data.eq_ignoring_origin_and_deps(_o_data, true) => + { + // If the origins differ, check if the two crates are equal without + // considering the dev dependencies, if they are, they most likely are in + // different loaded workspaces which may cause issues. We keep the local + // version and discard the library one as the local version may have + // dev-dependencies that we want to keep resolving. See #15656 for more + // information. + (true, !a.is_local()) + } + (_, _) => (false, false), + }; + if res && update { + _cg_data.origin = _o_data.origin.clone(); + _cg_data.dependencies = _o_data.dependencies.clone(); + } + res + } else { + false + } + }, + ); + // Populate the side tables for the newly merged crates + mapping.values().for_each(|val| { + let idx = val.into_raw().into_u32() as usize; + // we only need to consider crates that were not merged and remapped, as the + // ones that were remapped already have the correct layout and toolchain + if idx >= num_layouts { + if layouts.len() <= idx { + layouts.resize(idx + 1, e.clone()); + } + layouts[idx] = layout.clone(); + } + if idx >= num_toolchains { + if toolchains.len() <= idx { + toolchains.resize(idx + 1, None); + } + toolchains[idx] = toolchain.clone(); + } + }); + proc_macro_paths.push(crate_proc_macros); + } + (crate_graph, proc_macro_paths, layouts, toolchains) +} + pub(crate) fn should_refresh_for_change(path: &AbsPath, change_kind: ChangeKind) -> bool { const IMPLICIT_TARGET_FILES: &[&str] = &["build.rs", "src/main.rs", "src/lib.rs"]; const IMPLICIT_TARGET_DIRS: &[&str] = &["src/bin", "examples", "tests", "benches"]; diff --git a/crates/rust-analyzer/tests/crate_graph.rs b/crates/rust-analyzer/tests/crate_graph.rs new file mode 100644 index 000000000000..efd42fadf7e9 --- /dev/null +++ b/crates/rust-analyzer/tests/crate_graph.rs @@ -0,0 +1,118 @@ +use std::path::PathBuf; + +use project_model::{CargoWorkspace, ProjectWorkspace, Sysroot, WorkspaceBuildScripts}; +use rust_analyzer::ws_to_crate_graph; +use rustc_hash::FxHashMap; +use serde::de::DeserializeOwned; +use vfs::{AbsPathBuf, FileId}; + +fn load_cargo_with_fake_sysroot(file: &str) -> ProjectWorkspace { + let meta = get_test_json_file(file); + let cargo_workspace = CargoWorkspace::new(meta); + ProjectWorkspace::Cargo { + cargo: cargo_workspace, + build_scripts: WorkspaceBuildScripts::default(), + sysroot: Ok(get_fake_sysroot()), + rustc: Err(None), + rustc_cfg: Vec::new(), + cfg_overrides: Default::default(), + toolchain: None, + target_layout: Err("target_data_layout not loaded".into()), + cargo_config_extra_env: Default::default(), + } +} + +fn get_test_json_file(file: &str) -> T { + let base = PathBuf::from(env!("CARGO_MANIFEST_DIR")); + let file = base.join("tests/test_data").join(file); + let data = std::fs::read_to_string(file).unwrap(); + let mut json = data.parse::().unwrap(); + fixup_paths(&mut json); + return serde_json::from_value(json).unwrap(); + + fn fixup_paths(val: &mut serde_json::Value) { + match val { + serde_json::Value::String(s) => replace_root(s, true), + serde_json::Value::Array(vals) => vals.iter_mut().for_each(fixup_paths), + serde_json::Value::Object(kvals) => kvals.values_mut().for_each(fixup_paths), + serde_json::Value::Null | serde_json::Value::Bool(_) | serde_json::Value::Number(_) => { + } + } + } +} + +fn replace_root(s: &mut String, direction: bool) { + if direction { + let root = if cfg!(windows) { r#"C:\\ROOT\"# } else { "/ROOT/" }; + *s = s.replace("$ROOT$", root) + } else { + let root = if cfg!(windows) { r#"C:\\\\ROOT\\"# } else { "/ROOT/" }; + *s = s.replace(root, "$ROOT$") + } +} + +fn get_fake_sysroot_path() -> PathBuf { + let base = PathBuf::from(env!("CARGO_MANIFEST_DIR")); + base.join("../project-model/test_data/fake-sysroot") +} + +fn get_fake_sysroot() -> Sysroot { + let sysroot_path = get_fake_sysroot_path(); + // there's no `libexec/` directory with a `proc-macro-srv` binary in that + // fake sysroot, so we give them both the same path: + let sysroot_dir = AbsPathBuf::assert(sysroot_path); + let sysroot_src_dir = sysroot_dir.clone(); + Sysroot::load(sysroot_dir, Some(Ok(sysroot_src_dir)), false) +} + +#[test] +fn test_deduplicate_origin_dev() { + let path_map = &mut FxHashMap::default(); + let ws = load_cargo_with_fake_sysroot("deduplication_crate_graph_A.json"); + let ws2 = load_cargo_with_fake_sysroot("deduplication_crate_graph_B.json"); + + let (crate_graph, ..) = ws_to_crate_graph(&[ws, ws2], &Default::default(), |path| { + let len = path_map.len(); + Some(*path_map.entry(path.to_path_buf()).or_insert(FileId::from_raw(len as u32))) + }); + + let mut crates_named_p2 = vec![]; + for id in crate_graph.iter() { + let krate = &crate_graph[id]; + if let Some(name) = krate.display_name.as_ref() { + if name.to_string() == "p2" { + crates_named_p2.push(krate); + } + } + } + + assert!(crates_named_p2.len() == 1); + let p2 = crates_named_p2[0]; + assert!(p2.origin.is_local()); +} + +#[test] +fn test_deduplicate_origin_dev_rev() { + let path_map = &mut FxHashMap::default(); + let ws = load_cargo_with_fake_sysroot("deduplication_crate_graph_B.json"); + let ws2 = load_cargo_with_fake_sysroot("deduplication_crate_graph_A.json"); + + let (crate_graph, ..) = ws_to_crate_graph(&[ws, ws2], &Default::default(), |path| { + let len = path_map.len(); + Some(*path_map.entry(path.to_path_buf()).or_insert(FileId::from_raw(len as u32))) + }); + + let mut crates_named_p2 = vec![]; + for id in crate_graph.iter() { + let krate = &crate_graph[id]; + if let Some(name) = krate.display_name.as_ref() { + if name.to_string() == "p2" { + crates_named_p2.push(krate); + } + } + } + + assert!(crates_named_p2.len() == 1); + let p2 = crates_named_p2[0]; + assert!(p2.origin.is_local()); +} diff --git a/crates/project-model/test_data/deduplication_crate_graph_A.json b/crates/rust-analyzer/tests/test_data/deduplication_crate_graph_A.json similarity index 100% rename from crates/project-model/test_data/deduplication_crate_graph_A.json rename to crates/rust-analyzer/tests/test_data/deduplication_crate_graph_A.json diff --git a/crates/project-model/test_data/deduplication_crate_graph_B.json b/crates/rust-analyzer/tests/test_data/deduplication_crate_graph_B.json similarity index 100% rename from crates/project-model/test_data/deduplication_crate_graph_B.json rename to crates/rust-analyzer/tests/test_data/deduplication_crate_graph_B.json From c8fd8a33d221739327393fde85a72f8adf9de99e Mon Sep 17 00:00:00 2001 From: LucasFA <23667494+LucasFA@users.noreply.github.com> Date: Fri, 16 Feb 2024 12:28:16 +0100 Subject: [PATCH 089/127] Update GitHub actions dependencies Use newer versions of actions, Node 16 -> 18 Fixes several warnings in the actions tab regarding usage of Node 16 --- .github/workflows/ci.yaml | 4 ++-- .github/workflows/metrics.yaml | 10 +++++----- .github/workflows/release.yaml | 6 +++--- 3 files changed, 10 insertions(+), 10 deletions(-) diff --git a/.github/workflows/ci.yaml b/.github/workflows/ci.yaml index 6ea9b6e253b7..62fbd57abc16 100644 --- a/.github/workflows/ci.yaml +++ b/.github/workflows/ci.yaml @@ -165,9 +165,9 @@ jobs: if: needs.changes.outputs.typescript == 'true' - name: Install Nodejs - uses: actions/setup-node@v3 + uses: actions/setup-node@v4 with: - node-version: 16 + node-version: 18 if: needs.changes.outputs.typescript == 'true' - name: Install xvfb diff --git a/.github/workflows/metrics.yaml b/.github/workflows/metrics.yaml index ed775ae2d35b..be9f504e5996 100644 --- a/.github/workflows/metrics.yaml +++ b/.github/workflows/metrics.yaml @@ -21,7 +21,7 @@ jobs: rustup component add rustfmt rust-src rustup default stable - name: Cache cargo - uses: actions/cache@v3 + uses: actions/cache@v4 with: path: | ~/.cargo/bin/ @@ -39,7 +39,7 @@ jobs: uses: actions/checkout@v4 - name: Restore cargo cache - uses: actions/cache@v3 + uses: actions/cache@v4 with: path: | ~/.cargo/bin/ @@ -52,7 +52,7 @@ jobs: run: cargo xtask metrics build - name: Cache target - uses: actions/cache@v3 + uses: actions/cache@v4 with: path: target/ key: ${{ runner.os }}-target-${{ github.sha }} @@ -76,7 +76,7 @@ jobs: uses: actions/checkout@v4 - name: Restore cargo cache - uses: actions/cache@v3 + uses: actions/cache@v4 with: path: | ~/.cargo/bin/ @@ -86,7 +86,7 @@ jobs: key: ${{ runner.os }}-cargo-${{ github.sha }} - name: Restore target cache - uses: actions/cache@v3 + uses: actions/cache@v4 with: path: target/ key: ${{ runner.os }}-target-${{ github.sha }} diff --git a/.github/workflows/release.yaml b/.github/workflows/release.yaml index f22e40181ba1..adb1c8505161 100644 --- a/.github/workflows/release.yaml +++ b/.github/workflows/release.yaml @@ -80,7 +80,7 @@ jobs: - name: Install Node.js uses: actions/setup-node@v4 with: - node-version: 16 + node-version: 18 - name: Update apt repositories if: matrix.target == 'aarch64-unknown-linux-gnu' || matrix.target == 'arm-unknown-linux-gnueabihf' @@ -188,9 +188,9 @@ jobs: needs: ["dist", "dist-x86_64-unknown-linux-musl"] steps: - name: Install Nodejs - uses: actions/setup-node@v3 + uses: actions/setup-node@v4 with: - node-version: 16 + node-version: 18 - run: echo "TAG=$(date --iso -u)" >> $GITHUB_ENV if: github.ref == 'refs/heads/release' From ead369117a061c6a8afd308878cd98d1b8f4b2d3 Mon Sep 17 00:00:00 2001 From: Lukas Wirth Date: Fri, 16 Feb 2024 16:28:17 +0100 Subject: [PATCH 090/127] CrateOrigin::Local means local to the project workspace, not cargo workspace --- crates/base-db/src/input.rs | 131 +++--------------- crates/ide-db/src/rename.rs | 5 +- .../src/handlers/incorrect_case.rs | 2 +- crates/ide/src/lib.rs | 3 +- crates/ide/src/rename.rs | 41 ++---- crates/project-model/src/project_json.rs | 3 +- crates/project-model/src/workspace.rs | 99 +++++-------- .../cargo_hello_world_project_model.txt | 7 - ...project_model_with_selective_overrides.txt | 7 - ..._project_model_with_wildcard_overrides.txt | 7 - ...rust_project_hello_world_project_model.txt | 16 --- crates/rust-analyzer/src/config.rs | 7 - crates/rust-analyzer/src/handlers/request.rs | 6 +- crates/rust-analyzer/src/reload.rs | 34 +---- crates/test-fixture/src/lib.rs | 27 +--- docs/user/generated_config.adoc | 5 - editors/code/package.json | 5 - 17 files changed, 73 insertions(+), 332 deletions(-) diff --git a/crates/base-db/src/input.rs b/crates/base-db/src/input.rs index f0a4b9bf8557..a817cd0c3ac2 100644 --- a/crates/base-db/src/input.rs +++ b/crates/base-db/src/input.rs @@ -293,62 +293,6 @@ pub struct CrateData { pub is_proc_macro: bool, } -impl CrateData { - /// Check if [`other`] is almost equal to [`self`] ignoring `CrateOrigin` value. - pub fn eq_ignoring_origin_and_deps(&self, other: &CrateData, ignore_dev_deps: bool) -> bool { - // This method has some obscure bits. These are mostly there to be compliant with - // some patches. References to the patches are given. - if self.root_file_id != other.root_file_id { - return false; - } - - if self.display_name != other.display_name { - return false; - } - - if self.is_proc_macro != other.is_proc_macro { - return false; - } - - if self.edition != other.edition { - return false; - } - - if self.version != other.version { - return false; - } - - let mut opts = self.cfg_options.difference(&other.cfg_options); - if let Some(it) = opts.next() { - // Don't care if rust_analyzer CfgAtom is the only cfg in the difference set of self's and other's cfgs. - // https://github.com/rust-lang/rust-analyzer/blob/0840038f02daec6ba3238f05d8caa037d28701a0/crates/project-model/src/workspace.rs#L894 - if it.to_string() != "rust_analyzer" { - return false; - } - - if opts.next().is_some() { - return false; - } - } - - if self.env != other.env { - return false; - } - - let slf_deps = self.dependencies.iter(); - let other_deps = other.dependencies.iter(); - - if ignore_dev_deps { - return slf_deps - .clone() - .filter(|it| it.kind != DependencyKind::Dev) - .eq(other_deps.clone().filter(|it| it.kind != DependencyKind::Dev)); - } - - slf_deps.eq(other_deps) - } -} - #[derive(Debug, Clone, Copy, PartialEq, Eq, PartialOrd, Ord, Hash)] pub enum Edition { Edition2015, @@ -389,32 +333,22 @@ pub enum DependencyKind { pub struct Dependency { pub crate_id: CrateId, pub name: CrateName, - kind: DependencyKind, prelude: bool, } impl Dependency { - pub fn new(name: CrateName, crate_id: CrateId, kind: DependencyKind) -> Self { - Self { name, crate_id, prelude: true, kind } + pub fn new(name: CrateName, crate_id: CrateId) -> Self { + Self { name, crate_id, prelude: true } } - pub fn with_prelude( - name: CrateName, - crate_id: CrateId, - prelude: bool, - kind: DependencyKind, - ) -> Self { - Self { name, crate_id, prelude, kind } + pub fn with_prelude(name: CrateName, crate_id: CrateId, prelude: bool) -> Self { + Self { name, crate_id, prelude } } /// Whether this dependency is to be added to the depending crate's extern prelude. pub fn is_prelude(&self) -> bool { self.prelude } - - pub fn kind(&self) -> DependencyKind { - self.kind - } } impl CrateGraph { @@ -684,11 +618,9 @@ impl CrateGraph { match (cfg_if, std) { (Some(cfg_if), Some(std)) => { self.arena[cfg_if].dependencies.clear(); - self.arena[std].dependencies.push(Dependency::new( - CrateName::new("cfg_if").unwrap(), - cfg_if, - DependencyKind::Normal, - )); + self.arena[std] + .dependencies + .push(Dependency::new(CrateName::new("cfg_if").unwrap(), cfg_if)); true } _ => false, @@ -836,7 +768,7 @@ impl fmt::Display for CyclicDependenciesError { #[cfg(test)] mod tests { - use crate::{CrateOrigin, DependencyKind}; + use crate::CrateOrigin; use super::{CrateGraph, CrateName, Dependency, Edition::Edition2018, Env, FileId}; @@ -877,22 +809,13 @@ mod tests { CrateOrigin::Local { repo: None, name: None }, ); assert!(graph - .add_dep( - crate1, - Dependency::new(CrateName::new("crate2").unwrap(), crate2, DependencyKind::Normal) - ) + .add_dep(crate1, Dependency::new(CrateName::new("crate2").unwrap(), crate2,)) .is_ok()); assert!(graph - .add_dep( - crate2, - Dependency::new(CrateName::new("crate3").unwrap(), crate3, DependencyKind::Normal) - ) + .add_dep(crate2, Dependency::new(CrateName::new("crate3").unwrap(), crate3,)) .is_ok()); assert!(graph - .add_dep( - crate3, - Dependency::new(CrateName::new("crate1").unwrap(), crate1, DependencyKind::Normal) - ) + .add_dep(crate3, Dependency::new(CrateName::new("crate1").unwrap(), crate1,)) .is_err()); } @@ -922,16 +845,10 @@ mod tests { CrateOrigin::Local { repo: None, name: None }, ); assert!(graph - .add_dep( - crate1, - Dependency::new(CrateName::new("crate2").unwrap(), crate2, DependencyKind::Normal) - ) + .add_dep(crate1, Dependency::new(CrateName::new("crate2").unwrap(), crate2,)) .is_ok()); assert!(graph - .add_dep( - crate2, - Dependency::new(CrateName::new("crate2").unwrap(), crate2, DependencyKind::Normal) - ) + .add_dep(crate2, Dependency::new(CrateName::new("crate2").unwrap(), crate2,)) .is_err()); } @@ -972,16 +889,10 @@ mod tests { CrateOrigin::Local { repo: None, name: None }, ); assert!(graph - .add_dep( - crate1, - Dependency::new(CrateName::new("crate2").unwrap(), crate2, DependencyKind::Normal) - ) + .add_dep(crate1, Dependency::new(CrateName::new("crate2").unwrap(), crate2,)) .is_ok()); assert!(graph - .add_dep( - crate2, - Dependency::new(CrateName::new("crate3").unwrap(), crate3, DependencyKind::Normal) - ) + .add_dep(crate2, Dependency::new(CrateName::new("crate3").unwrap(), crate3,)) .is_ok()); } @@ -1013,20 +924,12 @@ mod tests { assert!(graph .add_dep( crate1, - Dependency::new( - CrateName::normalize_dashes("crate-name-with-dashes"), - crate2, - DependencyKind::Normal - ) + Dependency::new(CrateName::normalize_dashes("crate-name-with-dashes"), crate2,) ) .is_ok()); assert_eq!( graph[crate1].dependencies, - vec![Dependency::new( - CrateName::new("crate_name_with_dashes").unwrap(), - crate2, - DependencyKind::Normal - )] + vec![Dependency::new(CrateName::new("crate_name_with_dashes").unwrap(), crate2,)] ); } } diff --git a/crates/ide-db/src/rename.rs b/crates/ide-db/src/rename.rs index 032b8338ab85..6a7042988a9c 100644 --- a/crates/ide-db/src/rename.rs +++ b/crates/ide-db/src/rename.rs @@ -71,7 +71,6 @@ impl Definition { &self, sema: &Semantics<'_, RootDatabase>, new_name: &str, - rename_external: bool, ) -> Result { // self.krate() returns None if // self is a built-in attr, built-in type or tool module. @@ -80,8 +79,8 @@ impl Definition { if let Some(krate) = self.krate(sema.db) { // Can we not rename non-local items? // Then bail if non-local - if !rename_external && !krate.origin(sema.db).is_local() { - bail!("Cannot rename a non-local definition as the config for it is disabled") + if !krate.origin(sema.db).is_local() { + bail!("Cannot rename a non-local definition") } } diff --git a/crates/ide-diagnostics/src/handlers/incorrect_case.rs b/crates/ide-diagnostics/src/handlers/incorrect_case.rs index dd64b93e4548..5e2541795ca1 100644 --- a/crates/ide-diagnostics/src/handlers/incorrect_case.rs +++ b/crates/ide-diagnostics/src/handlers/incorrect_case.rs @@ -43,7 +43,7 @@ fn fixes(ctx: &DiagnosticsContext<'_>, d: &hir::IncorrectCase) -> Option Cancellable> { - self.with_db(|db| rename::rename(db, position, new_name, rename_external)) + self.with_db(|db| rename::rename(db, position, new_name)) } pub fn prepare_rename( diff --git a/crates/ide/src/rename.rs b/crates/ide/src/rename.rs index 9fce4bb0f827..f2eedfa43169 100644 --- a/crates/ide/src/rename.rs +++ b/crates/ide/src/rename.rs @@ -84,7 +84,6 @@ pub(crate) fn rename( db: &RootDatabase, position: FilePosition, new_name: &str, - rename_external: bool, ) -> RenameResult { let sema = Semantics::new(db); let source_file = sema.parse(position.file_id); @@ -104,7 +103,7 @@ pub(crate) fn rename( return rename_to_self(&sema, local); } } - def.rename(&sema, new_name, rename_external) + def.rename(&sema, new_name) }) .collect(); @@ -123,9 +122,9 @@ pub(crate) fn will_rename_file( let module = sema.to_module_def(file_id)?; let def = Definition::Module(module); let mut change = if is_raw_identifier(new_name_stem) { - def.rename(&sema, &SmolStr::from_iter(["r#", new_name_stem]), true).ok()? + def.rename(&sema, &SmolStr::from_iter(["r#", new_name_stem])).ok()? } else { - def.rename(&sema, new_name_stem, true).ok()? + def.rename(&sema, new_name_stem).ok()? }; change.file_system_edits.clear(); Some(change) @@ -377,16 +376,11 @@ mod tests { use super::{RangeInfo, RenameError}; fn check(new_name: &str, ra_fixture_before: &str, ra_fixture_after: &str) { - check_with_rename_config(new_name, ra_fixture_before, ra_fixture_after, true); + check_with_rename_config(new_name, ra_fixture_before, ra_fixture_after); } #[track_caller] - fn check_with_rename_config( - new_name: &str, - ra_fixture_before: &str, - ra_fixture_after: &str, - rename_external: bool, - ) { + fn check_with_rename_config(new_name: &str, ra_fixture_before: &str, ra_fixture_after: &str) { let ra_fixture_after = &trim_indent(ra_fixture_after); let (analysis, position) = fixture::position(ra_fixture_before); if !ra_fixture_after.starts_with("error: ") { @@ -395,7 +389,7 @@ mod tests { } } let rename_result = analysis - .rename(position, new_name, rename_external) + .rename(position, new_name) .unwrap_or_else(|err| panic!("Rename to '{new_name}' was cancelled: {err}")); match rename_result { Ok(source_change) => { @@ -426,10 +420,8 @@ mod tests { fn check_expect(new_name: &str, ra_fixture: &str, expect: Expect) { let (analysis, position) = fixture::position(ra_fixture); - let source_change = analysis - .rename(position, new_name, true) - .unwrap() - .expect("Expect returned a RenameError"); + let source_change = + analysis.rename(position, new_name).unwrap().expect("Expect returned a RenameError"); expect.assert_eq(&filter_expect(source_change)) } @@ -2636,19 +2628,7 @@ pub struct S; //- /main.rs crate:main deps:lib new_source_root:local use lib::S$0; "#, - "error: Cannot rename a non-local definition as the config for it is disabled", - false, - ); - - check( - "Baz", - r#" -//- /lib.rs crate:lib new_source_root:library -pub struct S; -//- /main.rs crate:main deps:lib new_source_root:local -use lib::S$0; -"#, - "use lib::Baz;\n", + "error: Cannot rename a non-local definition", ); } @@ -2663,8 +2643,7 @@ use core::hash::Hash; #[derive(H$0ash)] struct A; "#, - "error: Cannot rename a non-local definition as the config for it is disabled", - false, + "error: Cannot rename a non-local definition", ); } diff --git a/crates/project-model/src/project_json.rs b/crates/project-model/src/project_json.rs index cf3231498f3e..fba0aaa8ce9f 100644 --- a/crates/project-model/src/project_json.rs +++ b/crates/project-model/src/project_json.rs @@ -49,7 +49,7 @@ //! user explores them belongs to that extension (it's totally valid to change //! rust-project.json over time via configuration request!) -use base_db::{CrateDisplayName, CrateId, CrateName, Dependency, DependencyKind, Edition}; +use base_db::{CrateDisplayName, CrateId, CrateName, Dependency, Edition}; use la_arena::RawIdx; use paths::{AbsPath, AbsPathBuf}; use rustc_hash::FxHashMap; @@ -135,7 +135,6 @@ impl ProjectJson { Dependency::new( dep_data.name, CrateId::from_raw(RawIdx::from(dep_data.krate as u32)), - DependencyKind::Normal, ) }) .collect::>(), diff --git a/crates/project-model/src/workspace.rs b/crates/project-model/src/workspace.rs index fcebca80b3e2..b7ae76be8cec 100644 --- a/crates/project-model/src/workspace.rs +++ b/crates/project-model/src/workspace.rs @@ -6,8 +6,8 @@ use std::{collections::VecDeque, fmt, fs, iter, process::Command, str::FromStr, use anyhow::{format_err, Context}; use base_db::{ - CrateDisplayName, CrateGraph, CrateId, CrateName, CrateOrigin, Dependency, DependencyKind, - Edition, Env, FileId, LangCrateOrigin, ProcMacroPaths, TargetLayoutLoadResult, + CrateDisplayName, CrateGraph, CrateId, CrateName, CrateOrigin, Dependency, Edition, Env, + FileId, LangCrateOrigin, ProcMacroPaths, TargetLayoutLoadResult, }; use cfg::{CfgAtom, CfgDiff, CfgOptions}; use paths::{AbsPath, AbsPathBuf}; @@ -894,7 +894,7 @@ fn project_json_to_crate_graph( for dep in &krate.deps { if let Some(&to) = crates.get(&dep.crate_id) { - add_dep(crate_graph, from, dep.name.clone(), to, dep.kind().to_owned()) + add_dep(crate_graph, from, dep.name.clone(), to) } } } @@ -938,8 +938,6 @@ fn cargo_to_crate_graph( // Add test cfg for local crates if cargo[pkg].is_local { cfg_options.insert_atom("test".into()); - } - if cargo[pkg].is_member { cfg_options.insert_atom("rust_analyzer".into()); } @@ -973,16 +971,28 @@ fn cargo_to_crate_graph( let Some(file_id) = load(root) else { continue }; + let build_data = build_scripts.get_output(pkg); + let pkg_data = &cargo[pkg]; let crate_id = add_target_crate_root( crate_graph, proc_macros, - &cargo[pkg], - build_scripts.get_output(pkg), + pkg_data, + build_data, cfg_options.clone(), file_id, name, kind, - false, + if pkg_data.is_local { + CrateOrigin::Local { + repo: pkg_data.repository.clone(), + name: Some(pkg_data.name.clone()), + } + } else { + CrateOrigin::Library { + repo: pkg_data.repository.clone(), + name: pkg_data.name.clone(), + } + }, ); if let TargetKind::Lib { .. } = kind { lib_tgt = Some((crate_id, name.clone())); @@ -1016,7 +1026,7 @@ fn cargo_to_crate_graph( // cargo metadata does not do any normalization, // so we do it ourselves currently let name = CrateName::normalize_dashes(&name); - add_dep(crate_graph, from, name, to, DependencyKind::Normal); + add_dep(crate_graph, from, name, to); } } } @@ -1036,17 +1046,7 @@ fn cargo_to_crate_graph( continue; } - add_dep( - crate_graph, - from, - name.clone(), - to, - match dep.kind { - DepKind::Normal => DependencyKind::Normal, - DepKind::Dev => DependencyKind::Dev, - DepKind::Build => DependencyKind::Build, - }, - ) + add_dep(crate_graph, from, name.clone(), to) } } } @@ -1193,7 +1193,7 @@ fn handle_rustc_crates( file_id, &rustc_workspace[tgt].name, kind, - true, + CrateOrigin::Rustc { name: rustc_workspace[pkg].name.clone() }, ); pkg_to_lib_crate.insert(pkg, crate_id); // Add dependencies on core / std / alloc for this crate @@ -1213,17 +1213,7 @@ fn handle_rustc_crates( let name = CrateName::new(&dep.name).unwrap(); if let Some(&to) = pkg_to_lib_crate.get(&dep.pkg) { for &from in rustc_pkg_crates.get(&pkg).into_iter().flatten() { - add_dep( - crate_graph, - from, - name.clone(), - to, - match dep.kind { - DepKind::Normal => DependencyKind::Normal, - DepKind::Dev => DependencyKind::Dev, - DepKind::Build => DependencyKind::Build, - }, - ); + add_dep(crate_graph, from, name.clone(), to); } } } @@ -1245,7 +1235,7 @@ fn handle_rustc_crates( // `rust_analyzer` thinks that it should use the one from the `rustc_source` // instead of the one from `crates.io` if !crate_graph[*from].dependencies.iter().any(|d| d.name == name) { - add_dep(crate_graph, *from, name.clone(), to, DependencyKind::Normal); + add_dep(crate_graph, *from, name.clone(), to); } } } @@ -1262,7 +1252,7 @@ fn add_target_crate_root( file_id: FileId, cargo_name: &str, kind: TargetKind, - rustc_crate: bool, + origin: CrateOrigin, ) -> CrateId { let edition = pkg.edition; let potential_cfg_options = if pkg.features.is_empty() { @@ -1310,13 +1300,7 @@ fn add_target_crate_root( potential_cfg_options, env, matches!(kind, TargetKind::Lib { is_proc_macro: true }), - if rustc_crate { - CrateOrigin::Rustc { name: pkg.name.clone() } - } else if pkg.is_member { - CrateOrigin::Local { repo: pkg.repository.clone(), name: Some(pkg.name.clone()) } - } else { - CrateOrigin::Library { repo: pkg.repository.clone(), name: pkg.name.clone() } - }, + origin, ); if let TargetKind::Lib { is_proc_macro: true } = kind { let proc_macro = match build_data.as_ref().map(|it| it.proc_macro_dylib_path.as_ref()) { @@ -1340,14 +1324,7 @@ impl SysrootPublicDeps { /// Makes `from` depend on the public sysroot crates. fn add_to_crate_graph(&self, crate_graph: &mut CrateGraph, from: CrateId) { for (name, krate, prelude) in &self.deps { - add_dep_with_prelude( - crate_graph, - from, - name.clone(), - *krate, - *prelude, - DependencyKind::Normal, - ); + add_dep_with_prelude(crate_graph, from, name.clone(), *krate, *prelude); } } } @@ -1455,7 +1432,7 @@ fn sysroot_to_crate_graph( if let (Some(&from), Some(&to)) = (sysroot_crates.get(&from), sysroot_crates.get(&to)) { - add_dep(crate_graph, from, name, to, DependencyKind::Normal); + add_dep(crate_graph, from, name, to); } } } @@ -1476,14 +1453,8 @@ fn sysroot_to_crate_graph( } } -fn add_dep( - graph: &mut CrateGraph, - from: CrateId, - name: CrateName, - to: CrateId, - kind: DependencyKind, -) { - add_dep_inner(graph, from, Dependency::new(name, to, kind)) +fn add_dep(graph: &mut CrateGraph, from: CrateId, name: CrateName, to: CrateId) { + add_dep_inner(graph, from, Dependency::new(name, to)) } fn add_dep_with_prelude( @@ -1492,20 +1463,12 @@ fn add_dep_with_prelude( name: CrateName, to: CrateId, prelude: bool, - kind: DependencyKind, ) { - add_dep_inner(graph, from, Dependency::with_prelude(name, to, prelude, kind)) + add_dep_inner(graph, from, Dependency::with_prelude(name, to, prelude)) } fn add_proc_macro_dep(crate_graph: &mut CrateGraph, from: CrateId, to: CrateId, prelude: bool) { - add_dep_with_prelude( - crate_graph, - from, - CrateName::new("proc_macro").unwrap(), - to, - prelude, - DependencyKind::Normal, - ); + add_dep_with_prelude(crate_graph, from, CrateName::new("proc_macro").unwrap(), to, prelude); } fn add_dep_inner(graph: &mut CrateGraph, from: CrateId, dep: Dependency) { diff --git a/crates/project-model/test_data/output/cargo_hello_world_project_model.txt b/crates/project-model/test_data/output/cargo_hello_world_project_model.txt index 4f7169a10eba..0ad19ca9f759 100644 --- a/crates/project-model/test_data/output/cargo_hello_world_project_model.txt +++ b/crates/project-model/test_data/output/cargo_hello_world_project_model.txt @@ -48,7 +48,6 @@ name: CrateName( "libc", ), - kind: Normal, prelude: true, }, ], @@ -109,7 +108,6 @@ name: CrateName( "hello_world", ), - kind: Normal, prelude: true, }, Dependency { @@ -117,7 +115,6 @@ name: CrateName( "libc", ), - kind: Normal, prelude: true, }, ], @@ -178,7 +175,6 @@ name: CrateName( "hello_world", ), - kind: Normal, prelude: true, }, Dependency { @@ -186,7 +182,6 @@ name: CrateName( "libc", ), - kind: Normal, prelude: true, }, ], @@ -247,7 +242,6 @@ name: CrateName( "hello_world", ), - kind: Normal, prelude: true, }, Dependency { @@ -255,7 +249,6 @@ name: CrateName( "libc", ), - kind: Normal, prelude: true, }, ], diff --git a/crates/project-model/test_data/output/cargo_hello_world_project_model_with_selective_overrides.txt b/crates/project-model/test_data/output/cargo_hello_world_project_model_with_selective_overrides.txt index 4f7169a10eba..0ad19ca9f759 100644 --- a/crates/project-model/test_data/output/cargo_hello_world_project_model_with_selective_overrides.txt +++ b/crates/project-model/test_data/output/cargo_hello_world_project_model_with_selective_overrides.txt @@ -48,7 +48,6 @@ name: CrateName( "libc", ), - kind: Normal, prelude: true, }, ], @@ -109,7 +108,6 @@ name: CrateName( "hello_world", ), - kind: Normal, prelude: true, }, Dependency { @@ -117,7 +115,6 @@ name: CrateName( "libc", ), - kind: Normal, prelude: true, }, ], @@ -178,7 +175,6 @@ name: CrateName( "hello_world", ), - kind: Normal, prelude: true, }, Dependency { @@ -186,7 +182,6 @@ name: CrateName( "libc", ), - kind: Normal, prelude: true, }, ], @@ -247,7 +242,6 @@ name: CrateName( "hello_world", ), - kind: Normal, prelude: true, }, Dependency { @@ -255,7 +249,6 @@ name: CrateName( "libc", ), - kind: Normal, prelude: true, }, ], diff --git a/crates/project-model/test_data/output/cargo_hello_world_project_model_with_wildcard_overrides.txt b/crates/project-model/test_data/output/cargo_hello_world_project_model_with_wildcard_overrides.txt index 5c9a2f019c5e..e2334dca8757 100644 --- a/crates/project-model/test_data/output/cargo_hello_world_project_model_with_wildcard_overrides.txt +++ b/crates/project-model/test_data/output/cargo_hello_world_project_model_with_wildcard_overrides.txt @@ -47,7 +47,6 @@ name: CrateName( "libc", ), - kind: Normal, prelude: true, }, ], @@ -107,7 +106,6 @@ name: CrateName( "hello_world", ), - kind: Normal, prelude: true, }, Dependency { @@ -115,7 +113,6 @@ name: CrateName( "libc", ), - kind: Normal, prelude: true, }, ], @@ -175,7 +172,6 @@ name: CrateName( "hello_world", ), - kind: Normal, prelude: true, }, Dependency { @@ -183,7 +179,6 @@ name: CrateName( "libc", ), - kind: Normal, prelude: true, }, ], @@ -243,7 +238,6 @@ name: CrateName( "hello_world", ), - kind: Normal, prelude: true, }, Dependency { @@ -251,7 +245,6 @@ name: CrateName( "libc", ), - kind: Normal, prelude: true, }, ], diff --git a/crates/project-model/test_data/output/rust_project_hello_world_project_model.txt b/crates/project-model/test_data/output/rust_project_hello_world_project_model.txt index b6043f7e2d26..ccaba963deda 100644 --- a/crates/project-model/test_data/output/rust_project_hello_world_project_model.txt +++ b/crates/project-model/test_data/output/rust_project_hello_world_project_model.txt @@ -28,7 +28,6 @@ name: CrateName( "core", ), - kind: Normal, prelude: true, }, ], @@ -153,7 +152,6 @@ name: CrateName( "std", ), - kind: Normal, prelude: true, }, Dependency { @@ -161,7 +159,6 @@ name: CrateName( "core", ), - kind: Normal, prelude: true, }, ], @@ -228,7 +225,6 @@ name: CrateName( "alloc", ), - kind: Normal, prelude: true, }, Dependency { @@ -236,7 +232,6 @@ name: CrateName( "panic_unwind", ), - kind: Normal, prelude: true, }, Dependency { @@ -244,7 +239,6 @@ name: CrateName( "panic_abort", ), - kind: Normal, prelude: true, }, Dependency { @@ -252,7 +246,6 @@ name: CrateName( "core", ), - kind: Normal, prelude: true, }, Dependency { @@ -260,7 +253,6 @@ name: CrateName( "profiler_builtins", ), - kind: Normal, prelude: true, }, Dependency { @@ -268,7 +260,6 @@ name: CrateName( "unwind", ), - kind: Normal, prelude: true, }, Dependency { @@ -276,7 +267,6 @@ name: CrateName( "std_detect", ), - kind: Normal, prelude: true, }, Dependency { @@ -284,7 +274,6 @@ name: CrateName( "test", ), - kind: Normal, prelude: true, }, ], @@ -409,7 +398,6 @@ name: CrateName( "core", ), - kind: Normal, prelude: true, }, Dependency { @@ -417,7 +405,6 @@ name: CrateName( "alloc", ), - kind: Normal, prelude: true, }, Dependency { @@ -425,7 +412,6 @@ name: CrateName( "std", ), - kind: Normal, prelude: true, }, Dependency { @@ -433,7 +419,6 @@ name: CrateName( "test", ), - kind: Normal, prelude: false, }, Dependency { @@ -441,7 +426,6 @@ name: CrateName( "proc_macro", ), - kind: Normal, prelude: false, }, ], diff --git a/crates/rust-analyzer/src/config.rs b/crates/rust-analyzer/src/config.rs index 9bb972e24b2e..16e1a2f54490 100644 --- a/crates/rust-analyzer/src/config.rs +++ b/crates/rust-analyzer/src/config.rs @@ -511,9 +511,6 @@ config_data! { /// Exclude tests from find-all-references. references_excludeTests: bool = "false", - /// Allow renaming of items not belonging to the loaded workspaces. - rename_allowExternalItems: bool = "false", - /// Command to be executed instead of 'cargo' for runnables. runnables_command: Option = "null", @@ -1774,10 +1771,6 @@ impl Config { self.data.typing_autoClosingAngleBrackets_enable } - pub fn rename(&self) -> bool { - self.data.rename_allowExternalItems - } - // FIXME: VSCode seems to work wrong sometimes, see https://github.com/microsoft/vscode/issues/193124 // hence, distinguish it for now. pub fn is_visual_studio_code(&self) -> bool { diff --git a/crates/rust-analyzer/src/handlers/request.rs b/crates/rust-analyzer/src/handlers/request.rs index 34edbe5d7421..eb9d4bf0f02d 100644 --- a/crates/rust-analyzer/src/handlers/request.rs +++ b/crates/rust-analyzer/src/handlers/request.rs @@ -1017,10 +1017,8 @@ pub(crate) fn handle_rename( let _p = tracing::span!(tracing::Level::INFO, "handle_rename").entered(); let position = from_proto::file_position(&snap, params.text_document_position)?; - let mut change = snap - .analysis - .rename(position, ¶ms.new_name, snap.config.rename())? - .map_err(to_proto::rename_error)?; + let mut change = + snap.analysis.rename(position, ¶ms.new_name)?.map_err(to_proto::rename_error)?; // this is kind of a hack to prevent double edits from happening when moving files // When a module gets renamed by renaming the mod declaration this causes the file to move diff --git a/crates/rust-analyzer/src/reload.rs b/crates/rust-analyzer/src/reload.rs index 8a9ee1777283..5895459d1fcf 100644 --- a/crates/rust-analyzer/src/reload.rs +++ b/crates/rust-analyzer/src/reload.rs @@ -19,7 +19,7 @@ use flycheck::{FlycheckConfig, FlycheckHandle}; use hir::{db::DefDatabase, Change, ProcMacros}; use ide::CrateId; use ide_db::{ - base_db::{salsa::Durability, CrateGraph, CrateOrigin, ProcMacroPaths, Version}, + base_db::{salsa::Durability, CrateGraph, ProcMacroPaths, Version}, FxHashMap, }; use itertools::Itertools; @@ -707,39 +707,11 @@ pub fn ws_to_crate_graph( let mapping = crate_graph.extend( other, &mut crate_proc_macros, - |(cg_id, _cg_data), (_o_id, _o_data)| { + |(cg_id, cg_data), (_o_id, o_data)| { // if the newly created crate graph's layout is equal to the crate of the merged graph, then // we can merge the crates. let id = cg_id.into_raw().into_u32() as usize; - if layouts[id] == layout && toolchains[id] == toolchain { - let (res, update) = match (&_cg_data.origin, &_o_data.origin) { - (a, b) - if a == b && _cg_data.eq_ignoring_origin_and_deps(_o_data, false) => - { - (true, false) - } - (a @ CrateOrigin::Local { .. }, CrateOrigin::Library { .. }) - | (a @ CrateOrigin::Library { .. }, CrateOrigin::Local { .. }) - if _cg_data.eq_ignoring_origin_and_deps(_o_data, true) => - { - // If the origins differ, check if the two crates are equal without - // considering the dev dependencies, if they are, they most likely are in - // different loaded workspaces which may cause issues. We keep the local - // version and discard the library one as the local version may have - // dev-dependencies that we want to keep resolving. See #15656 for more - // information. - (true, !a.is_local()) - } - (_, _) => (false, false), - }; - if res && update { - _cg_data.origin = _o_data.origin.clone(); - _cg_data.dependencies = _o_data.dependencies.clone(); - } - res - } else { - false - } + layouts[id] == layout && toolchains[id] == toolchain && cg_data == o_data }, ); // Populate the side tables for the newly merged crates diff --git a/crates/test-fixture/src/lib.rs b/crates/test-fixture/src/lib.rs index abf5a6f3bebd..e118262b4edd 100644 --- a/crates/test-fixture/src/lib.rs +++ b/crates/test-fixture/src/lib.rs @@ -2,9 +2,8 @@ use std::{iter, mem, ops::Not, str::FromStr, sync}; use base_db::{ - CrateDisplayName, CrateGraph, CrateId, CrateName, CrateOrigin, Dependency, DependencyKind, - Edition, Env, FileChange, FileSet, LangCrateOrigin, SourceDatabaseExt, SourceRoot, Version, - VfsPath, + CrateDisplayName, CrateGraph, CrateId, CrateName, CrateOrigin, Dependency, Edition, Env, + FileChange, FileSet, LangCrateOrigin, SourceDatabaseExt, SourceRoot, Version, VfsPath, }; use cfg::CfgOptions; use hir_expand::{ @@ -235,12 +234,7 @@ impl ChangeFixture { crate_graph .add_dep( from_id, - Dependency::with_prelude( - CrateName::new(&to).unwrap(), - to_id, - prelude, - DependencyKind::Normal, - ), + Dependency::with_prelude(CrateName::new(&to).unwrap(), to_id, prelude), ) .unwrap(); } @@ -272,14 +266,7 @@ impl ChangeFixture { for krate in all_crates { crate_graph - .add_dep( - krate, - Dependency::new( - CrateName::new("core").unwrap(), - core_crate, - DependencyKind::Normal, - ), - ) + .add_dep(krate, Dependency::new(CrateName::new("core").unwrap(), core_crate)) .unwrap(); } } @@ -318,11 +305,7 @@ impl ChangeFixture { crate_graph .add_dep( krate, - Dependency::new( - CrateName::new("proc_macros").unwrap(), - proc_macros_crate, - DependencyKind::Normal, - ), + Dependency::new(CrateName::new("proc_macros").unwrap(), proc_macros_crate), ) .unwrap(); } diff --git a/docs/user/generated_config.adoc b/docs/user/generated_config.adoc index 9503b9c3ac15..da7654b0f644 100644 --- a/docs/user/generated_config.adoc +++ b/docs/user/generated_config.adoc @@ -803,11 +803,6 @@ Exclude imports from find-all-references. -- Exclude tests from find-all-references. -- -[[rust-analyzer.rename.allowExternalItems]]rust-analyzer.rename.allowExternalItems (default: `false`):: -+ --- -Allow renaming of items not belonging to the loaded workspaces. --- [[rust-analyzer.runnables.command]]rust-analyzer.runnables.command (default: `null`):: + -- diff --git a/editors/code/package.json b/editors/code/package.json index 8b33d05cc8ec..3a1df5a2f901 100644 --- a/editors/code/package.json +++ b/editors/code/package.json @@ -1527,11 +1527,6 @@ "default": false, "type": "boolean" }, - "rust-analyzer.rename.allowExternalItems": { - "markdownDescription": "Allow renaming of items not belonging to the loaded workspaces.", - "default": false, - "type": "boolean" - }, "rust-analyzer.runnables.command": { "markdownDescription": "Command to be executed instead of 'cargo' for runnables.", "default": null, From 3e4deab3d869787e2e9b5020e6061d23735c944c Mon Sep 17 00:00:00 2001 From: Michael Goulet Date: Fri, 16 Feb 2024 16:00:04 +0000 Subject: [PATCH 091/127] Add support for const and async trait bounds --- crates/parser/src/grammar/generic_params.rs | 10 +++++ .../inline/ok/0211_async_trait_bound.rast | 43 +++++++++++++++++++ .../inline/ok/0211_async_trait_bound.rs | 1 + .../inline/ok/0212_const_trait_bound.rast | 34 +++++++++++++++ .../inline/ok/0212_const_trait_bound.rs | 1 + 5 files changed, 89 insertions(+) create mode 100644 crates/parser/test_data/parser/inline/ok/0211_async_trait_bound.rast create mode 100644 crates/parser/test_data/parser/inline/ok/0211_async_trait_bound.rs create mode 100644 crates/parser/test_data/parser/inline/ok/0212_const_trait_bound.rast create mode 100644 crates/parser/test_data/parser/inline/ok/0212_const_trait_bound.rs diff --git a/crates/parser/src/grammar/generic_params.rs b/crates/parser/src/grammar/generic_params.rs index 3c577aa3cb49..4498daf21a3d 100644 --- a/crates/parser/src/grammar/generic_params.rs +++ b/crates/parser/src/grammar/generic_params.rs @@ -157,6 +157,16 @@ fn type_bound(p: &mut Parser<'_>) -> bool { p.bump_any(); p.expect(T![const]); } + // test const_trait_bound + // const fn foo(_: impl const Trait) {} + T![const] => { + p.bump_any(); + } + // test async_trait_bound + // fn async_foo(_: impl async Fn(&i32)) {} + T![async] => { + p.bump_any(); + } _ => (), } if paths::is_use_path_start(p) { diff --git a/crates/parser/test_data/parser/inline/ok/0211_async_trait_bound.rast b/crates/parser/test_data/parser/inline/ok/0211_async_trait_bound.rast new file mode 100644 index 000000000000..ebf758286a7c --- /dev/null +++ b/crates/parser/test_data/parser/inline/ok/0211_async_trait_bound.rast @@ -0,0 +1,43 @@ +SOURCE_FILE + FN + FN_KW "fn" + WHITESPACE " " + NAME + IDENT "async_foo" + PARAM_LIST + L_PAREN "(" + PARAM + WILDCARD_PAT + UNDERSCORE "_" + COLON ":" + WHITESPACE " " + IMPL_TRAIT_TYPE + IMPL_KW "impl" + WHITESPACE " " + TYPE_BOUND_LIST + TYPE_BOUND + ASYNC_KW "async" + WHITESPACE " " + PATH_TYPE + PATH + PATH_SEGMENT + NAME_REF + IDENT "Fn" + PARAM_LIST + L_PAREN "(" + PARAM + REF_TYPE + AMP "&" + PATH_TYPE + PATH + PATH_SEGMENT + NAME_REF + IDENT "i32" + R_PAREN ")" + R_PAREN ")" + WHITESPACE " " + BLOCK_EXPR + STMT_LIST + L_CURLY "{" + R_CURLY "}" + WHITESPACE "\n" diff --git a/crates/parser/test_data/parser/inline/ok/0211_async_trait_bound.rs b/crates/parser/test_data/parser/inline/ok/0211_async_trait_bound.rs new file mode 100644 index 000000000000..04d44175d778 --- /dev/null +++ b/crates/parser/test_data/parser/inline/ok/0211_async_trait_bound.rs @@ -0,0 +1 @@ +fn async_foo(_: impl async Fn(&i32)) {} diff --git a/crates/parser/test_data/parser/inline/ok/0212_const_trait_bound.rast b/crates/parser/test_data/parser/inline/ok/0212_const_trait_bound.rast new file mode 100644 index 000000000000..646873881bcb --- /dev/null +++ b/crates/parser/test_data/parser/inline/ok/0212_const_trait_bound.rast @@ -0,0 +1,34 @@ +SOURCE_FILE + FN + CONST_KW "const" + WHITESPACE " " + FN_KW "fn" + WHITESPACE " " + NAME + IDENT "foo" + PARAM_LIST + L_PAREN "(" + PARAM + WILDCARD_PAT + UNDERSCORE "_" + COLON ":" + WHITESPACE " " + IMPL_TRAIT_TYPE + IMPL_KW "impl" + WHITESPACE " " + TYPE_BOUND_LIST + TYPE_BOUND + CONST_KW "const" + WHITESPACE " " + PATH_TYPE + PATH + PATH_SEGMENT + NAME_REF + IDENT "Trait" + R_PAREN ")" + WHITESPACE " " + BLOCK_EXPR + STMT_LIST + L_CURLY "{" + R_CURLY "}" + WHITESPACE "\n" diff --git a/crates/parser/test_data/parser/inline/ok/0212_const_trait_bound.rs b/crates/parser/test_data/parser/inline/ok/0212_const_trait_bound.rs new file mode 100644 index 000000000000..8eb8f84c91f4 --- /dev/null +++ b/crates/parser/test_data/parser/inline/ok/0212_const_trait_bound.rs @@ -0,0 +1 @@ +const fn foo(_: impl const Trait) {} From 36020bb51253fbe098450ddd8112cd939e1ca473 Mon Sep 17 00:00:00 2001 From: Michael Goulet Date: Fri, 16 Feb 2024 16:16:34 +0000 Subject: [PATCH 092/127] Update grammar Bounds are CONSTNESS ASYNCNESS POLARITY --- crates/syntax/rust.ungram | 2 +- crates/syntax/src/ast/generated/nodes.rs | 3 ++- 2 files changed, 3 insertions(+), 2 deletions(-) diff --git a/crates/syntax/rust.ungram b/crates/syntax/rust.ungram index 36d677e151f6..c3d8e97c436c 100644 --- a/crates/syntax/rust.ungram +++ b/crates/syntax/rust.ungram @@ -614,7 +614,7 @@ TypeBoundList = TypeBound = Lifetime -| ('?' | '~' 'const')? Type +| ('~' 'const' | 'const')? 'async'? '?'? Type //************************// // Patterns // diff --git a/crates/syntax/src/ast/generated/nodes.rs b/crates/syntax/src/ast/generated/nodes.rs index affcdc94d078..75971861aa80 100644 --- a/crates/syntax/src/ast/generated/nodes.rs +++ b/crates/syntax/src/ast/generated/nodes.rs @@ -1410,9 +1410,10 @@ pub struct TypeBound { } impl TypeBound { pub fn lifetime(&self) -> Option { support::child(&self.syntax) } - pub fn question_mark_token(&self) -> Option { support::token(&self.syntax, T![?]) } pub fn tilde_token(&self) -> Option { support::token(&self.syntax, T![~]) } pub fn const_token(&self) -> Option { support::token(&self.syntax, T![const]) } + pub fn async_token(&self) -> Option { support::token(&self.syntax, T![async]) } + pub fn question_mark_token(&self) -> Option { support::token(&self.syntax, T![?]) } pub fn ty(&self) -> Option { support::child(&self.syntax) } } From 3a917cdfcb59b952f130443ececdb6cf98a2ba3b Mon Sep 17 00:00:00 2001 From: Tshepang Mbambo Date: Wed, 14 Feb 2024 05:25:57 +0200 Subject: [PATCH 093/127] make "invalid fragment specifier" translatable --- compiler/rustc_expand/messages.ftl | 5 +++++ compiler/rustc_expand/src/errors.rs | 10 ++++++++++ compiler/rustc_expand/src/mbe/quoted.rs | 18 +++++++++--------- 3 files changed, 24 insertions(+), 9 deletions(-) diff --git a/compiler/rustc_expand/messages.ftl b/compiler/rustc_expand/messages.ftl index 3e3b48143004..5a3303327db3 100644 --- a/compiler/rustc_expand/messages.ftl +++ b/compiler/rustc_expand/messages.ftl @@ -61,6 +61,11 @@ expand_invalid_cfg_multiple_predicates = multiple `cfg` predicates are specified expand_invalid_cfg_no_parens = `cfg` is not followed by parentheses expand_invalid_cfg_no_predicate = `cfg` predicate is not specified expand_invalid_cfg_predicate_literal = `cfg` predicate key cannot be a literal + +expand_invalid_fragment_specifier = + invalid fragment specifier `{$fragment}` + .help = {$help} + expand_macro_body_stability = macros cannot have body stability attributes .label = invalid body stability attribute diff --git a/compiler/rustc_expand/src/errors.rs b/compiler/rustc_expand/src/errors.rs index 2584ff62e98e..929f34794662 100644 --- a/compiler/rustc_expand/src/errors.rs +++ b/compiler/rustc_expand/src/errors.rs @@ -408,3 +408,13 @@ pub struct DuplicateMatcherBinding { #[label(expand_label2)] pub prev: Span, } + +#[derive(Diagnostic)] +#[diag(expand_invalid_fragment_specifier)] +#[help] +pub struct InvalidFragmentSpecifier { + #[primary_span] + pub span: Span, + pub fragment: Ident, + pub help: String, +} diff --git a/compiler/rustc_expand/src/mbe/quoted.rs b/compiler/rustc_expand/src/mbe/quoted.rs index 4824b67d2778..0fdfa5631389 100644 --- a/compiler/rustc_expand/src/mbe/quoted.rs +++ b/compiler/rustc_expand/src/mbe/quoted.rs @@ -1,3 +1,4 @@ +use crate::errors; use crate::mbe::macro_parser::count_metavar_decls; use crate::mbe::{Delimited, KleeneOp, KleeneToken, MetaVarExpr, SequenceRepetition, TokenTree}; @@ -60,11 +61,11 @@ pub(super) fn parse( Some(&tokenstream::TokenTree::Token(Token { kind: token::Colon, span }, _)) => { match trees.next() { Some(tokenstream::TokenTree::Token(token, _)) => match token.ident() { - Some((frag, _)) => { + Some((fragment, _)) => { let span = token.span.with_lo(start_sp.lo()); let kind = - token::NonterminalKind::from_symbol(frag.name, || { + token::NonterminalKind::from_symbol(fragment.name, || { // FIXME(#85708) - once we properly decode a foreign // crate's `SyntaxContext::root`, then we can replace // this with just `span.edition()`. A @@ -81,14 +82,13 @@ pub(super) fn parse( }) .unwrap_or_else( || { - let msg = format!( - "invalid fragment specifier `{}`", - frag.name + sess.dcx().emit_err( + errors::InvalidFragmentSpecifier { + span, + fragment, + help: VALID_FRAGMENT_NAMES_MSG.into(), + }, ); - sess.dcx() - .struct_span_err(span, msg) - .with_help(VALID_FRAGMENT_NAMES_MSG) - .emit(); token::NonterminalKind::Ident }, ); From 5fbb1b2f4dd858f21da7ba56f9e5011b43a0af4b Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Le=C3=B3n=20Orell=20Valerian=20Liehr?= Date: Thu, 15 Feb 2024 20:41:20 +0100 Subject: [PATCH 094/127] rustdoc: fix and refactor HTML rendering a bit --- src/librustdoc/html/format.rs | 454 +++++++----------- ...{bounds-in-multiple-parts.rs => bounds.rs} | 12 + .../const-generics/generic_const_exprs.rs | 2 +- 3 files changed, 186 insertions(+), 282 deletions(-) rename tests/rustdoc/{bounds-in-multiple-parts.rs => bounds.rs} (54%) diff --git a/src/librustdoc/html/format.rs b/src/librustdoc/html/format.rs index 4ba1665bdc9f..bb68c84f529a 100644 --- a/src/librustdoc/html/format.rs +++ b/src/librustdoc/html/format.rs @@ -1,6 +1,6 @@ //! HTML formatting module //! -//! This module contains a large number of `fmt::Display` implementations for +//! This module contains a large number of `Display` implementations for //! various types in `rustdoc::clean`. //! //! These implementations all emit HTML. As an internal implementation detail, @@ -9,7 +9,7 @@ use std::borrow::Cow; use std::cell::Cell; -use std::fmt::{self, Write}; +use std::fmt::{self, Display, Write}; use std::iter::{self, once}; use rustc_ast as ast; @@ -150,16 +150,16 @@ impl Buffer { } } -pub(crate) fn comma_sep( +pub(crate) fn comma_sep( items: impl Iterator, space_after_comma: bool, -) -> impl fmt::Display { +) -> impl Display { display_fn(move |f| { for (i, item) in items.enumerate() { if i != 0 { write!(f, ",{}", if space_after_comma { " " } else { "" })?; } - fmt::Display::fmt(&item, f)?; + item.fmt(f)?; } Ok(()) }) @@ -168,7 +168,7 @@ pub(crate) fn comma_sep( pub(crate) fn print_generic_bounds<'a, 'tcx: 'a>( bounds: &'a [clean::GenericBound], cx: &'a Context<'tcx>, -) -> impl fmt::Display + 'a + Captures<'tcx> { +) -> impl Display + 'a + Captures<'tcx> { display_fn(move |f| { let mut bounds_dup = FxHashSet::default(); @@ -176,7 +176,7 @@ pub(crate) fn print_generic_bounds<'a, 'tcx: 'a>( if i > 0 { f.write_str(" + ")?; } - fmt::Display::fmt(&bound.print(cx), f)?; + bound.print(cx).fmt(f)?; } Ok(()) }) @@ -186,7 +186,7 @@ impl clean::GenericParamDef { pub(crate) fn print<'a, 'tcx: 'a>( &'a self, cx: &'a Context<'tcx>, - ) -> impl fmt::Display + 'a + Captures<'tcx> { + ) -> impl Display + 'a + Captures<'tcx> { display_fn(move |f| match &self.kind { clean::GenericParamDefKind::Lifetime { outlives } => { write!(f, "{}", self.name)?; @@ -207,35 +207,27 @@ impl clean::GenericParamDef { f.write_str(self.name.as_str())?; if !bounds.is_empty() { - if f.alternate() { - write!(f, ": {:#}", print_generic_bounds(bounds, cx))?; - } else { - write!(f, ": {}", print_generic_bounds(bounds, cx))?; - } + f.write_str(": ")?; + print_generic_bounds(bounds, cx).fmt(f)?; } if let Some(ref ty) = default { - if f.alternate() { - write!(f, " = {:#}", ty.print(cx))?; - } else { - write!(f, " = {}", ty.print(cx))?; - } + f.write_str(" = ")?; + ty.print(cx).fmt(f)?; } Ok(()) } clean::GenericParamDefKind::Const { ty, default, .. } => { - if f.alternate() { - write!(f, "const {}: {:#}", self.name, ty.print(cx))?; - } else { - write!(f, "const {}: {}", self.name, ty.print(cx))?; - } + write!(f, "const {}: ", self.name)?; + ty.print(cx).fmt(f)?; if let Some(default) = default { + f.write_str(" = ")?; if f.alternate() { - write!(f, " = {default:#}")?; + write!(f, "{default}")?; } else { - write!(f, " = {default}")?; + write!(f, "{}", Escape(default))?; } } @@ -249,7 +241,7 @@ impl clean::Generics { pub(crate) fn print<'a, 'tcx: 'a>( &'a self, cx: &'a Context<'tcx>, - ) -> impl fmt::Display + 'a + Captures<'tcx> { + ) -> impl Display + 'a + Captures<'tcx> { display_fn(move |f| { let mut real_params = self.params.iter().filter(|p| !p.is_synthetic_param()).peekable(); if real_params.peek().is_none() { @@ -279,63 +271,50 @@ pub(crate) fn print_where_clause<'a, 'tcx: 'a>( cx: &'a Context<'tcx>, indent: usize, ending: Ending, -) -> impl fmt::Display + 'a + Captures<'tcx> { +) -> impl Display + 'a + Captures<'tcx> { display_fn(move |f| { - let mut where_predicates = gens.where_predicates.iter().filter(|pred| { - !matches!(pred, clean::WherePredicate::BoundPredicate { bounds, .. } if bounds.is_empty()) - }).map(|pred| { - display_fn(move |f| { - if f.alternate() { - f.write_str(" ")?; - } else { - f.write_str("\n")?; - } + let mut where_predicates = gens + .where_predicates + .iter() + .map(|pred| { + display_fn(move |f| { + if f.alternate() { + f.write_str(" ")?; + } else { + f.write_str("\n")?; + } - match pred { - clean::WherePredicate::BoundPredicate { ty, bounds, bound_params } => { - let ty_cx = ty.print(cx); - let generic_bounds = print_generic_bounds(bounds, cx); - - if bound_params.is_empty() { - if f.alternate() { - write!(f, "{ty_cx:#}: {generic_bounds:#}") - } else { - write!(f, "{ty_cx}: {generic_bounds}") + match pred { + clean::WherePredicate::BoundPredicate { ty, bounds, bound_params } => { + print_higher_ranked_params_with_space(bound_params, cx).fmt(f)?; + ty.print(cx).fmt(f)?; + f.write_str(":")?; + if !bounds.is_empty() { + f.write_str(" ")?; + print_generic_bounds(bounds, cx).fmt(f)?; } - } else { + Ok(()) + } + clean::WherePredicate::RegionPredicate { lifetime, bounds } => { + // We don't need to check `alternate` since we can be certain that neither + // the lifetime nor the bounds contain any characters which need escaping. + write!(f, "{}:", lifetime.print())?; + if !bounds.is_empty() { + write!(f, " {}", print_generic_bounds(bounds, cx))?; + } + Ok(()) + } + clean::WherePredicate::EqPredicate { lhs, rhs } => { if f.alternate() { - write!( - f, - "for<{:#}> {ty_cx:#}: {generic_bounds:#}", - comma_sep(bound_params.iter().map(|lt| lt.print(cx)), true) - ) + write!(f, "{:#} == {:#}", lhs.print(cx), rhs.print(cx)) } else { - write!( - f, - "for<{}> {ty_cx}: {generic_bounds}", - comma_sep(bound_params.iter().map(|lt| lt.print(cx)), true) - ) + write!(f, "{} == {}", lhs.print(cx), rhs.print(cx)) } } } - clean::WherePredicate::RegionPredicate { lifetime, bounds } => { - let mut bounds_display = String::new(); - for bound in bounds.iter().map(|b| b.print(cx)) { - write!(bounds_display, "{bound} + ")?; - } - bounds_display.truncate(bounds_display.len() - " + ".len()); - write!(f, "{}: {bounds_display}", lifetime.print()) - } - clean::WherePredicate::EqPredicate { lhs, rhs } => { - if f.alternate() { - write!(f, "{:#} == {:#}", lhs.print(cx), rhs.print(cx)) - } else { - write!(f, "{} == {}", lhs.print(cx), rhs.print(cx)) - } - } - } + }) }) - }).peekable(); + .peekable(); if where_predicates.peek().is_none() { return Ok(()); @@ -392,13 +371,13 @@ pub(crate) fn print_where_clause<'a, 'tcx: 'a>( } impl clean::Lifetime { - pub(crate) fn print(&self) -> impl fmt::Display + '_ { + pub(crate) fn print(&self) -> impl Display + '_ { self.0.as_str() } } impl clean::Constant { - pub(crate) fn print(&self, tcx: TyCtxt<'_>) -> impl fmt::Display + '_ { + pub(crate) fn print(&self, tcx: TyCtxt<'_>) -> impl Display + '_ { let expr = self.expr(tcx); display_fn( move |f| { @@ -409,31 +388,10 @@ impl clean::Constant { } impl clean::PolyTrait { - fn print<'a, 'tcx: 'a>( - &'a self, - cx: &'a Context<'tcx>, - ) -> impl fmt::Display + 'a + Captures<'tcx> { + fn print<'a, 'tcx: 'a>(&'a self, cx: &'a Context<'tcx>) -> impl Display + 'a + Captures<'tcx> { display_fn(move |f| { - if !self.generic_params.is_empty() { - if f.alternate() { - write!( - f, - "for<{:#}> ", - comma_sep(self.generic_params.iter().map(|g| g.print(cx)), true) - )?; - } else { - write!( - f, - "for<{}> ", - comma_sep(self.generic_params.iter().map(|g| g.print(cx)), true) - )?; - } - } - if f.alternate() { - write!(f, "{:#}", self.trait_.print(cx)) - } else { - write!(f, "{}", self.trait_.print(cx)) - } + print_higher_ranked_params_with_space(&self.generic_params, cx).fmt(f)?; + self.trait_.print(cx).fmt(f) }) } } @@ -442,32 +400,25 @@ impl clean::GenericBound { pub(crate) fn print<'a, 'tcx: 'a>( &'a self, cx: &'a Context<'tcx>, - ) -> impl fmt::Display + 'a + Captures<'tcx> { + ) -> impl Display + 'a + Captures<'tcx> { display_fn(move |f| match self { clean::GenericBound::Outlives(lt) => write!(f, "{}", lt.print()), clean::GenericBound::TraitBound(ty, modifier) => { - let modifier_str = match modifier { + f.write_str(match modifier { hir::TraitBoundModifier::None => "", hir::TraitBoundModifier::Maybe => "?", hir::TraitBoundModifier::Negative => "!", // `const` and `~const` trait bounds are experimental; don't render them. hir::TraitBoundModifier::Const | hir::TraitBoundModifier::MaybeConst => "", - }; - if f.alternate() { - write!(f, "{modifier_str}{ty:#}", ty = ty.print(cx)) - } else { - write!(f, "{modifier_str}{ty}", ty = ty.print(cx)) - } + })?; + ty.print(cx).fmt(f) } }) } } impl clean::GenericArgs { - fn print<'a, 'tcx: 'a>( - &'a self, - cx: &'a Context<'tcx>, - ) -> impl fmt::Display + 'a + Captures<'tcx> { + fn print<'a, 'tcx: 'a>(&'a self, cx: &'a Context<'tcx>) -> impl Display + 'a + Captures<'tcx> { display_fn(move |f| { match self { clean::GenericArgs::AngleBracketed { args, bindings } => { @@ -515,11 +466,7 @@ impl clean::GenericArgs { f.write_str(", ")?; } comma = true; - if f.alternate() { - write!(f, "{:#}", ty.print(cx))?; - } else { - write!(f, "{}", ty.print(cx))?; - } + ty.print(cx).fmt(f)?; } f.write_str(")")?; if let Some(ref ty) = *output { @@ -973,31 +920,43 @@ fn primitive_link_fragment( None => {} } } - std::fmt::Display::fmt(&name, f)?; + Display::fmt(&name, f)?; if needs_termination { write!(f, "")?; } Ok(()) } -/// Helper to render type parameters fn tybounds<'a, 'tcx: 'a>( bounds: &'a [clean::PolyTrait], lt: &'a Option, cx: &'a Context<'tcx>, -) -> impl fmt::Display + 'a + Captures<'tcx> { +) -> impl Display + 'a + Captures<'tcx> { display_fn(move |f| { for (i, bound) in bounds.iter().enumerate() { if i > 0 { write!(f, " + ")?; } - - fmt::Display::fmt(&bound.print(cx), f)?; + bound.print(cx).fmt(f)?; } - if let Some(lt) = lt { - write!(f, " + ")?; - fmt::Display::fmt(<.print(), f)?; + // We don't need to check `alternate` since we can be certain that + // the lifetime doesn't contain any characters which need escaping. + write!(f, " + {}", lt.print())?; + } + Ok(()) + }) +} + +fn print_higher_ranked_params_with_space<'a, 'tcx: 'a>( + params: &'a [clean::GenericParamDef], + cx: &'a Context<'tcx>, +) -> impl Display + 'a + Captures<'tcx> { + display_fn(move |f| { + if !params.is_empty() { + f.write_str(if f.alternate() { "for<" } else { "for<" })?; + comma_sep(params.iter().map(|lt| lt.print(cx)), true).fmt(f)?; + f.write_str(if f.alternate() { "> " } else { "> " })?; } Ok(()) }) @@ -1007,7 +966,7 @@ pub(crate) fn anchor<'a, 'cx: 'a>( did: DefId, text: Symbol, cx: &'cx Context<'_>, -) -> impl fmt::Display + 'a { +) -> impl Display + 'a { let parts = href(did, cx); display_fn(move |f| { if let Ok((url, short_ty, fqp)) = parts { @@ -1039,7 +998,7 @@ fn fmt_type<'cx>( } clean::DynTrait(ref bounds, ref lt) => { f.write_str("dyn ")?; - fmt::Display::fmt(&tybounds(bounds, lt, cx), f) + tybounds(bounds, lt, cx).fmt(f) } clean::Infer => write!(f, "_"), clean::Primitive(clean::PrimitiveType::Never) => { @@ -1049,80 +1008,62 @@ fn fmt_type<'cx>( primitive_link(f, prim, format_args!("{}", prim.as_sym().as_str()), cx) } clean::BareFunction(ref decl) => { + print_higher_ranked_params_with_space(&decl.generic_params, cx).fmt(f)?; + decl.unsafety.print_with_space().fmt(f)?; + print_abi_with_space(decl.abi).fmt(f)?; if f.alternate() { - write!( - f, - "{:#}{}{:#}fn{:#}", - decl.print_hrtb_with_space(cx), - decl.unsafety.print_with_space(), - print_abi_with_space(decl.abi), - decl.decl.print(cx), - ) + f.write_str("fn")?; } else { - write!( - f, - "{}{}{}", - decl.print_hrtb_with_space(cx), - decl.unsafety.print_with_space(), - print_abi_with_space(decl.abi) - )?; primitive_link(f, PrimitiveType::Fn, format_args!("fn"), cx)?; - write!(f, "{}", decl.decl.print(cx)) } + decl.decl.print(cx).fmt(f) } - clean::Tuple(ref typs) => { - match &typs[..] { - &[] => primitive_link(f, PrimitiveType::Unit, format_args!("()"), cx), - [one] => { - if let clean::Generic(name) = one { - primitive_link(f, PrimitiveType::Tuple, format_args!("({name},)"), cx) - } else { - write!(f, "(")?; - // Carry `f.alternate()` into this display w/o branching manually. - fmt::Display::fmt(&one.print(cx), f)?; - write!(f, ",)") - } + clean::Tuple(ref typs) => match &typs[..] { + &[] => primitive_link(f, PrimitiveType::Unit, format_args!("()"), cx), + [one] => { + if let clean::Generic(name) = one { + primitive_link(f, PrimitiveType::Tuple, format_args!("({name},)"), cx) + } else { + write!(f, "(")?; + one.print(cx).fmt(f)?; + write!(f, ",)") } - many => { - let generic_names: Vec = many - .iter() - .filter_map(|t| match t { - clean::Generic(name) => Some(*name), - _ => None, - }) - .collect(); - let is_generic = generic_names.len() == many.len(); - if is_generic { - primitive_link( - f, - PrimitiveType::Tuple, - format_args!( - "({})", - generic_names.iter().map(|s| s.as_str()).join(", ") - ), - cx, - ) - } else { - write!(f, "(")?; - for (i, item) in many.iter().enumerate() { - if i != 0 { - write!(f, ", ")?; - } - // Carry `f.alternate()` into this display w/o branching manually. - fmt::Display::fmt(&item.print(cx), f)?; + } + many => { + let generic_names: Vec = many + .iter() + .filter_map(|t| match t { + clean::Generic(name) => Some(*name), + _ => None, + }) + .collect(); + let is_generic = generic_names.len() == many.len(); + if is_generic { + primitive_link( + f, + PrimitiveType::Tuple, + format_args!("({})", generic_names.iter().map(|s| s.as_str()).join(", ")), + cx, + ) + } else { + write!(f, "(")?; + for (i, item) in many.iter().enumerate() { + if i != 0 { + write!(f, ", ")?; } - write!(f, ")") + item.print(cx).fmt(f)?; } + write!(f, ")") } } - } + }, clean::Slice(ref t) => match **t { clean::Generic(name) => { primitive_link(f, PrimitiveType::Slice, format_args!("[{name}]"), cx) } _ => { write!(f, "[")?; - fmt::Display::fmt(&t.print(cx), f)?; + t.print(cx).fmt(f)?; write!(f, "]") } }, @@ -1135,7 +1076,7 @@ fn fmt_type<'cx>( ), _ => { write!(f, "[")?; - fmt::Display::fmt(&t.print(cx), f)?; + t.print(cx).fmt(f)?; if f.alternate() { write!(f, "; {n}")?; } else { @@ -1175,7 +1116,7 @@ fn fmt_type<'cx>( } } else { primitive_link(f, clean::PrimitiveType::RawPointer, format_args!("*{m} "), cx)?; - fmt::Display::fmt(&t.print(cx), f) + t.print(cx).fmt(f) } } clean::BorrowedRef { lifetime: ref l, mutability, type_: ref ty } => { @@ -1216,11 +1157,8 @@ fn fmt_type<'cx>( Ok(()) } clean::ImplTrait(ref bounds) => { - if f.alternate() { - write!(f, "impl {:#}", print_generic_bounds(bounds, cx)) - } else { - write!(f, "impl {}", print_generic_bounds(bounds, cx)) - } + f.write_str("impl ")?; + print_generic_bounds(bounds, cx).fmt(f) } clean::QPath(box clean::QPathData { ref assoc, @@ -1292,8 +1230,7 @@ fn fmt_type<'cx>( write!(f, "{}", assoc.name) }?; - // Carry `f.alternate()` into this display w/o branching manually. - fmt::Display::fmt(&assoc.args.print(cx), f) + assoc.args.print(cx).fmt(f) } } } @@ -1302,7 +1239,7 @@ impl clean::Type { pub(crate) fn print<'b, 'a: 'b, 'tcx: 'a>( &'a self, cx: &'a Context<'tcx>, - ) -> impl fmt::Display + 'b + Captures<'tcx> { + ) -> impl Display + 'b + Captures<'tcx> { display_fn(move |f| fmt_type(self, f, false, cx)) } } @@ -1311,7 +1248,7 @@ impl clean::Path { pub(crate) fn print<'b, 'a: 'b, 'tcx: 'a>( &'a self, cx: &'a Context<'tcx>, - ) -> impl fmt::Display + 'b + Captures<'tcx> { + ) -> impl Display + 'b + Captures<'tcx> { display_fn(move |f| resolved_path(f, self.def_id(), self, false, false, cx)) } } @@ -1321,20 +1258,18 @@ impl clean::Impl { &'a self, use_absolute: bool, cx: &'a Context<'tcx>, - ) -> impl fmt::Display + 'a + Captures<'tcx> { + ) -> impl Display + 'a + Captures<'tcx> { display_fn(move |f| { - if f.alternate() { - write!(f, "impl{:#} ", self.generics.print(cx))?; - } else { - write!(f, "impl{} ", self.generics.print(cx))?; - } + f.write_str("impl")?; + self.generics.print(cx).fmt(f)?; + f.write_str(" ")?; if let Some(ref ty) = self.trait_ { match self.polarity { ty::ImplPolarity::Positive | ty::ImplPolarity::Reservation => {} ty::ImplPolarity::Negative => write!(f, "!")?, } - fmt::Display::fmt(&ty.print(cx), f)?; + ty.print(cx).fmt(f)?; write!(f, " for ")?; } @@ -1359,14 +1294,9 @@ impl clean::Impl { // Hardcoded anchor library/core/src/primitive_docs.rs // Link should match `# Trait implementations` - let hrtb = bare_fn.print_hrtb_with_space(cx); - let unsafety = bare_fn.unsafety.print_with_space(); - let abi = print_abi_with_space(bare_fn.abi); - if f.alternate() { - write!(f, "{hrtb:#}{unsafety}{abi:#}",)?; - } else { - write!(f, "{hrtb}{unsafety}{abi}",)?; - } + print_higher_ranked_params_with_space(&bare_fn.generic_params, cx).fmt(f)?; + bare_fn.unsafety.print_with_space().fmt(f)?; + print_abi_with_space(bare_fn.abi).fmt(f)?; let ellipsis = if bare_fn.decl.c_variadic { ", ..." } else { "" }; primitive_link_fragment( f, @@ -1386,8 +1316,7 @@ impl clean::Impl { fmt_type(&self.for_, f, use_absolute, cx)?; } - fmt::Display::fmt(&print_where_clause(&self.generics, cx, 0, Ending::Newline), f)?; - Ok(()) + print_where_clause(&self.generics, cx, 0, Ending::Newline).fmt(f) }) } } @@ -1396,16 +1325,11 @@ impl clean::Arguments { pub(crate) fn print<'a, 'tcx: 'a>( &'a self, cx: &'a Context<'tcx>, - ) -> impl fmt::Display + 'a + Captures<'tcx> { + ) -> impl Display + 'a + Captures<'tcx> { display_fn(move |f| { for (i, input) in self.values.iter().enumerate() { write!(f, "{}: ", input.name)?; - - if f.alternate() { - write!(f, "{:#}", input.type_.print(cx))?; - } else { - write!(f, "{}", input.type_.print(cx))?; - } + input.type_.print(cx).fmt(f)?; if i + 1 < self.values.len() { write!(f, ", ")?; } @@ -1415,25 +1339,6 @@ impl clean::Arguments { } } -impl clean::BareFunctionDecl { - fn print_hrtb_with_space<'a, 'tcx: 'a>( - &'a self, - cx: &'a Context<'tcx>, - ) -> impl fmt::Display + 'a + Captures<'tcx> { - display_fn(move |f| { - if !self.generic_params.is_empty() { - write!( - f, - "for<{}> ", - comma_sep(self.generic_params.iter().map(|g| g.print(cx)), true) - ) - } else { - Ok(()) - } - }) - } -} - // Implements Write but only counts the bytes "written". struct WriteCounter(usize); @@ -1447,7 +1352,7 @@ impl std::fmt::Write for WriteCounter { // Implements Display by emitting the given number of spaces. struct Indent(usize); -impl fmt::Display for Indent { +impl Display for Indent { fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { (0..self.0).for_each(|_| { f.write_char(' ').unwrap(); @@ -1460,7 +1365,7 @@ impl clean::FnDecl { pub(crate) fn print<'b, 'a: 'b, 'tcx: 'a>( &'a self, cx: &'a Context<'tcx>, - ) -> impl fmt::Display + 'b + Captures<'tcx> { + ) -> impl Display + 'b + Captures<'tcx> { display_fn(move |f| { let ellipsis = if self.c_variadic { ", ..." } else { "" }; if f.alternate() { @@ -1494,7 +1399,7 @@ impl clean::FnDecl { header_len: usize, indent: usize, cx: &'a Context<'tcx>, - ) -> impl fmt::Display + 'a + Captures<'tcx> { + ) -> impl Display + 'a + Captures<'tcx> { display_fn(move |f| { // First, generate the text form of the declaration, with no line wrapping, and count the bytes. let mut counter = WriteCounter(0); @@ -1554,7 +1459,7 @@ impl clean::FnDecl { } clean::SelfExplicit(ref typ) => { write!(f, "self: ")?; - fmt::Display::fmt(&typ.print(cx), f)?; + typ.print(cx).fmt(f)?; } } } else { @@ -1562,7 +1467,7 @@ impl clean::FnDecl { write!(f, "const ")?; } write!(f, "{}: ", input.name)?; - fmt::Display::fmt(&input.type_.print(cx), f)?; + input.type_.print(cx).fmt(f)?; } } @@ -1578,14 +1483,13 @@ impl clean::FnDecl { Some(n) => write!(f, "\n{})", Indent(n))?, }; - fmt::Display::fmt(&self.print_output(cx), f)?; - Ok(()) + self.print_output(cx).fmt(f) } fn print_output<'a, 'tcx: 'a>( &'a self, cx: &'a Context<'tcx>, - ) -> impl fmt::Display + 'a + Captures<'tcx> { + ) -> impl Display + 'a + Captures<'tcx> { display_fn(move |f| match &self.output { clean::Tuple(tys) if tys.is_empty() => Ok(()), ty if f.alternate() => { @@ -1600,7 +1504,7 @@ pub(crate) fn visibility_print_with_space<'a, 'tcx: 'a>( visibility: Option>, item_did: ItemId, cx: &'a Context<'tcx>, -) -> impl fmt::Display + 'a + Captures<'tcx> { +) -> impl Display + 'a + Captures<'tcx> { use std::fmt::Write as _; let to_print: Cow<'static, str> = match visibility { @@ -1648,7 +1552,7 @@ pub(crate) fn visibility_to_src_with_space<'a, 'tcx: 'a>( visibility: Option>, tcx: TyCtxt<'tcx>, item_did: DefId, -) -> impl fmt::Display + 'a + Captures<'tcx> { +) -> impl Display + 'a + Captures<'tcx> { let to_print: Cow<'static, str> = match visibility { None => "".into(), Some(ty::Visibility::Public) => "pub ".into(), @@ -1727,7 +1631,7 @@ impl clean::Import { pub(crate) fn print<'a, 'tcx: 'a>( &'a self, cx: &'a Context<'tcx>, - ) -> impl fmt::Display + 'a + Captures<'tcx> { + ) -> impl Display + 'a + Captures<'tcx> { display_fn(move |f| match self.kind { clean::ImportKind::Simple(name) => { if name == self.source.path.last() { @@ -1751,7 +1655,7 @@ impl clean::ImportSource { pub(crate) fn print<'a, 'tcx: 'a>( &'a self, cx: &'a Context<'tcx>, - ) -> impl fmt::Display + 'a + Captures<'tcx> { + ) -> impl Display + 'a + Captures<'tcx> { display_fn(move |f| match self.did { Some(did) => resolved_path(f, did, &self.path, true, false, cx), _ => { @@ -1779,29 +1683,19 @@ impl clean::TypeBinding { pub(crate) fn print<'a, 'tcx: 'a>( &'a self, cx: &'a Context<'tcx>, - ) -> impl fmt::Display + 'a + Captures<'tcx> { + ) -> impl Display + 'a + Captures<'tcx> { display_fn(move |f| { f.write_str(self.assoc.name.as_str())?; - if f.alternate() { - write!(f, "{:#}", self.assoc.args.print(cx))?; - } else { - write!(f, "{}", self.assoc.args.print(cx))?; - } + self.assoc.args.print(cx).fmt(f)?; match self.kind { clean::TypeBindingKind::Equality { ref term } => { - if f.alternate() { - write!(f, " = {:#}", term.print(cx))?; - } else { - write!(f, " = {}", term.print(cx))?; - } + f.write_str(" = ")?; + term.print(cx).fmt(f)?; } clean::TypeBindingKind::Constraint { ref bounds } => { if !bounds.is_empty() { - if f.alternate() { - write!(f, ": {:#}", print_generic_bounds(bounds, cx))?; - } else { - write!(f, ": {}", print_generic_bounds(bounds, cx))?; - } + f.write_str(": ")?; + print_generic_bounds(bounds, cx).fmt(f)?; } } } @@ -1810,7 +1704,7 @@ impl clean::TypeBinding { } } -pub(crate) fn print_abi_with_space(abi: Abi) -> impl fmt::Display { +pub(crate) fn print_abi_with_space(abi: Abi) -> impl Display { display_fn(move |f| { let quot = if f.alternate() { "\"" } else { """ }; match abi { @@ -1828,34 +1722,32 @@ impl clean::GenericArg { pub(crate) fn print<'a, 'tcx: 'a>( &'a self, cx: &'a Context<'tcx>, - ) -> impl fmt::Display + 'a + Captures<'tcx> { + ) -> impl Display + 'a + Captures<'tcx> { display_fn(move |f| match self { - clean::GenericArg::Lifetime(lt) => fmt::Display::fmt(<.print(), f), - clean::GenericArg::Type(ty) => fmt::Display::fmt(&ty.print(cx), f), - clean::GenericArg::Const(ct) => fmt::Display::fmt(&ct.print(cx.tcx()), f), - clean::GenericArg::Infer => fmt::Display::fmt("_", f), + clean::GenericArg::Lifetime(lt) => lt.print().fmt(f), + clean::GenericArg::Type(ty) => ty.print(cx).fmt(f), + clean::GenericArg::Const(ct) => ct.print(cx.tcx()).fmt(f), + clean::GenericArg::Infer => Display::fmt("_", f), }) } } -impl clean::types::Term { +impl clean::Term { pub(crate) fn print<'a, 'tcx: 'a>( &'a self, cx: &'a Context<'tcx>, - ) -> impl fmt::Display + 'a + Captures<'tcx> { + ) -> impl Display + 'a + Captures<'tcx> { display_fn(move |f| match self { - clean::types::Term::Type(ty) => fmt::Display::fmt(&ty.print(cx), f), - clean::types::Term::Constant(ct) => fmt::Display::fmt(&ct.print(cx.tcx()), f), + clean::Term::Type(ty) => ty.print(cx).fmt(f), + clean::Term::Constant(ct) => ct.print(cx.tcx()).fmt(f), }) } } -pub(crate) fn display_fn( - f: impl FnOnce(&mut fmt::Formatter<'_>) -> fmt::Result, -) -> impl fmt::Display { +pub(crate) fn display_fn(f: impl FnOnce(&mut fmt::Formatter<'_>) -> fmt::Result) -> impl Display { struct WithFormatter(Cell>); - impl fmt::Display for WithFormatter + impl Display for WithFormatter where F: FnOnce(&mut fmt::Formatter<'_>) -> fmt::Result, { diff --git a/tests/rustdoc/bounds-in-multiple-parts.rs b/tests/rustdoc/bounds.rs similarity index 54% rename from tests/rustdoc/bounds-in-multiple-parts.rs rename to tests/rustdoc/bounds.rs index 279e3c148887..da09e3f2a528 100644 --- a/tests/rustdoc/bounds-in-multiple-parts.rs +++ b/tests/rustdoc/bounds.rs @@ -18,3 +18,15 @@ pub trait T2 { fn f() where Self: Eq, Self: Eq2, T: Eq2; } + +// Checking that we support empty bounds (we used to crash on empty outlives-bounds). +// Note that we don't want to hide them since they have a semantic effect. +// For outlives-bounds, they force the lifetime param to be early-bound instead of late-bound. +// For trait bounds, it can affect well-formedness (see `ClauseKind::WellFormed`). +// @has 'foo/fn.empty.html' +// @has - '//pre[@class="rust item-decl"]' "empty<'a, T>()where T:, 'a:," +pub fn empty<'a, T>() + where + T:, + 'a:, +{} diff --git a/tests/rustdoc/const-generics/generic_const_exprs.rs b/tests/rustdoc/const-generics/generic_const_exprs.rs index e23b3006da6c..2d2d31d7231a 100644 --- a/tests/rustdoc/const-generics/generic_const_exprs.rs +++ b/tests/rustdoc/const-generics/generic_const_exprs.rs @@ -3,5 +3,5 @@ #![allow(incomplete_features)] // make sure that `ConstEvaluatable` predicates dont cause rustdoc to ICE #77647 // @has foo/struct.Ice.html '//pre[@class="rust item-decl"]' \ -// 'pub struct Ice;' +// 'pub struct Ice where [(); { _ }]:;' pub struct Ice where [(); N + 1]:; From f5d43a052b9eb464e54af819143467954d814a24 Mon Sep 17 00:00:00 2001 From: Shoyu Vanilla Date: Sat, 17 Feb 2024 14:27:05 +0900 Subject: [PATCH 095/127] Fix missing trait impls for type in rustc docs --- src/librustdoc/clean/inline.rs | 19 ++++++++++++------- src/librustdoc/config.rs | 4 ++++ .../inline_cross/auxiliary/issue-76736-1.rs | 6 ++++++ .../inline_cross/auxiliary/issue-76736-2.rs | 5 +++++ tests/rustdoc/inline_cross/issue-76736-1.rs | 15 +++++++++++++++ tests/rustdoc/inline_cross/issue-76736-2.rs | 16 ++++++++++++++++ tests/rustdoc/inline_cross/issue-76736-3.rs | 16 ++++++++++++++++ 7 files changed, 74 insertions(+), 7 deletions(-) create mode 100644 tests/rustdoc/inline_cross/auxiliary/issue-76736-1.rs create mode 100644 tests/rustdoc/inline_cross/auxiliary/issue-76736-2.rs create mode 100644 tests/rustdoc/inline_cross/issue-76736-1.rs create mode 100644 tests/rustdoc/inline_cross/issue-76736-2.rs create mode 100644 tests/rustdoc/inline_cross/issue-76736-3.rs diff --git a/src/librustdoc/clean/inline.rs b/src/librustdoc/clean/inline.rs index e2f2c9a5e566..03f62f41a26f 100644 --- a/src/librustdoc/clean/inline.rs +++ b/src/librustdoc/clean/inline.rs @@ -443,11 +443,13 @@ pub(crate) fn build_impl( return; } - if let Some(stab) = tcx.lookup_stability(did) - && stab.is_unstable() - && stab.feature == sym::rustc_private - { - return; + if !tcx.features().rustc_private && !cx.render_options.force_unstable_if_unmarked { + if let Some(stab) = tcx.lookup_stability(did) + && stab.is_unstable() + && stab.feature == sym::rustc_private + { + return; + } } } @@ -477,8 +479,11 @@ pub(crate) fn build_impl( return; } - if let Some(stab) = tcx.lookup_stability(did) { - if stab.is_unstable() && stab.feature == sym::rustc_private { + if !tcx.features().rustc_private && !cx.render_options.force_unstable_if_unmarked { + if let Some(stab) = tcx.lookup_stability(did) + && stab.is_unstable() + && stab.feature == sym::rustc_private + { return; } } diff --git a/src/librustdoc/config.rs b/src/librustdoc/config.rs index c46047aa0dba..4b1a417b2112 100644 --- a/src/librustdoc/config.rs +++ b/src/librustdoc/config.rs @@ -281,6 +281,8 @@ pub(crate) struct RenderOptions { pub(crate) no_emit_shared: bool, /// If `true`, HTML source code pages won't be generated. pub(crate) html_no_source: bool, + /// Whether `-Zforce-unstable-if-unmarked` unstable option is set + pub(crate) force_unstable_if_unmarked: bool, } #[derive(Copy, Clone, Debug, PartialEq, Eq)] @@ -347,6 +349,7 @@ impl Options { let codegen_options = CodegenOptions::build(early_dcx, matches); let unstable_opts = UnstableOptions::build(early_dcx, matches); + let force_unstable_if_unmarked = unstable_opts.force_unstable_if_unmarked; let dcx = new_dcx(error_format, None, diagnostic_width, &unstable_opts); @@ -760,6 +763,7 @@ impl Options { call_locations, no_emit_shared: false, html_no_source, + force_unstable_if_unmarked, }; Some((options, render_options)) } diff --git a/tests/rustdoc/inline_cross/auxiliary/issue-76736-1.rs b/tests/rustdoc/inline_cross/auxiliary/issue-76736-1.rs new file mode 100644 index 000000000000..4ae9f79a5321 --- /dev/null +++ b/tests/rustdoc/inline_cross/auxiliary/issue-76736-1.rs @@ -0,0 +1,6 @@ +#![feature(staged_api)] +#![unstable(feature = "rustc_private", issue = "none")] + +pub trait MaybeResult {} + +impl MaybeResult for T {} diff --git a/tests/rustdoc/inline_cross/auxiliary/issue-76736-2.rs b/tests/rustdoc/inline_cross/auxiliary/issue-76736-2.rs new file mode 100644 index 000000000000..b5fbac970822 --- /dev/null +++ b/tests/rustdoc/inline_cross/auxiliary/issue-76736-2.rs @@ -0,0 +1,5 @@ +#![feature(rustc_private)] + +extern crate issue_76736_1; + +pub struct Bar; diff --git a/tests/rustdoc/inline_cross/issue-76736-1.rs b/tests/rustdoc/inline_cross/issue-76736-1.rs new file mode 100644 index 000000000000..25feae2c8d68 --- /dev/null +++ b/tests/rustdoc/inline_cross/issue-76736-1.rs @@ -0,0 +1,15 @@ +// aux-build:issue-76736-1.rs +// aux-build:issue-76736-2.rs + +#![crate_name = "foo"] + +extern crate issue_76736_1; +extern crate issue_76736_2; + +// @has foo/struct.Foo.html +// @!has - '//*[@class="impl"]//h3[@class="code-header"]' 'MaybeResult' +pub struct Foo; + +// @has foo/struct.Bar.html +// @!has - '//*[@class="impl"]//h3[@class="code-header"]' 'MaybeResult' +pub use issue_76736_2::Bar; diff --git a/tests/rustdoc/inline_cross/issue-76736-2.rs b/tests/rustdoc/inline_cross/issue-76736-2.rs new file mode 100644 index 000000000000..e43c825d6e14 --- /dev/null +++ b/tests/rustdoc/inline_cross/issue-76736-2.rs @@ -0,0 +1,16 @@ +// aux-build:issue-76736-1.rs +// aux-build:issue-76736-2.rs + +#![crate_name = "foo"] +#![feature(rustc_private)] + +extern crate issue_76736_1; +extern crate issue_76736_2; + +// @has foo/struct.Foo.html +// @has - '//*[@class="impl"]//h3[@class="code-header"]' 'MaybeResult' +pub struct Foo; + +// @has foo/struct.Bar.html +// @has - '//*[@class="impl"]//h3[@class="code-header"]' 'MaybeResult' +pub use issue_76736_2::Bar; diff --git a/tests/rustdoc/inline_cross/issue-76736-3.rs b/tests/rustdoc/inline_cross/issue-76736-3.rs new file mode 100644 index 000000000000..9542f3f3557a --- /dev/null +++ b/tests/rustdoc/inline_cross/issue-76736-3.rs @@ -0,0 +1,16 @@ +// compile-flags: -Zforce-unstable-if-unmarked +// aux-build:issue-76736-1.rs +// aux-build:issue-76736-2.rs + +#![crate_name = "foo"] + +extern crate issue_76736_1; +extern crate issue_76736_2; + +// @has foo/struct.Foo.html +// @has - '//*[@class="impl"]//h3[@class="code-header"]' 'MaybeResult' +pub struct Foo; + +// @has foo/struct.Bar.html +// @has - '//*[@class="impl"]//h3[@class="code-header"]' 'MaybeResult' +pub use issue_76736_2::Bar; From f62a695572770330f0853cf9744beb5b3068e6c4 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Le=C3=B3n=20Orell=20Valerian=20Liehr?= Date: Thu, 28 Dec 2023 02:54:22 +0100 Subject: [PATCH 096/127] Update comments and variable names --- .../rustc_hir_analysis/src/astconv/bounds.rs | 52 ++++++++++--------- .../rustc_hir_analysis/src/astconv/errors.rs | 21 ++++---- 2 files changed, 39 insertions(+), 34 deletions(-) diff --git a/compiler/rustc_hir_analysis/src/astconv/bounds.rs b/compiler/rustc_hir_analysis/src/astconv/bounds.rs index dab1e2d52534..0c35ec99dae6 100644 --- a/compiler/rustc_hir_analysis/src/astconv/bounds.rs +++ b/compiler/rustc_hir_analysis/src/astconv/bounds.rs @@ -390,14 +390,12 @@ impl<'tcx> dyn AstConv<'tcx> + '_ { { alias_ty } else { - return Err(self.tcx().dcx().emit_err( - crate::errors::ReturnTypeNotationOnNonRpitit { - span: binding.span, - ty: tcx.liberate_late_bound_regions(assoc_item.def_id, output), - fn_span: tcx.hir().span_if_local(assoc_item.def_id), - note: (), - }, - )); + return Err(tcx.dcx().emit_err(crate::errors::ReturnTypeNotationOnNonRpitit { + span: binding.span, + ty: tcx.liberate_late_bound_regions(assoc_item.def_id, output), + fn_span: tcx.hir().span_if_local(assoc_item.def_id), + note: (), + })); }; // Finally, move the fn return type's bound vars over to account for the early bound @@ -410,7 +408,9 @@ impl<'tcx> dyn AstConv<'tcx> + '_ { let bound_vars = tcx.late_bound_vars(binding.hir_id); ty::Binder::bind_with_vars(instantiation_output, bound_vars) } else { - // Append the generic arguments of the associated type to the `trait_ref`. + // Create the generic arguments for the associated type or constant by joining the + // parent arguments (the arguments of the trait) and the own arguments (the ones of + // the associated item itself) and construct an alias type using them. candidate.map_bound(|trait_ref| { let ident = Ident::new(assoc_item.name, binding.item_name.span); let item_segment = hir::PathSegment { @@ -421,16 +421,18 @@ impl<'tcx> dyn AstConv<'tcx> + '_ { infer_args: false, }; - let args_trait_ref_and_assoc_item = self.create_args_for_associated_item( + let alias_args = self.create_args_for_associated_item( path_span, assoc_item.def_id, &item_segment, trait_ref.args, ); + debug!(?alias_args); - debug!(?args_trait_ref_and_assoc_item); - - ty::AliasTy::new(tcx, assoc_item.def_id, args_trait_ref_and_assoc_item) + // Note that we're indeed also using `AliasTy` (alias *type*) for associated + // *constants* to represent *const projections*. Alias *term* would be a more + // appropriate name but alas. + ty::AliasTy::new(tcx, assoc_item.def_id, alias_args) }) }; @@ -442,20 +444,22 @@ impl<'tcx> dyn AstConv<'tcx> + '_ { // // for<'a> ::Item = &'a str // <-- 'a is bad // for<'a> >::Output = &'a str // <-- 'a is ok - if let ConvertedBindingKind::Equality(ty) = binding.kind { - let late_bound_in_trait_ref = + if let ConvertedBindingKind::Equality(term) = binding.kind { + let late_bound_in_projection_ty = tcx.collect_constrained_late_bound_regions(&projection_ty); - let late_bound_in_ty = - tcx.collect_referenced_late_bound_regions(&trait_ref.rebind(ty.node)); - debug!(?late_bound_in_trait_ref); - debug!(?late_bound_in_ty); + let late_bound_in_term = + tcx.collect_referenced_late_bound_regions(&trait_ref.rebind(term.node)); + debug!(?late_bound_in_projection_ty); + debug!(?late_bound_in_term); + // NOTE(associated_const_equality): This error should be impossible to trigger + // with associated const equality bounds. // FIXME: point at the type params that don't have appropriate lifetimes: // struct S1 Fn(&i32, &i32) -> &'a i32>(F); // ---- ---- ^^^^^^^ self.validate_late_bound_regions( - late_bound_in_trait_ref, - late_bound_in_ty, + late_bound_in_projection_ty, + late_bound_in_term, |br_name| { struct_span_code_err!( tcx.dcx(), @@ -473,9 +477,9 @@ impl<'tcx> dyn AstConv<'tcx> + '_ { match binding.kind { ConvertedBindingKind::Equality(..) if let ty::AssocKind::Fn = assoc_kind => { - return Err(self.tcx().dcx().emit_err( - crate::errors::ReturnTypeNotationEqualityBound { span: binding.span }, - )); + return Err(tcx.dcx().emit_err(crate::errors::ReturnTypeNotationEqualityBound { + span: binding.span, + })); } ConvertedBindingKind::Equality(term) => { // "Desugar" a constraint like `T: Iterator` this to diff --git a/compiler/rustc_hir_analysis/src/astconv/errors.rs b/compiler/rustc_hir_analysis/src/astconv/errors.rs index ea8d364bba6c..f04114229bf9 100644 --- a/compiler/rustc_hir_analysis/src/astconv/errors.rs +++ b/compiler/rustc_hir_analysis/src/astconv/errors.rs @@ -243,7 +243,7 @@ impl<'o, 'tcx> dyn AstConv<'tcx> + 'o { None, ) && suggested_name != assoc_name.name { - // We suggested constraining a type parameter, but the associated type on it + // We suggested constraining a type parameter, but the associated item on it // was also not an exact match, so we also suggest changing it. err.span_suggestion_verbose( assoc_name.span, @@ -258,16 +258,17 @@ impl<'o, 'tcx> dyn AstConv<'tcx> + 'o { } } - // If we still couldn't find any associated type, and only one associated type exists, + // If we still couldn't find any associated item, and only one associated item exists, // suggests using it. if let [candidate_name] = all_candidate_names.as_slice() { - // this should still compile, except on `#![feature(associated_type_defaults)]` - // where it could suggests `type A = Self::A`, thus recursing infinitely - let applicability = if tcx.features().associated_type_defaults { - Applicability::Unspecified - } else { - Applicability::MaybeIncorrect - }; + // This should still compile, except on `#![feature(associated_type_defaults)]` + // where it could suggests `type A = Self::A`, thus recursing infinitely. + let applicability = + if assoc_kind == ty::AssocKind::Type && tcx.features().associated_type_defaults { + Applicability::Unspecified + } else { + Applicability::MaybeIncorrect + }; err.sugg = Some(errors::AssocItemNotFoundSugg::Other { span: assoc_name.span, @@ -323,7 +324,7 @@ impl<'o, 'tcx> dyn AstConv<'tcx> + 'o { }; // For equality bounds, we want to blame the term (RHS) instead of the item (LHS) since - // one can argue that that's more “untuitive” to the user. + // one can argue that that's more “intuitive” to the user. let (span, expected_because_label, expected, got) = if let Some(binding) = binding && let ConvertedBindingKind::Equality(term) = binding.kind { From 38a2a65f0b7bcf137bc4756c4fa1db1c3ba3b0c9 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Le=C3=B3n=20Orell=20Valerian=20Liehr?= Date: Thu, 28 Dec 2023 16:38:24 +0100 Subject: [PATCH 097/127] Remove astconv::ConvertedBinding --- .../rustc_hir_analysis/src/astconv/bounds.rs | 132 +++++++++--------- .../rustc_hir_analysis/src/astconv/errors.rs | 24 ++-- .../rustc_hir_analysis/src/astconv/mod.rs | 91 +++--------- 3 files changed, 96 insertions(+), 151 deletions(-) diff --git a/compiler/rustc_hir_analysis/src/astconv/bounds.rs b/compiler/rustc_hir_analysis/src/astconv/bounds.rs index 0c35ec99dae6..6940b4a50458 100644 --- a/compiler/rustc_hir_analysis/src/astconv/bounds.rs +++ b/compiler/rustc_hir_analysis/src/astconv/bounds.rs @@ -9,9 +9,7 @@ use rustc_span::{ErrorGuaranteed, Span}; use rustc_trait_selection::traits; use smallvec::SmallVec; -use crate::astconv::{ - AstConv, ConvertedBinding, ConvertedBindingKind, OnlySelfBounds, PredicateFilter, -}; +use crate::astconv::{AstConv, OnlySelfBounds, PredicateFilter}; use crate::bounds::Bounds; use crate::errors; @@ -238,7 +236,7 @@ impl<'tcx> dyn AstConv<'tcx> + '_ { &self, hir_ref_id: hir::HirId, trait_ref: ty::PolyTraitRef<'tcx>, - binding: &ConvertedBinding<'_, 'tcx>, + binding: &hir::TypeBinding<'tcx>, bounds: &mut Bounds<'tcx>, speculative: bool, dup_bindings: &mut FxIndexMap, @@ -263,21 +261,20 @@ impl<'tcx> dyn AstConv<'tcx> + '_ { let tcx = self.tcx(); - let assoc_kind = - if binding.gen_args.parenthesized == hir::GenericArgsParentheses::ReturnTypeNotation { - ty::AssocKind::Fn - } else if let ConvertedBindingKind::Equality(term) = binding.kind - && let ty::TermKind::Const(_) = term.node.unpack() - { - ty::AssocKind::Const - } else { - ty::AssocKind::Type - }; + let assoc_kind = if binding.gen_args.parenthesized + == hir::GenericArgsParentheses::ReturnTypeNotation + { + ty::AssocKind::Fn + } else if let hir::TypeBindingKind::Equality { term: hir::Term::Const(_) } = binding.kind { + ty::AssocKind::Const + } else { + ty::AssocKind::Type + }; let candidate = if self.trait_defines_associated_item_named( trait_ref.def_id(), assoc_kind, - binding.item_name, + binding.ident, ) { // Simple case: The assoc item is defined in the current trait. trait_ref @@ -289,14 +286,14 @@ impl<'tcx> dyn AstConv<'tcx> + '_ { trait_ref.skip_binder().print_only_trait_name(), None, assoc_kind, - binding.item_name, + binding.ident, path_span, - Some(&binding), + Some(binding), )? }; let (assoc_ident, def_scope) = - tcx.adjust_ident_and_get_scope(binding.item_name, candidate.def_id(), hir_ref_id); + tcx.adjust_ident_and_get_scope(binding.ident, candidate.def_id(), hir_ref_id); // We have already adjusted the item name above, so compare with `.normalize_to_macros_2_0()` // instead of calling `filter_by_name_and_kind` which would needlessly normalize the @@ -312,7 +309,7 @@ impl<'tcx> dyn AstConv<'tcx> + '_ { .dcx() .struct_span_err( binding.span, - format!("{} `{}` is private", assoc_item.kind, binding.item_name), + format!("{} `{}` is private", assoc_item.kind, binding.ident), ) .with_span_label(binding.span, format!("private {}", assoc_item.kind)) .emit(); @@ -327,7 +324,7 @@ impl<'tcx> dyn AstConv<'tcx> + '_ { tcx.dcx().emit_err(errors::ValueOfAssociatedStructAlreadySpecified { span: binding.span, prev_span: *prev_span, - item_name: binding.item_name, + item_name: binding.ident, def_path: tcx.def_path_str(assoc_item.container_id(tcx)), }); }) @@ -412,7 +409,7 @@ impl<'tcx> dyn AstConv<'tcx> + '_ { // parent arguments (the arguments of the trait) and the own arguments (the ones of // the associated item itself) and construct an alias type using them. candidate.map_bound(|trait_ref| { - let ident = Ident::new(assoc_item.name, binding.item_name.span); + let ident = Ident::new(assoc_item.name, binding.ident.span); let item_segment = hir::PathSegment { ident, hir_id: binding.hir_id, @@ -436,66 +433,67 @@ impl<'tcx> dyn AstConv<'tcx> + '_ { }) }; - if !speculative { - // Find any late-bound regions declared in `ty` that are not - // declared in the trait-ref or assoc_item. These are not well-formed. - // - // Example: - // - // for<'a> ::Item = &'a str // <-- 'a is bad - // for<'a> >::Output = &'a str // <-- 'a is ok - if let ConvertedBindingKind::Equality(term) = binding.kind { - let late_bound_in_projection_ty = - tcx.collect_constrained_late_bound_regions(&projection_ty); - let late_bound_in_term = - tcx.collect_referenced_late_bound_regions(&trait_ref.rebind(term.node)); - debug!(?late_bound_in_projection_ty); - debug!(?late_bound_in_term); - - // NOTE(associated_const_equality): This error should be impossible to trigger - // with associated const equality bounds. - // FIXME: point at the type params that don't have appropriate lifetimes: - // struct S1 Fn(&i32, &i32) -> &'a i32>(F); - // ---- ---- ^^^^^^^ - self.validate_late_bound_regions( - late_bound_in_projection_ty, - late_bound_in_term, - |br_name| { - struct_span_code_err!( - tcx.dcx(), - binding.span, - E0582, - "binding for associated type `{}` references {}, \ - which does not appear in the trait input types", - binding.item_name, - br_name - ) - }, - ); - } - } - match binding.kind { - ConvertedBindingKind::Equality(..) if let ty::AssocKind::Fn = assoc_kind => { + hir::TypeBindingKind::Equality { .. } if let ty::AssocKind::Fn = assoc_kind => { return Err(tcx.dcx().emit_err(crate::errors::ReturnTypeNotationEqualityBound { span: binding.span, })); } - ConvertedBindingKind::Equality(term) => { + hir::TypeBindingKind::Equality { term } => { + let term = match term { + hir::Term::Ty(ty) => self.ast_ty_to_ty(ty).into(), + hir::Term::Const(ct) => ty::Const::from_anon_const(tcx, ct.def_id).into(), + }; + + if !speculative { + // Find any late-bound regions declared in `ty` that are not + // declared in the trait-ref or assoc_item. These are not well-formed. + // + // Example: + // + // for<'a> ::Item = &'a str // <-- 'a is bad + // for<'a> >::Output = &'a str // <-- 'a is ok + let late_bound_in_projection_ty = + tcx.collect_constrained_late_bound_regions(&projection_ty); + let late_bound_in_term = + tcx.collect_referenced_late_bound_regions(&trait_ref.rebind(term)); + debug!(?late_bound_in_projection_ty); + debug!(?late_bound_in_term); + + // FIXME: point at the type params that don't have appropriate lifetimes: + // struct S1 Fn(&i32, &i32) -> &'a i32>(F); + // ---- ---- ^^^^^^^ + // NOTE(associated_const_equality): This error should be impossible to trigger + // with associated const equality bounds. + self.validate_late_bound_regions( + late_bound_in_projection_ty, + late_bound_in_term, + |br_name| { + struct_span_code_err!( + tcx.dcx(), + binding.span, + E0582, + "binding for associated type `{}` references {}, \ + which does not appear in the trait input types", + binding.ident, + br_name + ) + }, + ); + } + // "Desugar" a constraint like `T: Iterator` this to // the "projection predicate" for: // // `::Item = u32` bounds.push_projection_bound( tcx, - projection_ty.map_bound(|projection_ty| ty::ProjectionPredicate { - projection_ty, - term: term.node, - }), + projection_ty + .map_bound(|projection_ty| ty::ProjectionPredicate { projection_ty, term }), binding.span, ); } - ConvertedBindingKind::Constraint(ast_bounds) => { + hir::TypeBindingKind::Constraint { bounds: ast_bounds } => { // "Desugar" a constraint like `T: Iterator` to // // `::Item: Debug` diff --git a/compiler/rustc_hir_analysis/src/astconv/errors.rs b/compiler/rustc_hir_analysis/src/astconv/errors.rs index f04114229bf9..ad34c31ef8fc 100644 --- a/compiler/rustc_hir_analysis/src/astconv/errors.rs +++ b/compiler/rustc_hir_analysis/src/astconv/errors.rs @@ -1,4 +1,4 @@ -use crate::astconv::{AstConv, ConvertedBindingKind}; +use crate::astconv::AstConv; use crate::errors::{ self, AssocTypeBindingNotAllowed, ManualImplementation, MissingTypeParams, ParenthesizedFnTraitExpansion, @@ -111,7 +111,7 @@ impl<'o, 'tcx> dyn AstConv<'tcx> + 'o { assoc_kind: ty::AssocKind, assoc_name: Ident, span: Span, - binding: Option<&super::ConvertedBinding<'_, 'tcx>>, + binding: Option<&hir::TypeBinding<'tcx>>, ) -> ErrorGuaranteed where I: Iterator>, @@ -290,13 +290,13 @@ impl<'o, 'tcx> dyn AstConv<'tcx> + 'o { assoc_kind: ty::AssocKind, ident: Ident, span: Span, - binding: Option<&super::ConvertedBinding<'_, 'tcx>>, + binding: Option<&hir::TypeBinding<'tcx>>, ) -> ErrorGuaranteed { let tcx = self.tcx(); let bound_on_assoc_const_label = if let ty::AssocKind::Const = assoc_item.kind && let Some(binding) = binding - && let ConvertedBindingKind::Constraint(_) = binding.kind + && let hir::TypeBindingKind::Constraint { .. } = binding.kind { let lo = if binding.gen_args.span_ext.is_dummy() { ident.span @@ -310,14 +310,14 @@ impl<'o, 'tcx> dyn AstConv<'tcx> + 'o { // FIXME(associated_const_equality): This has quite a few false positives and negatives. let wrap_in_braces_sugg = if let Some(binding) = binding - && let ConvertedBindingKind::Equality(term) = binding.kind - && let ty::TermKind::Ty(ty) = term.node.unpack() + && let hir::TypeBindingKind::Equality { term: hir::Term::Ty(hir_ty) } = binding.kind + && let ty = self.ast_ty_to_ty(hir_ty) && (ty.is_enum() || ty.references_error()) && tcx.features().associated_const_equality { Some(errors::AssocKindMismatchWrapInBracesSugg { - lo: term.span.shrink_to_lo(), - hi: term.span.shrink_to_hi(), + lo: hir_ty.span.shrink_to_lo(), + hi: hir_ty.span.shrink_to_hi(), }) } else { None @@ -326,9 +326,13 @@ impl<'o, 'tcx> dyn AstConv<'tcx> + 'o { // For equality bounds, we want to blame the term (RHS) instead of the item (LHS) since // one can argue that that's more “intuitive” to the user. let (span, expected_because_label, expected, got) = if let Some(binding) = binding - && let ConvertedBindingKind::Equality(term) = binding.kind + && let hir::TypeBindingKind::Equality { term } = binding.kind { - (term.span, Some(ident.span), assoc_item.kind, assoc_kind) + let span = match term { + hir::Term::Ty(ty) => ty.span, + hir::Term::Const(ct) => tcx.def_span(ct.def_id), + }; + (span, Some(ident.span), assoc_item.kind, assoc_kind) } else { (ident.span, None, assoc_kind, assoc_item.kind) }; diff --git a/compiler/rustc_hir_analysis/src/astconv/mod.rs b/compiler/rustc_hir_analysis/src/astconv/mod.rs index 94c49453cdcf..3ccf78567edd 100644 --- a/compiler/rustc_hir_analysis/src/astconv/mod.rs +++ b/compiler/rustc_hir_analysis/src/astconv/mod.rs @@ -35,7 +35,6 @@ use rustc_middle::ty::{ }; use rustc_session::lint::builtin::AMBIGUOUS_ASSOCIATED_ITEMS; use rustc_span::edit_distance::find_best_match_for_name; -use rustc_span::source_map::{respan, Spanned}; use rustc_span::symbol::{kw, Ident, Symbol}; use rustc_span::{sym, BytePos, Span, DUMMY_SP}; use rustc_target::spec::abi; @@ -151,21 +150,6 @@ pub trait AstConv<'tcx> { fn infcx(&self) -> Option<&InferCtxt<'tcx>>; } -#[derive(Debug)] -struct ConvertedBinding<'a, 'tcx> { - hir_id: hir::HirId, - item_name: Ident, - kind: ConvertedBindingKind<'a, 'tcx>, - gen_args: &'tcx GenericArgs<'tcx>, - span: Span, -} - -#[derive(Debug)] -enum ConvertedBindingKind<'a, 'tcx> { - Equality(Spanned>), - Constraint(&'a [hir::GenericBound<'tcx>]), -} - /// New-typed boolean indicating whether explicit late-bound lifetimes /// are present in a set of generic arguments. /// @@ -316,7 +300,9 @@ impl<'o, 'tcx> dyn AstConv<'tcx> + 'o { /// Given the type/lifetime/const arguments provided to some path (along with /// an implicit `Self`, if this is a trait reference), returns the complete /// set of generic arguments. This may involve applying defaulted type parameters. - /// Constraints on associated types are created from `create_assoc_bindings_for_generic_args`. + /// + /// Constraints on associated types are not converted here but + /// separately in `add_predicates_for_ast_type_binding`. /// /// Example: /// @@ -329,8 +315,8 @@ impl<'o, 'tcx> dyn AstConv<'tcx> + 'o { /// 2. The path in question is the path to the trait `std::ops::Index`, /// which will have been resolved to a `def_id` /// 3. The `generic_args` contains info on the `<...>` contents. The `usize` type - /// parameters are returned in the `GenericArgsRef`, the associated type bindings like - /// `Output = u32` are returned from `create_assoc_bindings_for_generic_args`. + /// parameters are returned in the `GenericArgsRef` + /// 4. Associated type bindings like `Output = u32` are contained in `generic_args.bindings`. /// /// Note that the type listing given here is *exactly* what the user provided. /// @@ -591,52 +577,6 @@ impl<'o, 'tcx> dyn AstConv<'tcx> + 'o { (args, arg_count) } - fn create_assoc_bindings_for_generic_args<'a>( - &self, - generic_args: &'a hir::GenericArgs<'tcx>, - ) -> Vec> { - // Convert associated-type bindings or constraints into a separate vector. - // Example: Given this: - // - // T: Iterator - // - // The `T` is passed in as a self-type; the `Item = u32` is - // not a "type parameter" of the `Iterator` trait, but rather - // a restriction on `::Item`, so it is passed - // back separately. - let assoc_bindings = generic_args - .bindings - .iter() - .map(|binding| { - let kind = match &binding.kind { - hir::TypeBindingKind::Equality { term } => match term { - hir::Term::Ty(ty) => ConvertedBindingKind::Equality(respan( - ty.span, - self.ast_ty_to_ty(ty).into(), - )), - hir::Term::Const(c) => { - let span = self.tcx().def_span(c.def_id); - let c = Const::from_anon_const(self.tcx(), c.def_id); - ConvertedBindingKind::Equality(respan(span, c.into())) - } - }, - hir::TypeBindingKind::Constraint { bounds } => { - ConvertedBindingKind::Constraint(bounds) - } - }; - ConvertedBinding { - hir_id: binding.hir_id, - item_name: binding.ident, - kind, - gen_args: binding.gen_args, - span: binding.span, - } - }) - .collect(); - - assoc_bindings - } - pub fn create_args_for_associated_item( &self, span: Span, @@ -742,18 +682,16 @@ impl<'o, 'tcx> dyn AstConv<'tcx> + 'o { let bound_vars = tcx.late_bound_vars(trait_ref.hir_ref_id); debug!(?bound_vars); - let assoc_bindings = self.create_assoc_bindings_for_generic_args(args); - let poly_trait_ref = ty::Binder::bind_with_vars( ty::TraitRef::new(tcx, trait_def_id, generic_args), bound_vars, ); - debug!(?poly_trait_ref, ?assoc_bindings); + debug!(?poly_trait_ref); bounds.push_trait_bound(tcx, poly_trait_ref, span, polarity); let mut dup_bindings = FxIndexMap::default(); - for binding in &assoc_bindings { + for binding in args.bindings { // Don't register additional associated type bounds for negative bounds, // since we should have emitten an error for them earlier, and they will // not be well-formed! @@ -1029,7 +967,7 @@ impl<'o, 'tcx> dyn AstConv<'tcx> + 'o { assoc_kind: ty::AssocKind, assoc_name: Ident, span: Span, - binding: Option<&ConvertedBinding<'_, 'tcx>>, + binding: Option<&hir::TypeBinding<'tcx>>, ) -> Result, ErrorGuaranteed> where I: Iterator>, @@ -1069,7 +1007,7 @@ impl<'o, 'tcx> dyn AstConv<'tcx> + 'o { // Provide a more specific error code index entry for equality bindings. err.code( if let Some(binding) = binding - && let ConvertedBindingKind::Equality(_) = binding.kind + && let hir::TypeBindingKind::Equality { .. } = binding.kind { E0222 } else { @@ -1094,16 +1032,21 @@ impl<'o, 'tcx> dyn AstConv<'tcx> + 'o { ); if let Some(binding) = binding { match binding.kind { - ConvertedBindingKind::Equality(term) => { + hir::TypeBindingKind::Equality { term } => { + let term: ty::Term<'_> = match term { + hir::Term::Ty(ty) => self.ast_ty_to_ty(ty).into(), + hir::Term::Const(ct) => { + ty::Const::from_anon_const(tcx, ct.def_id).into() + } + }; // FIXME(#97583): This isn't syntactically well-formed! where_bounds.push(format!( " T: {trait}::{assoc_name} = {term}", trait = bound.print_only_trait_path(), - term = term.node, )); } // FIXME: Provide a suggestion. - ConvertedBindingKind::Constraint(_bounds) => {} + hir::TypeBindingKind::Constraint { bounds: _ } => {} } } else { err.span_suggestion_verbose( From 8677d64c724e30453e321b8022fb6495dc0f4129 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Le=C3=B3n=20Orell=20Valerian=20Liehr?= Date: Thu, 8 Feb 2024 12:54:07 +0100 Subject: [PATCH 098/127] Support weak alias types as self type of inherent impls --- .../src/coherence/inherent_impls.rs | 31 ++++++++++++- .../inherent-impls-conflicting.rs | 10 +++++ .../inherent-impls-conflicting.stderr | 11 +++++ .../inherent-impls-not-nominal.rs | 12 ++++++ .../inherent-impls-not-nominal.stderr | 11 +++++ .../inherent-impls-overflow.classic.stderr | 43 +++++++++++++++++++ .../inherent-impls-overflow.next.stderr | 38 ++++++++++++++++ .../inherent-impls-overflow.rs | 20 +++++++++ tests/ui/lazy-type-alias/inherent-impls.rs | 18 ++++++++ 9 files changed, 193 insertions(+), 1 deletion(-) create mode 100644 tests/ui/lazy-type-alias/inherent-impls-conflicting.rs create mode 100644 tests/ui/lazy-type-alias/inherent-impls-conflicting.stderr create mode 100644 tests/ui/lazy-type-alias/inherent-impls-not-nominal.rs create mode 100644 tests/ui/lazy-type-alias/inherent-impls-not-nominal.stderr create mode 100644 tests/ui/lazy-type-alias/inherent-impls-overflow.classic.stderr create mode 100644 tests/ui/lazy-type-alias/inherent-impls-overflow.next.stderr create mode 100644 tests/ui/lazy-type-alias/inherent-impls-overflow.rs create mode 100644 tests/ui/lazy-type-alias/inherent-impls.rs diff --git a/compiler/rustc_hir_analysis/src/coherence/inherent_impls.rs b/compiler/rustc_hir_analysis/src/coherence/inherent_impls.rs index 0df5a57bc2c4..4823472cf961 100644 --- a/compiler/rustc_hir_analysis/src/coherence/inherent_impls.rs +++ b/compiler/rustc_hir_analysis/src/coherence/inherent_impls.rs @@ -144,6 +144,7 @@ impl<'tcx> InherentCollect<'tcx> { let id = id.owner_id.def_id; let item_span = self.tcx.def_span(id); let self_ty = self.tcx.type_of(id).instantiate_identity(); + let self_ty = peel_off_weak_aliases(self.tcx, self_ty); match *self_ty.kind() { ty::Adt(def, _) => self.check_def_id(id, self_ty, def.did()), ty::Foreign(did) => self.check_def_id(id, self_ty, did), @@ -166,7 +167,7 @@ impl<'tcx> InherentCollect<'tcx> { | ty::Never | ty::FnPtr(_) | ty::Tuple(..) => self.check_primitive_impl(id, self_ty), - ty::Alias(..) | ty::Param(_) => { + ty::Alias(ty::Projection | ty::Inherent | ty::Opaque, _) | ty::Param(_) => { Err(self.tcx.dcx().emit_err(errors::InherentNominal { span: item_span })) } ty::FnDef(..) @@ -174,6 +175,7 @@ impl<'tcx> InherentCollect<'tcx> { | ty::CoroutineClosure(..) | ty::Coroutine(..) | ty::CoroutineWitness(..) + | ty::Alias(ty::Weak, _) | ty::Bound(..) | ty::Placeholder(_) | ty::Infer(_) => { @@ -184,3 +186,30 @@ impl<'tcx> InherentCollect<'tcx> { } } } + +/// Peel off all weak alias types in this type until there are none left. +/// +///
+/// +/// This assumes that `ty` gets normalized later and that any overflows occurring +/// during said normalization get reported. +/// +///
+fn peel_off_weak_aliases<'tcx>(tcx: TyCtxt<'tcx>, mut ty: Ty<'tcx>) -> Ty<'tcx> { + let ty::Alias(ty::Weak, _) = ty.kind() else { return ty }; + + let limit = tcx.recursion_limit(); + let mut depth = 0; + + while let ty::Alias(ty::Weak, alias) = ty.kind() { + if !limit.value_within_limit(depth) { + let guar = tcx.dcx().delayed_bug("overflow expanding weak alias type"); + return Ty::new_error(tcx, guar); + } + + ty = tcx.type_of(alias.def_id).instantiate(tcx, alias.args); + depth += 1; + } + + ty +} diff --git a/tests/ui/lazy-type-alias/inherent-impls-conflicting.rs b/tests/ui/lazy-type-alias/inherent-impls-conflicting.rs new file mode 100644 index 000000000000..2adb04839ae2 --- /dev/null +++ b/tests/ui/lazy-type-alias/inherent-impls-conflicting.rs @@ -0,0 +1,10 @@ +#![feature(lazy_type_alias)] +#![allow(incomplete_features)] + +type Alias = Local; +struct Local; + +impl Alias { fn method() {} } //~ ERROR duplicate definitions with name `method` +impl Local { fn method() {} } + +fn main() {} diff --git a/tests/ui/lazy-type-alias/inherent-impls-conflicting.stderr b/tests/ui/lazy-type-alias/inherent-impls-conflicting.stderr new file mode 100644 index 000000000000..3f8dcef857fd --- /dev/null +++ b/tests/ui/lazy-type-alias/inherent-impls-conflicting.stderr @@ -0,0 +1,11 @@ +error[E0592]: duplicate definitions with name `method` + --> $DIR/inherent-impls-conflicting.rs:7:14 + | +LL | impl Alias { fn method() {} } + | ^^^^^^^^^^^ duplicate definitions for `method` +LL | impl Local { fn method() {} } + | ----------- other definition for `method` + +error: aborting due to 1 previous error + +For more information about this error, try `rustc --explain E0592`. diff --git a/tests/ui/lazy-type-alias/inherent-impls-not-nominal.rs b/tests/ui/lazy-type-alias/inherent-impls-not-nominal.rs new file mode 100644 index 000000000000..0ec23bb7fb75 --- /dev/null +++ b/tests/ui/lazy-type-alias/inherent-impls-not-nominal.rs @@ -0,0 +1,12 @@ +#![feature(lazy_type_alias)] +#![allow(incomplete_features)] + +type Alias = <() as Trait>::Out; + +trait Trait { type Out; } +impl Trait for () { type Out = Local; } +struct Local; + +impl Alias {} //~ ERROR no nominal type found for inherent implementation + +fn main() {} diff --git a/tests/ui/lazy-type-alias/inherent-impls-not-nominal.stderr b/tests/ui/lazy-type-alias/inherent-impls-not-nominal.stderr new file mode 100644 index 000000000000..2936e70f5b4d --- /dev/null +++ b/tests/ui/lazy-type-alias/inherent-impls-not-nominal.stderr @@ -0,0 +1,11 @@ +error[E0118]: no nominal type found for inherent implementation + --> $DIR/inherent-impls-not-nominal.rs:10:1 + | +LL | impl Alias {} + | ^^^^^^^^^^ impl requires a nominal type + | + = note: either implement a trait on it or create a newtype to wrap it instead + +error: aborting due to 1 previous error + +For more information about this error, try `rustc --explain E0118`. diff --git a/tests/ui/lazy-type-alias/inherent-impls-overflow.classic.stderr b/tests/ui/lazy-type-alias/inherent-impls-overflow.classic.stderr new file mode 100644 index 000000000000..1cace470627e --- /dev/null +++ b/tests/ui/lazy-type-alias/inherent-impls-overflow.classic.stderr @@ -0,0 +1,43 @@ +error[E0275]: overflow evaluating the requirement `Loop` + --> $DIR/inherent-impls-overflow.rs:7:13 + | +LL | type Loop = Loop; + | ^^^^ + | + = note: in case this is a recursive type alias, consider using a struct, enum, or union instead + +error[E0275]: overflow evaluating the requirement `Loop` + --> $DIR/inherent-impls-overflow.rs:9:1 + | +LL | impl Loop {} + | ^^^^^^^^^^^^ + | + = note: in case this is a recursive type alias, consider using a struct, enum, or union instead + +error[E0275]: overflow evaluating the requirement `Poly0<((((((...,),),),),),)>` + --> $DIR/inherent-impls-overflow.rs:11:17 + | +LL | type Poly0 = Poly1<(T,)>; + | ^^^^^^^^^^^ + | + = note: in case this is a recursive type alias, consider using a struct, enum, or union instead + +error[E0275]: overflow evaluating the requirement `Poly1<((((((...,),),),),),)>` + --> $DIR/inherent-impls-overflow.rs:14:17 + | +LL | type Poly1 = Poly0<(T,)>; + | ^^^^^^^^^^^ + | + = note: in case this is a recursive type alias, consider using a struct, enum, or union instead + +error[E0275]: overflow evaluating the requirement `Poly1<((((((...,),),),),),)>` + --> $DIR/inherent-impls-overflow.rs:18:1 + | +LL | impl Poly0<()> {} + | ^^^^^^^^^^^^^^^^^ + | + = note: in case this is a recursive type alias, consider using a struct, enum, or union instead + +error: aborting due to 5 previous errors + +For more information about this error, try `rustc --explain E0275`. diff --git a/tests/ui/lazy-type-alias/inherent-impls-overflow.next.stderr b/tests/ui/lazy-type-alias/inherent-impls-overflow.next.stderr new file mode 100644 index 000000000000..1a6259b5cf90 --- /dev/null +++ b/tests/ui/lazy-type-alias/inherent-impls-overflow.next.stderr @@ -0,0 +1,38 @@ +error[E0275]: overflow evaluating the requirement `Loop == _` + --> $DIR/inherent-impls-overflow.rs:9:6 + | +LL | impl Loop {} + | ^^^^ + | + = help: consider increasing the recursion limit by adding a `#![recursion_limit = "256"]` attribute to your crate (`inherent_impls_overflow`) + +error[E0392]: type parameter `T` is never used + --> $DIR/inherent-impls-overflow.rs:11:12 + | +LL | type Poly0 = Poly1<(T,)>; + | ^ unused type parameter + | + = help: consider removing `T` or referring to it in the body of the type alias + = help: if you intended `T` to be a const parameter, use `const T: /* Type */` instead + +error[E0392]: type parameter `T` is never used + --> $DIR/inherent-impls-overflow.rs:14:12 + | +LL | type Poly1 = Poly0<(T,)>; + | ^ unused type parameter + | + = help: consider removing `T` or referring to it in the body of the type alias + = help: if you intended `T` to be a const parameter, use `const T: /* Type */` instead + +error[E0275]: overflow evaluating the requirement `Poly0<()> == _` + --> $DIR/inherent-impls-overflow.rs:18:6 + | +LL | impl Poly0<()> {} + | ^^^^^^^^^ + | + = help: consider increasing the recursion limit by adding a `#![recursion_limit = "256"]` attribute to your crate (`inherent_impls_overflow`) + +error: aborting due to 4 previous errors + +Some errors have detailed explanations: E0275, E0392. +For more information about an error, try `rustc --explain E0275`. diff --git a/tests/ui/lazy-type-alias/inherent-impls-overflow.rs b/tests/ui/lazy-type-alias/inherent-impls-overflow.rs new file mode 100644 index 000000000000..b260dedeb074 --- /dev/null +++ b/tests/ui/lazy-type-alias/inherent-impls-overflow.rs @@ -0,0 +1,20 @@ +//@ revisions: classic next +//@[next] compile-flags: -Znext-solver + +#![feature(lazy_type_alias)] +#![allow(incomplete_features)] + +type Loop = Loop; //[classic]~ ERROR overflow evaluating the requirement + +impl Loop {} //~ ERROR overflow evaluating the requirement + +type Poly0 = Poly1<(T,)>; +//[classic]~^ ERROR overflow evaluating the requirement +//[next]~^^ ERROR type parameter `T` is never used +type Poly1 = Poly0<(T,)>; +//[classic]~^ ERROR overflow evaluating the requirement +//[next]~^^ ERROR type parameter `T` is never used + +impl Poly0<()> {} //~ ERROR overflow evaluating the requirement + +fn main() {} diff --git a/tests/ui/lazy-type-alias/inherent-impls.rs b/tests/ui/lazy-type-alias/inherent-impls.rs new file mode 100644 index 000000000000..835b70bf67a2 --- /dev/null +++ b/tests/ui/lazy-type-alias/inherent-impls.rs @@ -0,0 +1,18 @@ +//@ check-pass + +#![feature(lazy_type_alias)] +#![allow(incomplete_features)] + +type Alias = Local; +struct Local; + +impl Alias { + fn method(self) {} +} + +fn main() { + let _ = Local.method(); + let _ = Local::method; + let _ = Alias {}.method(); + let _ = Alias::method; +} From fde4556785ac5e07302a48fcea7ead71e711d5f4 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Le=C3=B3n=20Orell=20Valerian=20Liehr?= Date: Thu, 8 Feb 2024 14:08:58 +0100 Subject: [PATCH 099/127] Properly check constrainedness of gen params in the presence of weak alias types --- .../src/collect/predicates_of.rs | 2 +- .../src/constrained_generic_params.rs | 43 ++++++++++++++----- .../rustc_hir_analysis/src/impl_wf_check.rs | 4 +- .../src/impl_wf_check/min_specialization.rs | 8 ++-- .../ui/lazy-type-alias/constrained-params.rs | 27 ++++++++++++ .../unconstrained-param-due-to-overflow.rs | 8 ++++ ...unconstrained-param-due-to-overflow.stderr | 9 ++++ .../lazy-type-alias/unconstrained-params.rs | 12 ++++++ .../unconstrained-params.stderr | 9 ++++ 9 files changed, 105 insertions(+), 17 deletions(-) create mode 100644 tests/ui/lazy-type-alias/constrained-params.rs create mode 100644 tests/ui/lazy-type-alias/unconstrained-param-due-to-overflow.rs create mode 100644 tests/ui/lazy-type-alias/unconstrained-param-due-to-overflow.stderr create mode 100644 tests/ui/lazy-type-alias/unconstrained-params.rs create mode 100644 tests/ui/lazy-type-alias/unconstrained-params.stderr diff --git a/compiler/rustc_hir_analysis/src/collect/predicates_of.rs b/compiler/rustc_hir_analysis/src/collect/predicates_of.rs index 05755f98f202..351ac2eb7702 100644 --- a/compiler/rustc_hir_analysis/src/collect/predicates_of.rs +++ b/compiler/rustc_hir_analysis/src/collect/predicates_of.rs @@ -307,7 +307,7 @@ fn gather_explicit_predicates_of(tcx: TyCtxt<'_>, def_id: LocalDefId) -> ty::Gen tcx, &mut predicates, trait_ref, - &mut cgp::parameters_for_impl(self_ty, trait_ref), + &mut cgp::parameters_for_impl(tcx, self_ty, trait_ref), ); } diff --git a/compiler/rustc_hir_analysis/src/constrained_generic_params.rs b/compiler/rustc_hir_analysis/src/constrained_generic_params.rs index 05efad3ccb31..4ce43bb48871 100644 --- a/compiler/rustc_hir_analysis/src/constrained_generic_params.rs +++ b/compiler/rustc_hir_analysis/src/constrained_generic_params.rs @@ -1,4 +1,5 @@ use rustc_data_structures::fx::FxHashSet; +use rustc_data_structures::stack::ensure_sufficient_stack; use rustc_middle::ty::visit::{TypeSuperVisitable, TypeVisitable, TypeVisitor}; use rustc_middle::ty::{self, Ty, TyCtxt}; use rustc_span::Span; @@ -27,12 +28,13 @@ impl From for Parameter { /// Returns the set of parameters constrained by the impl header. pub fn parameters_for_impl<'tcx>( + tcx: TyCtxt<'tcx>, impl_self_ty: Ty<'tcx>, impl_trait_ref: Option>, ) -> FxHashSet { let vec = match impl_trait_ref { - Some(tr) => parameters_for(&tr, false), - None => parameters_for(&impl_self_ty, false), + Some(tr) => parameters_for(tcx, &tr, false), + None => parameters_for(tcx, &impl_self_ty, false), }; vec.into_iter().collect() } @@ -43,26 +45,47 @@ pub fn parameters_for_impl<'tcx>( /// of parameters whose values are needed in order to constrain `ty` - these /// differ, with the latter being a superset, in the presence of projections. pub fn parameters_for<'tcx>( + tcx: TyCtxt<'tcx>, t: &impl TypeVisitable>, include_nonconstraining: bool, ) -> Vec { - let mut collector = ParameterCollector { parameters: vec![], include_nonconstraining }; + let mut collector = + ParameterCollector { tcx, parameters: vec![], include_nonconstraining, depth: 0 }; t.visit_with(&mut collector); collector.parameters } -struct ParameterCollector { +struct ParameterCollector<'tcx> { + tcx: TyCtxt<'tcx>, parameters: Vec, include_nonconstraining: bool, + depth: usize, } -impl<'tcx> TypeVisitor> for ParameterCollector { +impl<'tcx> TypeVisitor> for ParameterCollector<'tcx> { fn visit_ty(&mut self, t: Ty<'tcx>) -> ControlFlow { match *t.kind() { - ty::Alias(..) if !self.include_nonconstraining => { - // projections are not injective + ty::Alias(ty::Projection | ty::Inherent | ty::Opaque, _) + if !self.include_nonconstraining => + { + // Projections are not injective in general. return ControlFlow::Continue(()); } + ty::Alias(ty::Weak, alias) if !self.include_nonconstraining => { + if !self.tcx.recursion_limit().value_within_limit(self.depth) { + // Other constituent types may still constrain some generic params, consider + // ` (Overflow, T)` for example. Therefore we want to continue instead of + // breaking. Only affects diagnostics. + return ControlFlow::Continue(()); + } + self.depth += 1; + return ensure_sufficient_stack(|| { + self.tcx + .type_of(alias.def_id) + .instantiate(self.tcx, alias.args) + .visit_with(self) + }); + } ty::Param(data) => { self.parameters.push(Parameter::from(data)); } @@ -82,7 +105,7 @@ impl<'tcx> TypeVisitor> for ParameterCollector { fn visit_const(&mut self, c: ty::Const<'tcx>) -> ControlFlow { match c.kind() { ty::ConstKind::Unevaluated(..) if !self.include_nonconstraining => { - // Constant expressions are not injective + // Constant expressions are not injective in general. return c.ty().visit_with(self); } ty::ConstKind::Param(data) => { @@ -201,12 +224,12 @@ pub fn setup_constraining_predicates<'tcx>( // `<::Baz as Iterator>::Output = ::Output` // Then the projection only applies if `T` is known, but it still // does not determine `U`. - let inputs = parameters_for(&projection.projection_ty, true); + let inputs = parameters_for(tcx, &projection.projection_ty, true); let relies_only_on_inputs = inputs.iter().all(|p| input_parameters.contains(p)); if !relies_only_on_inputs { continue; } - input_parameters.extend(parameters_for(&projection.term, false)); + input_parameters.extend(parameters_for(tcx, &projection.term, false)); } else { continue; } diff --git a/compiler/rustc_hir_analysis/src/impl_wf_check.rs b/compiler/rustc_hir_analysis/src/impl_wf_check.rs index c072891e2952..b4cec1d9882b 100644 --- a/compiler/rustc_hir_analysis/src/impl_wf_check.rs +++ b/compiler/rustc_hir_analysis/src/impl_wf_check.rs @@ -94,7 +94,7 @@ fn enforce_impl_params_are_constrained( let impl_predicates = tcx.predicates_of(impl_def_id); let impl_trait_ref = tcx.impl_trait_ref(impl_def_id).map(ty::EarlyBinder::instantiate_identity); - let mut input_parameters = cgp::parameters_for_impl(impl_self_ty, impl_trait_ref); + let mut input_parameters = cgp::parameters_for_impl(tcx, impl_self_ty, impl_trait_ref); cgp::identify_constrained_generic_params( tcx, impl_predicates, @@ -111,7 +111,7 @@ fn enforce_impl_params_are_constrained( match item.kind { ty::AssocKind::Type => { if item.defaultness(tcx).has_value() { - cgp::parameters_for(&tcx.type_of(def_id).instantiate_identity(), true) + cgp::parameters_for(tcx, &tcx.type_of(def_id).instantiate_identity(), true) } else { vec![] } diff --git a/compiler/rustc_hir_analysis/src/impl_wf_check/min_specialization.rs b/compiler/rustc_hir_analysis/src/impl_wf_check/min_specialization.rs index c8dedb0f3719..bd4fce81377d 100644 --- a/compiler/rustc_hir_analysis/src/impl_wf_check/min_specialization.rs +++ b/compiler/rustc_hir_analysis/src/impl_wf_check/min_specialization.rs @@ -266,15 +266,15 @@ fn unconstrained_parent_impl_args<'tcx>( continue; } - unconstrained_parameters.extend(cgp::parameters_for(&projection_ty, true)); + unconstrained_parameters.extend(cgp::parameters_for(tcx, &projection_ty, true)); - for param in cgp::parameters_for(&projected_ty, false) { + for param in cgp::parameters_for(tcx, &projected_ty, false) { if !unconstrained_parameters.contains(¶m) { constrained_params.insert(param.0); } } - unconstrained_parameters.extend(cgp::parameters_for(&projected_ty, true)); + unconstrained_parameters.extend(cgp::parameters_for(tcx, &projected_ty, true)); } } @@ -312,7 +312,7 @@ fn check_duplicate_params<'tcx>( parent_args: &Vec>, span: Span, ) -> Result<(), ErrorGuaranteed> { - let mut base_params = cgp::parameters_for(parent_args, true); + let mut base_params = cgp::parameters_for(tcx, parent_args, true); base_params.sort_by_key(|param| param.0); if let (_, [duplicate, ..]) = base_params.partition_dedup() { let param = impl1_args[duplicate.0 as usize]; diff --git a/tests/ui/lazy-type-alias/constrained-params.rs b/tests/ui/lazy-type-alias/constrained-params.rs new file mode 100644 index 000000000000..59693dd5461e --- /dev/null +++ b/tests/ui/lazy-type-alias/constrained-params.rs @@ -0,0 +1,27 @@ +//@ check-pass + +#![feature(lazy_type_alias)] +#![allow(incomplete_features)] + +type Injective = Local; +struct Local(T); + +impl Injective { + fn take(_: T) {} +} + +trait Trait { + type Out; + fn produce() -> Self::Out; +} + +impl Trait for Injective { + type Out = T; + fn produce() -> Self::Out { T::default() } +} + +fn main() { + Injective::take(0); + let _: String = Injective::produce(); + let _: bool = Local::produce(); +} diff --git a/tests/ui/lazy-type-alias/unconstrained-param-due-to-overflow.rs b/tests/ui/lazy-type-alias/unconstrained-param-due-to-overflow.rs new file mode 100644 index 000000000000..eceefa719ec8 --- /dev/null +++ b/tests/ui/lazy-type-alias/unconstrained-param-due-to-overflow.rs @@ -0,0 +1,8 @@ +#![feature(lazy_type_alias)] +#![allow(incomplete_features)] + +impl Loop {} //~ ERROR the type parameter `T` is not constrained + +type Loop = Loop; + +fn main() {} diff --git a/tests/ui/lazy-type-alias/unconstrained-param-due-to-overflow.stderr b/tests/ui/lazy-type-alias/unconstrained-param-due-to-overflow.stderr new file mode 100644 index 000000000000..9af6f5dda0b3 --- /dev/null +++ b/tests/ui/lazy-type-alias/unconstrained-param-due-to-overflow.stderr @@ -0,0 +1,9 @@ +error[E0207]: the type parameter `T` is not constrained by the impl trait, self type, or predicates + --> $DIR/unconstrained-param-due-to-overflow.rs:4:6 + | +LL | impl Loop {} + | ^ unconstrained type parameter + +error: aborting due to 1 previous error + +For more information about this error, try `rustc --explain E0207`. diff --git a/tests/ui/lazy-type-alias/unconstrained-params.rs b/tests/ui/lazy-type-alias/unconstrained-params.rs new file mode 100644 index 000000000000..d58938b30701 --- /dev/null +++ b/tests/ui/lazy-type-alias/unconstrained-params.rs @@ -0,0 +1,12 @@ +#![feature(lazy_type_alias)] +#![allow(incomplete_features)] + +impl NotInjective {} //~ ERROR the type parameter `T` is not constrained + +type NotInjective = Local<::Out>; +struct Local(T); + +trait Discard { type Out; } +impl Discard for T { type Out = (); } + +fn main() {} diff --git a/tests/ui/lazy-type-alias/unconstrained-params.stderr b/tests/ui/lazy-type-alias/unconstrained-params.stderr new file mode 100644 index 000000000000..3c52a06c319d --- /dev/null +++ b/tests/ui/lazy-type-alias/unconstrained-params.stderr @@ -0,0 +1,9 @@ +error[E0207]: the type parameter `T` is not constrained by the impl trait, self type, or predicates + --> $DIR/unconstrained-params.rs:4:6 + | +LL | impl NotInjective {} + | ^ unconstrained type parameter + +error: aborting due to 1 previous error + +For more information about this error, try `rustc --explain E0207`. From 6f1383cf5b036236dc7c72ff7bb68401f5e4169a Mon Sep 17 00:00:00 2001 From: hi-rustin Date: Sat, 17 Feb 2024 16:11:32 +0800 Subject: [PATCH 100/127] Remove unnecessary unit binding Signed-off-by: hi-rustin --- library/alloc/src/raw_vec.rs | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/library/alloc/src/raw_vec.rs b/library/alloc/src/raw_vec.rs index 49f5f53f9b30..dd8d6f6c7e63 100644 --- a/library/alloc/src/raw_vec.rs +++ b/library/alloc/src/raw_vec.rs @@ -261,7 +261,7 @@ impl RawVec { // and could hypothetically handle differences between stride and size, but this memory // has already been allocated so we know it can't overflow and currently rust does not // support such types. So we can do better by skipping some checks and avoid an unwrap. - let _: () = const { assert!(mem::size_of::() % mem::align_of::() == 0) }; + const { assert!(mem::size_of::() % mem::align_of::() == 0) }; unsafe { let align = mem::align_of::(); let size = mem::size_of::().unchecked_mul(self.cap.0); @@ -465,7 +465,7 @@ impl RawVec { let (ptr, layout) = if let Some(mem) = self.current_memory() { mem } else { return Ok(()) }; // See current_memory() why this assert is here - let _: () = const { assert!(mem::size_of::() % mem::align_of::() == 0) }; + const { assert!(mem::size_of::() % mem::align_of::() == 0) }; // If shrinking to 0, deallocate the buffer. We don't reach this point // for the T::IS_ZST case since current_memory() will have returned From e3859d206ce49998f0f33e0c7fceca261aeaf7e8 Mon Sep 17 00:00:00 2001 From: Tshepang Mbambo Date: Sat, 17 Feb 2024 14:15:22 +0200 Subject: [PATCH 101/127] add test to guard against inaccurate diagnostic Also replaces an incomplete test --- tests/ui/macros/invalid-fragment-specifier.rs | 10 ++++++++++ .../macros/invalid-fragment-specifier.stderr | 18 ++++++++++++++++++ tests/ui/macros/macro-invalid-fragment-spec.rs | 8 -------- .../macros/macro-invalid-fragment-spec.stderr | 10 ---------- 4 files changed, 28 insertions(+), 18 deletions(-) create mode 100644 tests/ui/macros/invalid-fragment-specifier.rs create mode 100644 tests/ui/macros/invalid-fragment-specifier.stderr delete mode 100644 tests/ui/macros/macro-invalid-fragment-spec.rs delete mode 100644 tests/ui/macros/macro-invalid-fragment-spec.stderr diff --git a/tests/ui/macros/invalid-fragment-specifier.rs b/tests/ui/macros/invalid-fragment-specifier.rs new file mode 100644 index 000000000000..1daf0a95434f --- /dev/null +++ b/tests/ui/macros/invalid-fragment-specifier.rs @@ -0,0 +1,10 @@ +macro_rules! test { + ($wrong:id) => {}; +} //~^ ERROR: invalid fragment specifier `id` + +// guard against breaking raw identifier diagnostic +macro_rules! test_raw_identifer { + ($wrong:r#if) => {}; +} //~^ ERROR: invalid fragment specifier `r#if` + +fn main() {} diff --git a/tests/ui/macros/invalid-fragment-specifier.stderr b/tests/ui/macros/invalid-fragment-specifier.stderr new file mode 100644 index 000000000000..7516dbc3a087 --- /dev/null +++ b/tests/ui/macros/invalid-fragment-specifier.stderr @@ -0,0 +1,18 @@ +error: invalid fragment specifier `id` + --> $DIR/invalid-fragment-specifier.rs:2:6 + | +LL | ($wrong:id) => {}; + | ^^^^^^^^^ + | + = help: valid fragment specifiers are `ident`, `block`, `stmt`, `expr`, `pat`, `ty`, `lifetime`, `literal`, `path`, `meta`, `tt`, `item` and `vis` + +error: invalid fragment specifier `r#if` + --> $DIR/invalid-fragment-specifier.rs:7:6 + | +LL | ($wrong:r#if) => {}; + | ^^^^^^^^^^^ + | + = help: valid fragment specifiers are `ident`, `block`, `stmt`, `expr`, `pat`, `ty`, `lifetime`, `literal`, `path`, `meta`, `tt`, `item` and `vis` + +error: aborting due to 2 previous errors + diff --git a/tests/ui/macros/macro-invalid-fragment-spec.rs b/tests/ui/macros/macro-invalid-fragment-spec.rs deleted file mode 100644 index dc4d75096af8..000000000000 --- a/tests/ui/macros/macro-invalid-fragment-spec.rs +++ /dev/null @@ -1,8 +0,0 @@ -macro_rules! foo( - ($x:foo) => () - //~^ ERROR invalid fragment specifier -); - -fn main() { - foo!(foo); -} diff --git a/tests/ui/macros/macro-invalid-fragment-spec.stderr b/tests/ui/macros/macro-invalid-fragment-spec.stderr deleted file mode 100644 index 919111ede51d..000000000000 --- a/tests/ui/macros/macro-invalid-fragment-spec.stderr +++ /dev/null @@ -1,10 +0,0 @@ -error: invalid fragment specifier `foo` - --> $DIR/macro-invalid-fragment-spec.rs:2:6 - | -LL | ($x:foo) => () - | ^^^^^^ - | - = help: valid fragment specifiers are `ident`, `block`, `stmt`, `expr`, `pat`, `ty`, `lifetime`, `literal`, `path`, `meta`, `tt`, `item` and `vis` - -error: aborting due to 1 previous error - From 62b789fba444476eeddb9ae575d69d66a53a23c6 Mon Sep 17 00:00:00 2001 From: clubby789 Date: Fri, 16 Feb 2024 18:48:09 +0000 Subject: [PATCH 102/127] Add more checks for `unnamed_field` during HIR analysis --- compiler/rustc_hir/src/hir.rs | 6 ++ compiler/rustc_hir_analysis/messages.ftl | 2 + .../rustc_hir_analysis/src/check/check.rs | 23 ++++--- compiler/rustc_hir_analysis/src/collect.rs | 10 ++- compiler/rustc_hir_analysis/src/errors.rs | 7 +++ .../ui/union/unnamed-fields/auxiliary/dep.rs | 18 ++++++ .../union/unnamed-fields/restrict_type_hir.rs | 44 +++++++++++++ .../unnamed-fields/restrict_type_hir.stderr | 62 +++++++++++++++++++ 8 files changed, 161 insertions(+), 11 deletions(-) create mode 100644 tests/ui/union/unnamed-fields/auxiliary/dep.rs create mode 100644 tests/ui/union/unnamed-fields/restrict_type_hir.rs create mode 100644 tests/ui/union/unnamed-fields/restrict_type_hir.stderr diff --git a/compiler/rustc_hir/src/hir.rs b/compiler/rustc_hir/src/hir.rs index 77044df9a409..fcb15925f6a7 100644 --- a/compiler/rustc_hir/src/hir.rs +++ b/compiler/rustc_hir/src/hir.rs @@ -2998,6 +2998,12 @@ impl<'hir> Item<'hir> { ItemId { owner_id: self.owner_id } } + /// Check if this is an [`ItemKind::Enum`], [`ItemKind::Struct`] or + /// [`ItemKind::Union`]. + pub fn is_adt(&self) -> bool { + matches!(self.kind, ItemKind::Enum(..) | ItemKind::Struct(..) | ItemKind::Union(..)) + } + expect_methods_self_kind! { expect_extern_crate, Option, ItemKind::ExternCrate(s), *s; diff --git a/compiler/rustc_hir_analysis/messages.ftl b/compiler/rustc_hir_analysis/messages.ftl index a61cfd0e4ce9..f32b14aca23f 100644 --- a/compiler/rustc_hir_analysis/messages.ftl +++ b/compiler/rustc_hir_analysis/messages.ftl @@ -198,6 +198,8 @@ hir_analysis_invalid_union_field = hir_analysis_invalid_union_field_sugg = wrap the field type in `ManuallyDrop<...>` +hir_analysis_invalid_unnamed_field_ty = unnamed fields can only have struct or union types + hir_analysis_late_bound_const_in_apit = `impl Trait` can only mention const parameters from an fn or impl .label = const parameter declared here diff --git a/compiler/rustc_hir_analysis/src/check/check.rs b/compiler/rustc_hir_analysis/src/check/check.rs index 1410273e3bc9..2c367b15df2a 100644 --- a/compiler/rustc_hir_analysis/src/check/check.rs +++ b/compiler/rustc_hir_analysis/src/check/check.rs @@ -129,17 +129,20 @@ fn check_unnamed_fields(tcx: TyCtxt<'_>, def: ty::AdtDef<'_>) { for field in variant.fields.iter().filter(|f| f.is_unnamed()) { let field_ty = tcx.type_of(field.did).instantiate_identity(); if let Some(adt) = field_ty.ty_adt_def() - && !adt.is_anonymous() - && !adt.repr().c() + && !adt.is_enum() { - let field_ty_span = tcx.def_span(adt.did()); - tcx.dcx().emit_err(errors::UnnamedFieldsRepr::FieldMissingReprC { - span: tcx.def_span(field.did), - field_ty_span, - field_ty, - field_adt_kind: adt.descr(), - sugg_span: field_ty_span.shrink_to_lo(), - }); + if !adt.is_anonymous() && !adt.repr().c() { + let field_ty_span = tcx.def_span(adt.did()); + tcx.dcx().emit_err(errors::UnnamedFieldsRepr::FieldMissingReprC { + span: tcx.def_span(field.did), + field_ty_span, + field_ty, + field_adt_kind: adt.descr(), + sugg_span: field_ty_span.shrink_to_lo(), + }); + } + } else { + tcx.dcx().emit_err(errors::InvalidUnnamedFieldTy { span: tcx.def_span(field.did) }); } } } diff --git a/compiler/rustc_hir_analysis/src/collect.rs b/compiler/rustc_hir_analysis/src/collect.rs index 43f0af5bd1da..a5ef1490bce9 100644 --- a/compiler/rustc_hir_analysis/src/collect.rs +++ b/compiler/rustc_hir_analysis/src/collect.rs @@ -943,7 +943,15 @@ impl<'tcx> FieldUniquenessCheckContext<'tcx> { } } hir::TyKind::Path(hir::QPath::Resolved(_, hir::Path { res, .. })) => { - self.check_field_in_nested_adt(self.tcx.adt_def(res.def_id()), field.span); + // If this is a direct path to an ADT, we can check it + // If this is a type alias or non-ADT, `check_unnamed_fields` should verify it + if let Some(def_id) = res.opt_def_id() + && let Some(local) = def_id.as_local() + && let Node::Item(item) = self.tcx.hir_node_by_def_id(local) + && item.is_adt() + { + self.check_field_in_nested_adt(self.tcx.adt_def(def_id), field.span); + } } // Abort due to errors (there must be an error if an unnamed field // has any type kind other than an anonymous adt or a named adt) diff --git a/compiler/rustc_hir_analysis/src/errors.rs b/compiler/rustc_hir_analysis/src/errors.rs index 6a505b961974..3bd7687a5442 100644 --- a/compiler/rustc_hir_analysis/src/errors.rs +++ b/compiler/rustc_hir_analysis/src/errors.rs @@ -661,6 +661,13 @@ pub(crate) struct InvalidUnionField { pub note: (), } +#[derive(Diagnostic)] +#[diag(hir_analysis_invalid_unnamed_field_ty)] +pub struct InvalidUnnamedFieldTy { + #[primary_span] + pub span: Span, +} + #[derive(Diagnostic)] #[diag(hir_analysis_return_type_notation_on_non_rpitit)] pub(crate) struct ReturnTypeNotationOnNonRpitit<'tcx> { diff --git a/tests/ui/union/unnamed-fields/auxiliary/dep.rs b/tests/ui/union/unnamed-fields/auxiliary/dep.rs new file mode 100644 index 000000000000..a11f3e18f52a --- /dev/null +++ b/tests/ui/union/unnamed-fields/auxiliary/dep.rs @@ -0,0 +1,18 @@ +#[repr(C)] +pub struct GoodStruct(()); + +pub struct BadStruct(()); + +pub enum BadEnum { + A, + B, +} + +#[repr(C)] +pub enum BadEnum2 { + A, + B, +} + +pub type GoodAlias = GoodStruct; +pub type BadAlias = i32; diff --git a/tests/ui/union/unnamed-fields/restrict_type_hir.rs b/tests/ui/union/unnamed-fields/restrict_type_hir.rs new file mode 100644 index 000000000000..80e4608be82e --- /dev/null +++ b/tests/ui/union/unnamed-fields/restrict_type_hir.rs @@ -0,0 +1,44 @@ +//@ aux-build:dep.rs + +// test for #121151 + +#![allow(incomplete_features)] +#![feature(unnamed_fields)] + +extern crate dep; + +#[repr(C)] +struct A { + a: u8, +} + +enum BadEnum { + A, + B, +} + +#[repr(C)] +enum BadEnum2 { + A, + B, +} + +type MyStruct = A; +type MyI32 = i32; + +#[repr(C)] +struct L { + _: i32, //~ ERROR unnamed fields can only have struct or union types + _: MyI32, //~ ERROR unnamed fields can only have struct or union types + _: BadEnum, //~ ERROR unnamed fields can only have struct or union types + _: BadEnum2, //~ ERROR unnamed fields can only have struct or union types + _: MyStruct, + _: dep::BadStruct, //~ ERROR named type of unnamed field must have `#[repr(C)]` representation + _: dep::BadEnum, //~ ERROR unnamed fields can only have struct or union types + _: dep::BadEnum2, //~ ERROR unnamed fields can only have struct or union types + _: dep::BadAlias, //~ ERROR unnamed fields can only have struct or union types + _: dep::GoodAlias, + _: dep::GoodStruct, +} + +fn main() {} diff --git a/tests/ui/union/unnamed-fields/restrict_type_hir.stderr b/tests/ui/union/unnamed-fields/restrict_type_hir.stderr new file mode 100644 index 000000000000..3e80d7fbf5c0 --- /dev/null +++ b/tests/ui/union/unnamed-fields/restrict_type_hir.stderr @@ -0,0 +1,62 @@ +error: unnamed fields can only have struct or union types + --> $DIR/restrict_type_hir.rs:31:5 + | +LL | _: i32, + | ^^^^^^ + +error: unnamed fields can only have struct or union types + --> $DIR/restrict_type_hir.rs:32:5 + | +LL | _: MyI32, + | ^^^^^^^^ + +error: unnamed fields can only have struct or union types + --> $DIR/restrict_type_hir.rs:33:5 + | +LL | _: BadEnum, + | ^^^^^^^^^^ + +error: unnamed fields can only have struct or union types + --> $DIR/restrict_type_hir.rs:34:5 + | +LL | _: BadEnum2, + | ^^^^^^^^^^^ + +error: named type of unnamed field must have `#[repr(C)]` representation + --> $DIR/restrict_type_hir.rs:36:5 + | +LL | _: dep::BadStruct, + | ^^^^^^^^^^^^^^^^^ unnamed field defined here + | + ::: $DIR/auxiliary/dep.rs:4:1 + | +LL | pub struct BadStruct(()); + | -------------------- `BadStruct` defined here + | +help: add `#[repr(C)]` to this struct + --> $DIR/auxiliary/dep.rs:4:1 + | +LL + #[repr(C)] +LL | pub struct BadStruct(()); + | + +error: unnamed fields can only have struct or union types + --> $DIR/restrict_type_hir.rs:37:5 + | +LL | _: dep::BadEnum, + | ^^^^^^^^^^^^^^^ + +error: unnamed fields can only have struct or union types + --> $DIR/restrict_type_hir.rs:38:5 + | +LL | _: dep::BadEnum2, + | ^^^^^^^^^^^^^^^^ + +error: unnamed fields can only have struct or union types + --> $DIR/restrict_type_hir.rs:39:5 + | +LL | _: dep::BadAlias, + | ^^^^^^^^^^^^^^^^ + +error: aborting due to 8 previous errors + From d988d8f4ba069e11ace537feea4e4e226e7f4afe Mon Sep 17 00:00:00 2001 From: Urgau Date: Sat, 17 Feb 2024 16:45:59 +0100 Subject: [PATCH 103/127] Use better heuristic for printing Cargo specific diagnostics --- compiler/rustc_codegen_ssa/src/errors.rs | 7 +++++-- compiler/rustc_hir_typeck/src/errors.rs | 2 +- compiler/rustc_incremental/src/persist/fs.rs | 2 +- .../rustc_lint/src/context/diagnostics.rs | 4 ++-- compiler/rustc_parse/src/errors.rs | 2 +- compiler/rustc_session/src/utils.rs | 20 ++++++++++++++++++- ...uggest-switching-edition-on-await-cargo.rs | 2 +- tests/ui/check-cfg/cargo-feature.rs | 2 +- tests/ui/check-cfg/diagnotics.rs | 4 ++-- tests/ui/crate-loading/missing-std.rs | 2 +- 10 files changed, 34 insertions(+), 13 deletions(-) diff --git a/compiler/rustc_codegen_ssa/src/errors.rs b/compiler/rustc_codegen_ssa/src/errors.rs index 3d7903b5efb0..e42a8bd9ed98 100644 --- a/compiler/rustc_codegen_ssa/src/errors.rs +++ b/compiler/rustc_codegen_ssa/src/errors.rs @@ -362,8 +362,11 @@ impl IntoDiagnostic<'_, G> for LinkingFailed<'_> { // which by now we have no way to translate. if contains_undefined_ref { diag.note(fluent::codegen_ssa_extern_funcs_not_found) - .note(fluent::codegen_ssa_specify_libraries_to_link) - .note(fluent::codegen_ssa_use_cargo_directive); + .note(fluent::codegen_ssa_specify_libraries_to_link); + + if rustc_session::utils::was_invoked_from_cargo() { + diag.note(fluent::codegen_ssa_use_cargo_directive); + } } diag } diff --git a/compiler/rustc_hir_typeck/src/errors.rs b/compiler/rustc_hir_typeck/src/errors.rs index 10e12d01b1fe..1af0b75bd23c 100644 --- a/compiler/rustc_hir_typeck/src/errors.rs +++ b/compiler/rustc_hir_typeck/src/errors.rs @@ -293,7 +293,7 @@ pub enum HelpUseLatestEdition { impl HelpUseLatestEdition { pub fn new() -> Self { let edition = LATEST_STABLE_EDITION; - if std::env::var_os("CARGO").is_some() { + if rustc_session::utils::was_invoked_from_cargo() { Self::Cargo { edition } } else { Self::Standalone { edition } diff --git a/compiler/rustc_incremental/src/persist/fs.rs b/compiler/rustc_incremental/src/persist/fs.rs index 2578f284dee7..23d29916922f 100644 --- a/compiler/rustc_incremental/src/persist/fs.rs +++ b/compiler/rustc_incremental/src/persist/fs.rs @@ -492,7 +492,7 @@ fn lock_directory( lock_err, session_dir, is_unsupported_lock, - is_cargo: std::env::var_os("CARGO").map(|_| ()), + is_cargo: rustc_session::utils::was_invoked_from_cargo().then_some(()), })) } } diff --git a/compiler/rustc_lint/src/context/diagnostics.rs b/compiler/rustc_lint/src/context/diagnostics.rs index 0fa61c5d87e7..5af2b6daec18 100644 --- a/compiler/rustc_lint/src/context/diagnostics.rs +++ b/compiler/rustc_lint/src/context/diagnostics.rs @@ -205,7 +205,7 @@ pub(super) fn builtin( Vec::new() }; - let is_from_cargo = std::env::var_os("CARGO").is_some(); + let is_from_cargo = rustc_session::utils::was_invoked_from_cargo(); let mut is_feature_cfg = name == sym::feature; if is_feature_cfg && is_from_cargo { @@ -340,7 +340,7 @@ pub(super) fn builtin( .copied() .flatten() .collect(); - let is_from_cargo = std::env::var_os("CARGO").is_some(); + let is_from_cargo = rustc_session::utils::was_invoked_from_cargo(); // Show the full list if all possible values for a given name, but don't do it // for names as the possibilities could be very long diff --git a/compiler/rustc_parse/src/errors.rs b/compiler/rustc_parse/src/errors.rs index 3c3a8d6fbb9c..674f7218ea6d 100644 --- a/compiler/rustc_parse/src/errors.rs +++ b/compiler/rustc_parse/src/errors.rs @@ -2545,7 +2545,7 @@ pub enum HelpUseLatestEdition { impl HelpUseLatestEdition { pub fn new() -> Self { let edition = LATEST_STABLE_EDITION; - if std::env::var_os("CARGO").is_some() { + if rustc_session::utils::was_invoked_from_cargo() { Self::Cargo { edition } } else { Self::Standalone { edition } diff --git a/compiler/rustc_session/src/utils.rs b/compiler/rustc_session/src/utils.rs index f76c69af526e..50ebbdccf672 100644 --- a/compiler/rustc_session/src/utils.rs +++ b/compiler/rustc_session/src/utils.rs @@ -1,7 +1,10 @@ use crate::session::Session; use rustc_data_structures::profiling::VerboseTimingGuard; use rustc_fs_util::try_canonicalize; -use std::path::{Path, PathBuf}; +use std::{ + path::{Path, PathBuf}, + sync::OnceLock, +}; impl Session { pub fn timer(&self, what: &'static str) -> VerboseTimingGuard<'_> { @@ -158,3 +161,18 @@ pub fn extra_compiler_flags() -> Option<(Vec, bool)> { if !result.is_empty() { Some((result, excluded_cargo_defaults)) } else { None } } + +/// Returns whenever rustc was launched by Cargo as opposed to another build system. +/// +/// To be used in diagnostics to avoid printing Cargo specific suggestions to other +/// build systems (like Bazel, Buck2, Makefile, ...). +pub fn was_invoked_from_cargo() -> bool { + static FROM_CARGO: OnceLock = OnceLock::new(); + + // To be able to detect Cargo, we use the simplest and least intrusive + // way: we check whenever the `CARGO_CRATE_NAME` env is set. + // + // Note that it is common in Makefiles to define the `CARGO` env even + // though we may not have been called by Cargo, so we avoid using it. + *FROM_CARGO.get_or_init(|| std::env::var_os("CARGO_CRATE_NAME").is_some()) +} diff --git a/tests/ui/async-await/suggest-switching-edition-on-await-cargo.rs b/tests/ui/async-await/suggest-switching-edition-on-await-cargo.rs index e5a3d54c5d03..e1fae0f0e934 100644 --- a/tests/ui/async-await/suggest-switching-edition-on-await-cargo.rs +++ b/tests/ui/async-await/suggest-switching-edition-on-await-cargo.rs @@ -1,4 +1,4 @@ -//@ rustc-env:CARGO=/usr/bin/cargo +//@ rustc-env:CARGO_CRATE_NAME=foo use std::pin::Pin; use std::future::Future; diff --git a/tests/ui/check-cfg/cargo-feature.rs b/tests/ui/check-cfg/cargo-feature.rs index a91068ca05ae..ba451921d794 100644 --- a/tests/ui/check-cfg/cargo-feature.rs +++ b/tests/ui/check-cfg/cargo-feature.rs @@ -4,7 +4,7 @@ // //@ check-pass //@ revisions: some none -//@ rustc-env:CARGO=/usr/bin/cargo +//@ rustc-env:CARGO_CRATE_NAME=foo //@ compile-flags: -Z unstable-options //@ [none]compile-flags: --check-cfg=cfg(feature,values()) //@ [some]compile-flags: --check-cfg=cfg(feature,values("bitcode")) diff --git a/tests/ui/check-cfg/diagnotics.rs b/tests/ui/check-cfg/diagnotics.rs index 54138d158904..134bfcf8ef49 100644 --- a/tests/ui/check-cfg/diagnotics.rs +++ b/tests/ui/check-cfg/diagnotics.rs @@ -1,7 +1,7 @@ //@ check-pass //@ revisions: cargo rustc -//@ [rustc]unset-rustc-env:CARGO -//@ [cargo]rustc-env:CARGO=/usr/bin/cargo +//@ [rustc]unset-rustc-env:CARGO_CRATE_NAME +//@ [cargo]rustc-env:CARGO_CRATE_NAME=foo //@ compile-flags: --check-cfg=cfg(feature,values("foo")) --check-cfg=cfg(no_values) -Z unstable-options #[cfg(featur)] diff --git a/tests/ui/crate-loading/missing-std.rs b/tests/ui/crate-loading/missing-std.rs index ca9501cda3a9..aed8b0c530a6 100644 --- a/tests/ui/crate-loading/missing-std.rs +++ b/tests/ui/crate-loading/missing-std.rs @@ -1,6 +1,6 @@ //@ compile-flags: --target x86_64-unknown-uefi //@ needs-llvm-components: x86 -//@ rustc-env:CARGO=/usr/bin/cargo +//@ rustc-env:CARGO_CRATE_NAME=foo #![feature(no_core)] #![no_core] extern crate core; From a2dd4a3b5be7356c7c72387346e4299093964ea6 Mon Sep 17 00:00:00 2001 From: Caleb Zulawski Date: Sat, 17 Feb 2024 10:49:13 -0500 Subject: [PATCH 104/127] Minor fixes --- crates/core_simd/src/lib.rs | 1 - crates/core_simd/src/masks.rs | 6 +++--- 2 files changed, 3 insertions(+), 4 deletions(-) diff --git a/crates/core_simd/src/lib.rs b/crates/core_simd/src/lib.rs index 4cad148f224c..a25723e11cef 100644 --- a/crates/core_simd/src/lib.rs +++ b/crates/core_simd/src/lib.rs @@ -1,6 +1,5 @@ #![no_std] #![feature( - core_intrinsics, const_intrinsic_copy, const_refs_to_cell, const_maybe_uninit_as_mut_ptr, diff --git a/crates/core_simd/src/masks.rs b/crates/core_simd/src/masks.rs index 4389079adb37..ad7c68fc7f6f 100644 --- a/crates/core_simd/src/masks.rs +++ b/crates/core_simd/src/masks.rs @@ -33,7 +33,7 @@ mod sealed { fn eq(self, other: Self) -> bool; - fn as_usize(self) -> usize; + fn to_usize(self) -> usize; type Unsigned: SimdElement; @@ -65,7 +65,7 @@ macro_rules! impl_element { fn eq(self, other: Self) -> bool { self == other } #[inline] - fn as_usize(self) -> usize { + fn to_usize(self) -> usize { self as usize } @@ -394,7 +394,7 @@ where if min_index.eq(T::TRUE) { None } else { - Some(min_index.as_usize()) + Some(min_index.to_usize()) } } } From 644bdfb2ac810c788300bfd79fcc0e0ec84cda5f Mon Sep 17 00:00:00 2001 From: Caleb Zulawski Date: Sat, 17 Feb 2024 11:08:08 -0500 Subject: [PATCH 105/127] Revert "Merge pull request #385 from workingjubilee/make-an-ass-out-of-u-and-me" This reverts commit 6ad779c3f6f8e935c12f5a9e488aeeaf0d829ff1, reversing changes made to b2e1bcba2c9febb16561420392be2ca483fcad7f. --- crates/core_simd/src/lib.rs | 1 - crates/core_simd/src/masks.rs | 5 +---- 2 files changed, 1 insertion(+), 5 deletions(-) diff --git a/crates/core_simd/src/lib.rs b/crates/core_simd/src/lib.rs index a25723e11cef..6c66142f0c8d 100644 --- a/crates/core_simd/src/lib.rs +++ b/crates/core_simd/src/lib.rs @@ -5,7 +5,6 @@ const_maybe_uninit_as_mut_ptr, const_mut_refs, convert_float_to_int, - core_intrinsics, decl_macro, inline_const, intra_doc_pointers, diff --git a/crates/core_simd/src/masks.rs b/crates/core_simd/src/masks.rs index ad7c68fc7f6f..aad91d7acb72 100644 --- a/crates/core_simd/src/masks.rs +++ b/crates/core_simd/src/masks.rs @@ -174,10 +174,7 @@ where #[must_use = "method returns a new mask and does not mutate the original value"] pub unsafe fn from_int_unchecked(value: Simd) -> Self { // Safety: the caller must confirm this invariant - unsafe { - core::intrinsics::assume(::valid(value)); - Self(mask_impl::Mask::from_int_unchecked(value)) - } + unsafe { Self(mask_impl::Mask::from_int_unchecked(value)) } } /// Converts a vector of integers to a mask, where 0 represents `false` and -1 From d8439e90bd6c86fe4368807f5d1fc9952a36aeca Mon Sep 17 00:00:00 2001 From: Caleb Zulawski Date: Sat, 17 Feb 2024 11:11:58 -0500 Subject: [PATCH 106/127] Add back core_intrinsics --- crates/core_simd/src/lib.rs | 1 + 1 file changed, 1 insertion(+) diff --git a/crates/core_simd/src/lib.rs b/crates/core_simd/src/lib.rs index 6c66142f0c8d..a25723e11cef 100644 --- a/crates/core_simd/src/lib.rs +++ b/crates/core_simd/src/lib.rs @@ -5,6 +5,7 @@ const_maybe_uninit_as_mut_ptr, const_mut_refs, convert_float_to_int, + core_intrinsics, decl_macro, inline_const, intra_doc_pointers, From 44b4d26273e6d18dcd6bbadd66e793c229e89a59 Mon Sep 17 00:00:00 2001 From: Caleb Zulawski Date: Sat, 17 Feb 2024 11:18:31 -0500 Subject: [PATCH 107/127] Revert "Revert "Merge pull request #385 from workingjubilee/make-an-ass-out-of-u-and-me"" This reverts commit 644bdfb2ac810c788300bfd79fcc0e0ec84cda5f. --- crates/core_simd/src/masks.rs | 5 ++++- 1 file changed, 4 insertions(+), 1 deletion(-) diff --git a/crates/core_simd/src/masks.rs b/crates/core_simd/src/masks.rs index aad91d7acb72..ad7c68fc7f6f 100644 --- a/crates/core_simd/src/masks.rs +++ b/crates/core_simd/src/masks.rs @@ -174,7 +174,10 @@ where #[must_use = "method returns a new mask and does not mutate the original value"] pub unsafe fn from_int_unchecked(value: Simd) -> Self { // Safety: the caller must confirm this invariant - unsafe { Self(mask_impl::Mask::from_int_unchecked(value)) } + unsafe { + core::intrinsics::assume(::valid(value)); + Self(mask_impl::Mask::from_int_unchecked(value)) + } } /// Converts a vector of integers to a mask, where 0 represents `false` and -1 From aebf6f156056ed803afed2ad055094d2ff4fc0cb Mon Sep 17 00:00:00 2001 From: Caleb Zulawski Date: Sat, 17 Feb 2024 11:33:13 -0500 Subject: [PATCH 108/127] Use intrinsics directly to avoid recursion --- crates/core_simd/src/masks.rs | 13 +++++++++++-- 1 file changed, 11 insertions(+), 2 deletions(-) diff --git a/crates/core_simd/src/masks.rs b/crates/core_simd/src/masks.rs index ad7c68fc7f6f..e480c25a51e6 100644 --- a/crates/core_simd/src/masks.rs +++ b/crates/core_simd/src/masks.rs @@ -12,7 +12,7 @@ )] mod mask_impl; -use crate::simd::{cmp::SimdPartialEq, LaneCount, Simd, SimdCast, SimdElement, SupportedLaneCount}; +use crate::simd::{LaneCount, Simd, SimdCast, SimdElement, SupportedLaneCount}; use core::cmp::Ordering; use core::{fmt, mem}; @@ -58,7 +58,16 @@ macro_rules! impl_element { where LaneCount: SupportedLaneCount, { - (value.simd_eq(Simd::splat(0 as _)) | value.simd_eq(Simd::splat(-1 as _))).all() + // We can't use `Simd` directly, because `Simd`'s functions call this function and + // we will end up with an infinite loop. + // Safety: `value` is an integer vector + unsafe { + use core::intrinsics::simd; + let falses: Simd = simd::simd_eq(value, Simd::splat(0 as _)); + let trues: Simd = simd::simd_eq(value, Simd::splat(-1 as _)); + let valid: Simd = simd::simd_or(falses, trues); + simd::simd_reduce_all(valid) + } } #[inline] From afa70e6d92b90b54f1c31ca9838fdeb4c7033db7 Mon Sep 17 00:00:00 2001 From: Caleb Zulawski Date: Sat, 2 Dec 2023 10:49:21 -0500 Subject: [PATCH 109/127] Remove link to core::arch::x86_64 --- crates/core_simd/src/core_simd_docs.md | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/crates/core_simd/src/core_simd_docs.md b/crates/core_simd/src/core_simd_docs.md index fa93155ff5ed..bf412e035b51 100644 --- a/crates/core_simd/src/core_simd_docs.md +++ b/crates/core_simd/src/core_simd_docs.md @@ -30,7 +30,7 @@ Instead, they map to a reasonable implementation of the operation for the target Consistency between targets is not compromised to use faster or fewer instructions. In some cases, `std::arch` will provide a faster function that has slightly different behavior than the `std::simd` equivalent. -For example, [`_mm_min_ps`](`core::arch::x86_64::_mm_min_ps`)[^1] can be slightly faster than [`SimdFloat::simd_min`](`num::SimdFloat::simd_min`), but does not conform to the IEEE standard also used by [`f32::min`]. +For example, `_mm_min_ps`[^1] can be slightly faster than [`SimdFloat::simd_min`](`num::SimdFloat::simd_min`), but does not conform to the IEEE standard also used by [`f32::min`]. When necessary, [`Simd`] can be converted to the types provided by `std::arch` to make use of target-specific functions. Many targets simply don't have SIMD, or don't support SIMD for a particular element type. From 64bd26f221e5f2ce198899635999d043f4dfc241 Mon Sep 17 00:00:00 2001 From: Urgau Date: Tue, 12 Dec 2023 23:26:45 +0100 Subject: [PATCH 110/127] Fix target_feature config in portable-simd --- crates/core_simd/src/swizzle_dyn.rs | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/crates/core_simd/src/swizzle_dyn.rs b/crates/core_simd/src/swizzle_dyn.rs index bd8a38e350d3..dac013cc98dc 100644 --- a/crates/core_simd/src/swizzle_dyn.rs +++ b/crates/core_simd/src/swizzle_dyn.rs @@ -55,7 +55,7 @@ where 16 => transize(vqtbl1q_u8, self, idxs), #[cfg(all(target_feature = "avx2", not(target_feature = "avx512vbmi")))] 32 => transize_raw(avx2_pshufb, self, idxs), - #[cfg(target_feature = "avx512vl,avx512vbmi")] + #[cfg(all(target_feature = "avx512vl", target_feature = "avx512vbmi"))] 32 => transize(x86::_mm256_permutexvar_epi8, self, idxs), // Notable absence: avx512bw shuffle // If avx512bw is available, odds of avx512vbmi are good From 9aec60e2c63c3c67649589a77a4e53de45cc565b Mon Sep 17 00:00:00 2001 From: Amanieu d'Antras Date: Tue, 30 Jan 2024 03:40:53 +0000 Subject: [PATCH 111/127] Disable conversions between portable_simd and stdarch on big-endian ARM stdarch no longer provide SIMD on big-endian ARM due to https://github.com/rust-lang/stdarch/issues/1484 --- crates/core_simd/src/vendor/arm.rs | 9 ++++++--- 1 file changed, 6 insertions(+), 3 deletions(-) diff --git a/crates/core_simd/src/vendor/arm.rs b/crates/core_simd/src/vendor/arm.rs index ff3b69ccf959..ee5c64213736 100644 --- a/crates/core_simd/src/vendor/arm.rs +++ b/crates/core_simd/src/vendor/arm.rs @@ -7,9 +7,12 @@ use core::arch::arm::*; #[cfg(target_arch = "aarch64")] use core::arch::aarch64::*; -#[cfg(any( - target_arch = "aarch64", - all(target_arch = "arm", target_feature = "v7"), +#[cfg(all( + any( + target_arch = "aarch64", + all(target_arch = "arm", target_feature = "v7"), + ), + target_endian = "little" ))] mod neon { use super::*; From eee4f1d83a29345dcd542a53d729c04bc53e656d Mon Sep 17 00:00:00 2001 From: Caleb Zulawski Date: Sat, 17 Feb 2024 19:10:17 -0500 Subject: [PATCH 112/127] Fix swizzle_dyn --- crates/core_simd/src/swizzle_dyn.rs | 44 ++++++++--------------------- 1 file changed, 12 insertions(+), 32 deletions(-) diff --git a/crates/core_simd/src/swizzle_dyn.rs b/crates/core_simd/src/swizzle_dyn.rs index dac013cc98dc..ae9ff6894b0a 100644 --- a/crates/core_simd/src/swizzle_dyn.rs +++ b/crates/core_simd/src/swizzle_dyn.rs @@ -44,7 +44,7 @@ where ))] 8 => transize(vtbl1_u8, self, idxs), #[cfg(target_feature = "ssse3")] - 16 => transize(x86::_mm_shuffle_epi8, self, idxs), + 16 => transize(x86::_mm_shuffle_epi8, self, zeroing_idxs(idxs)), #[cfg(target_feature = "simd128")] 16 => transize(wasm::i8x16_swizzle, self, idxs), #[cfg(all( @@ -54,9 +54,9 @@ where ))] 16 => transize(vqtbl1q_u8, self, idxs), #[cfg(all(target_feature = "avx2", not(target_feature = "avx512vbmi")))] - 32 => transize_raw(avx2_pshufb, self, idxs), + 32 => transize(avx2_pshufb, self, idxs), #[cfg(all(target_feature = "avx512vl", target_feature = "avx512vbmi"))] - 32 => transize(x86::_mm256_permutexvar_epi8, self, idxs), + 32 => transize(x86::_mm256_permutexvar_epi8, zeroing_idxs(idxs), self), // Notable absence: avx512bw shuffle // If avx512bw is available, odds of avx512vbmi are good // FIXME: initial AVX512VBMI variant didn't actually pass muster @@ -129,45 +129,25 @@ unsafe fn avx2_pshufb(bytes: Simd, idxs: Simd) -> Simd { #[inline(always)] unsafe fn transize( f: unsafe fn(T, T) -> T, - bytes: Simd, - idxs: Simd, + a: Simd, + b: Simd, ) -> Simd where LaneCount: SupportedLaneCount, { - let idxs = zeroing_idxs(idxs); // SAFETY: Same obligation to use this function as to use mem::transmute_copy. - unsafe { mem::transmute_copy(&f(mem::transmute_copy(&bytes), mem::transmute_copy(&idxs))) } + unsafe { mem::transmute_copy(&f(mem::transmute_copy(&a), mem::transmute_copy(&b))) } } -/// Make indices that yield 0 for this architecture +/// Make indices that yield 0 for x86 +#[cfg(any(target_arch = "x86", target_arch = "x86_64"))] +#[allow(unused)] #[inline(always)] fn zeroing_idxs(idxs: Simd) -> Simd where LaneCount: SupportedLaneCount, { - // On x86, make sure the top bit is set. - #[cfg(any(target_arch = "x86", target_arch = "x86_64"))] - let idxs = { - use crate::simd::cmp::SimdPartialOrd; - idxs.simd_lt(Simd::splat(N as u8)) - .select(idxs, Simd::splat(u8::MAX)) - }; - // Simply do nothing on most architectures. - idxs -} - -/// As transize but no implicit call to `zeroing_idxs`. -#[allow(dead_code)] -#[inline(always)] -unsafe fn transize_raw( - f: unsafe fn(T, T) -> T, - bytes: Simd, - idxs: Simd, -) -> Simd -where - LaneCount: SupportedLaneCount, -{ - // SAFETY: Same obligation to use this function as to use mem::transmute_copy. - unsafe { mem::transmute_copy(&f(mem::transmute_copy(&bytes), mem::transmute_copy(&idxs))) } + use crate::simd::cmp::SimdPartialOrd; + idxs.simd_lt(Simd::splat(N as u8)) + .select(idxs, Simd::splat(u8::MAX)) } From 408eeae59d35cbcaab2cfb345d24373954e74fc5 Mon Sep 17 00:00:00 2001 From: Obei Sideg Date: Sat, 17 Feb 2024 22:01:56 +0300 Subject: [PATCH 113/127] Improve wording of static_mut_ref Rename `static_mut_ref` lint to `static_mut_refs`. --- .../example/mini_core_hello_world.rs | 4 +- .../example/mini_core_hello_world.rs | 4 +- .../src/error_codes/E0796.md | 26 +++--- compiler/rustc_hir_analysis/messages.ftl | 29 +++--- compiler/rustc_hir_analysis/src/check/errs.rs | 28 ++---- compiler/rustc_hir_analysis/src/errors.rs | 28 ++---- compiler/rustc_lint/src/lib.rs | 1 + compiler/rustc_lint_defs/src/builtin.rs | 8 +- library/panic_unwind/src/seh.rs | 10 +- library/std/src/panicking.rs | 5 +- .../sys/pal/common/thread_local/fast_local.rs | 5 +- .../pal/common/thread_local/static_local.rs | 5 +- library/std/src/thread/local.rs | 4 +- .../miri/tests/fail/tls/tls_static_dealloc.rs | 4 +- src/tools/miri/tests/pass/static_mut.rs | 4 +- src/tools/miri/tests/pass/tls/tls_static.rs | 4 +- tests/ui/abi/statics/static-mut-foreign.rs | 4 +- .../ui/abi/statics/static-mut-foreign.stderr | 22 ++--- .../borrowck/borrowck-access-permissions.rs | 2 +- .../borrowck-access-permissions.stderr | 12 +-- .../borrowck-unsafe-static-mutable-borrows.rs | 2 +- ...rowck-unsafe-static-mutable-borrows.stderr | 12 +-- tests/ui/borrowck/issue-20801.rs | 2 +- tests/ui/borrowck/issue-20801.stderr | 12 +-- ...ue-55492-borrowck-migrate-scans-parents.rs | 6 +- ...5492-borrowck-migrate-scans-parents.stderr | 32 +++---- tests/ui/consts/const_let_assign2.rs | 2 +- tests/ui/consts/const_let_assign2.stderr | 12 +-- .../const_refs_to_static_fail_invalid.rs | 6 +- .../const_refs_to_static_fail_invalid.stderr | 2 +- .../ui/consts/issue-17718-const-bad-values.rs | 2 +- .../const_refers_to_static_cross_crate.rs | 2 +- .../ui/consts/miri_unleashed/extern-static.rs | 2 +- .../mutable_references_err.32bit.stderr | 62 ++++++------- .../mutable_references_err.64bit.stderr | 62 ++++++------- .../miri_unleashed/mutable_references_err.rs | 13 ++- .../static-promoted-to-mutable-static.rs | 21 ++--- .../consts/static_mut_containing_mut_ref.rs | 2 +- .../consts/static_mut_containing_mut_ref2.rs | 2 +- .../issue-23338-ensure-param-drop-order.rs | 2 +- ...issue-23338-ensure-param-drop-order.stderr | 12 +-- tests/ui/error-codes/E0017.rs | 2 +- tests/ui/error-codes/E0017.stderr | 12 +-- .../issues/issue-23611-enum-swap-in-drop.rs | 2 +- .../issue-23611-enum-swap-in-drop.stderr | 12 +-- tests/ui/issues/issue-54410.rs | 2 +- tests/ui/issues/issue-54410.stderr | 12 +-- ...ead-local-static-mut-borrow-outlives-fn.rs | 2 +- ...local-static-mut-borrow-outlives-fn.stderr | 12 +-- .../reference-of-mut-static-safe.e2021.stderr | 26 ------ .../reference-of-mut-static-safe.e2024.stderr | 15 --- .../reference-of-mut-static-unsafe-fn.rs | 15 +-- .../reference-of-mut-static-unsafe-fn.stderr | 70 ++++++++------ .../reference-of-mut-static.e2021.stderr | 64 ++++++------- .../reference-of-mut-static.e2024.stderr | 48 +++++----- tests/ui/static/reference-of-mut-static.rs | 26 +++--- .../reference-to-mut-static-safe.e2021.stderr | 26 ++++++ .../reference-to-mut-static-safe.e2024.stderr | 15 +++ ...afe.rs => reference-to-mut-static-safe.rs} | 4 +- .../reference-to-mut-static-unsafe-fn.rs | 26 ++++++ .../reference-to-mut-static-unsafe-fn.stderr | 75 +++++++++++++++ .../reference-to-mut-static.e2021.stderr | 91 +++++++++++++++++++ .../reference-to-mut-static.e2024.stderr | 75 +++++++++++++++ tests/ui/static/reference-to-mut-static.rs | 50 ++++++++++ tests/ui/static/safe-extern-statics-mut.rs | 4 +- .../ui/static/safe-extern-statics-mut.stderr | 22 ++--- tests/ui/statics/issue-15261.rs | 2 +- tests/ui/statics/issue-15261.stderr | 12 +-- tests/ui/statics/static-mut-xc.rs | 4 +- tests/ui/statics/static-mut-xc.stderr | 22 ++--- tests/ui/statics/static-recursive.rs | 2 +- tests/ui/statics/static-recursive.stderr | 12 +-- tests/ui/thread-local/thread-local-static.rs | 2 +- 73 files changed, 783 insertions(+), 460 deletions(-) delete mode 100644 tests/ui/static/reference-of-mut-static-safe.e2021.stderr delete mode 100644 tests/ui/static/reference-of-mut-static-safe.e2024.stderr create mode 100644 tests/ui/static/reference-to-mut-static-safe.e2021.stderr create mode 100644 tests/ui/static/reference-to-mut-static-safe.e2024.stderr rename tests/ui/static/{reference-of-mut-static-safe.rs => reference-to-mut-static-safe.rs} (63%) create mode 100644 tests/ui/static/reference-to-mut-static-unsafe-fn.rs create mode 100644 tests/ui/static/reference-to-mut-static-unsafe-fn.stderr create mode 100644 tests/ui/static/reference-to-mut-static.e2021.stderr create mode 100644 tests/ui/static/reference-to-mut-static.e2024.stderr create mode 100644 tests/ui/static/reference-to-mut-static.rs diff --git a/compiler/rustc_codegen_cranelift/example/mini_core_hello_world.rs b/compiler/rustc_codegen_cranelift/example/mini_core_hello_world.rs index 2a7b1107ffca..8b0b9123ac7d 100644 --- a/compiler/rustc_codegen_cranelift/example/mini_core_hello_world.rs +++ b/compiler/rustc_codegen_cranelift/example/mini_core_hello_world.rs @@ -112,8 +112,8 @@ fn start( static mut NUM: u8 = 6 * 7; -// FIXME: Use `SyncUnsafeCell` instead of allowing `static_mut_ref` lint -#[allow(static_mut_ref)] +// FIXME: Use `SyncUnsafeCell` instead of allowing `static_mut_refs` lint +#[allow(static_mut_refs)] static NUM_REF: &'static u8 = unsafe { &NUM }; unsafe fn zeroed() -> T { diff --git a/compiler/rustc_codegen_gcc/example/mini_core_hello_world.rs b/compiler/rustc_codegen_gcc/example/mini_core_hello_world.rs index 9827e299f2a3..add77880716c 100644 --- a/compiler/rustc_codegen_gcc/example/mini_core_hello_world.rs +++ b/compiler/rustc_codegen_gcc/example/mini_core_hello_world.rs @@ -99,8 +99,8 @@ fn start( static mut NUM: u8 = 6 * 7; -// FIXME: Use `SyncUnsafeCell` instead of allowing `static_mut_ref` lint -#[allow(static_mut_ref)] +// FIXME: Use `SyncUnsafeCell` instead of allowing `static_mut_refs` lint +#[allow(static_mut_refs)] static NUM_REF: &'static u8 = unsafe { &NUM }; macro_rules! assert { diff --git a/compiler/rustc_error_codes/src/error_codes/E0796.md b/compiler/rustc_error_codes/src/error_codes/E0796.md index cea18f8db851..7ac429e52158 100644 --- a/compiler/rustc_error_codes/src/error_codes/E0796.md +++ b/compiler/rustc_error_codes/src/error_codes/E0796.md @@ -1,22 +1,26 @@ -Reference of mutable static. +You have created a reference to a mutable static. Erroneous code example: ```compile_fail,edition2024,E0796 static mut X: i32 = 23; -static mut Y: i32 = 24; -unsafe { - let y = &X; - let ref x = X; - let (x, y) = (&X, &Y); - foo(&X); +fn work() { + let _val = unsafe { X }; } -fn foo<'a>(_x: &'a i32) {} +let x_ref = unsafe { &mut X }; +work(); +// The next line has Undefined Behavior! +// `x_ref` is a mutable reference and allows no aliases, +// but `work` has been reading the reference between +// the moment `x_ref` was created and when it was used. +// This violates the uniqueness of `x_ref`. +*x_ref = 42; ``` -Mutable statics can be written to by multiple threads: aliasing violations or -data races will cause undefined behavior. +A reference to a mutable static has lifetime `'static`. This is very dangerous +as it is easy to accidentally overlap the lifetime of that reference with +other, conflicting accesses to the same static. -Reference of mutable static is a hard error from 2024 edition. +References to mutable statics are a hard error in the 2024 edition. diff --git a/compiler/rustc_hir_analysis/messages.ftl b/compiler/rustc_hir_analysis/messages.ftl index a61cfd0e4ce9..202c5ba989ab 100644 --- a/compiler/rustc_hir_analysis/messages.ftl +++ b/compiler/rustc_hir_analysis/messages.ftl @@ -373,19 +373,24 @@ hir_analysis_start_not_target_feature = `#[start]` function is not allowed to ha hir_analysis_start_not_track_caller = `#[start]` function is not allowed to be `#[track_caller]` .label = `#[start]` function is not allowed to be `#[track_caller]` -hir_analysis_static_mut_ref = reference of mutable static is disallowed - .label = reference of mutable static - .note = mutable statics can be written to by multiple threads: aliasing violations or data races will cause undefined behavior - .suggestion = shared references are dangerous since if there's any kind of mutation of that static while the reference lives, that's UB; use `addr_of!` instead to create a raw pointer - .suggestion_mut = mutable references are dangerous since if there's any other pointer or reference used for that static while the reference lives, that's UB; use `addr_of_mut!` instead to create a raw pointer +hir_analysis_static_mut_ref = creating a {$shared} reference to a mutable static + .label = {$shared} reference to mutable static + .note = {$shared -> + [shared] this shared reference has lifetime `'static`, but if the static ever gets mutated, or a mutable reference is created, then any further use of this shared reference is Undefined Behavior + *[mutable] this mutable reference has lifetime `'static`, but if the static gets accessed (read or written) by any other means, or any other reference is created, then any further use of this mutable reference is Undefined Behavior + } + .suggestion = use `addr_of!` instead to create a raw pointer + .suggestion_mut = use `addr_of_mut!` instead to create a raw pointer -hir_analysis_static_mut_ref_lint = {$shared}reference of mutable static is discouraged - .label = shared reference of mutable static - .label_mut = mutable reference of mutable static - .suggestion = shared references are dangerous since if there's any kind of mutation of that static while the reference lives, that's UB; use `addr_of!` instead to create a raw pointer - .suggestion_mut = mutable references are dangerous since if there's any other pointer or reference used for that static while the reference lives, that's UB; use `addr_of_mut!` instead to create a raw pointer - .note = reference of mutable static is a hard error from 2024 edition - .why_note = mutable statics can be written to by multiple threads: aliasing violations or data races will cause undefined behavior +hir_analysis_static_mut_refs_lint = creating a {$shared} reference to mutable static is discouraged + .label = {$shared} reference to mutable static + .suggestion = use `addr_of!` instead to create a raw pointer + .suggestion_mut = use `addr_of_mut!` instead to create a raw pointer + .note = this will be a hard error in the 2024 edition + .why_note = {$shared -> + [shared] this shared reference has lifetime `'static`, but if the static ever gets mutated, or a mutable reference is created, then any further use of this shared reference is Undefined Behavior + *[mutable] this mutable reference has lifetime `'static`, but if the static gets accessed (read or written) by any other means, or any other reference is created, then any further use of this mutable reference is Undefined Behavior + } hir_analysis_static_specialize = cannot specialize on `'static` lifetime diff --git a/compiler/rustc_hir_analysis/src/check/errs.rs b/compiler/rustc_hir_analysis/src/check/errs.rs index 87a1f3d34258..4a7ace274c5b 100644 --- a/compiler/rustc_hir_analysis/src/check/errs.rs +++ b/compiler/rustc_hir_analysis/src/check/errs.rs @@ -1,6 +1,6 @@ use rustc_hir as hir; use rustc_hir_pretty::qpath_to_string; -use rustc_lint_defs::builtin::STATIC_MUT_REF; +use rustc_lint_defs::builtin::STATIC_MUT_REFS; use rustc_middle::ty::TyCtxt; use rustc_span::Span; use rustc_type_ir::Mutability; @@ -66,32 +66,24 @@ fn handle_static_mut_ref( hir_id: hir::HirId, ) { if e2024 { - let sugg = if mutable { - errors::StaticMutRefSugg::Mut { span, var } + let (sugg, shared) = if mutable { + (errors::StaticMutRefSugg::Mut { span, var }, "mutable") } else { - errors::StaticMutRefSugg::Shared { span, var } + (errors::StaticMutRefSugg::Shared { span, var }, "shared") }; - tcx.sess.parse_sess.dcx.emit_err(errors::StaticMutRef { span, sugg }); + tcx.sess.parse_sess.dcx.emit_err(errors::StaticMutRef { span, sugg, shared }); return; } - let (label, sugg, shared) = if mutable { - ( - errors::RefOfMutStaticLabel::Mut { span }, - errors::RefOfMutStaticSugg::Mut { span, var }, - "mutable ", - ) + let (sugg, shared) = if mutable { + (errors::RefOfMutStaticSugg::Mut { span, var }, "mutable") } else { - ( - errors::RefOfMutStaticLabel::Shared { span }, - errors::RefOfMutStaticSugg::Shared { span, var }, - "shared ", - ) + (errors::RefOfMutStaticSugg::Shared { span, var }, "shared") }; tcx.emit_node_span_lint( - STATIC_MUT_REF, + STATIC_MUT_REFS, hir_id, span, - errors::RefOfMutStatic { shared, why_note: (), label, sugg }, + errors::RefOfMutStatic { span, sugg, shared }, ); } diff --git a/compiler/rustc_hir_analysis/src/errors.rs b/compiler/rustc_hir_analysis/src/errors.rs index 6a505b961974..a8fd311df432 100644 --- a/compiler/rustc_hir_analysis/src/errors.rs +++ b/compiler/rustc_hir_analysis/src/errors.rs @@ -1455,12 +1455,13 @@ pub struct OnlyCurrentTraitsPointerSugg<'a> { #[derive(Diagnostic)] #[diag(hir_analysis_static_mut_ref, code = E0796)] #[note] -pub struct StaticMutRef { +pub struct StaticMutRef<'a> { #[primary_span] #[label] pub span: Span, #[subdiagnostic] pub sugg: StaticMutRefSugg, + pub shared: &'a str, } #[derive(Subdiagnostic)] @@ -1491,30 +1492,15 @@ pub enum StaticMutRefSugg { // STATIC_MUT_REF lint #[derive(LintDiagnostic)] -#[diag(hir_analysis_static_mut_ref_lint)] +#[diag(hir_analysis_static_mut_refs_lint)] #[note] +#[note(hir_analysis_why_note)] pub struct RefOfMutStatic<'a> { - pub shared: &'a str, - #[note(hir_analysis_why_note)] - pub why_note: (), - #[subdiagnostic] - pub label: RefOfMutStaticLabel, + #[label] + pub span: Span, #[subdiagnostic] pub sugg: RefOfMutStaticSugg, -} - -#[derive(Subdiagnostic)] -pub enum RefOfMutStaticLabel { - #[label(hir_analysis_label)] - Shared { - #[primary_span] - span: Span, - }, - #[label(hir_analysis_label_mut)] - Mut { - #[primary_span] - span: Span, - }, + pub shared: &'a str, } #[derive(Subdiagnostic)] diff --git a/compiler/rustc_lint/src/lib.rs b/compiler/rustc_lint/src/lib.rs index 85f9d3bd63ec..e50f4ca338bd 100644 --- a/compiler/rustc_lint/src/lib.rs +++ b/compiler/rustc_lint/src/lib.rs @@ -325,6 +325,7 @@ fn register_builtins(store: &mut LintStore) { store.register_renamed("or_patterns_back_compat", "rust_2021_incompatible_or_patterns"); store.register_renamed("non_fmt_panic", "non_fmt_panics"); store.register_renamed("unused_tuple_struct_fields", "dead_code"); + store.register_renamed("static_mut_ref", "static_mut_refs"); // These were moved to tool lints, but rustc still sees them when compiling normally, before // tool lints are registered, so `check_tool_name_for_backwards_compat` doesn't work. Use diff --git a/compiler/rustc_lint_defs/src/builtin.rs b/compiler/rustc_lint_defs/src/builtin.rs index 6a2a2c1e48e2..3f5d3c259715 100644 --- a/compiler/rustc_lint_defs/src/builtin.rs +++ b/compiler/rustc_lint_defs/src/builtin.rs @@ -89,7 +89,7 @@ declare_lint_pass! { SINGLE_USE_LIFETIMES, SOFT_UNSTABLE, STABLE_FEATURES, - STATIC_MUT_REF, + STATIC_MUT_REFS, SUSPICIOUS_AUTO_TRAIT_IMPLS, TEST_UNSTABLE_LINT, TEXT_DIRECTION_CODEPOINT_IN_COMMENT, @@ -1769,7 +1769,7 @@ declare_lint! { } declare_lint! { - /// The `static_mut_ref` lint checks for shared or mutable references + /// The `static_mut_refs` lint checks for shared or mutable references /// of mutable static inside `unsafe` blocks and `unsafe` functions. /// /// ### Example @@ -1807,9 +1807,9 @@ declare_lint! { /// Shared or mutable references of mutable static are almost always a mistake and /// can lead to undefined behavior and various other problems in your code. /// - /// This lint is "warn" by default on editions up to 2021, from 2024 there is + /// This lint is "warn" by default on editions up to 2021, in 2024 there is /// a hard error instead. - pub STATIC_MUT_REF, + pub STATIC_MUT_REFS, Warn, "shared references or mutable references of mutable static is discouraged", @future_incompatible = FutureIncompatibleInfo { diff --git a/library/panic_unwind/src/seh.rs b/library/panic_unwind/src/seh.rs index d3ba546d730d..c4f4d2fbec91 100644 --- a/library/panic_unwind/src/seh.rs +++ b/library/panic_unwind/src/seh.rs @@ -261,8 +261,9 @@ cfg_if::cfg_if! { } } -// FIXME: Use `SyncUnsafeCell` instead of allowing `static_mut_ref` lint -#[allow(static_mut_ref)] +// FIXME: Use `SyncUnsafeCell` instead of allowing `static_mut_refs` lint +#[cfg_attr(bootstrap, allow(static_mut_ref))] +#[cfg_attr(not(bootstrap), allow(static_mut_refs))] pub unsafe fn panic(data: Box) -> u32 { use core::intrinsics::atomic_store_seqcst; @@ -324,8 +325,9 @@ pub unsafe fn panic(data: Box) -> u32 { _CxxThrowException(throw_ptr, &mut THROW_INFO as *mut _ as *mut _); } -// FIXME: Use `SyncUnsafeCell` instead of allowing `static_mut_ref` lint -#[allow(static_mut_ref)] +// FIXME: Use `SyncUnsafeCell` instead of allowing `static_mut_refs` lint +#[cfg_attr(bootstrap, allow(static_mut_ref))] +#[cfg_attr(not(bootstrap), allow(static_mut_refs))] pub unsafe fn cleanup(payload: *mut u8) -> Box { // A null payload here means that we got here from the catch (...) of // __rust_try. This happens when a non-Rust foreign exception is caught. diff --git a/library/std/src/panicking.rs b/library/std/src/panicking.rs index 8294160b72ca..c8306c1b597a 100644 --- a/library/std/src/panicking.rs +++ b/library/std/src/panicking.rs @@ -337,8 +337,9 @@ pub mod panic_count { #[doc(hidden)] #[cfg(not(feature = "panic_immediate_abort"))] #[unstable(feature = "update_panic_count", issue = "none")] -// FIXME: Use `SyncUnsafeCell` instead of allowing `static_mut_ref` lint -#[allow(static_mut_ref)] +// FIXME: Use `SyncUnsafeCell` instead of allowing `static_mut_refs` lint +#[cfg_attr(bootstrap, allow(static_mut_ref))] +#[cfg_attr(not(bootstrap), allow(static_mut_refs))] pub mod panic_count { use crate::cell::Cell; use crate::sync::atomic::{AtomicUsize, Ordering}; diff --git a/library/std/src/sys/pal/common/thread_local/fast_local.rs b/library/std/src/sys/pal/common/thread_local/fast_local.rs index 04c0dd6f7509..646dcd7f3a3e 100644 --- a/library/std/src/sys/pal/common/thread_local/fast_local.rs +++ b/library/std/src/sys/pal/common/thread_local/fast_local.rs @@ -13,8 +13,9 @@ pub macro thread_local_inner { (@key $t:ty, const $init:expr) => {{ #[inline] #[deny(unsafe_op_in_unsafe_fn)] - // FIXME: Use `SyncUnsafeCell` instead of allowing `static_mut_ref` lint - #[allow(static_mut_ref)] + // FIXME: Use `SyncUnsafeCell` instead of allowing `static_mut_refs` lint + #[cfg_attr(bootstrap, allow(static_mut_ref))] + #[cfg_attr(not(bootstrap), allow(static_mut_refs))] unsafe fn __getit( _init: $crate::option::Option<&mut $crate::option::Option<$t>>, ) -> $crate::option::Option<&'static $t> { diff --git a/library/std/src/sys/pal/common/thread_local/static_local.rs b/library/std/src/sys/pal/common/thread_local/static_local.rs index 0dde78b14dbb..4f2b68689622 100644 --- a/library/std/src/sys/pal/common/thread_local/static_local.rs +++ b/library/std/src/sys/pal/common/thread_local/static_local.rs @@ -11,8 +11,9 @@ pub macro thread_local_inner { (@key $t:ty, const $init:expr) => {{ #[inline] // see comments below #[deny(unsafe_op_in_unsafe_fn)] - // FIXME: Use `SyncUnsafeCell` instead of allowing `static_mut_ref` lint - #[allow(static_mut_ref)] + // FIXME: Use `SyncUnsafeCell` instead of allowing `static_mut_refs` lint + #[cfg_attr(bootstrap, allow(static_mut_ref))] + #[cfg_attr(not(bootstrap), allow(static_mut_refs))] unsafe fn __getit( _init: $crate::option::Option<&mut $crate::option::Option<$t>>, ) -> $crate::option::Option<&'static $t> { diff --git a/library/std/src/thread/local.rs b/library/std/src/thread/local.rs index 83d5d63556fc..d1213e2f1668 100644 --- a/library/std/src/thread/local.rs +++ b/library/std/src/thread/local.rs @@ -180,8 +180,8 @@ impl fmt::Debug for LocalKey { #[stable(feature = "rust1", since = "1.0.0")] #[cfg_attr(not(test), rustc_diagnostic_item = "thread_local_macro")] #[allow_internal_unstable(thread_local_internals)] -// FIXME: Use `SyncUnsafeCell` instead of allowing `static_mut_ref` lint -#[allow(static_mut_ref)] +// FIXME: Use `SyncUnsafeCell` instead of allowing `static_mut_refs` lint +#[cfg_attr(not(bootstrap), allow(static_mut_refs))] macro_rules! thread_local { // empty (base case for the recursion) () => {}; diff --git a/src/tools/miri/tests/fail/tls/tls_static_dealloc.rs b/src/tools/miri/tests/fail/tls/tls_static_dealloc.rs index 762a8d85314f..d47a05d8475c 100644 --- a/src/tools/miri/tests/fail/tls/tls_static_dealloc.rs +++ b/src/tools/miri/tests/fail/tls/tls_static_dealloc.rs @@ -1,8 +1,8 @@ //! Ensure that thread-local statics get deallocated when the thread dies. #![feature(thread_local)] -// FIXME: Use `SyncUnsafeCell` instead of allowing `static_mut_ref` lint -#![allow(static_mut_ref)] +// FIXME: Use `SyncUnsafeCell` instead of allowing `static_mut_refs` lint +#![allow(static_mut_refs)] #[thread_local] static mut TLS: u8 = 0; diff --git a/src/tools/miri/tests/pass/static_mut.rs b/src/tools/miri/tests/pass/static_mut.rs index c1e58b70adb0..6b0c0297726f 100644 --- a/src/tools/miri/tests/pass/static_mut.rs +++ b/src/tools/miri/tests/pass/static_mut.rs @@ -1,7 +1,7 @@ static mut FOO: i32 = 42; -// FIXME: Use `SyncUnsafeCell` instead of allowing `static_mut_ref` lint -#[allow(static_mut_ref)] +// FIXME: Use `SyncUnsafeCell` instead of allowing `static_mut_refs` lint +#[allow(static_mut_refs)] static BAR: Foo = Foo(unsafe { &FOO as *const _ }); #[allow(dead_code)] diff --git a/src/tools/miri/tests/pass/tls/tls_static.rs b/src/tools/miri/tests/pass/tls/tls_static.rs index 9be00af47aa3..fea5bb1db5e6 100644 --- a/src/tools/miri/tests/pass/tls/tls_static.rs +++ b/src/tools/miri/tests/pass/tls/tls_static.rs @@ -8,8 +8,8 @@ //! test, we also check that thread-locals act as per-thread statics. #![feature(thread_local)] -// FIXME: Use `SyncUnsafeCell` instead of allowing `static_mut_ref` lint -#![allow(static_mut_ref)] +// FIXME: Use `SyncUnsafeCell` instead of allowing `static_mut_refs` lint +#![allow(static_mut_refs)] use std::thread; diff --git a/tests/ui/abi/statics/static-mut-foreign.rs b/tests/ui/abi/statics/static-mut-foreign.rs index fdd775da5784..f32ce8cf085a 100644 --- a/tests/ui/abi/statics/static-mut-foreign.rs +++ b/tests/ui/abi/statics/static-mut-foreign.rs @@ -33,9 +33,9 @@ unsafe fn run() { rust_dbg_static_mut = -3; assert_eq!(rust_dbg_static_mut, -3); static_bound(&rust_dbg_static_mut); - //~^ WARN shared reference of mutable static is discouraged [static_mut_ref] + //~^ WARN shared reference to mutable static is discouraged [static_mut_refs] static_bound_set(&mut rust_dbg_static_mut); - //~^ WARN mutable reference of mutable static is discouraged [static_mut_ref] + //~^ WARN mutable reference to mutable static is discouraged [static_mut_refs] } pub fn main() { diff --git a/tests/ui/abi/statics/static-mut-foreign.stderr b/tests/ui/abi/statics/static-mut-foreign.stderr index 144ac056f87e..f393088ff9f3 100644 --- a/tests/ui/abi/statics/static-mut-foreign.stderr +++ b/tests/ui/abi/statics/static-mut-foreign.stderr @@ -1,28 +1,28 @@ -warning: shared reference of mutable static is discouraged +warning: creating a shared reference to mutable static is discouraged --> $DIR/static-mut-foreign.rs:35:18 | LL | static_bound(&rust_dbg_static_mut); - | ^^^^^^^^^^^^^^^^^^^^ shared reference of mutable static + | ^^^^^^^^^^^^^^^^^^^^ shared reference to mutable static | = note: for more information, see issue #114447 - = note: reference of mutable static is a hard error from 2024 edition - = note: mutable statics can be written to by multiple threads: aliasing violations or data races will cause undefined behavior - = note: `#[warn(static_mut_ref)]` on by default -help: shared references are dangerous since if there's any kind of mutation of that static while the reference lives, that's UB; use `addr_of!` instead to create a raw pointer + = note: this will be a hard error in the 2024 edition + = note: this shared reference has lifetime `'static`, but if the static ever gets mutated, or a mutable reference is created, then any further use of this shared reference is Undefined Behavior + = note: `#[warn(static_mut_refs)]` on by default +help: use `addr_of!` instead to create a raw pointer | LL | static_bound(addr_of!(rust_dbg_static_mut)); | ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ -warning: mutable reference of mutable static is discouraged +warning: creating a mutable reference to mutable static is discouraged --> $DIR/static-mut-foreign.rs:37:22 | LL | static_bound_set(&mut rust_dbg_static_mut); - | ^^^^^^^^^^^^^^^^^^^^^^^^ mutable reference of mutable static + | ^^^^^^^^^^^^^^^^^^^^^^^^ mutable reference to mutable static | = note: for more information, see issue #114447 - = note: reference of mutable static is a hard error from 2024 edition - = note: mutable statics can be written to by multiple threads: aliasing violations or data races will cause undefined behavior -help: mutable references are dangerous since if there's any other pointer or reference used for that static while the reference lives, that's UB; use `addr_of_mut!` instead to create a raw pointer + = note: this will be a hard error in the 2024 edition + = note: this mutable reference has lifetime `'static`, but if the static gets accessed (read or written) by any other means, or any other reference is created, then any further use of this mutable reference is Undefined Behavior +help: use `addr_of_mut!` instead to create a raw pointer | LL | static_bound_set(addr_of_mut!(rust_dbg_static_mut)); | ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ diff --git a/tests/ui/borrowck/borrowck-access-permissions.rs b/tests/ui/borrowck/borrowck-access-permissions.rs index 1638644103ba..be11286a523d 100644 --- a/tests/ui/borrowck/borrowck-access-permissions.rs +++ b/tests/ui/borrowck/borrowck-access-permissions.rs @@ -16,7 +16,7 @@ fn main() { let _y1 = &mut static_x; //~ ERROR [E0596] unsafe { let _y2 = &mut static_x_mut; - //~^ WARN mutable reference of mutable static is discouraged [static_mut_ref] + //~^ WARN mutable reference to mutable static is discouraged [static_mut_refs] } } diff --git a/tests/ui/borrowck/borrowck-access-permissions.stderr b/tests/ui/borrowck/borrowck-access-permissions.stderr index 93d92295dd9b..11e2b63bd6cd 100644 --- a/tests/ui/borrowck/borrowck-access-permissions.stderr +++ b/tests/ui/borrowck/borrowck-access-permissions.stderr @@ -1,14 +1,14 @@ -warning: mutable reference of mutable static is discouraged +warning: creating a mutable reference to mutable static is discouraged --> $DIR/borrowck-access-permissions.rs:18:23 | LL | let _y2 = &mut static_x_mut; - | ^^^^^^^^^^^^^^^^^ mutable reference of mutable static + | ^^^^^^^^^^^^^^^^^ mutable reference to mutable static | = note: for more information, see issue #114447 - = note: reference of mutable static is a hard error from 2024 edition - = note: mutable statics can be written to by multiple threads: aliasing violations or data races will cause undefined behavior - = note: `#[warn(static_mut_ref)]` on by default -help: mutable references are dangerous since if there's any other pointer or reference used for that static while the reference lives, that's UB; use `addr_of_mut!` instead to create a raw pointer + = note: this will be a hard error in the 2024 edition + = note: this mutable reference has lifetime `'static`, but if the static gets accessed (read or written) by any other means, or any other reference is created, then any further use of this mutable reference is Undefined Behavior + = note: `#[warn(static_mut_refs)]` on by default +help: use `addr_of_mut!` instead to create a raw pointer | LL | let _y2 = addr_of_mut!(static_x_mut); | ~~~~~~~~~~~~~~~~~~~~~~~~~~ diff --git a/tests/ui/borrowck/borrowck-unsafe-static-mutable-borrows.rs b/tests/ui/borrowck/borrowck-unsafe-static-mutable-borrows.rs index a89cad20f971..b09c96ada8ab 100644 --- a/tests/ui/borrowck/borrowck-unsafe-static-mutable-borrows.rs +++ b/tests/ui/borrowck/borrowck-unsafe-static-mutable-borrows.rs @@ -17,7 +17,7 @@ impl Foo { fn main() { unsafe { let sfoo: *mut Foo = &mut SFOO; - //~^ WARN mutable reference of mutable static is discouraged [static_mut_ref] + //~^ WARN mutable reference to mutable static is discouraged [static_mut_refs] let x = (*sfoo).x(); (*sfoo).x[1] += 1; *x += 1; diff --git a/tests/ui/borrowck/borrowck-unsafe-static-mutable-borrows.stderr b/tests/ui/borrowck/borrowck-unsafe-static-mutable-borrows.stderr index 7a3824f79a4c..354d70eb1ad4 100644 --- a/tests/ui/borrowck/borrowck-unsafe-static-mutable-borrows.stderr +++ b/tests/ui/borrowck/borrowck-unsafe-static-mutable-borrows.stderr @@ -1,14 +1,14 @@ -warning: mutable reference of mutable static is discouraged +warning: creating a mutable reference to mutable static is discouraged --> $DIR/borrowck-unsafe-static-mutable-borrows.rs:19:30 | LL | let sfoo: *mut Foo = &mut SFOO; - | ^^^^^^^^^ mutable reference of mutable static + | ^^^^^^^^^ mutable reference to mutable static | = note: for more information, see issue #114447 - = note: reference of mutable static is a hard error from 2024 edition - = note: mutable statics can be written to by multiple threads: aliasing violations or data races will cause undefined behavior - = note: `#[warn(static_mut_ref)]` on by default -help: mutable references are dangerous since if there's any other pointer or reference used for that static while the reference lives, that's UB; use `addr_of_mut!` instead to create a raw pointer + = note: this will be a hard error in the 2024 edition + = note: this mutable reference has lifetime `'static`, but if the static gets accessed (read or written) by any other means, or any other reference is created, then any further use of this mutable reference is Undefined Behavior + = note: `#[warn(static_mut_refs)]` on by default +help: use `addr_of_mut!` instead to create a raw pointer | LL | let sfoo: *mut Foo = addr_of_mut!(SFOO); | ~~~~~~~~~~~~~~~~~~ diff --git a/tests/ui/borrowck/issue-20801.rs b/tests/ui/borrowck/issue-20801.rs index ec83af5d5dfc..7e3d3703dc76 100644 --- a/tests/ui/borrowck/issue-20801.rs +++ b/tests/ui/borrowck/issue-20801.rs @@ -12,7 +12,7 @@ fn imm_ref() -> &'static T { fn mut_ref() -> &'static mut T { unsafe { &mut GLOBAL_MUT_T } - //~^ WARN mutable reference of mutable static is discouraged [static_mut_ref] + //~^ WARN mutable reference to mutable static is discouraged [static_mut_refs] } fn mut_ptr() -> *mut T { diff --git a/tests/ui/borrowck/issue-20801.stderr b/tests/ui/borrowck/issue-20801.stderr index b2bee2d88039..97294afd3df9 100644 --- a/tests/ui/borrowck/issue-20801.stderr +++ b/tests/ui/borrowck/issue-20801.stderr @@ -1,14 +1,14 @@ -warning: mutable reference of mutable static is discouraged +warning: creating a mutable reference to mutable static is discouraged --> $DIR/issue-20801.rs:14:14 | LL | unsafe { &mut GLOBAL_MUT_T } - | ^^^^^^^^^^^^^^^^^ mutable reference of mutable static + | ^^^^^^^^^^^^^^^^^ mutable reference to mutable static | = note: for more information, see issue #114447 - = note: reference of mutable static is a hard error from 2024 edition - = note: mutable statics can be written to by multiple threads: aliasing violations or data races will cause undefined behavior - = note: `#[warn(static_mut_ref)]` on by default -help: mutable references are dangerous since if there's any other pointer or reference used for that static while the reference lives, that's UB; use `addr_of_mut!` instead to create a raw pointer + = note: this will be a hard error in the 2024 edition + = note: this mutable reference has lifetime `'static`, but if the static gets accessed (read or written) by any other means, or any other reference is created, then any further use of this mutable reference is Undefined Behavior + = note: `#[warn(static_mut_refs)]` on by default +help: use `addr_of_mut!` instead to create a raw pointer | LL | unsafe { addr_of_mut!(GLOBAL_MUT_T) } | ~~~~~~~~~~~~~~~~~~~~~~~~~~ diff --git a/tests/ui/borrowck/issue-55492-borrowck-migrate-scans-parents.rs b/tests/ui/borrowck/issue-55492-borrowck-migrate-scans-parents.rs index 9b172b413191..c3909d059633 100644 --- a/tests/ui/borrowck/issue-55492-borrowck-migrate-scans-parents.rs +++ b/tests/ui/borrowck/issue-55492-borrowck-migrate-scans-parents.rs @@ -10,7 +10,7 @@ mod borrowck_closures_unique { //~^ ERROR is not declared as mutable unsafe { c1(&mut Y); - //~^ WARN mutable reference of mutable static is discouraged [static_mut_ref] + //~^ WARN mutable reference to mutable static is discouraged [static_mut_refs] } } } @@ -25,7 +25,7 @@ mod borrowck_closures_unique_grandparent { }; unsafe { c1(&mut Z); - //~^ WARN mutable reference of mutable static is discouraged [static_mut_ref] + //~^ WARN mutable reference to mutable static is discouraged [static_mut_refs] } } } @@ -62,7 +62,7 @@ fn main() { static mut X: isize = 2; unsafe { borrowck_closures_unique::e(&mut X); - //~^ WARN mutable reference of mutable static is discouraged [static_mut_ref] + //~^ WARN mutable reference to mutable static is discouraged [static_mut_refs] } mutability_errors::capture_assign_whole((1000,)); diff --git a/tests/ui/borrowck/issue-55492-borrowck-migrate-scans-parents.stderr b/tests/ui/borrowck/issue-55492-borrowck-migrate-scans-parents.stderr index e4e4947fce1c..098a2964e9fc 100644 --- a/tests/ui/borrowck/issue-55492-borrowck-migrate-scans-parents.stderr +++ b/tests/ui/borrowck/issue-55492-borrowck-migrate-scans-parents.stderr @@ -1,42 +1,42 @@ -warning: mutable reference of mutable static is discouraged +warning: creating a mutable reference to mutable static is discouraged --> $DIR/issue-55492-borrowck-migrate-scans-parents.rs:12:16 | LL | c1(&mut Y); - | ^^^^^^ mutable reference of mutable static + | ^^^^^^ mutable reference to mutable static | = note: for more information, see issue #114447 - = note: reference of mutable static is a hard error from 2024 edition - = note: mutable statics can be written to by multiple threads: aliasing violations or data races will cause undefined behavior - = note: `#[warn(static_mut_ref)]` on by default -help: mutable references are dangerous since if there's any other pointer or reference used for that static while the reference lives, that's UB; use `addr_of_mut!` instead to create a raw pointer + = note: this will be a hard error in the 2024 edition + = note: this mutable reference has lifetime `'static`, but if the static gets accessed (read or written) by any other means, or any other reference is created, then any further use of this mutable reference is Undefined Behavior + = note: `#[warn(static_mut_refs)]` on by default +help: use `addr_of_mut!` instead to create a raw pointer | LL | c1(addr_of_mut!(Y)); | ~~~~~~~~~~~~~~~ -warning: mutable reference of mutable static is discouraged +warning: creating a mutable reference to mutable static is discouraged --> $DIR/issue-55492-borrowck-migrate-scans-parents.rs:27:16 | LL | c1(&mut Z); - | ^^^^^^ mutable reference of mutable static + | ^^^^^^ mutable reference to mutable static | = note: for more information, see issue #114447 - = note: reference of mutable static is a hard error from 2024 edition - = note: mutable statics can be written to by multiple threads: aliasing violations or data races will cause undefined behavior -help: mutable references are dangerous since if there's any other pointer or reference used for that static while the reference lives, that's UB; use `addr_of_mut!` instead to create a raw pointer + = note: this will be a hard error in the 2024 edition + = note: this mutable reference has lifetime `'static`, but if the static gets accessed (read or written) by any other means, or any other reference is created, then any further use of this mutable reference is Undefined Behavior +help: use `addr_of_mut!` instead to create a raw pointer | LL | c1(addr_of_mut!(Z)); | ~~~~~~~~~~~~~~~ -warning: mutable reference of mutable static is discouraged +warning: creating a mutable reference to mutable static is discouraged --> $DIR/issue-55492-borrowck-migrate-scans-parents.rs:64:37 | LL | borrowck_closures_unique::e(&mut X); - | ^^^^^^ mutable reference of mutable static + | ^^^^^^ mutable reference to mutable static | = note: for more information, see issue #114447 - = note: reference of mutable static is a hard error from 2024 edition - = note: mutable statics can be written to by multiple threads: aliasing violations or data races will cause undefined behavior -help: mutable references are dangerous since if there's any other pointer or reference used for that static while the reference lives, that's UB; use `addr_of_mut!` instead to create a raw pointer + = note: this will be a hard error in the 2024 edition + = note: this mutable reference has lifetime `'static`, but if the static gets accessed (read or written) by any other means, or any other reference is created, then any further use of this mutable reference is Undefined Behavior +help: use `addr_of_mut!` instead to create a raw pointer | LL | borrowck_closures_unique::e(addr_of_mut!(X)); | ~~~~~~~~~~~~~~~ diff --git a/tests/ui/consts/const_let_assign2.rs b/tests/ui/consts/const_let_assign2.rs index f239507d2454..e8ebba7b2088 100644 --- a/tests/ui/consts/const_let_assign2.rs +++ b/tests/ui/consts/const_let_assign2.rs @@ -16,7 +16,7 @@ static mut BB: AA = AA::new(); fn main() { let ptr = unsafe { &mut BB }; - //~^ WARN mutable reference of mutable static is discouraged [static_mut_ref] + //~^ WARN mutable reference to mutable static is discouraged [static_mut_refs] for a in ptr.data.iter() { println!("{}", a); } diff --git a/tests/ui/consts/const_let_assign2.stderr b/tests/ui/consts/const_let_assign2.stderr index 2764153a8a59..5ae8fd353dd7 100644 --- a/tests/ui/consts/const_let_assign2.stderr +++ b/tests/ui/consts/const_let_assign2.stderr @@ -1,14 +1,14 @@ -warning: mutable reference of mutable static is discouraged +warning: creating a mutable reference to mutable static is discouraged --> $DIR/const_let_assign2.rs:18:24 | LL | let ptr = unsafe { &mut BB }; - | ^^^^^^^ mutable reference of mutable static + | ^^^^^^^ mutable reference to mutable static | = note: for more information, see issue #114447 - = note: reference of mutable static is a hard error from 2024 edition - = note: mutable statics can be written to by multiple threads: aliasing violations or data races will cause undefined behavior - = note: `#[warn(static_mut_ref)]` on by default -help: mutable references are dangerous since if there's any other pointer or reference used for that static while the reference lives, that's UB; use `addr_of_mut!` instead to create a raw pointer + = note: this will be a hard error in the 2024 edition + = note: this mutable reference has lifetime `'static`, but if the static gets accessed (read or written) by any other means, or any other reference is created, then any further use of this mutable reference is Undefined Behavior + = note: `#[warn(static_mut_refs)]` on by default +help: use `addr_of_mut!` instead to create a raw pointer | LL | let ptr = unsafe { addr_of_mut!(BB) }; | ~~~~~~~~~~~~~~~~ diff --git a/tests/ui/consts/const_refs_to_static_fail_invalid.rs b/tests/ui/consts/const_refs_to_static_fail_invalid.rs index 363a6da0901c..be1574af588a 100644 --- a/tests/ui/consts/const_refs_to_static_fail_invalid.rs +++ b/tests/ui/consts/const_refs_to_static_fail_invalid.rs @@ -1,7 +1,7 @@ //@ normalize-stderr-test "(the raw bytes of the constant) \(size: [0-9]*, align: [0-9]*\)" -> "$1 (size: $$SIZE, align: $$ALIGN)" //@ normalize-stderr-test "([0-9a-f][0-9a-f] |╾─*ALLOC[0-9]+(\+[a-z0-9]+)?()?─*╼ )+ *│.*" -> "HEX_DUMP" #![feature(const_refs_to_static)] -#![allow(static_mut_ref)] +#![allow(static_mut_refs)] fn invalid() { static S: i8 = 10; @@ -43,8 +43,8 @@ fn mutable() { // This *must not build*, the constant we are matching against // could change its value! match &42 { - C => {}, //~ERROR: could not evaluate constant pattern - _ => {}, + C => {} //~ERROR: could not evaluate constant pattern + _ => {} } } diff --git a/tests/ui/consts/const_refs_to_static_fail_invalid.stderr b/tests/ui/consts/const_refs_to_static_fail_invalid.stderr index 082f8532444a..4ff15f0c28b8 100644 --- a/tests/ui/consts/const_refs_to_static_fail_invalid.stderr +++ b/tests/ui/consts/const_refs_to_static_fail_invalid.stderr @@ -46,7 +46,7 @@ LL | const C: &i32 = unsafe { &S_MUT }; error: could not evaluate constant pattern --> $DIR/const_refs_to_static_fail_invalid.rs:46:9 | -LL | C => {}, +LL | C => {} | ^ error: aborting due to 6 previous errors diff --git a/tests/ui/consts/issue-17718-const-bad-values.rs b/tests/ui/consts/issue-17718-const-bad-values.rs index e112a346b656..33347d8df622 100644 --- a/tests/ui/consts/issue-17718-const-bad-values.rs +++ b/tests/ui/consts/issue-17718-const-bad-values.rs @@ -1,4 +1,4 @@ -#![allow(static_mut_ref)] +#![allow(static_mut_refs)] const C1: &'static mut [usize] = &mut []; //~^ ERROR: mutable references are not allowed diff --git a/tests/ui/consts/miri_unleashed/const_refers_to_static_cross_crate.rs b/tests/ui/consts/miri_unleashed/const_refers_to_static_cross_crate.rs index 6ec44aab2c1b..5e7845e4e82f 100644 --- a/tests/ui/consts/miri_unleashed/const_refers_to_static_cross_crate.rs +++ b/tests/ui/consts/miri_unleashed/const_refers_to_static_cross_crate.rs @@ -3,7 +3,7 @@ //@ normalize-stderr-test "(the raw bytes of the constant) \(size: [0-9]*, align: [0-9]*\)" -> "$1 (size: $$SIZE, align: $$ALIGN)" //@ normalize-stderr-test "([0-9a-f][0-9a-f] |╾─*ALLOC[0-9]+(\+[a-z0-9]+)?()?─*╼ )+ *│.*" -> "HEX_DUMP" #![feature(exclusive_range_pattern, half_open_range_patterns_in_slices)] -#![allow(static_mut_ref)] +#![allow(static_mut_refs)] extern crate static_cross_crate; diff --git a/tests/ui/consts/miri_unleashed/extern-static.rs b/tests/ui/consts/miri_unleashed/extern-static.rs index 1a523cc8e312..c9d9397518ee 100644 --- a/tests/ui/consts/miri_unleashed/extern-static.rs +++ b/tests/ui/consts/miri_unleashed/extern-static.rs @@ -1,6 +1,6 @@ //@ compile-flags: -Zunleash-the-miri-inside-of-you #![feature(thread_local)] -#![allow(static_mut_ref)] +#![allow(static_mut_refs)] extern "C" { static mut DATA: u8; diff --git a/tests/ui/consts/miri_unleashed/mutable_references_err.32bit.stderr b/tests/ui/consts/miri_unleashed/mutable_references_err.32bit.stderr index 82739c08cf1b..ea7573bf2176 100644 --- a/tests/ui/consts/miri_unleashed/mutable_references_err.32bit.stderr +++ b/tests/ui/consts/miri_unleashed/mutable_references_err.32bit.stderr @@ -5,13 +5,13 @@ LL | const MUH: Meh = Meh { | ^^^^^^^^^^^^^^ error: encountered mutable pointer in final value of constant - --> $DIR/mutable_references_err.rs:27:1 + --> $DIR/mutable_references_err.rs:28:1 | LL | const SNEAKY: &dyn Sync = &Synced { x: UnsafeCell::new(42) }; | ^^^^^^^^^^^^^^^^^^^^^^^ error[E0080]: it is undefined behavior to use this value - --> $DIR/mutable_references_err.rs:32:1 + --> $DIR/mutable_references_err.rs:33:1 | LL | const SUBTLE: &mut i32 = unsafe { &mut FOO }; | ^^^^^^^^^^^^^^^^^^^^^^ constructing invalid value: encountered mutable reference in a `const` or `static` @@ -22,13 +22,13 @@ LL | const SUBTLE: &mut i32 = unsafe { &mut FOO }; } error: encountered mutable pointer in final value of constant - --> $DIR/mutable_references_err.rs:35:1 + --> $DIR/mutable_references_err.rs:36:1 | LL | const BLUNT: &mut i32 = &mut 42; | ^^^^^^^^^^^^^^^^^^^^^ error[E0080]: it is undefined behavior to use this value - --> $DIR/mutable_references_err.rs:40:1 + --> $DIR/mutable_references_err.rs:41:1 | LL | static mut MUT_TO_READONLY: &mut i32 = unsafe { &mut *(&READONLY as *const _ as *mut _) }; | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ constructing invalid value: encountered mutable reference or box pointing to read-only memory @@ -39,7 +39,7 @@ LL | static mut MUT_TO_READONLY: &mut i32 = unsafe { &mut *(&READONLY as *const } error[E0080]: it is undefined behavior to use this value - --> $DIR/mutable_references_err.rs:47:1 + --> $DIR/mutable_references_err.rs:48:1 | LL | const POINTS_TO_MUTABLE1: &i32 = unsafe { &MUTABLE }; | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ constructing invalid value: encountered reference to mutable memory in `const` @@ -50,49 +50,49 @@ LL | const POINTS_TO_MUTABLE1: &i32 = unsafe { &MUTABLE }; } note: erroneous constant encountered - --> $DIR/mutable_references_err.rs:49:34 + --> $DIR/mutable_references_err.rs:50:34 | LL | const READS_FROM_MUTABLE: i32 = *POINTS_TO_MUTABLE1; | ^^^^^^^^^^^^^^^^^^ error[E0080]: evaluation of constant value failed - --> $DIR/mutable_references_err.rs:51:43 + --> $DIR/mutable_references_err.rs:52:43 | LL | const POINTS_TO_MUTABLE2: &i32 = unsafe { &*MUTABLE_REF }; | ^^^^^^^^^^^^^ constant accesses mutable global memory error: encountered mutable pointer in final value of constant - --> $DIR/mutable_references_err.rs:55:1 + --> $DIR/mutable_references_err.rs:56:1 | LL | const POINTS_TO_MUTABLE_INNER: *const i32 = &mut 42 as *mut _ as *const _; | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ error: encountered mutable pointer in final value of constant - --> $DIR/mutable_references_err.rs:57:1 + --> $DIR/mutable_references_err.rs:58:1 | LL | const POINTS_TO_MUTABLE_INNER2: *const i32 = &mut 42 as *const _; | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ error: encountered mutable pointer in final value of constant - --> $DIR/mutable_references_err.rs:59:1 + --> $DIR/mutable_references_err.rs:60:1 | LL | const INTERIOR_MUTABLE_BEHIND_RAW: *mut i32 = &UnsafeCell::new(42) as *const _ as *mut _; | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ error: encountered mutable pointer in final value of constant - --> $DIR/mutable_references_err.rs:69:1 + --> $DIR/mutable_references_err.rs:72:1 | LL | const RAW_SYNC: SyncPtr = SyncPtr { x: &AtomicI32::new(42) }; | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ error: encountered mutable pointer in final value of constant - --> $DIR/mutable_references_err.rs:71:1 + --> $DIR/mutable_references_err.rs:74:1 | -LL | const RAW_MUT_CAST: SyncPtr = SyncPtr { x : &mut 42 as *mut _ as *const _ }; +LL | const RAW_MUT_CAST: SyncPtr = SyncPtr { x: &mut 42 as *mut _ as *const _ }; | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ error: encountered mutable pointer in final value of constant - --> $DIR/mutable_references_err.rs:73:1 + --> $DIR/mutable_references_err.rs:76:1 | LL | const RAW_MUT_COERCE: SyncPtr = SyncPtr { x: &mut 0 }; | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ @@ -100,77 +100,77 @@ LL | const RAW_MUT_COERCE: SyncPtr = SyncPtr { x: &mut 0 }; warning: skipping const checks | help: skipping check that does not even have a feature gate - --> $DIR/mutable_references_err.rs:18:8 + --> $DIR/mutable_references_err.rs:19:8 | LL | x: &UnsafeCell::new(42), | ^^^^^^^^^^^^^^^^^^^^ help: skipping check that does not even have a feature gate - --> $DIR/mutable_references_err.rs:27:27 + --> $DIR/mutable_references_err.rs:28:27 | LL | const SNEAKY: &dyn Sync = &Synced { x: UnsafeCell::new(42) }; | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ help: skipping check for `const_refs_to_static` feature - --> $DIR/mutable_references_err.rs:32:40 + --> $DIR/mutable_references_err.rs:33:40 | LL | const SUBTLE: &mut i32 = unsafe { &mut FOO }; | ^^^ help: skipping check for `const_mut_refs` feature - --> $DIR/mutable_references_err.rs:32:35 + --> $DIR/mutable_references_err.rs:33:35 | LL | const SUBTLE: &mut i32 = unsafe { &mut FOO }; | ^^^^^^^^ help: skipping check that does not even have a feature gate - --> $DIR/mutable_references_err.rs:35:25 + --> $DIR/mutable_references_err.rs:36:25 | LL | const BLUNT: &mut i32 = &mut 42; | ^^^^^^^ help: skipping check for `const_mut_refs` feature - --> $DIR/mutable_references_err.rs:40:49 + --> $DIR/mutable_references_err.rs:41:49 | LL | static mut MUT_TO_READONLY: &mut i32 = unsafe { &mut *(&READONLY as *const _ as *mut _) }; | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ help: skipping check for `const_mut_refs` feature - --> $DIR/mutable_references_err.rs:40:49 + --> $DIR/mutable_references_err.rs:41:49 | LL | static mut MUT_TO_READONLY: &mut i32 = unsafe { &mut *(&READONLY as *const _ as *mut _) }; | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ help: skipping check for `const_refs_to_static` feature - --> $DIR/mutable_references_err.rs:47:44 + --> $DIR/mutable_references_err.rs:48:44 | LL | const POINTS_TO_MUTABLE1: &i32 = unsafe { &MUTABLE }; | ^^^^^^^ help: skipping check for `const_refs_to_static` feature - --> $DIR/mutable_references_err.rs:51:45 + --> $DIR/mutable_references_err.rs:52:45 | LL | const POINTS_TO_MUTABLE2: &i32 = unsafe { &*MUTABLE_REF }; | ^^^^^^^^^^^ help: skipping check that does not even have a feature gate - --> $DIR/mutable_references_err.rs:55:45 + --> $DIR/mutable_references_err.rs:56:45 | LL | const POINTS_TO_MUTABLE_INNER: *const i32 = &mut 42 as *mut _ as *const _; | ^^^^^^^ help: skipping check that does not even have a feature gate - --> $DIR/mutable_references_err.rs:57:46 + --> $DIR/mutable_references_err.rs:58:46 | LL | const POINTS_TO_MUTABLE_INNER2: *const i32 = &mut 42 as *const _; | ^^^^^^^ help: skipping check that does not even have a feature gate - --> $DIR/mutable_references_err.rs:59:47 + --> $DIR/mutable_references_err.rs:60:47 | LL | const INTERIOR_MUTABLE_BEHIND_RAW: *mut i32 = &UnsafeCell::new(42) as *const _ as *mut _; | ^^^^^^^^^^^^^^^^^^^^ help: skipping check that does not even have a feature gate - --> $DIR/mutable_references_err.rs:69:51 + --> $DIR/mutable_references_err.rs:72:51 | LL | const RAW_SYNC: SyncPtr = SyncPtr { x: &AtomicI32::new(42) }; | ^^^^^^^^^^^^^^^^^^^ help: skipping check that does not even have a feature gate - --> $DIR/mutable_references_err.rs:71:50 + --> $DIR/mutable_references_err.rs:74:49 | -LL | const RAW_MUT_CAST: SyncPtr = SyncPtr { x : &mut 42 as *mut _ as *const _ }; - | ^^^^^^^ +LL | const RAW_MUT_CAST: SyncPtr = SyncPtr { x: &mut 42 as *mut _ as *const _ }; + | ^^^^^^^ help: skipping check that does not even have a feature gate - --> $DIR/mutable_references_err.rs:73:51 + --> $DIR/mutable_references_err.rs:76:51 | LL | const RAW_MUT_COERCE: SyncPtr = SyncPtr { x: &mut 0 }; | ^^^^^^ diff --git a/tests/ui/consts/miri_unleashed/mutable_references_err.64bit.stderr b/tests/ui/consts/miri_unleashed/mutable_references_err.64bit.stderr index 844483d88e93..2b5e8643f3b9 100644 --- a/tests/ui/consts/miri_unleashed/mutable_references_err.64bit.stderr +++ b/tests/ui/consts/miri_unleashed/mutable_references_err.64bit.stderr @@ -5,13 +5,13 @@ LL | const MUH: Meh = Meh { | ^^^^^^^^^^^^^^ error: encountered mutable pointer in final value of constant - --> $DIR/mutable_references_err.rs:27:1 + --> $DIR/mutable_references_err.rs:28:1 | LL | const SNEAKY: &dyn Sync = &Synced { x: UnsafeCell::new(42) }; | ^^^^^^^^^^^^^^^^^^^^^^^ error[E0080]: it is undefined behavior to use this value - --> $DIR/mutable_references_err.rs:32:1 + --> $DIR/mutable_references_err.rs:33:1 | LL | const SUBTLE: &mut i32 = unsafe { &mut FOO }; | ^^^^^^^^^^^^^^^^^^^^^^ constructing invalid value: encountered mutable reference in a `const` or `static` @@ -22,13 +22,13 @@ LL | const SUBTLE: &mut i32 = unsafe { &mut FOO }; } error: encountered mutable pointer in final value of constant - --> $DIR/mutable_references_err.rs:35:1 + --> $DIR/mutable_references_err.rs:36:1 | LL | const BLUNT: &mut i32 = &mut 42; | ^^^^^^^^^^^^^^^^^^^^^ error[E0080]: it is undefined behavior to use this value - --> $DIR/mutable_references_err.rs:40:1 + --> $DIR/mutable_references_err.rs:41:1 | LL | static mut MUT_TO_READONLY: &mut i32 = unsafe { &mut *(&READONLY as *const _ as *mut _) }; | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ constructing invalid value: encountered mutable reference or box pointing to read-only memory @@ -39,7 +39,7 @@ LL | static mut MUT_TO_READONLY: &mut i32 = unsafe { &mut *(&READONLY as *const } error[E0080]: it is undefined behavior to use this value - --> $DIR/mutable_references_err.rs:47:1 + --> $DIR/mutable_references_err.rs:48:1 | LL | const POINTS_TO_MUTABLE1: &i32 = unsafe { &MUTABLE }; | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ constructing invalid value: encountered reference to mutable memory in `const` @@ -50,49 +50,49 @@ LL | const POINTS_TO_MUTABLE1: &i32 = unsafe { &MUTABLE }; } note: erroneous constant encountered - --> $DIR/mutable_references_err.rs:49:34 + --> $DIR/mutable_references_err.rs:50:34 | LL | const READS_FROM_MUTABLE: i32 = *POINTS_TO_MUTABLE1; | ^^^^^^^^^^^^^^^^^^ error[E0080]: evaluation of constant value failed - --> $DIR/mutable_references_err.rs:51:43 + --> $DIR/mutable_references_err.rs:52:43 | LL | const POINTS_TO_MUTABLE2: &i32 = unsafe { &*MUTABLE_REF }; | ^^^^^^^^^^^^^ constant accesses mutable global memory error: encountered mutable pointer in final value of constant - --> $DIR/mutable_references_err.rs:55:1 + --> $DIR/mutable_references_err.rs:56:1 | LL | const POINTS_TO_MUTABLE_INNER: *const i32 = &mut 42 as *mut _ as *const _; | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ error: encountered mutable pointer in final value of constant - --> $DIR/mutable_references_err.rs:57:1 + --> $DIR/mutable_references_err.rs:58:1 | LL | const POINTS_TO_MUTABLE_INNER2: *const i32 = &mut 42 as *const _; | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ error: encountered mutable pointer in final value of constant - --> $DIR/mutable_references_err.rs:59:1 + --> $DIR/mutable_references_err.rs:60:1 | LL | const INTERIOR_MUTABLE_BEHIND_RAW: *mut i32 = &UnsafeCell::new(42) as *const _ as *mut _; | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ error: encountered mutable pointer in final value of constant - --> $DIR/mutable_references_err.rs:69:1 + --> $DIR/mutable_references_err.rs:72:1 | LL | const RAW_SYNC: SyncPtr = SyncPtr { x: &AtomicI32::new(42) }; | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ error: encountered mutable pointer in final value of constant - --> $DIR/mutable_references_err.rs:71:1 + --> $DIR/mutable_references_err.rs:74:1 | -LL | const RAW_MUT_CAST: SyncPtr = SyncPtr { x : &mut 42 as *mut _ as *const _ }; +LL | const RAW_MUT_CAST: SyncPtr = SyncPtr { x: &mut 42 as *mut _ as *const _ }; | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ error: encountered mutable pointer in final value of constant - --> $DIR/mutable_references_err.rs:73:1 + --> $DIR/mutable_references_err.rs:76:1 | LL | const RAW_MUT_COERCE: SyncPtr = SyncPtr { x: &mut 0 }; | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ @@ -100,77 +100,77 @@ LL | const RAW_MUT_COERCE: SyncPtr = SyncPtr { x: &mut 0 }; warning: skipping const checks | help: skipping check that does not even have a feature gate - --> $DIR/mutable_references_err.rs:18:8 + --> $DIR/mutable_references_err.rs:19:8 | LL | x: &UnsafeCell::new(42), | ^^^^^^^^^^^^^^^^^^^^ help: skipping check that does not even have a feature gate - --> $DIR/mutable_references_err.rs:27:27 + --> $DIR/mutable_references_err.rs:28:27 | LL | const SNEAKY: &dyn Sync = &Synced { x: UnsafeCell::new(42) }; | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ help: skipping check for `const_refs_to_static` feature - --> $DIR/mutable_references_err.rs:32:40 + --> $DIR/mutable_references_err.rs:33:40 | LL | const SUBTLE: &mut i32 = unsafe { &mut FOO }; | ^^^ help: skipping check for `const_mut_refs` feature - --> $DIR/mutable_references_err.rs:32:35 + --> $DIR/mutable_references_err.rs:33:35 | LL | const SUBTLE: &mut i32 = unsafe { &mut FOO }; | ^^^^^^^^ help: skipping check that does not even have a feature gate - --> $DIR/mutable_references_err.rs:35:25 + --> $DIR/mutable_references_err.rs:36:25 | LL | const BLUNT: &mut i32 = &mut 42; | ^^^^^^^ help: skipping check for `const_mut_refs` feature - --> $DIR/mutable_references_err.rs:40:49 + --> $DIR/mutable_references_err.rs:41:49 | LL | static mut MUT_TO_READONLY: &mut i32 = unsafe { &mut *(&READONLY as *const _ as *mut _) }; | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ help: skipping check for `const_mut_refs` feature - --> $DIR/mutable_references_err.rs:40:49 + --> $DIR/mutable_references_err.rs:41:49 | LL | static mut MUT_TO_READONLY: &mut i32 = unsafe { &mut *(&READONLY as *const _ as *mut _) }; | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ help: skipping check for `const_refs_to_static` feature - --> $DIR/mutable_references_err.rs:47:44 + --> $DIR/mutable_references_err.rs:48:44 | LL | const POINTS_TO_MUTABLE1: &i32 = unsafe { &MUTABLE }; | ^^^^^^^ help: skipping check for `const_refs_to_static` feature - --> $DIR/mutable_references_err.rs:51:45 + --> $DIR/mutable_references_err.rs:52:45 | LL | const POINTS_TO_MUTABLE2: &i32 = unsafe { &*MUTABLE_REF }; | ^^^^^^^^^^^ help: skipping check that does not even have a feature gate - --> $DIR/mutable_references_err.rs:55:45 + --> $DIR/mutable_references_err.rs:56:45 | LL | const POINTS_TO_MUTABLE_INNER: *const i32 = &mut 42 as *mut _ as *const _; | ^^^^^^^ help: skipping check that does not even have a feature gate - --> $DIR/mutable_references_err.rs:57:46 + --> $DIR/mutable_references_err.rs:58:46 | LL | const POINTS_TO_MUTABLE_INNER2: *const i32 = &mut 42 as *const _; | ^^^^^^^ help: skipping check that does not even have a feature gate - --> $DIR/mutable_references_err.rs:59:47 + --> $DIR/mutable_references_err.rs:60:47 | LL | const INTERIOR_MUTABLE_BEHIND_RAW: *mut i32 = &UnsafeCell::new(42) as *const _ as *mut _; | ^^^^^^^^^^^^^^^^^^^^ help: skipping check that does not even have a feature gate - --> $DIR/mutable_references_err.rs:69:51 + --> $DIR/mutable_references_err.rs:72:51 | LL | const RAW_SYNC: SyncPtr = SyncPtr { x: &AtomicI32::new(42) }; | ^^^^^^^^^^^^^^^^^^^ help: skipping check that does not even have a feature gate - --> $DIR/mutable_references_err.rs:71:50 + --> $DIR/mutable_references_err.rs:74:49 | -LL | const RAW_MUT_CAST: SyncPtr = SyncPtr { x : &mut 42 as *mut _ as *const _ }; - | ^^^^^^^ +LL | const RAW_MUT_CAST: SyncPtr = SyncPtr { x: &mut 42 as *mut _ as *const _ }; + | ^^^^^^^ help: skipping check that does not even have a feature gate - --> $DIR/mutable_references_err.rs:73:51 + --> $DIR/mutable_references_err.rs:76:51 | LL | const RAW_MUT_COERCE: SyncPtr = SyncPtr { x: &mut 0 }; | ^^^^^^ diff --git a/tests/ui/consts/miri_unleashed/mutable_references_err.rs b/tests/ui/consts/miri_unleashed/mutable_references_err.rs index 2075adad6f70..feb2c825380d 100644 --- a/tests/ui/consts/miri_unleashed/mutable_references_err.rs +++ b/tests/ui/consts/miri_unleashed/mutable_references_err.rs @@ -1,9 +1,9 @@ //@ stderr-per-bitwidth //@ compile-flags: -Zunleash-the-miri-inside-of-you -#![allow(invalid_reference_casting, static_mut_ref)] +#![allow(invalid_reference_casting, static_mut_refs)] -use std::sync::atomic::*; use std::cell::UnsafeCell; +use std::sync::atomic::*; // this test ensures that our mutability story is sound @@ -14,7 +14,8 @@ unsafe impl Sync for Meh {} // the following will never be ok! no interior mut behind consts, because // all allocs interned here will be marked immutable. -const MUH: Meh = Meh { //~ ERROR: mutable pointer in final value +const MUH: Meh = Meh { + //~^ ERROR encountered mutable pointer in final value of constant x: &UnsafeCell::new(42), }; @@ -59,7 +60,9 @@ const POINTS_TO_MUTABLE_INNER2: *const i32 = &mut 42 as *const _; const INTERIOR_MUTABLE_BEHIND_RAW: *mut i32 = &UnsafeCell::new(42) as *const _ as *mut _; //~^ ERROR: mutable pointer in final value -struct SyncPtr { x : *const T } +struct SyncPtr { + x: *const T, +} unsafe impl Sync for SyncPtr {} // These pass the lifetime checks because of the "tail expression" / "outer scope" rule. @@ -68,7 +71,7 @@ unsafe impl Sync for SyncPtr {} // (Also see `static-no-inner-mut` for similar tests on `static`.) const RAW_SYNC: SyncPtr = SyncPtr { x: &AtomicI32::new(42) }; //~^ ERROR mutable pointer in final value -const RAW_MUT_CAST: SyncPtr = SyncPtr { x : &mut 42 as *mut _ as *const _ }; +const RAW_MUT_CAST: SyncPtr = SyncPtr { x: &mut 42 as *mut _ as *const _ }; //~^ ERROR mutable pointer in final value const RAW_MUT_COERCE: SyncPtr = SyncPtr { x: &mut 0 }; //~^ ERROR mutable pointer in final value diff --git a/tests/ui/consts/static-promoted-to-mutable-static.rs b/tests/ui/consts/static-promoted-to-mutable-static.rs index 1cf72781e458..9eb9e1b8dfbe 100644 --- a/tests/ui/consts/static-promoted-to-mutable-static.rs +++ b/tests/ui/consts/static-promoted-to-mutable-static.rs @@ -1,8 +1,9 @@ //@ check-pass -#![allow(non_camel_case_types, non_upper_case_globals, static_mut_ref)] + +#![allow(non_camel_case_types, non_upper_case_globals, static_mut_refs)] pub struct wl_interface { - pub version: i32 + pub version: i32, } pub struct Interface { @@ -10,20 +11,14 @@ pub struct Interface { pub c_ptr: Option<&'static wl_interface>, } -pub static mut wl_callback_interface: wl_interface = wl_interface { - version: 0, -}; +pub static mut wl_callback_interface: wl_interface = wl_interface { version: 0 }; -pub static WL_CALLBACK_INTERFACE: Interface = Interface { - other_interfaces: &[], - c_ptr: Some(unsafe { &wl_callback_interface }), -}; +pub static WL_CALLBACK_INTERFACE: Interface = + Interface { other_interfaces: &[], c_ptr: Some(unsafe { &wl_callback_interface }) }; // This static contains a promoted that points to a static that points to a mutable static. -pub static WL_SURFACE_INTERFACE: Interface = Interface { - other_interfaces: &[&WL_CALLBACK_INTERFACE], - c_ptr: None, -}; +pub static WL_SURFACE_INTERFACE: Interface = + Interface { other_interfaces: &[&WL_CALLBACK_INTERFACE], c_ptr: None }; // And another variant of the same thing, this time with interior mutability. use std::sync::OnceLock; diff --git a/tests/ui/consts/static_mut_containing_mut_ref.rs b/tests/ui/consts/static_mut_containing_mut_ref.rs index 710328d6aa79..874a047d807b 100644 --- a/tests/ui/consts/static_mut_containing_mut_ref.rs +++ b/tests/ui/consts/static_mut_containing_mut_ref.rs @@ -1,5 +1,5 @@ //@ build-pass (FIXME(62277): could be check-pass?) -#![allow(static_mut_ref)] +#![allow(static_mut_refs)] static mut STDERR_BUFFER_SPACE: [u8; 42] = [0u8; 42]; diff --git a/tests/ui/consts/static_mut_containing_mut_ref2.rs b/tests/ui/consts/static_mut_containing_mut_ref2.rs index b5110623606b..547f6449f130 100644 --- a/tests/ui/consts/static_mut_containing_mut_ref2.rs +++ b/tests/ui/consts/static_mut_containing_mut_ref2.rs @@ -1,5 +1,5 @@ //@ revisions: stock mut_refs -#![allow(static_mut_ref)] +#![allow(static_mut_refs)] #![cfg_attr(mut_refs, feature(const_mut_refs))] static mut STDERR_BUFFER_SPACE: u8 = 0; diff --git a/tests/ui/drop/issue-23338-ensure-param-drop-order.rs b/tests/ui/drop/issue-23338-ensure-param-drop-order.rs index f283b33f6458..1fa68a2e7381 100644 --- a/tests/ui/drop/issue-23338-ensure-param-drop-order.rs +++ b/tests/ui/drop/issue-23338-ensure-param-drop-order.rs @@ -91,7 +91,7 @@ pub mod d { pub fn max_width() -> u32 { unsafe { (mem::size_of_val(&trails) * 8) as u32 - //~^ WARN shared reference of mutable static is discouraged [static_mut_ref] + //~^ WARN shared reference to mutable static is discouraged [static_mut_refs] } } diff --git a/tests/ui/drop/issue-23338-ensure-param-drop-order.stderr b/tests/ui/drop/issue-23338-ensure-param-drop-order.stderr index fd36ccbcbee4..de1194e74b40 100644 --- a/tests/ui/drop/issue-23338-ensure-param-drop-order.stderr +++ b/tests/ui/drop/issue-23338-ensure-param-drop-order.stderr @@ -1,14 +1,14 @@ -warning: shared reference of mutable static is discouraged +warning: creating a shared reference to mutable static is discouraged --> $DIR/issue-23338-ensure-param-drop-order.rs:93:31 | LL | (mem::size_of_val(&trails) * 8) as u32 - | ^^^^^^^ shared reference of mutable static + | ^^^^^^^ shared reference to mutable static | = note: for more information, see issue #114447 - = note: reference of mutable static is a hard error from 2024 edition - = note: mutable statics can be written to by multiple threads: aliasing violations or data races will cause undefined behavior - = note: `#[warn(static_mut_ref)]` on by default -help: shared references are dangerous since if there's any kind of mutation of that static while the reference lives, that's UB; use `addr_of!` instead to create a raw pointer + = note: this will be a hard error in the 2024 edition + = note: this shared reference has lifetime `'static`, but if the static ever gets mutated, or a mutable reference is created, then any further use of this shared reference is Undefined Behavior + = note: `#[warn(static_mut_refs)]` on by default +help: use `addr_of!` instead to create a raw pointer | LL | (mem::size_of_val(addr_of!(trails)) * 8) as u32 | ~~~~~~~~~~~~~~~~ diff --git a/tests/ui/error-codes/E0017.rs b/tests/ui/error-codes/E0017.rs index c128c2779e24..144340b35129 100644 --- a/tests/ui/error-codes/E0017.rs +++ b/tests/ui/error-codes/E0017.rs @@ -13,6 +13,6 @@ static CONST_REF: &'static mut i32 = &mut C; //~ ERROR mutable references are no //~| WARN taking a mutable static STATIC_MUT_REF: &'static mut i32 = unsafe { &mut M }; -//~^ WARN mutable reference of mutable static is discouraged [static_mut_ref] +//~^ WARN mutable reference to mutable static is discouraged [static_mut_refs] fn main() {} diff --git a/tests/ui/error-codes/E0017.stderr b/tests/ui/error-codes/E0017.stderr index eb626a7fe3a9..982ce52764e7 100644 --- a/tests/ui/error-codes/E0017.stderr +++ b/tests/ui/error-codes/E0017.stderr @@ -1,14 +1,14 @@ -warning: mutable reference of mutable static is discouraged +warning: creating a mutable reference to mutable static is discouraged --> $DIR/E0017.rs:15:52 | LL | static STATIC_MUT_REF: &'static mut i32 = unsafe { &mut M }; - | ^^^^^^ mutable reference of mutable static + | ^^^^^^ mutable reference to mutable static | = note: for more information, see issue #114447 - = note: reference of mutable static is a hard error from 2024 edition - = note: mutable statics can be written to by multiple threads: aliasing violations or data races will cause undefined behavior - = note: `#[warn(static_mut_ref)]` on by default -help: mutable references are dangerous since if there's any other pointer or reference used for that static while the reference lives, that's UB; use `addr_of_mut!` instead to create a raw pointer + = note: this will be a hard error in the 2024 edition + = note: this mutable reference has lifetime `'static`, but if the static gets accessed (read or written) by any other means, or any other reference is created, then any further use of this mutable reference is Undefined Behavior + = note: `#[warn(static_mut_refs)]` on by default +help: use `addr_of_mut!` instead to create a raw pointer | LL | static STATIC_MUT_REF: &'static mut i32 = unsafe { addr_of_mut!(M) }; | ~~~~~~~~~~~~~~~ diff --git a/tests/ui/issues/issue-23611-enum-swap-in-drop.rs b/tests/ui/issues/issue-23611-enum-swap-in-drop.rs index 980a2c01f23a..1afaff0f735b 100644 --- a/tests/ui/issues/issue-23611-enum-swap-in-drop.rs +++ b/tests/ui/issues/issue-23611-enum-swap-in-drop.rs @@ -187,7 +187,7 @@ pub mod d { pub fn max_width() -> u32 { unsafe { (mem::size_of_val(&trails) * 8) as u32 - //~^ WARN shared reference of mutable static is discouraged [static_mut_ref] + //~^ WARN shared reference to mutable static is discouraged [static_mut_refs] } } diff --git a/tests/ui/issues/issue-23611-enum-swap-in-drop.stderr b/tests/ui/issues/issue-23611-enum-swap-in-drop.stderr index 14a986a33326..bdf46abea8a1 100644 --- a/tests/ui/issues/issue-23611-enum-swap-in-drop.stderr +++ b/tests/ui/issues/issue-23611-enum-swap-in-drop.stderr @@ -1,14 +1,14 @@ -warning: shared reference of mutable static is discouraged +warning: creating a shared reference to mutable static is discouraged --> $DIR/issue-23611-enum-swap-in-drop.rs:189:31 | LL | (mem::size_of_val(&trails) * 8) as u32 - | ^^^^^^^ shared reference of mutable static + | ^^^^^^^ shared reference to mutable static | = note: for more information, see issue #114447 - = note: reference of mutable static is a hard error from 2024 edition - = note: mutable statics can be written to by multiple threads: aliasing violations or data races will cause undefined behavior - = note: `#[warn(static_mut_ref)]` on by default -help: shared references are dangerous since if there's any kind of mutation of that static while the reference lives, that's UB; use `addr_of!` instead to create a raw pointer + = note: this will be a hard error in the 2024 edition + = note: this shared reference has lifetime `'static`, but if the static ever gets mutated, or a mutable reference is created, then any further use of this shared reference is Undefined Behavior + = note: `#[warn(static_mut_refs)]` on by default +help: use `addr_of!` instead to create a raw pointer | LL | (mem::size_of_val(addr_of!(trails)) * 8) as u32 | ~~~~~~~~~~~~~~~~ diff --git a/tests/ui/issues/issue-54410.rs b/tests/ui/issues/issue-54410.rs index 51eea3ad9ac4..208be6f221c2 100644 --- a/tests/ui/issues/issue-54410.rs +++ b/tests/ui/issues/issue-54410.rs @@ -5,5 +5,5 @@ extern "C" { fn main() { println!("{:p}", unsafe { &symbol }); - //~^ WARN: shared reference of mutable static is discouraged + //~^ WARN creating a shared reference to mutable static is discouraged [static_mut_refs] } diff --git a/tests/ui/issues/issue-54410.stderr b/tests/ui/issues/issue-54410.stderr index 941c1be3eab5..7cc67ab72c39 100644 --- a/tests/ui/issues/issue-54410.stderr +++ b/tests/ui/issues/issue-54410.stderr @@ -6,17 +6,17 @@ LL | pub static mut symbol: [i8]; | = help: the trait `Sized` is not implemented for `[i8]` -warning: shared reference of mutable static is discouraged +warning: creating a shared reference to mutable static is discouraged --> $DIR/issue-54410.rs:7:31 | LL | println!("{:p}", unsafe { &symbol }); - | ^^^^^^^ shared reference of mutable static + | ^^^^^^^ shared reference to mutable static | = note: for more information, see issue #114447 - = note: reference of mutable static is a hard error from 2024 edition - = note: mutable statics can be written to by multiple threads: aliasing violations or data races will cause undefined behavior - = note: `#[warn(static_mut_ref)]` on by default -help: shared references are dangerous since if there's any kind of mutation of that static while the reference lives, that's UB; use `addr_of!` instead to create a raw pointer + = note: this will be a hard error in the 2024 edition + = note: this shared reference has lifetime `'static`, but if the static ever gets mutated, or a mutable reference is created, then any further use of this shared reference is Undefined Behavior + = note: `#[warn(static_mut_refs)]` on by default +help: use `addr_of!` instead to create a raw pointer | LL | println!("{:p}", unsafe { addr_of!(symbol) }); | ~~~~~~~~~~~~~~~~ diff --git a/tests/ui/nll/borrowck-thread-local-static-mut-borrow-outlives-fn.rs b/tests/ui/nll/borrowck-thread-local-static-mut-borrow-outlives-fn.rs index fd49b2322657..be882085c5c9 100644 --- a/tests/ui/nll/borrowck-thread-local-static-mut-borrow-outlives-fn.rs +++ b/tests/ui/nll/borrowck-thread-local-static-mut-borrow-outlives-fn.rs @@ -15,7 +15,7 @@ struct S1 { impl S1 { fn new(_x: u64) -> S1 { S1 { a: unsafe { &mut X1 } } - //~^ WARN mutable reference of mutable static is discouraged [static_mut_ref] + //~^ WARN mutable reference to mutable static is discouraged [static_mut_refs] } } diff --git a/tests/ui/nll/borrowck-thread-local-static-mut-borrow-outlives-fn.stderr b/tests/ui/nll/borrowck-thread-local-static-mut-borrow-outlives-fn.stderr index 17217cd5859d..82065cc06ea1 100644 --- a/tests/ui/nll/borrowck-thread-local-static-mut-borrow-outlives-fn.stderr +++ b/tests/ui/nll/borrowck-thread-local-static-mut-borrow-outlives-fn.stderr @@ -1,14 +1,14 @@ -warning: mutable reference of mutable static is discouraged +warning: creating a mutable reference to mutable static is discouraged --> $DIR/borrowck-thread-local-static-mut-borrow-outlives-fn.rs:17:26 | LL | S1 { a: unsafe { &mut X1 } } - | ^^^^^^^ mutable reference of mutable static + | ^^^^^^^ mutable reference to mutable static | = note: for more information, see issue #114447 - = note: reference of mutable static is a hard error from 2024 edition - = note: mutable statics can be written to by multiple threads: aliasing violations or data races will cause undefined behavior - = note: `#[warn(static_mut_ref)]` on by default -help: mutable references are dangerous since if there's any other pointer or reference used for that static while the reference lives, that's UB; use `addr_of_mut!` instead to create a raw pointer + = note: this will be a hard error in the 2024 edition + = note: this mutable reference has lifetime `'static`, but if the static gets accessed (read or written) by any other means, or any other reference is created, then any further use of this mutable reference is Undefined Behavior + = note: `#[warn(static_mut_refs)]` on by default +help: use `addr_of_mut!` instead to create a raw pointer | LL | S1 { a: unsafe { addr_of_mut!(X1) } } | ~~~~~~~~~~~~~~~~ diff --git a/tests/ui/static/reference-of-mut-static-safe.e2021.stderr b/tests/ui/static/reference-of-mut-static-safe.e2021.stderr deleted file mode 100644 index 16f47ace3a93..000000000000 --- a/tests/ui/static/reference-of-mut-static-safe.e2021.stderr +++ /dev/null @@ -1,26 +0,0 @@ -warning: shared reference of mutable static is discouraged - --> $DIR/reference-of-mut-static-safe.rs:9:14 - | -LL | let _x = &X; - | ^^ shared reference of mutable static - | - = note: for more information, see issue #114447 - = note: reference of mutable static is a hard error from 2024 edition - = note: mutable statics can be written to by multiple threads: aliasing violations or data races will cause undefined behavior - = note: `#[warn(static_mut_ref)]` on by default -help: shared references are dangerous since if there's any kind of mutation of that static while the reference lives, that's UB; use `addr_of!` instead to create a raw pointer - | -LL | let _x = addr_of!(X); - | ~~~~~~~~~~~ - -error[E0133]: use of mutable static is unsafe and requires unsafe function or block - --> $DIR/reference-of-mut-static-safe.rs:9:15 - | -LL | let _x = &X; - | ^ use of mutable static - | - = note: mutable statics can be mutated by multiple threads: aliasing violations or data races will cause undefined behavior - -error: aborting due to 1 previous error; 1 warning emitted - -For more information about this error, try `rustc --explain E0133`. diff --git a/tests/ui/static/reference-of-mut-static-safe.e2024.stderr b/tests/ui/static/reference-of-mut-static-safe.e2024.stderr deleted file mode 100644 index 53f81179de55..000000000000 --- a/tests/ui/static/reference-of-mut-static-safe.e2024.stderr +++ /dev/null @@ -1,15 +0,0 @@ -error[E0796]: reference of mutable static is disallowed - --> $DIR/reference-of-mut-static-safe.rs:9:14 - | -LL | let _x = &X; - | ^^ reference of mutable static - | - = note: mutable statics can be written to by multiple threads: aliasing violations or data races will cause undefined behavior -help: shared references are dangerous since if there's any kind of mutation of that static while the reference lives, that's UB; use `addr_of!` instead to create a raw pointer - | -LL | let _x = addr_of!(X); - | ~~~~~~~~~~~ - -error: aborting due to 1 previous error - -For more information about this error, try `rustc --explain E0796`. diff --git a/tests/ui/static/reference-of-mut-static-unsafe-fn.rs b/tests/ui/static/reference-of-mut-static-unsafe-fn.rs index 8f3b3eb77453..5652703a2718 100644 --- a/tests/ui/static/reference-of-mut-static-unsafe-fn.rs +++ b/tests/ui/static/reference-of-mut-static-unsafe-fn.rs @@ -7,17 +7,20 @@ unsafe fn _foo() { static mut Y: i32 = 1; let _y = &X; - //~^ ERROR reference of mutable static is disallowed + //~^ ERROR creating a shared reference to a mutable static [E0796] let ref _a = X; - //~^ ERROR reference of mutable static is disallowed + //~^ ERROR creating a shared reference to a mutable static [E0796] - let (_b, _c) = (&X, &Y); - //~^ ERROR reference of mutable static is disallowed - //~^^ ERROR reference of mutable static is disallowed + let ref mut _a = X; + //~^ ERROR creating a mutable reference to a mutable static [E0796] + + let (_b, _c) = (&X, &mut Y); + //~^ ERROR creating a shared reference to a mutable static [E0796] + //~^^ ERROR creating a mutable reference to a mutable static [E0796] foo(&X); - //~^ ERROR reference of mutable static is disallowed + //~^ ERROR creating a shared reference to a mutable static [E0796] } fn foo<'a>(_x: &'a i32) {} diff --git a/tests/ui/static/reference-of-mut-static-unsafe-fn.stderr b/tests/ui/static/reference-of-mut-static-unsafe-fn.stderr index 5c6fdedfa96f..5675d313e076 100644 --- a/tests/ui/static/reference-of-mut-static-unsafe-fn.stderr +++ b/tests/ui/static/reference-of-mut-static-unsafe-fn.stderr @@ -1,63 +1,75 @@ -error[E0796]: reference of mutable static is disallowed +error[E0796]: creating a shared reference to a mutable static --> $DIR/reference-of-mut-static-unsafe-fn.rs:9:14 | LL | let _y = &X; - | ^^ reference of mutable static + | ^^ shared reference to mutable static | - = note: mutable statics can be written to by multiple threads: aliasing violations or data races will cause undefined behavior -help: shared references are dangerous since if there's any kind of mutation of that static while the reference lives, that's UB; use `addr_of!` instead to create a raw pointer + = note: this shared reference has lifetime `'static`, but if the static ever gets mutated, or a mutable reference is created, then any further use of this shared reference is Undefined Behavior +help: use `addr_of!` instead to create a raw pointer | LL | let _y = addr_of!(X); | ~~~~~~~~~~~ -error[E0796]: reference of mutable static is disallowed +error[E0796]: creating a shared reference to a mutable static --> $DIR/reference-of-mut-static-unsafe-fn.rs:12:18 | LL | let ref _a = X; - | ^ reference of mutable static + | ^ shared reference to mutable static | - = note: mutable statics can be written to by multiple threads: aliasing violations or data races will cause undefined behavior -help: shared references are dangerous since if there's any kind of mutation of that static while the reference lives, that's UB; use `addr_of!` instead to create a raw pointer + = note: this shared reference has lifetime `'static`, but if the static ever gets mutated, or a mutable reference is created, then any further use of this shared reference is Undefined Behavior +help: use `addr_of!` instead to create a raw pointer | LL | let ref _a = addr_of!(X); | ~~~~~~~~~~~ -error[E0796]: reference of mutable static is disallowed - --> $DIR/reference-of-mut-static-unsafe-fn.rs:15:21 +error[E0796]: creating a mutable reference to a mutable static + --> $DIR/reference-of-mut-static-unsafe-fn.rs:15:22 | -LL | let (_b, _c) = (&X, &Y); - | ^^ reference of mutable static +LL | let ref mut _a = X; + | ^ mutable reference to mutable static | - = note: mutable statics can be written to by multiple threads: aliasing violations or data races will cause undefined behavior -help: shared references are dangerous since if there's any kind of mutation of that static while the reference lives, that's UB; use `addr_of!` instead to create a raw pointer + = note: this mutable reference has lifetime `'static`, but if the static gets accessed (read or written) by any other means, or any other reference is created, then any further use of this mutable reference is Undefined Behavior +help: use `addr_of_mut!` instead to create a raw pointer | -LL | let (_b, _c) = (addr_of!(X), &Y); +LL | let ref mut _a = addr_of_mut!(X); + | ~~~~~~~~~~~~~~~ + +error[E0796]: creating a shared reference to a mutable static + --> $DIR/reference-of-mut-static-unsafe-fn.rs:18:21 + | +LL | let (_b, _c) = (&X, &mut Y); + | ^^ shared reference to mutable static + | + = note: this shared reference has lifetime `'static`, but if the static ever gets mutated, or a mutable reference is created, then any further use of this shared reference is Undefined Behavior +help: use `addr_of!` instead to create a raw pointer + | +LL | let (_b, _c) = (addr_of!(X), &mut Y); | ~~~~~~~~~~~ -error[E0796]: reference of mutable static is disallowed - --> $DIR/reference-of-mut-static-unsafe-fn.rs:15:25 +error[E0796]: creating a mutable reference to a mutable static + --> $DIR/reference-of-mut-static-unsafe-fn.rs:18:25 | -LL | let (_b, _c) = (&X, &Y); - | ^^ reference of mutable static +LL | let (_b, _c) = (&X, &mut Y); + | ^^^^^^ mutable reference to mutable static | - = note: mutable statics can be written to by multiple threads: aliasing violations or data races will cause undefined behavior -help: shared references are dangerous since if there's any kind of mutation of that static while the reference lives, that's UB; use `addr_of!` instead to create a raw pointer + = note: this mutable reference has lifetime `'static`, but if the static gets accessed (read or written) by any other means, or any other reference is created, then any further use of this mutable reference is Undefined Behavior +help: use `addr_of_mut!` instead to create a raw pointer | -LL | let (_b, _c) = (&X, addr_of!(Y)); - | ~~~~~~~~~~~ +LL | let (_b, _c) = (&X, addr_of_mut!(Y)); + | ~~~~~~~~~~~~~~~ -error[E0796]: reference of mutable static is disallowed - --> $DIR/reference-of-mut-static-unsafe-fn.rs:19:9 +error[E0796]: creating a shared reference to a mutable static + --> $DIR/reference-of-mut-static-unsafe-fn.rs:22:9 | LL | foo(&X); - | ^^ reference of mutable static + | ^^ shared reference to mutable static | - = note: mutable statics can be written to by multiple threads: aliasing violations or data races will cause undefined behavior -help: shared references are dangerous since if there's any kind of mutation of that static while the reference lives, that's UB; use `addr_of!` instead to create a raw pointer + = note: this shared reference has lifetime `'static`, but if the static ever gets mutated, or a mutable reference is created, then any further use of this shared reference is Undefined Behavior +help: use `addr_of!` instead to create a raw pointer | LL | foo(addr_of!(X)); | ~~~~~~~~~~~ -error: aborting due to 5 previous errors +error: aborting due to 6 previous errors For more information about this error, try `rustc --explain E0796`. diff --git a/tests/ui/static/reference-of-mut-static.e2021.stderr b/tests/ui/static/reference-of-mut-static.e2021.stderr index 77a6b3d304bd..f7ad51b61571 100644 --- a/tests/ui/static/reference-of-mut-static.e2021.stderr +++ b/tests/ui/static/reference-of-mut-static.e2021.stderr @@ -1,88 +1,88 @@ -error: shared reference of mutable static is discouraged +error: creating a shared reference to mutable static is discouraged --> $DIR/reference-of-mut-static.rs:16:18 | LL | let _y = &X; - | ^^ shared reference of mutable static + | ^^ shared reference to mutable static | = note: for more information, see issue #114447 - = note: reference of mutable static is a hard error from 2024 edition - = note: mutable statics can be written to by multiple threads: aliasing violations or data races will cause undefined behavior + = note: this will be a hard error in the 2024 edition + = note: this shared reference has lifetime `'static`, but if the static ever gets mutated, or a mutable reference is created, then any further use of this shared reference is Undefined Behavior note: the lint level is defined here --> $DIR/reference-of-mut-static.rs:6:9 | -LL | #![deny(static_mut_ref)] - | ^^^^^^^^^^^^^^ -help: shared references are dangerous since if there's any kind of mutation of that static while the reference lives, that's UB; use `addr_of!` instead to create a raw pointer +LL | #![deny(static_mut_refs)] + | ^^^^^^^^^^^^^^^ +help: use `addr_of!` instead to create a raw pointer | LL | let _y = addr_of!(X); | ~~~~~~~~~~~ -error: mutable reference of mutable static is discouraged +error: creating a mutable reference to mutable static is discouraged --> $DIR/reference-of-mut-static.rs:20:18 | LL | let _y = &mut X; - | ^^^^^^ mutable reference of mutable static + | ^^^^^^ mutable reference to mutable static | = note: for more information, see issue #114447 - = note: reference of mutable static is a hard error from 2024 edition - = note: mutable statics can be written to by multiple threads: aliasing violations or data races will cause undefined behavior -help: mutable references are dangerous since if there's any other pointer or reference used for that static while the reference lives, that's UB; use `addr_of_mut!` instead to create a raw pointer + = note: this will be a hard error in the 2024 edition + = note: this mutable reference has lifetime `'static`, but if the static gets accessed (read or written) by any other means, or any other reference is created, then any further use of this mutable reference is Undefined Behavior +help: use `addr_of_mut!` instead to create a raw pointer | LL | let _y = addr_of_mut!(X); | ~~~~~~~~~~~~~~~ -error: shared reference of mutable static is discouraged +error: creating a shared reference to mutable static is discouraged --> $DIR/reference-of-mut-static.rs:28:22 | LL | let ref _a = X; - | ^ shared reference of mutable static + | ^ shared reference to mutable static | = note: for more information, see issue #114447 - = note: reference of mutable static is a hard error from 2024 edition - = note: mutable statics can be written to by multiple threads: aliasing violations or data races will cause undefined behavior -help: shared references are dangerous since if there's any kind of mutation of that static while the reference lives, that's UB; use `addr_of!` instead to create a raw pointer + = note: this will be a hard error in the 2024 edition + = note: this shared reference has lifetime `'static`, but if the static ever gets mutated, or a mutable reference is created, then any further use of this shared reference is Undefined Behavior +help: use `addr_of!` instead to create a raw pointer | LL | let ref _a = addr_of!(X); | ~~~~~~~~~~~ -error: shared reference of mutable static is discouraged +error: creating a shared reference to mutable static is discouraged --> $DIR/reference-of-mut-static.rs:32:25 | LL | let (_b, _c) = (&X, &Y); - | ^^ shared reference of mutable static + | ^^ shared reference to mutable static | = note: for more information, see issue #114447 - = note: reference of mutable static is a hard error from 2024 edition - = note: mutable statics can be written to by multiple threads: aliasing violations or data races will cause undefined behavior -help: shared references are dangerous since if there's any kind of mutation of that static while the reference lives, that's UB; use `addr_of!` instead to create a raw pointer + = note: this will be a hard error in the 2024 edition + = note: this shared reference has lifetime `'static`, but if the static ever gets mutated, or a mutable reference is created, then any further use of this shared reference is Undefined Behavior +help: use `addr_of!` instead to create a raw pointer | LL | let (_b, _c) = (addr_of!(X), &Y); | ~~~~~~~~~~~ -error: shared reference of mutable static is discouraged +error: creating a shared reference to mutable static is discouraged --> $DIR/reference-of-mut-static.rs:32:29 | LL | let (_b, _c) = (&X, &Y); - | ^^ shared reference of mutable static + | ^^ shared reference to mutable static | = note: for more information, see issue #114447 - = note: reference of mutable static is a hard error from 2024 edition - = note: mutable statics can be written to by multiple threads: aliasing violations or data races will cause undefined behavior -help: shared references are dangerous since if there's any kind of mutation of that static while the reference lives, that's UB; use `addr_of!` instead to create a raw pointer + = note: this will be a hard error in the 2024 edition + = note: this shared reference has lifetime `'static`, but if the static ever gets mutated, or a mutable reference is created, then any further use of this shared reference is Undefined Behavior +help: use `addr_of!` instead to create a raw pointer | LL | let (_b, _c) = (&X, addr_of!(Y)); | ~~~~~~~~~~~ -error: shared reference of mutable static is discouraged +error: creating a shared reference to mutable static is discouraged --> $DIR/reference-of-mut-static.rs:38:13 | LL | foo(&X); - | ^^ shared reference of mutable static + | ^^ shared reference to mutable static | = note: for more information, see issue #114447 - = note: reference of mutable static is a hard error from 2024 edition - = note: mutable statics can be written to by multiple threads: aliasing violations or data races will cause undefined behavior -help: shared references are dangerous since if there's any kind of mutation of that static while the reference lives, that's UB; use `addr_of!` instead to create a raw pointer + = note: this will be a hard error in the 2024 edition + = note: this shared reference has lifetime `'static`, but if the static ever gets mutated, or a mutable reference is created, then any further use of this shared reference is Undefined Behavior +help: use `addr_of!` instead to create a raw pointer | LL | foo(addr_of!(X)); | ~~~~~~~~~~~ diff --git a/tests/ui/static/reference-of-mut-static.e2024.stderr b/tests/ui/static/reference-of-mut-static.e2024.stderr index f445ec65a5d2..6205c10ac416 100644 --- a/tests/ui/static/reference-of-mut-static.e2024.stderr +++ b/tests/ui/static/reference-of-mut-static.e2024.stderr @@ -1,71 +1,71 @@ -error[E0796]: reference of mutable static is disallowed +error[E0796]: creating a shared reference to a mutable static --> $DIR/reference-of-mut-static.rs:16:18 | LL | let _y = &X; - | ^^ reference of mutable static + | ^^ shared reference to mutable static | - = note: mutable statics can be written to by multiple threads: aliasing violations or data races will cause undefined behavior -help: shared references are dangerous since if there's any kind of mutation of that static while the reference lives, that's UB; use `addr_of!` instead to create a raw pointer + = note: this shared reference has lifetime `'static`, but if the static ever gets mutated, or a mutable reference is created, then any further use of this shared reference is Undefined Behavior +help: use `addr_of!` instead to create a raw pointer | LL | let _y = addr_of!(X); | ~~~~~~~~~~~ -error[E0796]: reference of mutable static is disallowed +error[E0796]: creating a mutable reference to a mutable static --> $DIR/reference-of-mut-static.rs:20:18 | LL | let _y = &mut X; - | ^^^^^^ reference of mutable static + | ^^^^^^ mutable reference to mutable static | - = note: mutable statics can be written to by multiple threads: aliasing violations or data races will cause undefined behavior -help: mutable references are dangerous since if there's any other pointer or reference used for that static while the reference lives, that's UB; use `addr_of_mut!` instead to create a raw pointer + = note: this mutable reference has lifetime `'static`, but if the static gets accessed (read or written) by any other means, or any other reference is created, then any further use of this mutable reference is Undefined Behavior +help: use `addr_of_mut!` instead to create a raw pointer | LL | let _y = addr_of_mut!(X); | ~~~~~~~~~~~~~~~ -error[E0796]: reference of mutable static is disallowed +error[E0796]: creating a shared reference to a mutable static --> $DIR/reference-of-mut-static.rs:28:22 | LL | let ref _a = X; - | ^ reference of mutable static + | ^ shared reference to mutable static | - = note: mutable statics can be written to by multiple threads: aliasing violations or data races will cause undefined behavior -help: shared references are dangerous since if there's any kind of mutation of that static while the reference lives, that's UB; use `addr_of!` instead to create a raw pointer + = note: this shared reference has lifetime `'static`, but if the static ever gets mutated, or a mutable reference is created, then any further use of this shared reference is Undefined Behavior +help: use `addr_of!` instead to create a raw pointer | LL | let ref _a = addr_of!(X); | ~~~~~~~~~~~ -error[E0796]: reference of mutable static is disallowed +error[E0796]: creating a shared reference to a mutable static --> $DIR/reference-of-mut-static.rs:32:25 | LL | let (_b, _c) = (&X, &Y); - | ^^ reference of mutable static + | ^^ shared reference to mutable static | - = note: mutable statics can be written to by multiple threads: aliasing violations or data races will cause undefined behavior -help: shared references are dangerous since if there's any kind of mutation of that static while the reference lives, that's UB; use `addr_of!` instead to create a raw pointer + = note: this shared reference has lifetime `'static`, but if the static ever gets mutated, or a mutable reference is created, then any further use of this shared reference is Undefined Behavior +help: use `addr_of!` instead to create a raw pointer | LL | let (_b, _c) = (addr_of!(X), &Y); | ~~~~~~~~~~~ -error[E0796]: reference of mutable static is disallowed +error[E0796]: creating a shared reference to a mutable static --> $DIR/reference-of-mut-static.rs:32:29 | LL | let (_b, _c) = (&X, &Y); - | ^^ reference of mutable static + | ^^ shared reference to mutable static | - = note: mutable statics can be written to by multiple threads: aliasing violations or data races will cause undefined behavior -help: shared references are dangerous since if there's any kind of mutation of that static while the reference lives, that's UB; use `addr_of!` instead to create a raw pointer + = note: this shared reference has lifetime `'static`, but if the static ever gets mutated, or a mutable reference is created, then any further use of this shared reference is Undefined Behavior +help: use `addr_of!` instead to create a raw pointer | LL | let (_b, _c) = (&X, addr_of!(Y)); | ~~~~~~~~~~~ -error[E0796]: reference of mutable static is disallowed +error[E0796]: creating a shared reference to a mutable static --> $DIR/reference-of-mut-static.rs:38:13 | LL | foo(&X); - | ^^ reference of mutable static + | ^^ shared reference to mutable static | - = note: mutable statics can be written to by multiple threads: aliasing violations or data races will cause undefined behavior -help: shared references are dangerous since if there's any kind of mutation of that static while the reference lives, that's UB; use `addr_of!` instead to create a raw pointer + = note: this shared reference has lifetime `'static`, but if the static ever gets mutated, or a mutable reference is created, then any further use of this shared reference is Undefined Behavior +help: use `addr_of!` instead to create a raw pointer | LL | foo(addr_of!(X)); | ~~~~~~~~~~~ diff --git a/tests/ui/static/reference-of-mut-static.rs b/tests/ui/static/reference-of-mut-static.rs index 166303f0257f..af2cab7dd872 100644 --- a/tests/ui/static/reference-of-mut-static.rs +++ b/tests/ui/static/reference-of-mut-static.rs @@ -3,7 +3,7 @@ //@ [e2021] edition:2021 //@ [e2024] compile-flags: --edition 2024 -Z unstable-options -#![deny(static_mut_ref)] +#![deny(static_mut_refs)] use std::ptr::{addr_of, addr_of_mut}; @@ -14,30 +14,30 @@ fn main() { unsafe { let _y = &X; - //[e2024]~^ ERROR reference of mutable static is disallowed - //[e2021]~^^ ERROR shared reference of mutable static is discouraged [static_mut_ref] + //[e2024]~^ ERROR creating a shared reference to a mutable static [E0796] + //[e2021]~^^ ERROR shared reference to mutable static is discouraged [static_mut_refs] let _y = &mut X; - //[e2024]~^ ERROR reference of mutable static is disallowed - //[e2021]~^^ ERROR mutable reference of mutable static is discouraged [static_mut_ref] + //[e2024]~^ ERROR creating a mutable reference to a mutable static [E0796] + //[e2021]~^^ ERROR mutable reference to mutable static is discouraged [static_mut_refs] let _z = addr_of_mut!(X); let _p = addr_of!(X); let ref _a = X; - //[e2024]~^ ERROR reference of mutable static is disallowed - //[e2021]~^^ ERROR shared reference of mutable static is discouraged [static_mut_ref] + //[e2024]~^ ERROR creating a shared reference to a mutable static [E0796] + //[e2021]~^^ ERROR shared reference to mutable static is discouraged [static_mut_refs] let (_b, _c) = (&X, &Y); - //[e2024]~^ ERROR reference of mutable static is disallowed - //[e2021]~^^ ERROR shared reference of mutable static is discouraged [static_mut_ref] - //[e2024]~^^^ ERROR reference of mutable static is disallowed - //[e2021]~^^^^ ERROR shared reference of mutable static is discouraged [static_mut_ref] + //[e2024]~^ ERROR creating a shared reference to a mutable static [E0796] + //[e2021]~^^ ERROR shared reference to mutable static is discouraged [static_mut_refs] + //[e2024]~^^^ ERROR creating a shared reference to a mutable static [E0796] + //[e2021]~^^^^ ERROR shared reference to mutable static is discouraged [static_mut_refs] foo(&X); - //[e2024]~^ ERROR reference of mutable static is disallowed - //[e2021]~^^ ERROR shared reference of mutable static is discouraged [static_mut_ref] + //[e2024]~^ ERROR creating a shared reference to a mutable static [E0796] + //[e2021]~^^ ERROR shared reference to mutable static is discouraged [static_mut_refs] static mut Z: &[i32; 3] = &[0, 1, 2]; diff --git a/tests/ui/static/reference-to-mut-static-safe.e2021.stderr b/tests/ui/static/reference-to-mut-static-safe.e2021.stderr new file mode 100644 index 000000000000..9ea34290e36d --- /dev/null +++ b/tests/ui/static/reference-to-mut-static-safe.e2021.stderr @@ -0,0 +1,26 @@ +warning: creating a shared reference to mutable static is discouraged + --> $DIR/reference-to-mut-static-safe.rs:9:14 + | +LL | let _x = &X; + | ^^ shared reference to mutable static + | + = note: for more information, see issue #114447 + = note: this will be a hard error in the 2024 edition + = note: this shared reference has lifetime `'static`, but if the static ever gets mutated, or a mutable reference is created, then any further use of this shared reference is Undefined Behavior + = note: `#[warn(static_mut_refs)]` on by default +help: use `addr_of!` instead to create a raw pointer + | +LL | let _x = addr_of!(X); + | ~~~~~~~~~~~ + +error[E0133]: use of mutable static is unsafe and requires unsafe function or block + --> $DIR/reference-to-mut-static-safe.rs:9:15 + | +LL | let _x = &X; + | ^ use of mutable static + | + = note: mutable statics can be mutated by multiple threads: aliasing violations or data races will cause undefined behavior + +error: aborting due to 1 previous error; 1 warning emitted + +For more information about this error, try `rustc --explain E0133`. diff --git a/tests/ui/static/reference-to-mut-static-safe.e2024.stderr b/tests/ui/static/reference-to-mut-static-safe.e2024.stderr new file mode 100644 index 000000000000..607c1bba1352 --- /dev/null +++ b/tests/ui/static/reference-to-mut-static-safe.e2024.stderr @@ -0,0 +1,15 @@ +error[E0796]: creating a shared reference to a mutable static + --> $DIR/reference-to-mut-static-safe.rs:9:14 + | +LL | let _x = &X; + | ^^ shared reference to mutable static + | + = note: this shared reference has lifetime `'static`, but if the static ever gets mutated, or a mutable reference is created, then any further use of this shared reference is Undefined Behavior +help: use `addr_of!` instead to create a raw pointer + | +LL | let _x = addr_of!(X); + | ~~~~~~~~~~~ + +error: aborting due to 1 previous error + +For more information about this error, try `rustc --explain E0796`. diff --git a/tests/ui/static/reference-of-mut-static-safe.rs b/tests/ui/static/reference-to-mut-static-safe.rs similarity index 63% rename from tests/ui/static/reference-of-mut-static-safe.rs rename to tests/ui/static/reference-to-mut-static-safe.rs index d113d0ee48d2..de4f4be8f762 100644 --- a/tests/ui/static/reference-of-mut-static-safe.rs +++ b/tests/ui/static/reference-to-mut-static-safe.rs @@ -7,7 +7,7 @@ fn main() { static mut X: i32 = 1; let _x = &X; - //[e2024]~^ reference of mutable static is disallowed [E0796] + //[e2024]~^ creating a shared reference to a mutable static [E0796] //[e2021]~^^ use of mutable static is unsafe and requires unsafe function or block [E0133] - //[e2021]~^^^ shared reference of mutable static is discouraged [static_mut_ref] + //[e2021]~^^^ shared reference to mutable static is discouraged [static_mut_refs] } diff --git a/tests/ui/static/reference-to-mut-static-unsafe-fn.rs b/tests/ui/static/reference-to-mut-static-unsafe-fn.rs new file mode 100644 index 000000000000..5652703a2718 --- /dev/null +++ b/tests/ui/static/reference-to-mut-static-unsafe-fn.rs @@ -0,0 +1,26 @@ +//@ compile-flags: --edition 2024 -Z unstable-options + +fn main() {} + +unsafe fn _foo() { + static mut X: i32 = 1; + static mut Y: i32 = 1; + + let _y = &X; + //~^ ERROR creating a shared reference to a mutable static [E0796] + + let ref _a = X; + //~^ ERROR creating a shared reference to a mutable static [E0796] + + let ref mut _a = X; + //~^ ERROR creating a mutable reference to a mutable static [E0796] + + let (_b, _c) = (&X, &mut Y); + //~^ ERROR creating a shared reference to a mutable static [E0796] + //~^^ ERROR creating a mutable reference to a mutable static [E0796] + + foo(&X); + //~^ ERROR creating a shared reference to a mutable static [E0796] +} + +fn foo<'a>(_x: &'a i32) {} diff --git a/tests/ui/static/reference-to-mut-static-unsafe-fn.stderr b/tests/ui/static/reference-to-mut-static-unsafe-fn.stderr new file mode 100644 index 000000000000..77d2aa5d1aec --- /dev/null +++ b/tests/ui/static/reference-to-mut-static-unsafe-fn.stderr @@ -0,0 +1,75 @@ +error[E0796]: creating a shared reference to a mutable static + --> $DIR/reference-to-mut-static-unsafe-fn.rs:9:14 + | +LL | let _y = &X; + | ^^ shared reference to mutable static + | + = note: this shared reference has lifetime `'static`, but if the static ever gets mutated, or a mutable reference is created, then any further use of this shared reference is Undefined Behavior +help: use `addr_of!` instead to create a raw pointer + | +LL | let _y = addr_of!(X); + | ~~~~~~~~~~~ + +error[E0796]: creating a shared reference to a mutable static + --> $DIR/reference-to-mut-static-unsafe-fn.rs:12:18 + | +LL | let ref _a = X; + | ^ shared reference to mutable static + | + = note: this shared reference has lifetime `'static`, but if the static ever gets mutated, or a mutable reference is created, then any further use of this shared reference is Undefined Behavior +help: use `addr_of!` instead to create a raw pointer + | +LL | let ref _a = addr_of!(X); + | ~~~~~~~~~~~ + +error[E0796]: creating a mutable reference to a mutable static + --> $DIR/reference-to-mut-static-unsafe-fn.rs:15:22 + | +LL | let ref mut _a = X; + | ^ mutable reference to mutable static + | + = note: this mutable reference has lifetime `'static`, but if the static gets accessed (read or written) by any other means, or any other reference is created, then any further use of this mutable reference is Undefined Behavior +help: use `addr_of_mut!` instead to create a raw pointer + | +LL | let ref mut _a = addr_of_mut!(X); + | ~~~~~~~~~~~~~~~ + +error[E0796]: creating a shared reference to a mutable static + --> $DIR/reference-to-mut-static-unsafe-fn.rs:18:21 + | +LL | let (_b, _c) = (&X, &mut Y); + | ^^ shared reference to mutable static + | + = note: this shared reference has lifetime `'static`, but if the static ever gets mutated, or a mutable reference is created, then any further use of this shared reference is Undefined Behavior +help: use `addr_of!` instead to create a raw pointer + | +LL | let (_b, _c) = (addr_of!(X), &mut Y); + | ~~~~~~~~~~~ + +error[E0796]: creating a mutable reference to a mutable static + --> $DIR/reference-to-mut-static-unsafe-fn.rs:18:25 + | +LL | let (_b, _c) = (&X, &mut Y); + | ^^^^^^ mutable reference to mutable static + | + = note: this mutable reference has lifetime `'static`, but if the static gets accessed (read or written) by any other means, or any other reference is created, then any further use of this mutable reference is Undefined Behavior +help: use `addr_of_mut!` instead to create a raw pointer + | +LL | let (_b, _c) = (&X, addr_of_mut!(Y)); + | ~~~~~~~~~~~~~~~ + +error[E0796]: creating a shared reference to a mutable static + --> $DIR/reference-to-mut-static-unsafe-fn.rs:22:9 + | +LL | foo(&X); + | ^^ shared reference to mutable static + | + = note: this shared reference has lifetime `'static`, but if the static ever gets mutated, or a mutable reference is created, then any further use of this shared reference is Undefined Behavior +help: use `addr_of!` instead to create a raw pointer + | +LL | foo(addr_of!(X)); + | ~~~~~~~~~~~ + +error: aborting due to 6 previous errors + +For more information about this error, try `rustc --explain E0796`. diff --git a/tests/ui/static/reference-to-mut-static.e2021.stderr b/tests/ui/static/reference-to-mut-static.e2021.stderr new file mode 100644 index 000000000000..f477e5ac6c5c --- /dev/null +++ b/tests/ui/static/reference-to-mut-static.e2021.stderr @@ -0,0 +1,91 @@ +error: creating a shared reference to mutable static is discouraged + --> $DIR/reference-to-mut-static.rs:16:18 + | +LL | let _y = &X; + | ^^ shared reference to mutable static + | + = note: for more information, see issue #114447 + = note: this will be a hard error in the 2024 edition + = note: this shared reference has lifetime `'static`, but if the static ever gets mutated, or a mutable reference is created, then any further use of this shared reference is Undefined Behavior +note: the lint level is defined here + --> $DIR/reference-to-mut-static.rs:6:9 + | +LL | #![deny(static_mut_refs)] + | ^^^^^^^^^^^^^^^ +help: use `addr_of!` instead to create a raw pointer + | +LL | let _y = addr_of!(X); + | ~~~~~~~~~~~ + +error: creating a mutable reference to mutable static is discouraged + --> $DIR/reference-to-mut-static.rs:20:18 + | +LL | let _y = &mut X; + | ^^^^^^ mutable reference to mutable static + | + = note: for more information, see issue #114447 + = note: this will be a hard error in the 2024 edition + = note: this mutable reference has lifetime `'static`, but if the static gets accessed (read or written) by any other means, or any other reference is created, then any further use of this mutable reference is Undefined Behavior +help: use `addr_of_mut!` instead to create a raw pointer + | +LL | let _y = addr_of_mut!(X); + | ~~~~~~~~~~~~~~~ + +error: creating a shared reference to mutable static is discouraged + --> $DIR/reference-to-mut-static.rs:28:22 + | +LL | let ref _a = X; + | ^ shared reference to mutable static + | + = note: for more information, see issue #114447 + = note: this will be a hard error in the 2024 edition + = note: this shared reference has lifetime `'static`, but if the static ever gets mutated, or a mutable reference is created, then any further use of this shared reference is Undefined Behavior +help: use `addr_of!` instead to create a raw pointer + | +LL | let ref _a = addr_of!(X); + | ~~~~~~~~~~~ + +error: creating a shared reference to mutable static is discouraged + --> $DIR/reference-to-mut-static.rs:32:25 + | +LL | let (_b, _c) = (&X, &Y); + | ^^ shared reference to mutable static + | + = note: for more information, see issue #114447 + = note: this will be a hard error in the 2024 edition + = note: this shared reference has lifetime `'static`, but if the static ever gets mutated, or a mutable reference is created, then any further use of this shared reference is Undefined Behavior +help: use `addr_of!` instead to create a raw pointer + | +LL | let (_b, _c) = (addr_of!(X), &Y); + | ~~~~~~~~~~~ + +error: creating a shared reference to mutable static is discouraged + --> $DIR/reference-to-mut-static.rs:32:29 + | +LL | let (_b, _c) = (&X, &Y); + | ^^ shared reference to mutable static + | + = note: for more information, see issue #114447 + = note: this will be a hard error in the 2024 edition + = note: this shared reference has lifetime `'static`, but if the static ever gets mutated, or a mutable reference is created, then any further use of this shared reference is Undefined Behavior +help: use `addr_of!` instead to create a raw pointer + | +LL | let (_b, _c) = (&X, addr_of!(Y)); + | ~~~~~~~~~~~ + +error: creating a shared reference to mutable static is discouraged + --> $DIR/reference-to-mut-static.rs:38:13 + | +LL | foo(&X); + | ^^ shared reference to mutable static + | + = note: for more information, see issue #114447 + = note: this will be a hard error in the 2024 edition + = note: this shared reference has lifetime `'static`, but if the static ever gets mutated, or a mutable reference is created, then any further use of this shared reference is Undefined Behavior +help: use `addr_of!` instead to create a raw pointer + | +LL | foo(addr_of!(X)); + | ~~~~~~~~~~~ + +error: aborting due to 6 previous errors + diff --git a/tests/ui/static/reference-to-mut-static.e2024.stderr b/tests/ui/static/reference-to-mut-static.e2024.stderr new file mode 100644 index 000000000000..b18e214e84fe --- /dev/null +++ b/tests/ui/static/reference-to-mut-static.e2024.stderr @@ -0,0 +1,75 @@ +error[E0796]: creating a shared reference to a mutable static + --> $DIR/reference-to-mut-static.rs:16:18 + | +LL | let _y = &X; + | ^^ shared reference to mutable static + | + = note: this shared reference has lifetime `'static`, but if the static ever gets mutated, or a mutable reference is created, then any further use of this shared reference is Undefined Behavior +help: use `addr_of!` instead to create a raw pointer + | +LL | let _y = addr_of!(X); + | ~~~~~~~~~~~ + +error[E0796]: creating a mutable reference to a mutable static + --> $DIR/reference-to-mut-static.rs:20:18 + | +LL | let _y = &mut X; + | ^^^^^^ mutable reference to mutable static + | + = note: this mutable reference has lifetime `'static`, but if the static gets accessed (read or written) by any other means, or any other reference is created, then any further use of this mutable reference is Undefined Behavior +help: use `addr_of_mut!` instead to create a raw pointer + | +LL | let _y = addr_of_mut!(X); + | ~~~~~~~~~~~~~~~ + +error[E0796]: creating a shared reference to a mutable static + --> $DIR/reference-to-mut-static.rs:28:22 + | +LL | let ref _a = X; + | ^ shared reference to mutable static + | + = note: this shared reference has lifetime `'static`, but if the static ever gets mutated, or a mutable reference is created, then any further use of this shared reference is Undefined Behavior +help: use `addr_of!` instead to create a raw pointer + | +LL | let ref _a = addr_of!(X); + | ~~~~~~~~~~~ + +error[E0796]: creating a shared reference to a mutable static + --> $DIR/reference-to-mut-static.rs:32:25 + | +LL | let (_b, _c) = (&X, &Y); + | ^^ shared reference to mutable static + | + = note: this shared reference has lifetime `'static`, but if the static ever gets mutated, or a mutable reference is created, then any further use of this shared reference is Undefined Behavior +help: use `addr_of!` instead to create a raw pointer + | +LL | let (_b, _c) = (addr_of!(X), &Y); + | ~~~~~~~~~~~ + +error[E0796]: creating a shared reference to a mutable static + --> $DIR/reference-to-mut-static.rs:32:29 + | +LL | let (_b, _c) = (&X, &Y); + | ^^ shared reference to mutable static + | + = note: this shared reference has lifetime `'static`, but if the static ever gets mutated, or a mutable reference is created, then any further use of this shared reference is Undefined Behavior +help: use `addr_of!` instead to create a raw pointer + | +LL | let (_b, _c) = (&X, addr_of!(Y)); + | ~~~~~~~~~~~ + +error[E0796]: creating a shared reference to a mutable static + --> $DIR/reference-to-mut-static.rs:38:13 + | +LL | foo(&X); + | ^^ shared reference to mutable static + | + = note: this shared reference has lifetime `'static`, but if the static ever gets mutated, or a mutable reference is created, then any further use of this shared reference is Undefined Behavior +help: use `addr_of!` instead to create a raw pointer + | +LL | foo(addr_of!(X)); + | ~~~~~~~~~~~ + +error: aborting due to 6 previous errors + +For more information about this error, try `rustc --explain E0796`. diff --git a/tests/ui/static/reference-to-mut-static.rs b/tests/ui/static/reference-to-mut-static.rs new file mode 100644 index 000000000000..af2cab7dd872 --- /dev/null +++ b/tests/ui/static/reference-to-mut-static.rs @@ -0,0 +1,50 @@ +//@ revisions: e2021 e2024 + +//@ [e2021] edition:2021 +//@ [e2024] compile-flags: --edition 2024 -Z unstable-options + +#![deny(static_mut_refs)] + +use std::ptr::{addr_of, addr_of_mut}; + +fn main() { + static mut X: i32 = 1; + + static mut Y: i32 = 1; + + unsafe { + let _y = &X; + //[e2024]~^ ERROR creating a shared reference to a mutable static [E0796] + //[e2021]~^^ ERROR shared reference to mutable static is discouraged [static_mut_refs] + + let _y = &mut X; + //[e2024]~^ ERROR creating a mutable reference to a mutable static [E0796] + //[e2021]~^^ ERROR mutable reference to mutable static is discouraged [static_mut_refs] + + let _z = addr_of_mut!(X); + + let _p = addr_of!(X); + + let ref _a = X; + //[e2024]~^ ERROR creating a shared reference to a mutable static [E0796] + //[e2021]~^^ ERROR shared reference to mutable static is discouraged [static_mut_refs] + + let (_b, _c) = (&X, &Y); + //[e2024]~^ ERROR creating a shared reference to a mutable static [E0796] + //[e2021]~^^ ERROR shared reference to mutable static is discouraged [static_mut_refs] + //[e2024]~^^^ ERROR creating a shared reference to a mutable static [E0796] + //[e2021]~^^^^ ERROR shared reference to mutable static is discouraged [static_mut_refs] + + foo(&X); + //[e2024]~^ ERROR creating a shared reference to a mutable static [E0796] + //[e2021]~^^ ERROR shared reference to mutable static is discouraged [static_mut_refs] + + static mut Z: &[i32; 3] = &[0, 1, 2]; + + let _ = Z.len(); + let _ = Z[0]; + let _ = format!("{:?}", Z); + } +} + +fn foo<'a>(_x: &'a i32) {} diff --git a/tests/ui/static/safe-extern-statics-mut.rs b/tests/ui/static/safe-extern-statics-mut.rs index 8aa0b47a3116..05a1bee8891e 100644 --- a/tests/ui/static/safe-extern-statics-mut.rs +++ b/tests/ui/static/safe-extern-statics-mut.rs @@ -10,8 +10,8 @@ extern "C" { fn main() { let b = B; //~ ERROR use of mutable static is unsafe let rb = &B; //~ ERROR use of mutable static is unsafe - //~^ WARN shared reference of mutable static is discouraged [static_mut_ref] + //~^ WARN shared reference to mutable static is discouraged [static_mut_refs] let xb = XB; //~ ERROR use of mutable static is unsafe let xrb = &XB; //~ ERROR use of mutable static is unsafe - //~^ WARN shared reference of mutable static is discouraged [static_mut_ref] + //~^ WARN shared reference to mutable static is discouraged [static_mut_refs] } diff --git a/tests/ui/static/safe-extern-statics-mut.stderr b/tests/ui/static/safe-extern-statics-mut.stderr index eda353ce6736..9a4b651405f2 100644 --- a/tests/ui/static/safe-extern-statics-mut.stderr +++ b/tests/ui/static/safe-extern-statics-mut.stderr @@ -1,28 +1,28 @@ -warning: shared reference of mutable static is discouraged +warning: creating a shared reference to mutable static is discouraged --> $DIR/safe-extern-statics-mut.rs:12:14 | LL | let rb = &B; - | ^^ shared reference of mutable static + | ^^ shared reference to mutable static | = note: for more information, see issue #114447 - = note: reference of mutable static is a hard error from 2024 edition - = note: mutable statics can be written to by multiple threads: aliasing violations or data races will cause undefined behavior - = note: `#[warn(static_mut_ref)]` on by default -help: shared references are dangerous since if there's any kind of mutation of that static while the reference lives, that's UB; use `addr_of!` instead to create a raw pointer + = note: this will be a hard error in the 2024 edition + = note: this shared reference has lifetime `'static`, but if the static ever gets mutated, or a mutable reference is created, then any further use of this shared reference is Undefined Behavior + = note: `#[warn(static_mut_refs)]` on by default +help: use `addr_of!` instead to create a raw pointer | LL | let rb = addr_of!(B); | ~~~~~~~~~~~ -warning: shared reference of mutable static is discouraged +warning: creating a shared reference to mutable static is discouraged --> $DIR/safe-extern-statics-mut.rs:15:15 | LL | let xrb = &XB; - | ^^^ shared reference of mutable static + | ^^^ shared reference to mutable static | = note: for more information, see issue #114447 - = note: reference of mutable static is a hard error from 2024 edition - = note: mutable statics can be written to by multiple threads: aliasing violations or data races will cause undefined behavior -help: shared references are dangerous since if there's any kind of mutation of that static while the reference lives, that's UB; use `addr_of!` instead to create a raw pointer + = note: this will be a hard error in the 2024 edition + = note: this shared reference has lifetime `'static`, but if the static ever gets mutated, or a mutable reference is created, then any further use of this shared reference is Undefined Behavior +help: use `addr_of!` instead to create a raw pointer | LL | let xrb = addr_of!(XB); | ~~~~~~~~~~~~ diff --git a/tests/ui/statics/issue-15261.rs b/tests/ui/statics/issue-15261.rs index 71eeb2a6d261..e168abce0784 100644 --- a/tests/ui/statics/issue-15261.rs +++ b/tests/ui/statics/issue-15261.rs @@ -7,6 +7,6 @@ static mut n_mut: usize = 0; static n: &'static usize = unsafe { &n_mut }; -//~^ WARN shared reference of mutable static is discouraged [static_mut_ref] +//~^ WARN shared reference to mutable static is discouraged [static_mut_refs] fn main() {} diff --git a/tests/ui/statics/issue-15261.stderr b/tests/ui/statics/issue-15261.stderr index 72d88ce1b383..c31793f3d8f1 100644 --- a/tests/ui/statics/issue-15261.stderr +++ b/tests/ui/statics/issue-15261.stderr @@ -1,14 +1,14 @@ -warning: shared reference of mutable static is discouraged +warning: creating a shared reference to mutable static is discouraged --> $DIR/issue-15261.rs:9:37 | LL | static n: &'static usize = unsafe { &n_mut }; - | ^^^^^^ shared reference of mutable static + | ^^^^^^ shared reference to mutable static | = note: for more information, see issue #114447 - = note: reference of mutable static is a hard error from 2024 edition - = note: mutable statics can be written to by multiple threads: aliasing violations or data races will cause undefined behavior - = note: `#[warn(static_mut_ref)]` on by default -help: shared references are dangerous since if there's any kind of mutation of that static while the reference lives, that's UB; use `addr_of!` instead to create a raw pointer + = note: this will be a hard error in the 2024 edition + = note: this shared reference has lifetime `'static`, but if the static ever gets mutated, or a mutable reference is created, then any further use of this shared reference is Undefined Behavior + = note: `#[warn(static_mut_refs)]` on by default +help: use `addr_of!` instead to create a raw pointer | LL | static n: &'static usize = unsafe { addr_of!(n_mut) }; | ~~~~~~~~~~~~~~~ diff --git a/tests/ui/statics/static-mut-xc.rs b/tests/ui/statics/static-mut-xc.rs index 75a4faed83d4..a772d4151f7c 100644 --- a/tests/ui/statics/static-mut-xc.rs +++ b/tests/ui/statics/static-mut-xc.rs @@ -26,9 +26,9 @@ unsafe fn run() { static_mut_xc::a = -3; assert_eq!(static_mut_xc::a, -3); static_bound(&static_mut_xc::a); - //~^ WARN shared reference of mutable static is discouraged [static_mut_ref] + //~^ WARN shared reference to mutable static is discouraged [static_mut_refs] static_bound_set(&mut static_mut_xc::a); - //~^ WARN mutable reference of mutable static is discouraged [static_mut_ref] + //~^ WARN mutable reference to mutable static is discouraged [static_mut_refs] } pub fn main() { diff --git a/tests/ui/statics/static-mut-xc.stderr b/tests/ui/statics/static-mut-xc.stderr index 37aa336bc50f..d381328c0717 100644 --- a/tests/ui/statics/static-mut-xc.stderr +++ b/tests/ui/statics/static-mut-xc.stderr @@ -1,28 +1,28 @@ -warning: shared reference of mutable static is discouraged +warning: creating a shared reference to mutable static is discouraged --> $DIR/static-mut-xc.rs:28:18 | LL | static_bound(&static_mut_xc::a); - | ^^^^^^^^^^^^^^^^^ shared reference of mutable static + | ^^^^^^^^^^^^^^^^^ shared reference to mutable static | = note: for more information, see issue #114447 - = note: reference of mutable static is a hard error from 2024 edition - = note: mutable statics can be written to by multiple threads: aliasing violations or data races will cause undefined behavior - = note: `#[warn(static_mut_ref)]` on by default -help: shared references are dangerous since if there's any kind of mutation of that static while the reference lives, that's UB; use `addr_of!` instead to create a raw pointer + = note: this will be a hard error in the 2024 edition + = note: this shared reference has lifetime `'static`, but if the static ever gets mutated, or a mutable reference is created, then any further use of this shared reference is Undefined Behavior + = note: `#[warn(static_mut_refs)]` on by default +help: use `addr_of!` instead to create a raw pointer | LL | static_bound(addr_of!(static_mut_xc::a)); | ~~~~~~~~~~~~~~~~~~~~~~~~~~ -warning: mutable reference of mutable static is discouraged +warning: creating a mutable reference to mutable static is discouraged --> $DIR/static-mut-xc.rs:30:22 | LL | static_bound_set(&mut static_mut_xc::a); - | ^^^^^^^^^^^^^^^^^^^^^ mutable reference of mutable static + | ^^^^^^^^^^^^^^^^^^^^^ mutable reference to mutable static | = note: for more information, see issue #114447 - = note: reference of mutable static is a hard error from 2024 edition - = note: mutable statics can be written to by multiple threads: aliasing violations or data races will cause undefined behavior -help: mutable references are dangerous since if there's any other pointer or reference used for that static while the reference lives, that's UB; use `addr_of_mut!` instead to create a raw pointer + = note: this will be a hard error in the 2024 edition + = note: this mutable reference has lifetime `'static`, but if the static gets accessed (read or written) by any other means, or any other reference is created, then any further use of this mutable reference is Undefined Behavior +help: use `addr_of_mut!` instead to create a raw pointer | LL | static_bound_set(addr_of_mut!(static_mut_xc::a)); | ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ diff --git a/tests/ui/statics/static-recursive.rs b/tests/ui/statics/static-recursive.rs index f504e2a79f00..29b80818b7d4 100644 --- a/tests/ui/statics/static-recursive.rs +++ b/tests/ui/statics/static-recursive.rs @@ -1,7 +1,7 @@ //@ run-pass static mut S: *const u8 = unsafe { &S as *const *const u8 as *const u8 }; -//~^ WARN shared reference of mutable static is discouraged [static_mut_ref] +//~^ WARN shared reference to mutable static is discouraged [static_mut_refs] struct StaticDoubleLinked { prev: &'static StaticDoubleLinked, diff --git a/tests/ui/statics/static-recursive.stderr b/tests/ui/statics/static-recursive.stderr index 15888e5c68d8..cd285c6c2a46 100644 --- a/tests/ui/statics/static-recursive.stderr +++ b/tests/ui/statics/static-recursive.stderr @@ -1,14 +1,14 @@ -warning: shared reference of mutable static is discouraged +warning: creating a shared reference to mutable static is discouraged --> $DIR/static-recursive.rs:3:36 | LL | static mut S: *const u8 = unsafe { &S as *const *const u8 as *const u8 }; - | ^^ shared reference of mutable static + | ^^ shared reference to mutable static | = note: for more information, see issue #114447 - = note: reference of mutable static is a hard error from 2024 edition - = note: mutable statics can be written to by multiple threads: aliasing violations or data races will cause undefined behavior - = note: `#[warn(static_mut_ref)]` on by default -help: shared references are dangerous since if there's any kind of mutation of that static while the reference lives, that's UB; use `addr_of!` instead to create a raw pointer + = note: this will be a hard error in the 2024 edition + = note: this shared reference has lifetime `'static`, but if the static ever gets mutated, or a mutable reference is created, then any further use of this shared reference is Undefined Behavior + = note: `#[warn(static_mut_refs)]` on by default +help: use `addr_of!` instead to create a raw pointer | LL | static mut S: *const u8 = unsafe { addr_of!(S) as *const *const u8 as *const u8 }; | ~~~~~~~~~~~ diff --git a/tests/ui/thread-local/thread-local-static.rs b/tests/ui/thread-local/thread-local-static.rs index a1b72323f710..05df0471b143 100644 --- a/tests/ui/thread-local/thread-local-static.rs +++ b/tests/ui/thread-local/thread-local-static.rs @@ -2,7 +2,7 @@ #![feature(thread_local)] #![feature(const_swap)] -#![allow(static_mut_ref)] +#![allow(static_mut_refs)] #[thread_local] static mut STATIC_VAR_2: [u32; 8] = [4; 8]; From 5793f8203046da13726db57a5418c3e49ce66902 Mon Sep 17 00:00:00 2001 From: Scott McMurray Date: Sat, 17 Feb 2024 15:26:45 -0800 Subject: [PATCH 114/127] Add help to `hir_analysis_unrecognized_intrinsic_function` To help remind forgetful people like me what step they forgot. --- compiler/rustc_hir_analysis/messages.ftl | 1 + compiler/rustc_hir_analysis/src/errors.rs | 1 + tests/ui/error-codes/E0093.stderr | 2 ++ tests/ui/feature-gates/feature-gate-abi.stderr | 4 ++++ tests/ui/feature-gates/feature-gate-intrinsics.stderr | 4 ++++ tests/ui/intrinsics-always-extern.stderr | 2 ++ 6 files changed, 14 insertions(+) diff --git a/compiler/rustc_hir_analysis/messages.ftl b/compiler/rustc_hir_analysis/messages.ftl index a61cfd0e4ce9..7468b38ce1a6 100644 --- a/compiler/rustc_hir_analysis/messages.ftl +++ b/compiler/rustc_hir_analysis/messages.ftl @@ -462,6 +462,7 @@ hir_analysis_unrecognized_atomic_operation = hir_analysis_unrecognized_intrinsic_function = unrecognized intrinsic function: `{$name}` .label = unrecognized intrinsic + .help = if you're adding an intrinsic, be sure to update `check_intrinsic_type` hir_analysis_unused_associated_type_bounds = unnecessary associated type bound for not object safe associated type diff --git a/compiler/rustc_hir_analysis/src/errors.rs b/compiler/rustc_hir_analysis/src/errors.rs index 6a505b961974..cd4096a9d477 100644 --- a/compiler/rustc_hir_analysis/src/errors.rs +++ b/compiler/rustc_hir_analysis/src/errors.rs @@ -143,6 +143,7 @@ pub struct WrongNumberOfGenericArgumentsToIntrinsic<'a> { #[derive(Diagnostic)] #[diag(hir_analysis_unrecognized_intrinsic_function, code = E0093)] +#[help] pub struct UnrecognizedIntrinsicFunction { #[primary_span] #[label] diff --git a/tests/ui/error-codes/E0093.stderr b/tests/ui/error-codes/E0093.stderr index 387e0c55d4d2..51c367b343ab 100644 --- a/tests/ui/error-codes/E0093.stderr +++ b/tests/ui/error-codes/E0093.stderr @@ -3,6 +3,8 @@ error[E0093]: unrecognized intrinsic function: `foo` | LL | fn foo(); | ^^^^^^^^^ unrecognized intrinsic + | + = help: if you're adding an intrinsic, be sure to update `check_intrinsic_type` error: aborting due to 1 previous error diff --git a/tests/ui/feature-gates/feature-gate-abi.stderr b/tests/ui/feature-gates/feature-gate-abi.stderr index aa60434d9fe6..3fd1e1189abc 100644 --- a/tests/ui/feature-gates/feature-gate-abi.stderr +++ b/tests/ui/feature-gates/feature-gate-abi.stderr @@ -187,12 +187,16 @@ error[E0093]: unrecognized intrinsic function: `f1` | LL | extern "rust-intrinsic" fn f1() {} | ^^ unrecognized intrinsic + | + = help: if you're adding an intrinsic, be sure to update `check_intrinsic_type` error[E0093]: unrecognized intrinsic function: `f2` --> $DIR/feature-gate-abi.rs:18:32 | LL | extern "platform-intrinsic" fn f2() {} | ^^ unrecognized intrinsic + | + = help: if you're adding an intrinsic, be sure to update `check_intrinsic_type` error: intrinsic must be in `extern "rust-intrinsic" { ... }` block --> $DIR/feature-gate-abi.rs:25:32 diff --git a/tests/ui/feature-gates/feature-gate-intrinsics.stderr b/tests/ui/feature-gates/feature-gate-intrinsics.stderr index 78c21843adb0..583a4a1a84eb 100644 --- a/tests/ui/feature-gates/feature-gate-intrinsics.stderr +++ b/tests/ui/feature-gates/feature-gate-intrinsics.stderr @@ -21,12 +21,16 @@ error[E0093]: unrecognized intrinsic function: `bar` | LL | fn bar(); | ^^^^^^^^^ unrecognized intrinsic + | + = help: if you're adding an intrinsic, be sure to update `check_intrinsic_type` error[E0093]: unrecognized intrinsic function: `baz` --> $DIR/feature-gate-intrinsics.rs:5:28 | LL | extern "rust-intrinsic" fn baz() {} | ^^^ unrecognized intrinsic + | + = help: if you're adding an intrinsic, be sure to update `check_intrinsic_type` error: intrinsic must be in `extern "rust-intrinsic" { ... }` block --> $DIR/feature-gate-intrinsics.rs:5:34 diff --git a/tests/ui/intrinsics-always-extern.stderr b/tests/ui/intrinsics-always-extern.stderr index 32468f99197f..1f7bb5a3b0de 100644 --- a/tests/ui/intrinsics-always-extern.stderr +++ b/tests/ui/intrinsics-always-extern.stderr @@ -9,6 +9,8 @@ error[E0093]: unrecognized intrinsic function: `hello` | LL | extern "rust-intrinsic" fn hello() { | ^^^^^ unrecognized intrinsic + | + = help: if you're adding an intrinsic, be sure to update `check_intrinsic_type` error: intrinsic must be in `extern "rust-intrinsic" { ... }` block --> $DIR/intrinsics-always-extern.rs:8:43 From 9f8d05f29f9df6ff46455d7e9e3b25464c1e28ec Mon Sep 17 00:00:00 2001 From: Vadim Petrochenkov Date: Tue, 26 Dec 2023 16:30:31 +0300 Subject: [PATCH 115/127] macro_rules: Preserve all metavariable spans in a global side table --- compiler/rustc_expand/src/lib.rs | 1 + compiler/rustc_expand/src/mbe/transcribe.rs | 55 +++++++++++--- compiler/rustc_span/src/lib.rs | 75 +++++++++++++++---- tests/coverage/no_spans.cov-map | 8 ++ tests/coverage/no_spans_if_not.cov-map | 12 +++ .../anon-params-edition-hygiene.rs | 4 +- .../anon-params-edition-hygiene.stderr | 23 ------ .../edition-keywords-2018-2018-parsing.rs | 4 +- .../edition-keywords-2018-2018-parsing.stderr | 6 +- ...mat-args-capture-first-literal-is-macro.rs | 2 +- ...args-capture-first-literal-is-macro.stderr | 8 +- tests/ui/lint/wide_pointer_comparisons.rs | 4 +- tests/ui/lint/wide_pointer_comparisons.stderr | 14 ++-- tests/ui/macros/issue-118786.rs | 4 +- tests/ui/macros/issue-118786.stderr | 10 +-- .../method-on-ambiguous-numeric-type.stderr | 4 +- tests/ui/parser/issues/issue-44406.stderr | 2 +- .../issue-68091-unicode-ident-after-if.stderr | 5 ++ ...092-unicode-ident-after-incomplete-expr.rs | 4 +- ...unicode-ident-after-incomplete-expr.stderr | 6 +- .../capture-macro-rules-invoke.stdout | 2 +- .../edition-lint-infer-outlives-macro.fixed | 16 ++-- .../edition-lint-infer-outlives-macro.rs | 12 ++- .../edition-lint-infer-outlives-macro.stderr | 45 +++++++---- tests/ui/span/macro-span-replacement.rs | 4 +- tests/ui/span/macro-span-replacement.stderr | 7 +- ...k-fn-with-more-than-65535-arguments.stderr | 4 +- ...ssue-116473-ice-wrong-span-variant-args.rs | 4 +- ...-116473-ice-wrong-span-variant-args.stderr | 26 ++++--- 29 files changed, 239 insertions(+), 132 deletions(-) delete mode 100644 tests/ui/anon-params/anon-params-edition-hygiene.stderr diff --git a/compiler/rustc_expand/src/lib.rs b/compiler/rustc_expand/src/lib.rs index 4da86d77dc88..0e73abc9ed85 100644 --- a/compiler/rustc_expand/src/lib.rs +++ b/compiler/rustc_expand/src/lib.rs @@ -6,6 +6,7 @@ #![feature(if_let_guard)] #![feature(let_chains)] #![feature(macro_metavar_expr)] +#![feature(map_try_insert)] #![feature(proc_macro_diagnostic)] #![feature(proc_macro_internals)] #![feature(proc_macro_span)] diff --git a/compiler/rustc_expand/src/mbe/transcribe.rs b/compiler/rustc_expand/src/mbe/transcribe.rs index 434891ebc76e..519e4a634d8a 100644 --- a/compiler/rustc_expand/src/mbe/transcribe.rs +++ b/compiler/rustc_expand/src/mbe/transcribe.rs @@ -13,7 +13,7 @@ use rustc_errors::DiagnosticBuilder; use rustc_errors::{pluralize, PResult}; use rustc_span::hygiene::{LocalExpnId, Transparency}; use rustc_span::symbol::{sym, Ident, MacroRulesNormalizedIdent}; -use rustc_span::{Span, SyntaxContext}; +use rustc_span::{with_metavar_spans, Span, SyntaxContext}; use smallvec::{smallvec, SmallVec}; use std::mem; @@ -254,7 +254,8 @@ pub(super) fn transcribe<'a>( MatchedTokenTree(tt) => { // `tt`s are emitted into the output stream directly as "raw tokens", // without wrapping them into groups. - result.push(maybe_use_metavar_location(cx, &stack, sp, tt)); + let tt = maybe_use_metavar_location(cx, &stack, sp, tt, &mut marker); + result.push(tt); } MatchedNonterminal(nt) => { // Other variables are emitted into the output stream as groups with @@ -319,6 +320,17 @@ pub(super) fn transcribe<'a>( } } +/// Store the metavariable span for this original span into a side table. +/// FIXME: Try to put the metavariable span into `SpanData` instead of a side table (#118517). +/// An optimal encoding for inlined spans will need to be selected to minimize regressions. +/// The side table approach is relatively good, but not perfect due to collisions. +/// In particular, collisions happen when token is passed as an argument through several macro +/// calls, like in recursive macros. +/// The old heuristic below is used to improve spans in case of collisions, but diagnostics are +/// still degraded sometimes in those cases. +/// +/// The old heuristic: +/// /// Usually metavariables `$var` produce interpolated tokens, which have an additional place for /// keeping both the original span and the metavariable span. For `tt` metavariables that's not the /// case however, and there's no place for keeping a second span. So we try to give the single @@ -338,15 +350,12 @@ pub(super) fn transcribe<'a>( /// These are typically used for passing larger amounts of code, and tokens in that code usually /// combine with each other and not with tokens outside of the sequence. /// - The metavariable span comes from a different crate, then we prefer the more local span. -/// -/// FIXME: Find a way to keep both original and metavariable spans for all tokens without -/// regressing compilation time too much. Several experiments for adding such spans were made in -/// the past (PR #95580, #118517, #118671) and all showed some regressions. fn maybe_use_metavar_location( cx: &ExtCtxt<'_>, stack: &[Frame<'_>], - metavar_span: Span, + mut metavar_span: Span, orig_tt: &TokenTree, + marker: &mut Marker, ) -> TokenTree { let undelimited_seq = matches!( stack.last(), @@ -357,18 +366,44 @@ fn maybe_use_metavar_location( .. }) ); - if undelimited_seq || cx.source_map().is_imported(metavar_span) { + if undelimited_seq { + // Do not record metavar spans for tokens from undelimited sequences, for perf reasons. return orig_tt.clone(); } + let insert = |mspans: &mut FxHashMap<_, _>, s, ms| match mspans.try_insert(s, ms) { + Ok(_) => true, + Err(err) => *err.entry.get() == ms, // Tried to insert the same span, still success + }; + marker.visit_span(&mut metavar_span); + let no_collision = match orig_tt { + TokenTree::Token(token, ..) => { + with_metavar_spans(|mspans| insert(mspans, token.span, metavar_span)) + } + TokenTree::Delimited(dspan, ..) => with_metavar_spans(|mspans| { + insert(mspans, dspan.open, metavar_span) + && insert(mspans, dspan.close, metavar_span) + && insert(mspans, dspan.entire(), metavar_span) + }), + }; + if no_collision || cx.source_map().is_imported(metavar_span) { + return orig_tt.clone(); + } + + // Setting metavar spans for the heuristic spans gives better opportunities for combining them + // with neighboring spans even despite their different syntactic contexts. match orig_tt { TokenTree::Token(Token { kind, span }, spacing) => { let span = metavar_span.with_ctxt(span.ctxt()); + with_metavar_spans(|mspans| insert(mspans, span, metavar_span)); TokenTree::Token(Token { kind: kind.clone(), span }, *spacing) } TokenTree::Delimited(dspan, dspacing, delimiter, tts) => { - let open = metavar_span.shrink_to_lo().with_ctxt(dspan.open.ctxt()); - let close = metavar_span.shrink_to_hi().with_ctxt(dspan.close.ctxt()); + let open = metavar_span.with_ctxt(dspan.open.ctxt()); + let close = metavar_span.with_ctxt(dspan.close.ctxt()); + with_metavar_spans(|mspans| { + insert(mspans, open, metavar_span) && insert(mspans, close, metavar_span) + }); let dspan = DelimSpan::from_pair(open, close); TokenTree::Delimited(dspan, *dspacing, *delimiter, tts.clone()) } diff --git a/compiler/rustc_span/src/lib.rs b/compiler/rustc_span/src/lib.rs index 49ae9f16c8e5..616a7ccc7c64 100644 --- a/compiler/rustc_span/src/lib.rs +++ b/compiler/rustc_span/src/lib.rs @@ -72,6 +72,7 @@ pub mod fatal_error; pub mod profiling; +use rustc_data_structures::fx::FxHashMap; use rustc_data_structures::stable_hasher::{Hash128, Hash64, HashStable, StableHasher}; use rustc_data_structures::sync::{FreezeLock, FreezeWriteGuard, Lock, Lrc}; @@ -98,6 +99,9 @@ mod tests; pub struct SessionGlobals { symbol_interner: symbol::Interner, span_interner: Lock, + /// Maps a macro argument token into use of the corresponding metavariable in the macro body. + /// Collisions are possible and processed in `maybe_use_metavar_location` on best effort basis. + metavar_spans: Lock>, hygiene_data: Lock, /// A reference to the source map in the `Session`. It's an `Option` @@ -115,6 +119,7 @@ impl SessionGlobals { SessionGlobals { symbol_interner: symbol::Interner::fresh(), span_interner: Lock::new(span_encoding::SpanInterner::default()), + metavar_spans: Default::default(), hygiene_data: Lock::new(hygiene::HygieneData::new(edition)), source_map: Lock::new(None), } @@ -168,6 +173,11 @@ pub fn create_default_session_globals_then(f: impl FnOnce() -> R) -> R { // deserialization. scoped_tls::scoped_thread_local!(static SESSION_GLOBALS: SessionGlobals); +#[inline] +pub fn with_metavar_spans(f: impl FnOnce(&mut FxHashMap) -> R) -> R { + with_session_globals(|session_globals| f(&mut session_globals.metavar_spans.lock())) +} + // FIXME: We should use this enum or something like it to get rid of the // use of magic `/rust/1.x/...` paths across the board. #[derive(Debug, Eq, PartialEq, Clone, Ord, PartialOrd, Decodable)] @@ -824,29 +834,64 @@ impl Span { ) } + /// Check if you can select metavar spans for the given spans to get matching contexts. + fn try_metavars(a: SpanData, b: SpanData, a_orig: Span, b_orig: Span) -> (SpanData, SpanData) { + let get = |mspans: &FxHashMap<_, _>, s| mspans.get(&s).copied(); + match with_metavar_spans(|mspans| (get(mspans, a_orig), get(mspans, b_orig))) { + (None, None) => {} + (Some(meta_a), None) => { + let meta_a = meta_a.data(); + if meta_a.ctxt == b.ctxt { + return (meta_a, b); + } + } + (None, Some(meta_b)) => { + let meta_b = meta_b.data(); + if a.ctxt == meta_b.ctxt { + return (a, meta_b); + } + } + (Some(meta_a), Some(meta_b)) => { + let meta_b = meta_b.data(); + if a.ctxt == meta_b.ctxt { + return (a, meta_b); + } + let meta_a = meta_a.data(); + if meta_a.ctxt == b.ctxt { + return (meta_a, b); + } else if meta_a.ctxt == meta_b.ctxt { + return (meta_a, meta_b); + } + } + } + + (a, b) + } + /// Prepare two spans to a combine operation like `to` or `between`. - /// FIXME: consider using declarative macro metavariable spans for the given spans if they are - /// better suitable for combining (#119412). fn prepare_to_combine( a_orig: Span, b_orig: Span, ) -> Result<(SpanData, SpanData, Option), Span> { let (a, b) = (a_orig.data(), b_orig.data()); - - if a.ctxt != b.ctxt { - // Context mismatches usually happen when procedural macros combine spans copied from - // the macro input with spans produced by the macro (`Span::*_site`). - // In that case we consider the combined span to be produced by the macro and return - // the original macro-produced span as the result. - // Otherwise we just fall back to returning the first span. - // Combining locations typically doesn't make sense in case of context mismatches. - // `is_root` here is a fast path optimization. - let a_is_callsite = a.ctxt.is_root() || a.ctxt == b.span().source_callsite().ctxt(); - return Err(if a_is_callsite { b_orig } else { a_orig }); + if a.ctxt == b.ctxt { + return Ok((a, b, if a.parent == b.parent { a.parent } else { None })); } - let parent = if a.parent == b.parent { a.parent } else { None }; - Ok((a, b, parent)) + let (a, b) = Span::try_metavars(a, b, a_orig, b_orig); + if a.ctxt == b.ctxt { + return Ok((a, b, if a.parent == b.parent { a.parent } else { None })); + } + + // Context mismatches usually happen when procedural macros combine spans copied from + // the macro input with spans produced by the macro (`Span::*_site`). + // In that case we consider the combined span to be produced by the macro and return + // the original macro-produced span as the result. + // Otherwise we just fall back to returning the first span. + // Combining locations typically doesn't make sense in case of context mismatches. + // `is_root` here is a fast path optimization. + let a_is_callsite = a.ctxt.is_root() || a.ctxt == b.span().source_callsite().ctxt(); + Err(if a_is_callsite { b_orig } else { a_orig }) } /// This span, but in a larger context, may switch to the metavariable span if suitable. diff --git a/tests/coverage/no_spans.cov-map b/tests/coverage/no_spans.cov-map index 9915fc52e6db..30171c3f3193 100644 --- a/tests/coverage/no_spans.cov-map +++ b/tests/coverage/no_spans.cov-map @@ -1,3 +1,11 @@ +Function name: no_spans::affected_function +Raw bytes (9): 0x[01, 01, 00, 01, 01, 1a, 1c, 00, 1d] +Number of files: 1 +- file 0 => global file 1 +Number of expressions: 0 +Number of file 0 mappings: 1 +- Code(Counter(0)) at (prev + 26, 28) to (start + 0, 29) + Function name: no_spans::affected_function::{closure#0} Raw bytes (9): 0x[01, 01, 00, 01, 01, 1b, 0c, 00, 0e] Number of files: 1 diff --git a/tests/coverage/no_spans_if_not.cov-map b/tests/coverage/no_spans_if_not.cov-map index 5277267ec1b0..bc3e14eddd54 100644 --- a/tests/coverage/no_spans_if_not.cov-map +++ b/tests/coverage/no_spans_if_not.cov-map @@ -1,3 +1,15 @@ +Function name: no_spans_if_not::affected_function +Raw bytes (21): 0x[01, 01, 01, 01, 00, 03, 01, 16, 1c, 01, 12, 02, 02, 0d, 00, 0f, 00, 02, 0d, 00, 0f] +Number of files: 1 +- file 0 => global file 1 +Number of expressions: 1 +- expression 0 operands: lhs = Counter(0), rhs = Zero +Number of file 0 mappings: 3 +- Code(Counter(0)) at (prev + 22, 28) to (start + 1, 18) +- Code(Expression(0, Sub)) at (prev + 2, 13) to (start + 0, 15) + = (c0 - Zero) +- Code(Zero) at (prev + 2, 13) to (start + 0, 15) + Function name: no_spans_if_not::main Raw bytes (9): 0x[01, 01, 00, 01, 01, 0b, 01, 02, 02] Number of files: 1 diff --git a/tests/ui/anon-params/anon-params-edition-hygiene.rs b/tests/ui/anon-params/anon-params-edition-hygiene.rs index 607412f44c4c..56e7336a7a52 100644 --- a/tests/ui/anon-params/anon-params-edition-hygiene.rs +++ b/tests/ui/anon-params/anon-params-edition-hygiene.rs @@ -1,3 +1,4 @@ +//@ check-pass //@ edition:2018 //@ aux-build:anon-params-edition-hygiene.rs @@ -8,7 +9,6 @@ extern crate anon_params_edition_hygiene; generate_trait_2015_ident!(u8); -// FIXME: Edition hygiene doesn't work correctly with `tt`s in this case. -generate_trait_2015_tt!(u8); //~ ERROR expected one of `:`, `@`, or `|`, found `)` +generate_trait_2015_tt!(u8); fn main() {} diff --git a/tests/ui/anon-params/anon-params-edition-hygiene.stderr b/tests/ui/anon-params/anon-params-edition-hygiene.stderr deleted file mode 100644 index 373d7c6aebb5..000000000000 --- a/tests/ui/anon-params/anon-params-edition-hygiene.stderr +++ /dev/null @@ -1,23 +0,0 @@ -error: expected one of `:`, `@`, or `|`, found `)` - --> $DIR/anon-params-edition-hygiene.rs:12:1 - | -LL | generate_trait_2015_tt!(u8); - | ^^^^^^^^^^^^^^^^^^^^^^^^^^^ expected one of `:`, `@`, or `|` - | - = note: anonymous parameters are removed in the 2018 edition (see RFC 1685) - = note: this error originates in the macro `generate_trait_2015_tt` (in Nightly builds, run with -Z macro-backtrace for more info) -help: if this is a `self` type, give it a parameter name - | -LL | generate_trait_2015_tt!(self: u8); - | +++++ -help: if this is a parameter name, give it a type - | -LL | generate_trait_2015_tt!(u8: TypeName); - | ++++++++++ -help: if this is a type, explicitly ignore the parameter name - | -LL | generate_trait_2015_tt!(_: u8); - | ++ - -error: aborting due to 1 previous error - diff --git a/tests/ui/editions/edition-keywords-2018-2018-parsing.rs b/tests/ui/editions/edition-keywords-2018-2018-parsing.rs index e3eed775a5fe..4975246fa942 100644 --- a/tests/ui/editions/edition-keywords-2018-2018-parsing.rs +++ b/tests/ui/editions/edition-keywords-2018-2018-parsing.rs @@ -16,7 +16,7 @@ macro_rules! local_passes_ident { ($i: ident) => ($i) //~ ERROR macro expansion ends with an incomplete expression } macro_rules! local_passes_tt { - ($i: tt) => ($i) //~ ERROR macro expansion ends with an incomplete expression + ($i: tt) => ($i) } pub fn check_async() { @@ -34,7 +34,7 @@ pub fn check_async() { if passes_tt!(r#async) == 1 {} // OK if local_passes_ident!(async) == 1 {} // Error reported above in the macro if local_passes_ident!(r#async) == 1 {} // OK - if local_passes_tt!(async) == 1 {} // Error reported above in the macro + if local_passes_tt!(async) == 1 {} //~ ERROR macro expansion ends with an incomplete expression if local_passes_tt!(r#async) == 1 {} // OK module::async(); //~ ERROR expected identifier, found keyword `async` module::r#async(); // OK diff --git a/tests/ui/editions/edition-keywords-2018-2018-parsing.stderr b/tests/ui/editions/edition-keywords-2018-2018-parsing.stderr index 6f08cff433b1..4bbe1597233c 100644 --- a/tests/ui/editions/edition-keywords-2018-2018-parsing.stderr +++ b/tests/ui/editions/edition-keywords-2018-2018-parsing.stderr @@ -68,10 +68,10 @@ LL | ($i: ident) => ($i) | ^ expected one of `move`, `|`, or `||` error: macro expansion ends with an incomplete expression: expected one of `move`, `|`, or `||` - --> $DIR/edition-keywords-2018-2018-parsing.rs:19:20 + --> $DIR/edition-keywords-2018-2018-parsing.rs:37:30 | -LL | ($i: tt) => ($i) - | ^ expected one of `move`, `|`, or `||` +LL | if local_passes_tt!(async) == 1 {} + | ^ expected one of `move`, `|`, or `||` error[E0308]: mismatched types --> $DIR/edition-keywords-2018-2018-parsing.rs:42:33 diff --git a/tests/ui/fmt/format-args-capture-first-literal-is-macro.rs b/tests/ui/fmt/format-args-capture-first-literal-is-macro.rs index 5afd21a17e55..3a0b7ba46662 100644 --- a/tests/ui/fmt/format-args-capture-first-literal-is-macro.rs +++ b/tests/ui/fmt/format-args-capture-first-literal-is-macro.rs @@ -6,7 +6,6 @@ extern crate format_string_proc_macro; macro_rules! identity_mbe { ($tt:tt) => { $tt - //~^ ERROR there is no argument named `a` }; } @@ -16,6 +15,7 @@ fn main() { format!(identity_pm!("{a}")); //~^ ERROR there is no argument named `a` format!(identity_mbe!("{a}")); + //~^ ERROR there is no argument named `a` format!(concat!("{a}")); //~^ ERROR there is no argument named `a` } diff --git a/tests/ui/fmt/format-args-capture-first-literal-is-macro.stderr b/tests/ui/fmt/format-args-capture-first-literal-is-macro.stderr index 4cf3afad7b8f..e399361579ff 100644 --- a/tests/ui/fmt/format-args-capture-first-literal-is-macro.stderr +++ b/tests/ui/fmt/format-args-capture-first-literal-is-macro.stderr @@ -1,5 +1,5 @@ error: there is no argument named `a` - --> $DIR/format-args-capture-first-literal-is-macro.rs:16:26 + --> $DIR/format-args-capture-first-literal-is-macro.rs:15:26 | LL | format!(identity_pm!("{a}")); | ^^^^^ @@ -8,10 +8,10 @@ LL | format!(identity_pm!("{a}")); = note: to avoid ambiguity, `format_args!` cannot capture variables when the format string is expanded from a macro error: there is no argument named `a` - --> $DIR/format-args-capture-first-literal-is-macro.rs:8:9 + --> $DIR/format-args-capture-first-literal-is-macro.rs:17:27 | -LL | $tt - | ^^^ +LL | format!(identity_mbe!("{a}")); + | ^^^^^ | = note: did you intend to capture a variable `a` from the surrounding scope? = note: to avoid ambiguity, `format_args!` cannot capture variables when the format string is expanded from a macro diff --git a/tests/ui/lint/wide_pointer_comparisons.rs b/tests/ui/lint/wide_pointer_comparisons.rs index 37807776d2f1..313690010754 100644 --- a/tests/ui/lint/wide_pointer_comparisons.rs +++ b/tests/ui/lint/wide_pointer_comparisons.rs @@ -110,10 +110,12 @@ fn main() { { macro_rules! cmp { ($a:tt, $b:tt) => { $a == $b } - //~^ WARN ambiguous wide pointer comparison } + // FIXME: This lint uses some custom span combination logic. + // Rewrite it to adapt to the new metavariable span rules. cmp!(a, b); + //~^ WARN ambiguous wide pointer comparison } { diff --git a/tests/ui/lint/wide_pointer_comparisons.stderr b/tests/ui/lint/wide_pointer_comparisons.stderr index 349ff467d0fb..6ef117c63c5b 100644 --- a/tests/ui/lint/wide_pointer_comparisons.stderr +++ b/tests/ui/lint/wide_pointer_comparisons.stderr @@ -421,18 +421,18 @@ LL | std::ptr::eq(*a, *b) | ~~~~~~~~~~~~~ ~ + warning: ambiguous wide pointer comparison, the comparison includes metadata which may not be expected - --> $DIR/wide_pointer_comparisons.rs:112:33 + --> $DIR/wide_pointer_comparisons.rs:117:14 | -LL | ($a:tt, $b:tt) => { $a == $b } - | ^^^^^^^^ +LL | cmp!(a, b); + | ^^^^ | help: use `std::ptr::addr_eq` or untyped pointers to only compare their addresses | -LL | ($a:tt, $b:tt) => { std::ptr::addr_eq($a, $b) } - | ++++++++++++++++++ ~ + +LL | cmp!(std::ptr::addr_eq(a, b)); + | ++++++++++++++++++ ~ + warning: ambiguous wide pointer comparison, the comparison includes metadata which may not be expected - --> $DIR/wide_pointer_comparisons.rs:121:39 + --> $DIR/wide_pointer_comparisons.rs:123:39 | LL | ($a:ident, $b:ident) => { $a == $b } | ^^^^^^^^ @@ -447,7 +447,7 @@ LL | ($a:ident, $b:ident) => { std::ptr::addr_eq($a, $b) } | ++++++++++++++++++ ~ + warning: ambiguous wide pointer comparison, the comparison includes metadata which may not be expected - --> $DIR/wide_pointer_comparisons.rs:131:37 + --> $DIR/wide_pointer_comparisons.rs:133:37 | LL | ($a:expr, $b:expr) => { $a == $b } | ^^ diff --git a/tests/ui/macros/issue-118786.rs b/tests/ui/macros/issue-118786.rs index cc6c751813b3..97454c9de079 100644 --- a/tests/ui/macros/issue-118786.rs +++ b/tests/ui/macros/issue-118786.rs @@ -5,8 +5,7 @@ macro_rules! make_macro { ($macro_name:tt) => { macro_rules! $macro_name { - //~^ ERROR macros that expand to items must be delimited with braces or followed by a semicolon - //~| ERROR macro expansion ignores token `{` and any following + //~^ ERROR macro expansion ignores token `{` and any following //~| ERROR cannot find macro `macro_rules` in this scope () => {} } @@ -14,3 +13,4 @@ macro_rules! make_macro { } make_macro!((meow)); +//~^ ERROR macros that expand to items must be delimited with braces or followed by a semicolon diff --git a/tests/ui/macros/issue-118786.stderr b/tests/ui/macros/issue-118786.stderr index 1a8ac9340daa..03e65c94ba73 100644 --- a/tests/ui/macros/issue-118786.stderr +++ b/tests/ui/macros/issue-118786.stderr @@ -1,13 +1,13 @@ error: macros that expand to items must be delimited with braces or followed by a semicolon - --> $DIR/issue-118786.rs:7:22 + --> $DIR/issue-118786.rs:15:13 | -LL | macro_rules! $macro_name { - | ^^^^^^^^^^^ +LL | make_macro!((meow)); + | ^^^^^^ | help: change the delimiters to curly braces | -LL | macro_rules! {$macro_name} { - | + + +LL | make_macro!({meow}); + | ~ ~ help: add a semicolon | LL | macro_rules! $macro_name; { diff --git a/tests/ui/methods/method-on-ambiguous-numeric-type.stderr b/tests/ui/methods/method-on-ambiguous-numeric-type.stderr index 060595e1d406..124270402727 100644 --- a/tests/ui/methods/method-on-ambiguous-numeric-type.stderr +++ b/tests/ui/methods/method-on-ambiguous-numeric-type.stderr @@ -47,8 +47,8 @@ LL | local_bar_tt.pow(2); | help: you must specify a type for this binding, like `i32` | -LL | ($tt:tt) => { let $tt: i32 = 42; } - | +++++ +LL | local_mac_tt!(local_bar_tt: i32); + | +++++ error[E0689]: can't call method `pow` on ambiguous numeric type `{integer}` --> $DIR/method-on-ambiguous-numeric-type.rs:37:9 diff --git a/tests/ui/parser/issues/issue-44406.stderr b/tests/ui/parser/issues/issue-44406.stderr index d005f116e124..78cde9b6dcac 100644 --- a/tests/ui/parser/issues/issue-44406.stderr +++ b/tests/ui/parser/issues/issue-44406.stderr @@ -15,7 +15,7 @@ LL | bar { baz: $rest } help: if `bar` is a function, use the arguments directly | LL - bar(baz: $rest) -LL + bar(: $rest) +LL + bar($rest) | error: aborting due to 1 previous error diff --git a/tests/ui/parser/issues/issue-68091-unicode-ident-after-if.stderr b/tests/ui/parser/issues/issue-68091-unicode-ident-after-if.stderr index 8e125864b8bc..a68fae1a36ec 100644 --- a/tests/ui/parser/issues/issue-68091-unicode-ident-after-if.stderr +++ b/tests/ui/parser/issues/issue-68091-unicode-ident-after-if.stderr @@ -5,6 +5,11 @@ LL | $($c)ö* {} | ^ - if this block is the condition of the `if` expression, then it must be followed by another block | | | expected condition here +... +LL | x!(if); + | ------ in this macro invocation + | + = note: this error originates in the macro `x` (in Nightly builds, run with -Z macro-backtrace for more info) error: aborting due to 1 previous error diff --git a/tests/ui/parser/issues/issue-68092-unicode-ident-after-incomplete-expr.rs b/tests/ui/parser/issues/issue-68092-unicode-ident-after-incomplete-expr.rs index 1a90b4724d49..d489df85c44d 100644 --- a/tests/ui/parser/issues/issue-68092-unicode-ident-after-incomplete-expr.rs +++ b/tests/ui/parser/issues/issue-68092-unicode-ident-after-incomplete-expr.rs @@ -1,9 +1,9 @@ macro_rules! x { ($($c:tt)*) => { - $($c)ö* //~ ERROR macro expansion ends with an incomplete expression: expected expression + $($c)ö* }; } fn main() { - x!(!); + x!(!); //~ ERROR macro expansion ends with an incomplete expression: expected expression } diff --git a/tests/ui/parser/issues/issue-68092-unicode-ident-after-incomplete-expr.stderr b/tests/ui/parser/issues/issue-68092-unicode-ident-after-incomplete-expr.stderr index 15aa62e08108..3a4b0cbf2a5b 100644 --- a/tests/ui/parser/issues/issue-68092-unicode-ident-after-incomplete-expr.stderr +++ b/tests/ui/parser/issues/issue-68092-unicode-ident-after-incomplete-expr.stderr @@ -1,8 +1,8 @@ error: macro expansion ends with an incomplete expression: expected expression - --> $DIR/issue-68092-unicode-ident-after-incomplete-expr.rs:3:13 + --> $DIR/issue-68092-unicode-ident-after-incomplete-expr.rs:8:9 | -LL | $($c)ö* - | ^ expected expression +LL | x!(!); + | ^ expected expression error: aborting due to 1 previous error diff --git a/tests/ui/proc-macro/capture-macro-rules-invoke.stdout b/tests/ui/proc-macro/capture-macro-rules-invoke.stdout index bbab08bca499..71e34119ba7e 100644 --- a/tests/ui/proc-macro/capture-macro-rules-invoke.stdout +++ b/tests/ui/proc-macro/capture-macro-rules-invoke.stdout @@ -271,7 +271,7 @@ PRINT-BANG INPUT (DEBUG): TokenStream [ span: $DIR/capture-macro-rules-invoke.rs:47:19: 47:20 (#0), }, ], - span: $DIR/capture-macro-rules-invoke.rs:15:60: 15:63 (#0), + span: $DIR/capture-macro-rules-invoke.rs:47:13: 47:22 (#0), }, Punct { ch: ',', diff --git a/tests/ui/rust-2018/edition-lint-infer-outlives-macro.fixed b/tests/ui/rust-2018/edition-lint-infer-outlives-macro.fixed index 38d8a36bcdfa..435857224ac3 100644 --- a/tests/ui/rust-2018/edition-lint-infer-outlives-macro.fixed +++ b/tests/ui/rust-2018/edition-lint-infer-outlives-macro.fixed @@ -111,10 +111,13 @@ mod everything_outside_with_tt_inner { mod everything_outside_with_tt_outer { macro_rules! m { ($b:lifetime $colon:tt $a:tt) => { - struct Foo<$a, $b >(&$a &$b ()); + // FIXME: replacement span is corrupted due to a collision in metavar span table. + // struct Foo<$a, $b $colon $a>(&$a &$b ()); + // ^ ERROR: outlives requirements can be inferred + struct Bar<$a, $b>(&$a &$b ()) ; + //~^ ERROR: outlives requirements can be inferred + struct Baz<$a, $b>(&$a &$b ()) where (): Sized, ; //~^ ERROR: outlives requirements can be inferred - struct Bar<$a, $b>(&$a &$b ()) where $b $colon $a; - struct Baz<$a, $b>(&$a &$b ()) where (): Sized, $b $colon $a; } } m!('b: 'a); @@ -123,9 +126,10 @@ mod everything_outside_with_tt_outer { mod everything_outside_with_tt_both { macro_rules! m { ($b:tt $colon:tt $a:tt) => { - struct Foo<$a, $b >(&$a &$b ()); - //~^ ERROR: outlives requirements can be inferred - struct Bar<$a, $b>(&$a &$b ()) where ; + // FIXME: replacement span is corrupted due to a collision in metavar span table. + // struct Foo<$a, $b $colon $a>(&$a &$b ()); + // ^ ERROR: outlives requirements can be inferred + struct Bar<$a, $b>(&$a &$b ()) ; //~^ ERROR: outlives requirements can be inferred struct Baz<$a, $b>(&$a &$b ()) where (): Sized, ; //~^ ERROR: outlives requirements can be inferred diff --git a/tests/ui/rust-2018/edition-lint-infer-outlives-macro.rs b/tests/ui/rust-2018/edition-lint-infer-outlives-macro.rs index 60eedf7b069a..6c879231a168 100644 --- a/tests/ui/rust-2018/edition-lint-infer-outlives-macro.rs +++ b/tests/ui/rust-2018/edition-lint-infer-outlives-macro.rs @@ -111,10 +111,13 @@ mod everything_outside_with_tt_inner { mod everything_outside_with_tt_outer { macro_rules! m { ($b:lifetime $colon:tt $a:tt) => { - struct Foo<$a, $b $colon $a>(&$a &$b ()); - //~^ ERROR: outlives requirements can be inferred + // FIXME: replacement span is corrupted due to a collision in metavar span table. + // struct Foo<$a, $b $colon $a>(&$a &$b ()); + // ^ ERROR: outlives requirements can be inferred struct Bar<$a, $b>(&$a &$b ()) where $b $colon $a; + //~^ ERROR: outlives requirements can be inferred struct Baz<$a, $b>(&$a &$b ()) where (): Sized, $b $colon $a; + //~^ ERROR: outlives requirements can be inferred } } m!('b: 'a); @@ -123,8 +126,9 @@ mod everything_outside_with_tt_outer { mod everything_outside_with_tt_both { macro_rules! m { ($b:tt $colon:tt $a:tt) => { - struct Foo<$a, $b $colon $a>(&$a &$b ()); - //~^ ERROR: outlives requirements can be inferred + // FIXME: replacement span is corrupted due to a collision in metavar span table. + // struct Foo<$a, $b $colon $a>(&$a &$b ()); + // ^ ERROR: outlives requirements can be inferred struct Bar<$a, $b>(&$a &$b ()) where $b $colon $a; //~^ ERROR: outlives requirements can be inferred struct Baz<$a, $b>(&$a &$b ()) where (): Sized, $b $colon $a; diff --git a/tests/ui/rust-2018/edition-lint-infer-outlives-macro.stderr b/tests/ui/rust-2018/edition-lint-infer-outlives-macro.stderr index 734ae6879789..d684911be397 100644 --- a/tests/ui/rust-2018/edition-lint-infer-outlives-macro.stderr +++ b/tests/ui/rust-2018/edition-lint-infer-outlives-macro.stderr @@ -83,25 +83,40 @@ LL | m!('b: 'a); = note: this error originates in the macro `m` (in Nightly builds, run with -Z macro-backtrace for more info) error: outlives requirements can be inferred - --> $DIR/edition-lint-infer-outlives-macro.rs:114:31 - | -LL | struct Foo<$a, $b $colon $a>(&$a &$b ()); - | ^^^^^^^^^ help: remove this bound - -error: outlives requirements can be inferred - --> $DIR/edition-lint-infer-outlives-macro.rs:126:31 - | -LL | struct Foo<$a, $b $colon $a>(&$a &$b ()); - | ^^^^^^^^^ help: remove this bound - -error: outlives requirements can be inferred - --> $DIR/edition-lint-infer-outlives-macro.rs:128:50 + --> $DIR/edition-lint-infer-outlives-macro.rs:117:44 | LL | struct Bar<$a, $b>(&$a &$b ()) where $b $colon $a; - | ^^^^^^^^^^^^ help: remove this bound + | ^^^^^^^^^^^^^^^^^^ help: remove this bound +... +LL | m!('b: 'a); + | ---------- in this macro invocation + | + = note: this error originates in the macro `m` (in Nightly builds, run with -Z macro-backtrace for more info) error: outlives requirements can be inferred - --> $DIR/edition-lint-infer-outlives-macro.rs:130:61 + --> $DIR/edition-lint-infer-outlives-macro.rs:119:61 + | +LL | struct Baz<$a, $b>(&$a &$b ()) where (): Sized, $b $colon $a; + | ^^^^^^^^^^^^ help: remove this bound +... +LL | m!('b: 'a); + | ---------- in this macro invocation + | + = note: this error originates in the macro `m` (in Nightly builds, run with -Z macro-backtrace for more info) + +error: outlives requirements can be inferred + --> $DIR/edition-lint-infer-outlives-macro.rs:132:44 + | +LL | struct Bar<$a, $b>(&$a &$b ()) where $b $colon $a; + | ^^^^^^^^^^^^^^^^^^ help: remove this bound +... +LL | m!('b: 'a); + | ---------- in this macro invocation + | + = note: this error originates in the macro `m` (in Nightly builds, run with -Z macro-backtrace for more info) + +error: outlives requirements can be inferred + --> $DIR/edition-lint-infer-outlives-macro.rs:134:61 | LL | struct Baz<$a, $b>(&$a &$b ()) where (): Sized, $b $colon $a; | ^^^^^^^^^^^^ help: remove this bound diff --git a/tests/ui/span/macro-span-replacement.rs b/tests/ui/span/macro-span-replacement.rs index e6fcfa4c7090..16f4018cf3e6 100644 --- a/tests/ui/span/macro-span-replacement.rs +++ b/tests/ui/span/macro-span-replacement.rs @@ -4,10 +4,10 @@ macro_rules! m { ($a:tt $b:tt) => { - $b $a; //~ WARN struct `S` is never constructed + $b $a; } } fn main() { - m!(S struct); + m!(S struct); //~ WARN struct `S` is never constructed } diff --git a/tests/ui/span/macro-span-replacement.stderr b/tests/ui/span/macro-span-replacement.stderr index 5dd563428897..6248db112f88 100644 --- a/tests/ui/span/macro-span-replacement.stderr +++ b/tests/ui/span/macro-span-replacement.stderr @@ -1,11 +1,8 @@ warning: struct `S` is never constructed - --> $DIR/macro-span-replacement.rs:7:12 + --> $DIR/macro-span-replacement.rs:12:8 | -LL | $b $a; - | ^^ -... LL | m!(S struct); - | ------------ in this macro invocation + | ^ | note: the lint level is defined here --> $DIR/macro-span-replacement.rs:3:9 diff --git a/tests/ui/type/type-check/issue-88577-check-fn-with-more-than-65535-arguments.stderr b/tests/ui/type/type-check/issue-88577-check-fn-with-more-than-65535-arguments.stderr index 1ef02321e15c..1a3848bbcf58 100644 --- a/tests/ui/type/type-check/issue-88577-check-fn-with-more-than-65535-arguments.stderr +++ b/tests/ui/type/type-check/issue-88577-check-fn-with-more-than-65535-arguments.stderr @@ -1,8 +1,8 @@ error: function can not have more than 65535 arguments - --> $DIR/issue-88577-check-fn-with-more-than-65535-arguments.rs:6:22 + --> $DIR/issue-88577-check-fn-with-more-than-65535-arguments.rs:6:17 | LL | fn _f($($t: ()),*) {} - | ^ + | ^^^^^^ ... LL | many_args!{[_]########## ######} | -------------------------------- in this macro invocation diff --git a/tests/ui/typeck/issue-116473-ice-wrong-span-variant-args.rs b/tests/ui/typeck/issue-116473-ice-wrong-span-variant-args.rs index c319c63eda26..cc66b5fd6f2b 100644 --- a/tests/ui/typeck/issue-116473-ice-wrong-span-variant-args.rs +++ b/tests/ui/typeck/issue-116473-ice-wrong-span-variant-args.rs @@ -58,12 +58,12 @@ macro_rules! nested2_ident { // instead of the enum variant macro_rules! nested1_tt_args_in_first_macro { () => (nested2_tt_args_in_first_macro!(i32, u32)); + //~^ ERROR type arguments are not allowed on this type } macro_rules! nested2_tt_args_in_first_macro { ($arg1:tt, $arg2:tt) => (if let EnumUnit::VariantB::<$arg1, $arg2> {} - //~^ ERROR type arguments are not allowed on this type - //~| ERROR mismatched types + //~^ ERROR mismatched types = 5 { true } else { false }); } diff --git a/tests/ui/typeck/issue-116473-ice-wrong-span-variant-args.stderr b/tests/ui/typeck/issue-116473-ice-wrong-span-variant-args.stderr index b17936ee3d35..cb7666657ef4 100644 --- a/tests/ui/typeck/issue-116473-ice-wrong-span-variant-args.stderr +++ b/tests/ui/typeck/issue-116473-ice-wrong-span-variant-args.stderr @@ -1,10 +1,10 @@ error[E0109]: type arguments are not allowed on this type --> $DIR/issue-116473-ice-wrong-span-variant-args.rs:15:51 | +LL | () => (recursive_tt!(VariantB)); + | -------- not allowed on this type LL | ($variant:tt) => (if let EnumUnit::$variant:: {} = 5 { true } else { false }); - | -------- ^^^ ^^^ type argument not allowed - | | - | not allowed on this type + | ^^^ ^^^ type argument not allowed ... LL | recursive_tt!(); | --------------- in this macro invocation @@ -69,10 +69,11 @@ LL | recursive_ident!(); error[E0109]: type arguments are not allowed on this type --> $DIR/issue-116473-ice-wrong-span-variant-args.rs:38:51 | +LL | () => (nested2_tt!(VariantB)); + | -------- not allowed on this type +... LL | ($variant:tt) => (if let EnumUnit::$variant:: {} = 5 { true } else { false }); - | -------- ^^^ ^^^ type argument not allowed - | | - | not allowed on this type + | ^^^ ^^^ type argument not allowed ... LL | nested1_tt!(); | ------------- in this macro invocation @@ -136,12 +137,13 @@ LL | nested1_ident!(); = note: this error originates in the macro `nested2_ident` which comes from the expansion of the macro `nested1_ident` (in Nightly builds, run with -Z macro-backtrace for more info) error[E0109]: type arguments are not allowed on this type - --> $DIR/issue-116473-ice-wrong-span-variant-args.rs:64:58 + --> $DIR/issue-116473-ice-wrong-span-variant-args.rs:60:44 | +LL | () => (nested2_tt_args_in_first_macro!(i32, u32)); + | ^^^ ^^^ type argument not allowed +... LL | ($arg1:tt, $arg2:tt) => (if let EnumUnit::VariantB::<$arg1, $arg2> {} - | -------- ^^^^^ ^^^^^ type argument not allowed - | | - | not allowed on this type + | -------- not allowed on this type ... LL | nested1_tt_args_in_first_macro!(); | --------------------------------- in this macro invocation @@ -155,11 +157,11 @@ LL + ($arg1:tt, $arg2:tt) => (if let EnumUnit::<$arg1, $arg2>::VariantB {} | error[E0308]: mismatched types - --> $DIR/issue-116473-ice-wrong-span-variant-args.rs:64:37 + --> $DIR/issue-116473-ice-wrong-span-variant-args.rs:65:37 | LL | ($arg1:tt, $arg2:tt) => (if let EnumUnit::VariantB::<$arg1, $arg2> {} | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ expected integer, found `Enum<(), ()>` -... +LL | LL | = 5 { true } else { false }); | - this expression has type `{integer}` ... From a61126cef6c4083d57e22835033eb2eefdd31bac Mon Sep 17 00:00:00 2001 From: surechen Date: Fri, 10 Nov 2023 10:11:24 +0800 Subject: [PATCH 116/127] By tracking import use types to check whether it is scope uses or the other situations like module-relative uses, we can do more accurate redundant import checking. fixes #117448 For example unnecessary imports in std::prelude that can be eliminated: ```rust use std::option::Option::Some;//~ WARNING the item `Some` is imported redundantly use std::option::Option::None; //~ WARNING the item `None` is imported redundantly ``` --- .../rustc_data_structures/src/owned_slice.rs | 12 +-- .../rustc_hir_analysis/src/check/check.rs | 3 +- .../missing_cast_for_variadic_arg.rs | 2 +- .../structured_errors/sized_unsized_cast.rs | 2 +- .../wrong_number_of_generic_args.rs | 4 +- compiler/rustc_hir_typeck/src/expr.rs | 2 +- .../rustc_hir_typeck/src/fn_ctxt/checks.rs | 2 +- compiler/rustc_hir_typeck/src/lib.rs | 2 +- .../infer/error_reporting/need_type_info.rs | 2 +- compiler/rustc_metadata/src/rmeta/decoder.rs | 2 +- compiler/rustc_middle/src/mir/syntax.rs | 2 +- .../rustc_resolve/src/build_reduced_graph.rs | 14 ++-- compiler/rustc_resolve/src/check_unused.rs | 46 +++++++++--- compiler/rustc_resolve/src/def_collector.rs | 2 +- compiler/rustc_resolve/src/diagnostics.rs | 4 +- compiler/rustc_resolve/src/ident.rs | 10 +-- compiler/rustc_resolve/src/imports.rs | 75 ++++++++++--------- compiler/rustc_resolve/src/late.rs | 13 ++-- compiler/rustc_resolve/src/lib.rs | 51 ++++++++++--- compiler/rustc_resolve/src/macros.rs | 4 +- compiler/rustc_session/src/session.rs | 2 +- compiler/rustc_span/src/source_map/tests.rs | 2 - library/std/src/os/unix/fs.rs | 1 - src/tools/clippy/src/driver.rs | 1 - .../crates/hir-def/src/body/pretty.rs | 2 +- .../crates/hir-def/src/import_map.rs | 2 +- .../crates/hir-def/src/item_tree/lower.rs | 8 +- .../crates/hir-def/src/item_tree/pretty.rs | 5 +- .../crates/hir-def/src/nameres.rs | 2 +- .../src/diagnostics/match_check/pat_util.rs | 2 +- .../crates/hir-ty/src/mir/eval/shim.rs | 6 +- .../crates/hir-ty/src/mir/lower.rs | 10 +-- .../hir-ty/src/mir/lower/pattern_matching.rs | 2 +- .../src/handlers/generate_delegate_methods.rs | 2 +- .../src/completions/item_list/trait_impl.rs | 2 +- .../crates/ide-db/src/symbol_index.rs | 2 +- .../rust-analyzer/src/cargo_target_spec.rs | 2 +- .../crates/rust-analyzer/src/cli/lsif.rs | 2 +- .../salsa/salsa-macros/src/query_group.rs | 1 - .../rust-analyzer/crates/salsa/src/debug.rs | 1 - .../rust-analyzer/crates/salsa/src/derived.rs | 1 - .../rust-analyzer/crates/salsa/src/input.rs | 1 - .../crates/salsa/src/interned.rs | 1 - tests/ui/lint/unused/lint-unused-imports.rs | 2 - .../ui/lint/unused/lint-unused-imports.stderr | 26 +------ .../use-redundant-prelude-rust-2015.rs | 19 +++++ .../use-redundant-prelude-rust-2015.stderr | 44 +++++++++++ .../use-redundant-prelude-rust-2021.rs | 11 +++ .../use-redundant-prelude-rust-2021.stderr | 26 +++++++ tests/ui/rust-2018/remove-extern-crate.fixed | 1 + tests/ui/rust-2018/remove-extern-crate.rs | 1 + tests/ui/rust-2018/remove-extern-crate.stderr | 6 +- 52 files changed, 283 insertions(+), 167 deletions(-) create mode 100644 tests/ui/lint/use-redundant/use-redundant-prelude-rust-2015.rs create mode 100644 tests/ui/lint/use-redundant/use-redundant-prelude-rust-2015.stderr create mode 100644 tests/ui/lint/use-redundant/use-redundant-prelude-rust-2021.rs create mode 100644 tests/ui/lint/use-redundant/use-redundant-prelude-rust-2021.stderr diff --git a/compiler/rustc_data_structures/src/owned_slice.rs b/compiler/rustc_data_structures/src/owned_slice.rs index cbb3047d8841..bb6647958606 100644 --- a/compiler/rustc_data_structures/src/owned_slice.rs +++ b/compiler/rustc_data_structures/src/owned_slice.rs @@ -4,7 +4,7 @@ use crate::sync::Lrc; // Use our fake Send/Sync traits when on not parallel compiler, // so that `OwnedSlice` only implements/requires Send/Sync // for parallel compiler builds. -use crate::sync::{Send, Sync}; +use crate::sync; /// An owned slice. /// @@ -33,7 +33,7 @@ pub struct OwnedSlice { // \/ // ⊂(´・◡・⊂ )∘˚˳° (I am the phantom remnant of #97770) #[expect(dead_code)] - owner: Lrc, + owner: Lrc, } /// Makes an [`OwnedSlice`] out of an `owner` and a `slicer` function. @@ -60,7 +60,7 @@ pub struct OwnedSlice { /// ``` pub fn slice_owned(owner: O, slicer: F) -> OwnedSlice where - O: Send + Sync + 'static, + O: sync::Send + sync::Sync + 'static, F: FnOnce(&O) -> &[u8], { try_slice_owned(owner, |x| Ok::<_, !>(slicer(x))).into_ok() @@ -71,7 +71,7 @@ where /// See [`slice_owned`] for the infallible version. pub fn try_slice_owned(owner: O, slicer: F) -> Result where - O: Send + Sync + 'static, + O: sync::Send + sync::Sync + 'static, F: FnOnce(&O) -> Result<&[u8], E>, { // We wrap the owner of the bytes in, so it doesn't move. @@ -139,11 +139,11 @@ impl Borrow<[u8]> for OwnedSlice { // Safety: `OwnedSlice` is conceptually `(&'self.1 [u8], Arc)`, which is `Send` #[cfg(parallel_compiler)] -unsafe impl Send for OwnedSlice {} +unsafe impl sync::Send for OwnedSlice {} // Safety: `OwnedSlice` is conceptually `(&'self.1 [u8], Arc)`, which is `Sync` #[cfg(parallel_compiler)] -unsafe impl Sync for OwnedSlice {} +unsafe impl sync::Sync for OwnedSlice {} #[cfg(test)] mod tests; diff --git a/compiler/rustc_hir_analysis/src/check/check.rs b/compiler/rustc_hir_analysis/src/check/check.rs index 1410273e3bc9..0da417c6ea60 100644 --- a/compiler/rustc_hir_analysis/src/check/check.rs +++ b/compiler/rustc_hir_analysis/src/check/check.rs @@ -5,10 +5,9 @@ use super::compare_impl_item::check_type_bounds; use super::compare_impl_item::{compare_impl_method, compare_impl_ty}; use super::*; use rustc_attr as attr; -use rustc_errors::{codes::*, ErrorGuaranteed, MultiSpan}; +use rustc_errors::{codes::*, MultiSpan}; use rustc_hir as hir; use rustc_hir::def::{CtorKind, DefKind}; -use rustc_hir::def_id::{DefId, LocalDefId}; use rustc_hir::Node; use rustc_infer::infer::{RegionVariableOrigin, TyCtxtInferExt}; use rustc_infer::traits::{Obligation, TraitEngineExt as _}; diff --git a/compiler/rustc_hir_analysis/src/structured_errors/missing_cast_for_variadic_arg.rs b/compiler/rustc_hir_analysis/src/structured_errors/missing_cast_for_variadic_arg.rs index 50b4ef623ac7..363b9ba6996f 100644 --- a/compiler/rustc_hir_analysis/src/structured_errors/missing_cast_for_variadic_arg.rs +++ b/compiler/rustc_hir_analysis/src/structured_errors/missing_cast_for_variadic_arg.rs @@ -1,5 +1,5 @@ use crate::{errors, structured_errors::StructuredDiagnostic}; -use rustc_errors::{codes::*, DiagnosticBuilder, ErrCode}; +use rustc_errors::{codes::*, DiagnosticBuilder}; use rustc_middle::ty::{Ty, TypeVisitableExt}; use rustc_session::Session; use rustc_span::Span; diff --git a/compiler/rustc_hir_analysis/src/structured_errors/sized_unsized_cast.rs b/compiler/rustc_hir_analysis/src/structured_errors/sized_unsized_cast.rs index 54d54a2af93a..052c2807a2e6 100644 --- a/compiler/rustc_hir_analysis/src/structured_errors/sized_unsized_cast.rs +++ b/compiler/rustc_hir_analysis/src/structured_errors/sized_unsized_cast.rs @@ -1,5 +1,5 @@ use crate::{errors, structured_errors::StructuredDiagnostic}; -use rustc_errors::{codes::*, DiagnosticBuilder, ErrCode}; +use rustc_errors::{codes::*, DiagnosticBuilder}; use rustc_middle::ty::{Ty, TypeVisitableExt}; use rustc_session::Session; use rustc_span::Span; diff --git a/compiler/rustc_hir_analysis/src/structured_errors/wrong_number_of_generic_args.rs b/compiler/rustc_hir_analysis/src/structured_errors/wrong_number_of_generic_args.rs index 52ff8b2e45d7..f9d57d402b10 100644 --- a/compiler/rustc_hir_analysis/src/structured_errors/wrong_number_of_generic_args.rs +++ b/compiler/rustc_hir_analysis/src/structured_errors/wrong_number_of_generic_args.rs @@ -1,7 +1,5 @@ use crate::structured_errors::StructuredDiagnostic; -use rustc_errors::{ - codes::*, pluralize, Applicability, Diagnostic, DiagnosticBuilder, ErrCode, MultiSpan, -}; +use rustc_errors::{codes::*, pluralize, Applicability, Diagnostic, DiagnosticBuilder, MultiSpan}; use rustc_hir as hir; use rustc_middle::ty::{self as ty, AssocItems, AssocKind, TyCtxt}; use rustc_session::Session; diff --git a/compiler/rustc_hir_typeck/src/expr.rs b/compiler/rustc_hir_typeck/src/expr.rs index ed19d00d6411..a946d59ff2b6 100644 --- a/compiler/rustc_hir_typeck/src/expr.rs +++ b/compiler/rustc_hir_typeck/src/expr.rs @@ -27,7 +27,7 @@ use rustc_data_structures::stack::ensure_sufficient_stack; use rustc_data_structures::unord::UnordMap; use rustc_errors::{ codes::*, pluralize, struct_span_code_err, AddToDiagnostic, Applicability, Diagnostic, - DiagnosticBuilder, ErrCode, ErrorGuaranteed, StashKey, + DiagnosticBuilder, ErrorGuaranteed, StashKey, }; use rustc_hir as hir; use rustc_hir::def::{CtorKind, DefKind, Res}; diff --git a/compiler/rustc_hir_typeck/src/fn_ctxt/checks.rs b/compiler/rustc_hir_typeck/src/fn_ctxt/checks.rs index dd527666d555..d8d3d45dd40c 100644 --- a/compiler/rustc_hir_typeck/src/fn_ctxt/checks.rs +++ b/compiler/rustc_hir_typeck/src/fn_ctxt/checks.rs @@ -13,7 +13,7 @@ use itertools::Itertools; use rustc_ast as ast; use rustc_data_structures::fx::FxIndexSet; use rustc_errors::{ - codes::*, pluralize, Applicability, Diagnostic, ErrCode, ErrorGuaranteed, MultiSpan, StashKey, + codes::*, pluralize, Applicability, Diagnostic, ErrorGuaranteed, MultiSpan, StashKey, }; use rustc_hir as hir; use rustc_hir::def::{CtorOf, DefKind, Res}; diff --git a/compiler/rustc_hir_typeck/src/lib.rs b/compiler/rustc_hir_typeck/src/lib.rs index 629c2f2a971a..70ddd6b2f4cf 100644 --- a/compiler/rustc_hir_typeck/src/lib.rs +++ b/compiler/rustc_hir_typeck/src/lib.rs @@ -53,7 +53,7 @@ use crate::expectation::Expectation; use crate::fn_ctxt::LoweredTy; use crate::gather_locals::GatherLocalsVisitor; use rustc_data_structures::unord::UnordSet; -use rustc_errors::{codes::*, struct_span_code_err, ErrCode, ErrorGuaranteed}; +use rustc_errors::{codes::*, struct_span_code_err, ErrorGuaranteed}; use rustc_hir as hir; use rustc_hir::def::{DefKind, Res}; use rustc_hir::intravisit::Visitor; diff --git a/compiler/rustc_infer/src/infer/error_reporting/need_type_info.rs b/compiler/rustc_infer/src/infer/error_reporting/need_type_info.rs index c637ab31cd21..af722b206262 100644 --- a/compiler/rustc_infer/src/infer/error_reporting/need_type_info.rs +++ b/compiler/rustc_infer/src/infer/error_reporting/need_type_info.rs @@ -5,7 +5,7 @@ use crate::errors::{ use crate::infer::error_reporting::TypeErrCtxt; use crate::infer::type_variable::{TypeVariableOrigin, TypeVariableOriginKind}; use crate::infer::InferCtxt; -use rustc_errors::{codes::*, DiagnosticBuilder, ErrCode, IntoDiagnosticArg}; +use rustc_errors::{codes::*, DiagnosticBuilder, IntoDiagnosticArg}; use rustc_hir as hir; use rustc_hir::def::Res; use rustc_hir::def::{CtorOf, DefKind, Namespace}; diff --git a/compiler/rustc_metadata/src/rmeta/decoder.rs b/compiler/rustc_metadata/src/rmeta/decoder.rs index 2cfbaff35ef2..e7c80639a0d7 100644 --- a/compiler/rustc_metadata/src/rmeta/decoder.rs +++ b/compiler/rustc_metadata/src/rmeta/decoder.rs @@ -13,7 +13,7 @@ use rustc_data_structures::unhash::UnhashMap; use rustc_expand::base::{SyntaxExtension, SyntaxExtensionKind}; use rustc_expand::proc_macro::{AttrProcMacro, BangProcMacro, DeriveProcMacro}; use rustc_hir::def::Res; -use rustc_hir::def_id::{DefIdMap, CRATE_DEF_INDEX, LOCAL_CRATE}; +use rustc_hir::def_id::{CRATE_DEF_INDEX, LOCAL_CRATE}; use rustc_hir::definitions::{DefPath, DefPathData}; use rustc_hir::diagnostic_items::DiagnosticItems; use rustc_index::Idx; diff --git a/compiler/rustc_middle/src/mir/syntax.rs b/compiler/rustc_middle/src/mir/syntax.rs index 40af453f6ce6..5c9857b9c53d 100644 --- a/compiler/rustc_middle/src/mir/syntax.rs +++ b/compiler/rustc_middle/src/mir/syntax.rs @@ -15,7 +15,7 @@ use crate::ty::{Region, UserTypeAnnotationIndex}; use rustc_ast::{InlineAsmOptions, InlineAsmTemplatePiece}; use rustc_data_structures::packed::Pu128; use rustc_hir::def_id::DefId; -use rustc_hir::{self, CoroutineKind}; +use rustc_hir::CoroutineKind; use rustc_index::IndexVec; use rustc_span::source_map::Spanned; use rustc_target::abi::{FieldIdx, VariantIdx}; diff --git a/compiler/rustc_resolve/src/build_reduced_graph.rs b/compiler/rustc_resolve/src/build_reduced_graph.rs index 469d15e42143..1c4aeefcbcfd 100644 --- a/compiler/rustc_resolve/src/build_reduced_graph.rs +++ b/compiler/rustc_resolve/src/build_reduced_graph.rs @@ -8,11 +8,11 @@ use crate::def_collector::collect_definitions; use crate::imports::{ImportData, ImportKind}; use crate::macros::{MacroRulesBinding, MacroRulesScope, MacroRulesScopeRef}; -use crate::Namespace::{self, MacroNS, TypeNS, ValueNS}; +use crate::Namespace::{MacroNS, TypeNS, ValueNS}; use crate::{errors, BindingKey, MacroData, NameBindingData}; use crate::{Determinacy, ExternPreludeEntry, Finalize, Module, ModuleKind, ModuleOrUniformRoot}; -use crate::{NameBinding, NameBindingKind, ParentScope, PathResult, PerNS, ResolutionError}; -use crate::{Resolver, ResolverArenas, Segment, ToNameBinding, VisResolutionError}; +use crate::{NameBinding, NameBindingKind, ParentScope, PathResult, ResolutionError}; +use crate::{Resolver, ResolverArenas, Segment, ToNameBinding, Used, VisResolutionError}; use rustc_ast::visit::{self, AssocCtxt, Visitor}; use rustc_ast::{self as ast, AssocItem, AssocItemKind, MetaItemKind, StmtKind}; @@ -362,7 +362,7 @@ impl<'a, 'b, 'tcx> BuildReducedGraphVisitor<'a, 'b, 'tcx> { root_span, root_id, vis: Cell::new(Some(vis)), - used: Cell::new(false), + used: Default::default(), }); self.r.indeterminate_imports.push(import); @@ -885,7 +885,7 @@ impl<'a, 'b, 'tcx> BuildReducedGraphVisitor<'a, 'b, 'tcx> { span: item.span, module_path: Vec::new(), vis: Cell::new(Some(vis)), - used: Cell::new(used), + used: Cell::new(used.then_some(Used::Other)), }); self.r.potentially_unused_imports.push(import); let imported_binding = self.r.import(binding, import); @@ -1090,7 +1090,7 @@ impl<'a, 'b, 'tcx> BuildReducedGraphVisitor<'a, 'b, 'tcx> { span, module_path: Vec::new(), vis: Cell::new(Some(ty::Visibility::Restricted(CRATE_DEF_ID))), - used: Cell::new(false), + used: Default::default(), }) }; @@ -1261,7 +1261,7 @@ impl<'a, 'b, 'tcx> BuildReducedGraphVisitor<'a, 'b, 'tcx> { span, module_path: Vec::new(), vis: Cell::new(Some(vis)), - used: Cell::new(true), + used: Cell::new(Some(Used::Other)), }); let import_binding = self.r.import(binding, import); self.r.define(self.r.graph_root, ident, MacroNS, import_binding); diff --git a/compiler/rustc_resolve/src/check_unused.rs b/compiler/rustc_resolve/src/check_unused.rs index c14788b841d2..37cc50f69438 100644 --- a/compiler/rustc_resolve/src/check_unused.rs +++ b/compiler/rustc_resolve/src/check_unused.rs @@ -27,9 +27,10 @@ use crate::imports::ImportKind; use crate::module_to_string; use crate::Resolver; +use crate::NameBindingKind; use rustc_ast as ast; use rustc_ast::visit::{self, Visitor}; -use rustc_data_structures::fx::{FxHashMap, FxIndexMap}; +use rustc_data_structures::fx::{FxHashMap, FxIndexMap, FxIndexSet}; use rustc_data_structures::unord::UnordSet; use rustc_errors::{pluralize, MultiSpan}; use rustc_hir::def::{DefKind, Res}; @@ -38,14 +39,14 @@ use rustc_session::lint::BuiltinLintDiagnostics; use rustc_span::symbol::{kw, Ident}; use rustc_span::{Span, DUMMY_SP}; -struct UnusedImport<'a> { - use_tree: &'a ast::UseTree, +struct UnusedImport { + use_tree: ast::UseTree, use_tree_id: ast::NodeId, item_span: Span, unused: UnordSet, } -impl<'a> UnusedImport<'a> { +impl UnusedImport { fn add(&mut self, id: ast::NodeId) { self.unused.insert(id); } @@ -54,7 +55,7 @@ impl<'a> UnusedImport<'a> { struct UnusedImportCheckVisitor<'a, 'b, 'tcx> { r: &'a mut Resolver<'b, 'tcx>, /// All the (so far) unused imports, grouped path list - unused_imports: FxIndexMap>, + unused_imports: FxIndexMap, extern_crate_items: Vec, base_use_tree: Option<&'a ast::UseTree>, base_id: ast::NodeId, @@ -100,9 +101,9 @@ impl<'a, 'b, 'tcx> UnusedImportCheckVisitor<'a, 'b, 'tcx> { } } - fn unused_import(&mut self, id: ast::NodeId) -> &mut UnusedImport<'a> { + fn unused_import(&mut self, id: ast::NodeId) -> &mut UnusedImport { let use_tree_id = self.base_id; - let use_tree = self.base_use_tree.unwrap(); + let use_tree = self.base_use_tree.unwrap().clone(); let item_span = self.item_span; self.unused_imports.entry(id).or_insert_with(|| UnusedImport { @@ -197,7 +198,7 @@ enum UnusedSpanResult { } fn calc_unused_spans( - unused_import: &UnusedImport<'_>, + unused_import: &UnusedImport, use_tree: &ast::UseTree, use_tree_id: ast::NodeId, ) -> UnusedSpanResult { @@ -287,7 +288,7 @@ impl Resolver<'_, '_> { for import in self.potentially_unused_imports.iter() { match import.kind { - _ if import.used.get() + _ if import.used.get().is_some() || import.expect_vis().is_public() || import.span.is_dummy() => { @@ -336,7 +337,7 @@ impl Resolver<'_, '_> { for unused in visitor.unused_imports.values() { let mut fixes = Vec::new(); - let spans = match calc_unused_spans(unused, unused.use_tree, unused.use_tree_id) { + let spans = match calc_unused_spans(unused, &unused.use_tree, unused.use_tree_id) { UnusedSpanResult::Used => continue, UnusedSpanResult::FlatUnused(span, remove) => { fixes.push((remove, String::new())); @@ -483,5 +484,30 @@ impl Resolver<'_, '_> { BuiltinLintDiagnostics::ExternCrateNotIdiomatic { vis_span, ident_span }, ); } + + let unused_imports = visitor.unused_imports; + let mut check_redundant_imports = FxIndexSet::default(); + for module in self.arenas.local_modules().iter() { + for (_key, resolution) in self.resolutions(*module).borrow().iter() { + let resolution = resolution.borrow(); + + if let Some(binding) = resolution.binding + && let NameBindingKind::Import { import, .. } = binding.kind + && let ImportKind::Single { id, .. } = import.kind + { + if let Some(unused_import) = unused_imports.get(&import.root_id) + && unused_import.unused.contains(&id) + { + continue; + } + + check_redundant_imports.insert(import); + } + } + } + + for import in check_redundant_imports { + self.check_for_redundant_imports(import); + } } } diff --git a/compiler/rustc_resolve/src/def_collector.rs b/compiler/rustc_resolve/src/def_collector.rs index 45aea585f97a..12bf462a6fdf 100644 --- a/compiler/rustc_resolve/src/def_collector.rs +++ b/compiler/rustc_resolve/src/def_collector.rs @@ -1,5 +1,5 @@ use crate::{ImplTraitContext, Resolver}; -use rustc_ast::visit::{self, FnKind}; +use rustc_ast::visit::FnKind; use rustc_ast::*; use rustc_expand::expand::AstFragment; use rustc_hir::def::{CtorKind, CtorOf, DefKind}; diff --git a/compiler/rustc_resolve/src/diagnostics.rs b/compiler/rustc_resolve/src/diagnostics.rs index 737481c78db6..4b978fefa107 100644 --- a/compiler/rustc_resolve/src/diagnostics.rs +++ b/compiler/rustc_resolve/src/diagnostics.rs @@ -33,8 +33,8 @@ use crate::errors::{AddedMacroUse, ChangeImportBinding, ChangeImportBindingSugge use crate::errors::{ConsiderAddingADerive, ExplicitUnsafeTraits, MaybeMissingMacroRulesName}; use crate::imports::{Import, ImportKind}; use crate::late::{PatternSource, Rib}; -use crate::path_names_to_string; use crate::{errors as errs, BindingKey}; +use crate::{path_names_to_string, Used}; use crate::{AmbiguityError, AmbiguityErrorMisc, AmbiguityKind, BindingError, Finalize}; use crate::{HasGenericParams, MacroRulesScope, Module, ModuleKind, ModuleOrUniformRoot}; use crate::{LexicalScopeBinding, NameBinding, NameBindingKind, PrivacyError, VisResolutionError}; @@ -1503,7 +1503,7 @@ impl<'a, 'tcx> Resolver<'a, 'tcx> { ); // Silence the 'unused import' warning we might get, // since this diagnostic already covers that import. - self.record_use(ident, binding, false); + self.record_use(ident, binding, Used::Other); return; } } diff --git a/compiler/rustc_resolve/src/ident.rs b/compiler/rustc_resolve/src/ident.rs index e47f2bdd4d2b..4583f991cabd 100644 --- a/compiler/rustc_resolve/src/ident.rs +++ b/compiler/rustc_resolve/src/ident.rs @@ -12,8 +12,8 @@ use rustc_span::Span; use crate::errors::{ParamKindInEnumDiscriminant, ParamKindInNonTrivialAnonConst}; use crate::late::{ConstantHasGenerics, NoConstantGenericsReason, PathSource, Rib, RibKind}; use crate::macros::{sub_namespace_match, MacroRulesScope}; -use crate::BindingKey; use crate::{errors, AmbiguityError, AmbiguityErrorMisc, AmbiguityKind, Determinacy, Finalize}; +use crate::{BindingKey, Used}; use crate::{ImportKind, LexicalScopeBinding, Module, ModuleKind, ModuleOrUniformRoot}; use crate::{NameBinding, NameBindingKind, ParentScope, PathResult, PrivacyError, Res}; use crate::{ResolutionError, Resolver, Scope, ScopeSet, Segment, ToNameBinding, Weak}; @@ -339,7 +339,7 @@ impl<'a, 'tcx> Resolver<'a, 'tcx> { ident, ns, parent_scope, - finalize, + finalize.map(|finalize| Finalize { used: Used::Scope, ..finalize }), ignore_binding, ); if let Ok(binding) = item { @@ -506,7 +506,7 @@ impl<'a, 'tcx> Resolver<'a, 'tcx> { ns, adjusted_parent_scope, !matches!(scope_set, ScopeSet::Late(..)), - finalize, + finalize.map(|finalize| Finalize { used: Used::Scope, ..finalize }), ignore_binding, ); match binding { @@ -857,7 +857,7 @@ impl<'a, 'tcx> Resolver<'a, 'tcx> { .into_iter() .find_map(|binding| if binding == ignore_binding { None } else { binding }); - if let Some(Finalize { path_span, report_private, .. }) = finalize { + if let Some(Finalize { path_span, report_private, used, .. }) = finalize { let Some(binding) = binding else { return Err((Determined, Weak::No)); }; @@ -901,7 +901,7 @@ impl<'a, 'tcx> Resolver<'a, 'tcx> { } } - self.record_use(ident, binding, restricted_shadowing); + self.record_use(ident, binding, used); return Ok(binding); } diff --git a/compiler/rustc_resolve/src/imports.rs b/compiler/rustc_resolve/src/imports.rs index dcd01b9b3341..9bfca0f17988 100644 --- a/compiler/rustc_resolve/src/imports.rs +++ b/compiler/rustc_resolve/src/imports.rs @@ -10,9 +10,9 @@ use crate::errors::{ use crate::Determinacy::{self, *}; use crate::Namespace::*; use crate::{module_to_string, names_to_string, ImportSuggestion}; -use crate::{AmbiguityKind, BindingKey, ModuleKind, ResolutionError, Resolver, Segment}; +use crate::{AmbiguityKind, BindingKey, ResolutionError, Resolver, Segment}; use crate::{Finalize, Module, ModuleOrUniformRoot, ParentScope, PerNS, ScopeSet}; -use crate::{NameBinding, NameBindingData, NameBindingKind, PathResult}; +use crate::{NameBinding, NameBindingData, NameBindingKind, PathResult, Used}; use rustc_ast::NodeId; use rustc_data_structures::fx::FxHashSet; @@ -173,7 +173,7 @@ pub(crate) struct ImportData<'a> { /// The resolution of `module_path`. pub imported_module: Cell>>, pub vis: Cell>, - pub used: Cell, + pub used: Cell>, } /// All imports are unique and allocated on a same arena, @@ -286,7 +286,7 @@ impl<'a, 'tcx> Resolver<'a, 'tcx> { } self.arenas.alloc_name_binding(NameBindingData { - kind: NameBindingKind::Import { binding, import, used: Cell::new(false) }, + kind: NameBindingKind::Import { binding, import }, ambiguity: None, warn_ambiguity: false, span: import.span, @@ -485,9 +485,9 @@ impl<'a, 'tcx> Resolver<'a, 'tcx> { resolution.single_imports.remove(&import); }) }); - self.record_use(target, dummy_binding, false); + self.record_use(target, dummy_binding, Used::Other); } else if import.imported_module.get().is_none() { - import.used.set(true); + import.used.set(Some(Used::Other)); if let Some(id) = import.id() { self.used_imports.insert(id); } @@ -1056,11 +1056,12 @@ impl<'a, 'tcx> Resolver<'a, 'tcx> { && initial_binding.is_extern_crate() && !initial_binding.is_import() { - this.record_use( - ident, - target_binding, - import.module_path.is_empty(), - ); + let used = if import.module_path.is_empty() { + Used::Scope + } else { + Used::Other + }; + this.record_use(ident, target_binding, used); } } initial_binding.res() @@ -1299,22 +1300,23 @@ impl<'a, 'tcx> Resolver<'a, 'tcx> { } }); - self.check_for_redundant_imports(ident, import, source_bindings, target_bindings, target); - debug!("(resolving single import) successfully resolved import"); None } - fn check_for_redundant_imports( - &mut self, - ident: Ident, - import: Import<'a>, - source_bindings: &PerNS, Determinacy>>>, - target_bindings: &PerNS>>>, - target: Ident, - ) { + pub(crate) fn check_for_redundant_imports(&mut self, import: Import<'a>) { // This function is only called for single imports. - let ImportKind::Single { id, .. } = import.kind else { unreachable!() }; + let ImportKind::Single { + source, target, ref source_bindings, ref target_bindings, id, .. + } = import.kind + else { + unreachable!() + }; + + // Skip if the import is of the form `use source as target` and source != target. + if source != target { + return; + } // Skip if the import was produced by a macro. if import.parent_scope.expansion != LocalExpnId::ROOT { @@ -1323,16 +1325,20 @@ impl<'a, 'tcx> Resolver<'a, 'tcx> { // Skip if we are inside a named module (in contrast to an anonymous // module defined by a block). - if let ModuleKind::Def(..) = import.parent_scope.module.kind { + // Skip if the import is public or was used through non scope-based resolution, + // e.g. through a module-relative path. + if import.used.get() == Some(Used::Other) + || self.effective_visibilities.is_exported(self.local_def_id(id)) + { return; } - let mut is_redundant = PerNS { value_ns: None, type_ns: None, macro_ns: None }; + let mut is_redundant = true; let mut redundant_span = PerNS { value_ns: None, type_ns: None, macro_ns: None }; self.per_ns(|this, ns| { - if let Ok(binding) = source_bindings[ns].get() { + if is_redundant && let Ok(binding) = source_bindings[ns].get() { if binding.res() == Res::Err { return; } @@ -1346,18 +1352,19 @@ impl<'a, 'tcx> Resolver<'a, 'tcx> { target_bindings[ns].get(), ) { Ok(other_binding) => { - is_redundant[ns] = Some( - binding.res() == other_binding.res() && !other_binding.is_ambiguity(), - ); - redundant_span[ns] = Some((other_binding.span, other_binding.is_import())); + is_redundant = + binding.res() == other_binding.res() && !other_binding.is_ambiguity(); + if is_redundant { + redundant_span[ns] = + Some((other_binding.span, other_binding.is_import())); + } } - Err(_) => is_redundant[ns] = Some(false), + Err(_) => is_redundant = false, } } }); - if !is_redundant.is_empty() && is_redundant.present_items().all(|is_redundant| is_redundant) - { + if is_redundant && !redundant_span.is_empty() { let mut redundant_spans: Vec<_> = redundant_span.present_items().collect(); redundant_spans.sort(); redundant_spans.dedup(); @@ -1365,8 +1372,8 @@ impl<'a, 'tcx> Resolver<'a, 'tcx> { UNUSED_IMPORTS, id, import.span, - format!("the item `{ident}` is imported redundantly"), - BuiltinLintDiagnostics::RedundantImport(redundant_spans, ident), + format!("the item `{source}` is imported redundantly"), + BuiltinLintDiagnostics::RedundantImport(redundant_spans, source), ); } } diff --git a/compiler/rustc_resolve/src/late.rs b/compiler/rustc_resolve/src/late.rs index 3ea4df1d2a43..1cf3fecc2898 100644 --- a/compiler/rustc_resolve/src/late.rs +++ b/compiler/rustc_resolve/src/late.rs @@ -7,23 +7,22 @@ //! `build_reduced_graph.rs`, `macros.rs` and `imports.rs`. use crate::errors::ImportsCannotReferTo; -use crate::BindingKey; use crate::{path_names_to_string, rustdoc, BindingError, Finalize, LexicalScopeBinding}; +use crate::{BindingKey, Used}; use crate::{Module, ModuleOrUniformRoot, NameBinding, ParentScope, PathResult}; use crate::{ResolutionError, Resolver, Segment, UseError}; use rustc_ast::ptr::P; -use rustc_ast::visit::{self, AssocCtxt, BoundKind, FnCtxt, FnKind, Visitor}; +use rustc_ast::visit::{AssocCtxt, BoundKind, FnCtxt, FnKind, Visitor}; use rustc_ast::*; use rustc_data_structures::fx::{FxHashMap, FxHashSet, FxIndexMap}; use rustc_errors::{ - codes::*, struct_span_code_err, Applicability, DiagnosticArgValue, ErrCode, IntoDiagnosticArg, - StashKey, + codes::*, struct_span_code_err, Applicability, DiagnosticArgValue, IntoDiagnosticArg, StashKey, }; use rustc_hir::def::Namespace::{self, *}; use rustc_hir::def::{self, CtorKind, DefKind, LifetimeRes, NonMacroAttrKind, PartialRes, PerNS}; use rustc_hir::def_id::{DefId, LocalDefId, CRATE_DEF_ID, LOCAL_CRATE}; -use rustc_hir::{BindingAnnotation, PrimTy, TraitCandidate}; +use rustc_hir::{PrimTy, TraitCandidate}; use rustc_metadata::creader::CStore; use rustc_middle::middle::resolve_bound_vars::Set1; use rustc_middle::{bug, span_bug}; @@ -3623,7 +3622,7 @@ impl<'a: 'ast, 'b, 'ast, 'tcx> LateResolutionVisitor<'a, 'b, 'ast, 'tcx> { // whether they can be shadowed by fresh bindings or not, so force an error. // issues/33118#issuecomment-233962221 (see below) still applies here, // but we have to ignore it for backward compatibility. - self.r.record_use(ident, binding, false); + self.r.record_use(ident, binding, Used::Other); return None; } LexicalScopeBinding::Item(binding) => (binding.res(), Some(binding)), @@ -3638,7 +3637,7 @@ impl<'a: 'ast, 'b, 'ast, 'tcx> LateResolutionVisitor<'a, 'b, 'ast, 'tcx> { ) if is_syntactic_ambiguity => { // Disambiguate in favor of a unit struct/variant or constant pattern. if let Some(binding) = binding { - self.r.record_use(ident, binding, false); + self.r.record_use(ident, binding, Used::Other); } Some(res) } diff --git a/compiler/rustc_resolve/src/lib.rs b/compiler/rustc_resolve/src/lib.rs index bf811c7a4bb8..1c625f49e3e9 100644 --- a/compiler/rustc_resolve/src/lib.rs +++ b/compiler/rustc_resolve/src/lib.rs @@ -175,6 +175,23 @@ enum ImplTraitContext { Universal, } +/// Used for tracking import use types which will be used for redundant import checking. +/// ### Used::Scope Example +/// ```rust,compile_fail +/// #![deny(unused_imports)] +/// use std::mem::drop; +/// fn main() { +/// let s = Box::new(32); +/// drop(s); +/// } +/// ``` +/// Used::Other is for other situations like module-relative uses. +#[derive(Clone, Copy, PartialEq, PartialOrd, Debug)] +enum Used { + Scope, + Other, +} + #[derive(Debug)] struct BindingError { name: Symbol, @@ -695,7 +712,7 @@ impl<'a> ToNameBinding<'a> for NameBinding<'a> { enum NameBindingKind<'a> { Res(Res), Module(Module<'a>), - Import { binding: NameBinding<'a>, import: Import<'a>, used: Cell }, + Import { binding: NameBinding<'a>, import: Import<'a> }, } impl<'a> NameBindingKind<'a> { @@ -1784,15 +1801,15 @@ impl<'a, 'tcx> Resolver<'a, 'tcx> { false } - fn record_use(&mut self, ident: Ident, used_binding: NameBinding<'a>, is_lexical_scope: bool) { - self.record_use_inner(ident, used_binding, is_lexical_scope, used_binding.warn_ambiguity); + fn record_use(&mut self, ident: Ident, used_binding: NameBinding<'a>, used: Used) { + self.record_use_inner(ident, used_binding, used, used_binding.warn_ambiguity); } fn record_use_inner( &mut self, ident: Ident, used_binding: NameBinding<'a>, - is_lexical_scope: bool, + used: Used, warn_ambiguity: bool, ) { if let Some((b2, kind)) = used_binding.ambiguity { @@ -1810,27 +1827,35 @@ impl<'a, 'tcx> Resolver<'a, 'tcx> { self.ambiguity_errors.push(ambiguity_error); } } - if let NameBindingKind::Import { import, binding, ref used } = used_binding.kind { + if let NameBindingKind::Import { import, binding } = used_binding.kind { if let ImportKind::MacroUse { warn_private: true } = import.kind { let msg = format!("macro `{ident}` is private"); self.lint_buffer().buffer_lint(PRIVATE_MACRO_USE, import.root_id, ident.span, msg); } // Avoid marking `extern crate` items that refer to a name from extern prelude, // but not introduce it, as used if they are accessed from lexical scope. - if is_lexical_scope { + if used == Used::Scope { if let Some(entry) = self.extern_prelude.get(&ident.normalize_to_macros_2_0()) { if !entry.introduced_by_item && entry.binding == Some(used_binding) { return; } } } - used.set(true); - import.used.set(true); + let old_used = import.used.get(); + let new_used = Some(used); + if new_used > old_used { + import.used.set(new_used); + } if let Some(id) = import.id() { self.used_imports.insert(id); } self.add_to_glob_map(import, ident); - self.record_use_inner(ident, binding, false, warn_ambiguity || binding.warn_ambiguity); + self.record_use_inner( + ident, + binding, + Used::Other, + warn_ambiguity || binding.warn_ambiguity, + ); } } @@ -1985,7 +2010,7 @@ impl<'a, 'tcx> Resolver<'a, 'tcx> { if !entry.is_import() { self.crate_loader(|c| c.process_path_extern(ident.name, ident.span)); } else if entry.introduced_by_item { - self.record_use(ident, binding, false); + self.record_use(ident, binding, Used::Other); } } binding @@ -2115,7 +2140,7 @@ impl<'a, 'tcx> Resolver<'a, 'tcx> { let is_import = name_binding.is_import(); let span = name_binding.span; if let Res::Def(DefKind::Fn, _) = res { - self.record_use(ident, name_binding, false); + self.record_use(ident, name_binding, Used::Other); } self.main_def = Some(MainDefinition { res, is_import, span }); } @@ -2176,6 +2201,8 @@ struct Finalize { /// Whether to report privacy errors or silently return "no resolution" for them, /// similarly to speculative resolution. report_private: bool, + /// Tracks whether an item is used in scope or used relatively to a module. + used: Used, } impl Finalize { @@ -2184,7 +2211,7 @@ impl Finalize { } fn with_root_span(node_id: NodeId, path_span: Span, root_span: Span) -> Finalize { - Finalize { node_id, path_span, root_span, report_private: true } + Finalize { node_id, path_span, root_span, report_private: true, used: Used::Other } } } diff --git a/compiler/rustc_resolve/src/macros.rs b/compiler/rustc_resolve/src/macros.rs index 170cc1268c39..3fccdec8aa0b 100644 --- a/compiler/rustc_resolve/src/macros.rs +++ b/compiler/rustc_resolve/src/macros.rs @@ -5,7 +5,7 @@ use crate::errors::CannotDetermineMacroResolution; use crate::errors::{self, AddAsNonDerive, CannotFindIdentInThisScope}; use crate::errors::{MacroExpectedFound, RemoveSurroundingDerive}; use crate::Namespace::*; -use crate::{BuiltinMacroState, Determinacy, MacroData}; +use crate::{BuiltinMacroState, Determinacy, MacroData, Used}; use crate::{DeriveData, Finalize, ParentScope, ResolutionError, Resolver, ScopeSet}; use crate::{ModuleKind, ModuleOrUniformRoot, NameBinding, PathResult, Segment, ToNameBinding}; use rustc_ast::expand::StrippedCfgItem; @@ -794,7 +794,7 @@ impl<'a, 'tcx> Resolver<'a, 'tcx> { ) { Ok(binding) => { let initial_res = initial_binding.map(|initial_binding| { - self.record_use(ident, initial_binding, false); + self.record_use(ident, initial_binding, Used::Other); initial_binding.res() }); let res = binding.res(); diff --git a/compiler/rustc_session/src/session.rs b/compiler/rustc_session/src/session.rs index 25c20be8e627..9d1133c487f6 100644 --- a/compiler/rustc_session/src/session.rs +++ b/compiler/rustc_session/src/session.rs @@ -22,7 +22,7 @@ use rustc_errors::emitter::{DynEmitter, HumanEmitter, HumanReadableErrorType}; use rustc_errors::json::JsonEmitter; use rustc_errors::registry::Registry; use rustc_errors::{ - codes::*, fallback_fluent_bundle, DiagCtxt, DiagnosticBuilder, DiagnosticMessage, ErrCode, + codes::*, fallback_fluent_bundle, DiagCtxt, DiagnosticBuilder, DiagnosticMessage, ErrorGuaranteed, FatalAbort, FluentBundle, IntoDiagnostic, LazyFallbackBundle, TerminalUrl, }; use rustc_macros::HashStable_Generic; diff --git a/compiler/rustc_span/src/source_map/tests.rs b/compiler/rustc_span/src/source_map/tests.rs index 51f8aa04e8a7..81a9e4706883 100644 --- a/compiler/rustc_span/src/source_map/tests.rs +++ b/compiler/rustc_span/src/source_map/tests.rs @@ -1,7 +1,5 @@ use super::*; -use rustc_data_structures::sync::FreezeLock; - fn init_source_map() -> SourceMap { let sm = SourceMap::new(FilePathMapping::empty()); sm.new_source_file(PathBuf::from("blork.rs").into(), "first line.\nsecond line".to_string()); diff --git a/library/std/src/os/unix/fs.rs b/library/std/src/os/unix/fs.rs index e995d5133f8a..058e9b90cc7a 100644 --- a/library/std/src/os/unix/fs.rs +++ b/library/std/src/os/unix/fs.rs @@ -397,7 +397,6 @@ pub trait OpenOptionsExt { /// /// ```no_run /// # #![feature(rustc_private)] - /// use libc; /// use std::fs::OpenOptions; /// use std::os::unix::fs::OpenOptionsExt; /// diff --git a/src/tools/clippy/src/driver.rs b/src/tools/clippy/src/driver.rs index bdf1cf0a2240..85867d1511d9 100644 --- a/src/tools/clippy/src/driver.rs +++ b/src/tools/clippy/src/driver.rs @@ -28,7 +28,6 @@ use std::fs::read_to_string; use std::ops::Deref; use std::path::Path; use std::process::exit; -use std::string::ToString; use anstream::println; diff --git a/src/tools/rust-analyzer/crates/hir-def/src/body/pretty.rs b/src/tools/rust-analyzer/crates/hir-def/src/body/pretty.rs index 4afb40865170..6afb46a2ddd8 100644 --- a/src/tools/rust-analyzer/crates/hir-def/src/body/pretty.rs +++ b/src/tools/rust-analyzer/crates/hir-def/src/body/pretty.rs @@ -6,7 +6,7 @@ use itertools::Itertools; use crate::{ hir::{ - Array, BindingAnnotation, BindingId, CaptureBy, ClosureKind, Literal, LiteralOrConst, + Array, BindingAnnotation, CaptureBy, ClosureKind, Literal, LiteralOrConst, Movability, Statement, }, pretty::{print_generic_args, print_path, print_type_ref}, diff --git a/src/tools/rust-analyzer/crates/hir-def/src/import_map.rs b/src/tools/rust-analyzer/crates/hir-def/src/import_map.rs index 98982c7db840..38cfcf0f2811 100644 --- a/src/tools/rust-analyzer/crates/hir-def/src/import_map.rs +++ b/src/tools/rust-analyzer/crates/hir-def/src/import_map.rs @@ -3,7 +3,7 @@ use std::{fmt, hash::BuildHasherDefault}; use base_db::CrateId; -use fst::{self, raw::IndexedValue, Automaton, Streamer}; +use fst::{raw::IndexedValue, Automaton, Streamer}; use hir_expand::name::Name; use indexmap::IndexMap; use itertools::Itertools; diff --git a/src/tools/rust-analyzer/crates/hir-def/src/item_tree/lower.rs b/src/tools/rust-analyzer/crates/hir-def/src/item_tree/lower.rs index e0aa3ae61235..b51cb5de0f49 100644 --- a/src/tools/rust-analyzer/crates/hir-def/src/item_tree/lower.rs +++ b/src/tools/rust-analyzer/crates/hir-def/src/item_tree/lower.rs @@ -2,12 +2,12 @@ use std::collections::hash_map::Entry; -use hir_expand::{ast_id_map::AstIdMap, span_map::SpanMapRef, HirFileId}; -use syntax::ast::{self, HasModuleItem, HasTypeBounds, IsString}; +use hir_expand::{ast_id_map::AstIdMap, span_map::SpanMapRef}; +use syntax::ast::{HasModuleItem, HasTypeBounds, IsString}; use crate::{ - generics::{GenericParams, GenericParamsCollector, TypeParamData, TypeParamProvenance}, - type_ref::{LifetimeRef, TraitBoundModifier, TraitRef}, + generics::{GenericParamsCollector, TypeParamData, TypeParamProvenance}, + type_ref::{LifetimeRef, TraitBoundModifier}, LocalLifetimeParamId, LocalTypeOrConstParamId, }; diff --git a/src/tools/rust-analyzer/crates/hir-def/src/item_tree/pretty.rs b/src/tools/rust-analyzer/crates/hir-def/src/item_tree/pretty.rs index 0086b7180b2b..dae876f7ecbc 100644 --- a/src/tools/rust-analyzer/crates/hir-def/src/item_tree/pretty.rs +++ b/src/tools/rust-analyzer/crates/hir-def/src/item_tree/pretty.rs @@ -1,13 +1,12 @@ //! `ItemTree` debug printer. -use std::fmt::{self, Write}; +use std::fmt::Write; use span::ErasedFileAstId; use crate::{ - generics::{TypeOrConstParamData, WherePredicate, WherePredicateTypeTarget}, + generics::{WherePredicate, WherePredicateTypeTarget}, pretty::{print_path, print_type_bounds, print_type_ref}, - visibility::RawVisibility, }; use super::*; diff --git a/src/tools/rust-analyzer/crates/hir-def/src/nameres.rs b/src/tools/rust-analyzer/crates/hir-def/src/nameres.rs index 2a9390e79780..a2eca066438a 100644 --- a/src/tools/rust-analyzer/crates/hir-def/src/nameres.rs +++ b/src/tools/rust-analyzer/crates/hir-def/src/nameres.rs @@ -57,7 +57,7 @@ pub mod proc_macro; #[cfg(test)] mod tests; -use std::{cmp::Ord, ops::Deref}; +use std::ops::Deref; use base_db::{CrateId, Edition, FileId}; use hir_expand::{ diff --git a/src/tools/rust-analyzer/crates/hir-ty/src/diagnostics/match_check/pat_util.rs b/src/tools/rust-analyzer/crates/hir-ty/src/diagnostics/match_check/pat_util.rs index 217454499ef6..c6a26cdd1d0f 100644 --- a/src/tools/rust-analyzer/crates/hir-ty/src/diagnostics/match_check/pat_util.rs +++ b/src/tools/rust-analyzer/crates/hir-ty/src/diagnostics/match_check/pat_util.rs @@ -2,7 +2,7 @@ //! //! Originates from `rustc_hir::pat_util` -use std::iter::{Enumerate, ExactSizeIterator}; +use std::iter::Enumerate; pub(crate) struct EnumerateAndAdjust { enumerate: Enumerate, diff --git a/src/tools/rust-analyzer/crates/hir-ty/src/mir/eval/shim.rs b/src/tools/rust-analyzer/crates/hir-ty/src/mir/eval/shim.rs index d68803fe2801..fbe6a982d6f8 100644 --- a/src/tools/rust-analyzer/crates/hir-ty/src/mir/eval/shim.rs +++ b/src/tools/rust-analyzer/crates/hir-ty/src/mir/eval/shim.rs @@ -4,11 +4,7 @@ use std::cmp; use chalk_ir::TyKind; -use hir_def::{ - builtin_type::{BuiltinInt, BuiltinUint}, - resolver::HasResolver, -}; -use hir_expand::mod_path::ModPath; +use hir_def::builtin_type::{BuiltinInt, BuiltinUint}; use super::*; diff --git a/src/tools/rust-analyzer/crates/hir-ty/src/mir/lower.rs b/src/tools/rust-analyzer/crates/hir-ty/src/mir/lower.rs index 1572a6d497c5..f0cb0afd5ac6 100644 --- a/src/tools/rust-analyzer/crates/hir-ty/src/mir/lower.rs +++ b/src/tools/rust-analyzer/crates/hir-ty/src/mir/lower.rs @@ -1,6 +1,6 @@ //! This module generates a polymorphic MIR from a hir body -use std::{fmt::Write, iter, mem}; +use std::{fmt::Write, mem}; use base_db::{salsa::Cycle, FileId}; use chalk_ir::{BoundVar, ConstData, DebruijnIndex, TyKind}; @@ -14,23 +14,19 @@ use hir_def::{ lang_item::{LangItem, LangItemTarget}, path::Path, resolver::{resolver_for_expr, HasResolver, ResolveValueResult, ValueNs}, - AdtId, DefWithBodyId, EnumVariantId, GeneralConstId, HasModule, ItemContainerId, LocalFieldId, + AdtId, EnumVariantId, GeneralConstId, HasModule, ItemContainerId, LocalFieldId, Lookup, TraitId, TupleId, TypeOrConstParamId, }; use hir_expand::name::Name; -use la_arena::ArenaMap; -use rustc_hash::FxHashMap; use syntax::TextRange; use triomphe::Arc; use crate::{ consteval::ConstEvalError, - db::{HirDatabase, InternedClosure}, - display::HirDisplay, + db::InternedClosure, infer::{CaptureKind, CapturedItem, TypeMismatch}, inhabitedness::is_ty_uninhabited_from, layout::LayoutError, - mapping::ToChalk, static_lifetime, traits::FnTrait, utils::{generics, ClosureSubst}, diff --git a/src/tools/rust-analyzer/crates/hir-ty/src/mir/lower/pattern_matching.rs b/src/tools/rust-analyzer/crates/hir-ty/src/mir/lower/pattern_matching.rs index 8202bac532f7..02b1494062fe 100644 --- a/src/tools/rust-analyzer/crates/hir-ty/src/mir/lower/pattern_matching.rs +++ b/src/tools/rust-analyzer/crates/hir-ty/src/mir/lower/pattern_matching.rs @@ -1,6 +1,6 @@ //! MIR lowering for patterns -use hir_def::{hir::LiteralOrConst, resolver::HasResolver, AssocItemId}; +use hir_def::AssocItemId; use crate::BindingMode; diff --git a/src/tools/rust-analyzer/crates/ide-assists/src/handlers/generate_delegate_methods.rs b/src/tools/rust-analyzer/crates/ide-assists/src/handlers/generate_delegate_methods.rs index 4f2df5633c3c..38f40b8d58b4 100644 --- a/src/tools/rust-analyzer/crates/ide-assists/src/handlers/generate_delegate_methods.rs +++ b/src/tools/rust-analyzer/crates/ide-assists/src/handlers/generate_delegate_methods.rs @@ -1,4 +1,4 @@ -use hir::{self, HasCrate, HasVisibility}; +use hir::{HasCrate, HasVisibility}; use ide_db::{path_transform::PathTransform, FxHashSet}; use syntax::{ ast::{ diff --git a/src/tools/rust-analyzer/crates/ide-completion/src/completions/item_list/trait_impl.rs b/src/tools/rust-analyzer/crates/ide-completion/src/completions/item_list/trait_impl.rs index 3c4b89ca742e..7394d63be586 100644 --- a/src/tools/rust-analyzer/crates/ide-completion/src/completions/item_list/trait_impl.rs +++ b/src/tools/rust-analyzer/crates/ide-completion/src/completions/item_list/trait_impl.rs @@ -31,7 +31,7 @@ //! } //! ``` -use hir::{self, HasAttrs}; +use hir::HasAttrs; use ide_db::{ documentation::HasDocs, path_transform::PathTransform, syntax_helpers::insert_whitespace_into_node, traits::get_missing_assoc_items, SymbolKind, diff --git a/src/tools/rust-analyzer/crates/ide-db/src/symbol_index.rs b/src/tools/rust-analyzer/crates/ide-db/src/symbol_index.rs index 92c09089e1f1..722161282fe6 100644 --- a/src/tools/rust-analyzer/crates/ide-db/src/symbol_index.rs +++ b/src/tools/rust-analyzer/crates/ide-db/src/symbol_index.rs @@ -31,7 +31,7 @@ use base_db::{ salsa::{self, ParallelDatabase}, SourceDatabaseExt, SourceRootId, Upcast, }; -use fst::{self, raw::IndexedValue, Automaton, Streamer}; +use fst::{raw::IndexedValue, Automaton, Streamer}; use hir::{ db::HirDatabase, import_map::{AssocSearchMode, SearchMode}, diff --git a/src/tools/rust-analyzer/crates/rust-analyzer/src/cargo_target_spec.rs b/src/tools/rust-analyzer/crates/rust-analyzer/src/cargo_target_spec.rs index 0190ca3cab85..879e259d0e4f 100644 --- a/src/tools/rust-analyzer/crates/rust-analyzer/src/cargo_target_spec.rs +++ b/src/tools/rust-analyzer/crates/rust-analyzer/src/cargo_target_spec.rs @@ -4,7 +4,7 @@ use std::mem; use cfg::{CfgAtom, CfgExpr}; use ide::{Cancellable, CrateId, FileId, RunnableKind, TestId}; -use project_model::{self, CargoFeatures, ManifestPath, TargetKind}; +use project_model::{CargoFeatures, ManifestPath, TargetKind}; use rustc_hash::FxHashSet; use vfs::AbsPathBuf; diff --git a/src/tools/rust-analyzer/crates/rust-analyzer/src/cli/lsif.rs b/src/tools/rust-analyzer/crates/rust-analyzer/src/cli/lsif.rs index 1424a775777f..5e810463db6c 100644 --- a/src/tools/rust-analyzer/crates/rust-analyzer/src/cli/lsif.rs +++ b/src/tools/rust-analyzer/crates/rust-analyzer/src/cli/lsif.rs @@ -13,7 +13,7 @@ use ide_db::{ LineIndexDatabase, }; use load_cargo::{load_workspace, LoadCargoConfig, ProcMacroServerChoice}; -use lsp_types::{self, lsif}; +use lsp_types::lsif; use project_model::{CargoConfig, ProjectManifest, ProjectWorkspace, RustLibSource}; use rustc_hash::FxHashMap; use vfs::{AbsPathBuf, Vfs}; diff --git a/src/tools/rust-analyzer/crates/salsa/salsa-macros/src/query_group.rs b/src/tools/rust-analyzer/crates/salsa/salsa-macros/src/query_group.rs index e535d7ed0438..5d1678ef1200 100644 --- a/src/tools/rust-analyzer/crates/salsa/salsa-macros/src/query_group.rs +++ b/src/tools/rust-analyzer/crates/salsa/salsa-macros/src/query_group.rs @@ -1,5 +1,4 @@ //! -use std::{convert::TryFrom, iter::FromIterator}; use crate::parenthesized::Parenthesized; use heck::ToUpperCamelCase; diff --git a/src/tools/rust-analyzer/crates/salsa/src/debug.rs b/src/tools/rust-analyzer/crates/salsa/src/debug.rs index 0925ddb3d85b..5f113541f04c 100644 --- a/src/tools/rust-analyzer/crates/salsa/src/debug.rs +++ b/src/tools/rust-analyzer/crates/salsa/src/debug.rs @@ -5,7 +5,6 @@ use crate::durability::Durability; use crate::plumbing::QueryStorageOps; use crate::Query; use crate::QueryTable; -use std::iter::FromIterator; /// Additional methods on queries that can be used to "peek into" /// their current state. These methods are meant for debugging and diff --git a/src/tools/rust-analyzer/crates/salsa/src/derived.rs b/src/tools/rust-analyzer/crates/salsa/src/derived.rs index c381e66e087b..d63167100581 100644 --- a/src/tools/rust-analyzer/crates/salsa/src/derived.rs +++ b/src/tools/rust-analyzer/crates/salsa/src/derived.rs @@ -13,7 +13,6 @@ use crate::Runtime; use crate::{Database, DatabaseKeyIndex, QueryDb, Revision}; use parking_lot::RwLock; use std::borrow::Borrow; -use std::convert::TryFrom; use std::hash::Hash; use std::marker::PhantomData; use triomphe::Arc; diff --git a/src/tools/rust-analyzer/crates/salsa/src/input.rs b/src/tools/rust-analyzer/crates/salsa/src/input.rs index 4e8fca6149b7..c2539570e0f9 100644 --- a/src/tools/rust-analyzer/crates/salsa/src/input.rs +++ b/src/tools/rust-analyzer/crates/salsa/src/input.rs @@ -14,7 +14,6 @@ use crate::Runtime; use crate::{DatabaseKeyIndex, QueryDb}; use indexmap::map::Entry; use parking_lot::RwLock; -use std::convert::TryFrom; use std::iter; use tracing::debug; diff --git a/src/tools/rust-analyzer/crates/salsa/src/interned.rs b/src/tools/rust-analyzer/crates/salsa/src/interned.rs index 731839e9598c..822219f51859 100644 --- a/src/tools/rust-analyzer/crates/salsa/src/interned.rs +++ b/src/tools/rust-analyzer/crates/salsa/src/interned.rs @@ -13,7 +13,6 @@ use crate::{Database, DatabaseKeyIndex, QueryDb}; use parking_lot::RwLock; use rustc_hash::FxHashMap; use std::collections::hash_map::Entry; -use std::convert::From; use std::fmt::Debug; use std::hash::Hash; use triomphe::Arc; diff --git a/tests/ui/lint/unused/lint-unused-imports.rs b/tests/ui/lint/unused/lint-unused-imports.rs index 4fa6511c97e1..88f2baa5da9b 100644 --- a/tests/ui/lint/unused/lint-unused-imports.rs +++ b/tests/ui/lint/unused/lint-unused-imports.rs @@ -66,7 +66,6 @@ pub mod bar { fn g() { use self::g; //~ ERROR unused import: `self::g` - //~^ ERROR the item `g` is imported redundantly fn f() { self::g(); } @@ -76,7 +75,6 @@ fn g() { #[allow(unused_variables)] fn h() { use test2::foo; //~ ERROR unused import: `test2::foo` - //~^ ERROR the item `foo` is imported redundantly let foo = 0; } diff --git a/tests/ui/lint/unused/lint-unused-imports.stderr b/tests/ui/lint/unused/lint-unused-imports.stderr index 0574ca4569fb..07684a84a64f 100644 --- a/tests/ui/lint/unused/lint-unused-imports.stderr +++ b/tests/ui/lint/unused/lint-unused-imports.stderr @@ -34,36 +34,14 @@ error: unused import: `foo::Square` LL | use foo::Square; | ^^^^^^^^^^^ -error: the item `g` is imported redundantly - --> $DIR/lint-unused-imports.rs:68:9 - | -LL | / fn g() { -LL | | use self::g; - | | ^^^^^^^ -LL | | -LL | | fn f() { -LL | | self::g(); -LL | | } -LL | | } - | |_- the item `g` is already defined here - error: unused import: `self::g` --> $DIR/lint-unused-imports.rs:68:9 | LL | use self::g; | ^^^^^^^ -error: the item `foo` is imported redundantly - --> $DIR/lint-unused-imports.rs:78:9 - | -LL | use test2::{foo, bar}; - | --- the item `foo` is already imported here -... -LL | use test2::foo; - | ^^^^^^^^^^ - error: unused import: `test2::foo` - --> $DIR/lint-unused-imports.rs:78:9 + --> $DIR/lint-unused-imports.rs:77:9 | LL | use test2::foo; | ^^^^^^^^^^ @@ -74,5 +52,5 @@ error: unused import: `test::B2` LL | use test::B2; | ^^^^^^^^ -error: aborting due to 10 previous errors +error: aborting due to 8 previous errors diff --git a/tests/ui/lint/use-redundant/use-redundant-prelude-rust-2015.rs b/tests/ui/lint/use-redundant/use-redundant-prelude-rust-2015.rs new file mode 100644 index 000000000000..ae5118b2729b --- /dev/null +++ b/tests/ui/lint/use-redundant/use-redundant-prelude-rust-2015.rs @@ -0,0 +1,19 @@ +//@ check-pass +#![warn(unused_imports)] + + +use std::option::Option::Some;//~ WARNING the item `Some` is imported redundantly +use std::option::Option::None; //~ WARNING the item `None` is imported redundantly + +use std::result::Result::Ok;//~ WARNING the item `Ok` is imported redundantly +use std::result::Result::Err;//~ WARNING the item `Err` is imported redundantly +use std::convert::{TryFrom, TryInto}; + +fn main() { + let _a: Option = Some(1); + let _b: Option = None; + let _c: Result = Ok(1); + let _d: Result = Err("error"); + let _e: Result = 8u8.try_into(); + let _f: Result = i32::try_from(8u8); +} diff --git a/tests/ui/lint/use-redundant/use-redundant-prelude-rust-2015.stderr b/tests/ui/lint/use-redundant/use-redundant-prelude-rust-2015.stderr new file mode 100644 index 000000000000..1b09df911eb0 --- /dev/null +++ b/tests/ui/lint/use-redundant/use-redundant-prelude-rust-2015.stderr @@ -0,0 +1,44 @@ +warning: the item `Some` is imported redundantly + --> $DIR/use-redundant-prelude-rust-2015.rs:5:5 + | +LL | use std::option::Option::Some; + | ^^^^^^^^^^^^^^^^^^^^^^^^^ + --> $SRC_DIR/std/src/prelude/mod.rs:LL:COL + | + = note: the item `Some` is already defined here + | +note: the lint level is defined here + --> $DIR/use-redundant-prelude-rust-2015.rs:2:9 + | +LL | #![warn(unused_imports)] + | ^^^^^^^^^^^^^^ + +warning: the item `None` is imported redundantly + --> $DIR/use-redundant-prelude-rust-2015.rs:6:5 + | +LL | use std::option::Option::None; + | ^^^^^^^^^^^^^^^^^^^^^^^^^ + --> $SRC_DIR/std/src/prelude/mod.rs:LL:COL + | + = note: the item `None` is already defined here + +warning: the item `Ok` is imported redundantly + --> $DIR/use-redundant-prelude-rust-2015.rs:8:5 + | +LL | use std::result::Result::Ok; + | ^^^^^^^^^^^^^^^^^^^^^^^ + --> $SRC_DIR/std/src/prelude/mod.rs:LL:COL + | + = note: the item `Ok` is already defined here + +warning: the item `Err` is imported redundantly + --> $DIR/use-redundant-prelude-rust-2015.rs:9:5 + | +LL | use std::result::Result::Err; + | ^^^^^^^^^^^^^^^^^^^^^^^^ + --> $SRC_DIR/std/src/prelude/mod.rs:LL:COL + | + = note: the item `Err` is already defined here + +warning: 4 warnings emitted + diff --git a/tests/ui/lint/use-redundant/use-redundant-prelude-rust-2021.rs b/tests/ui/lint/use-redundant/use-redundant-prelude-rust-2021.rs new file mode 100644 index 000000000000..cb4dcb6c0bd6 --- /dev/null +++ b/tests/ui/lint/use-redundant/use-redundant-prelude-rust-2021.rs @@ -0,0 +1,11 @@ +//@ check-pass +//@ edition:2021 +#![warn(unused_imports)] + +use std::convert::TryFrom;//~ WARNING the item `TryFrom` is imported redundantly +use std::convert::TryInto;//~ WARNING the item `TryInto` is imported redundantly + +fn main() { + let _e: Result = 8u8.try_into(); + let _f: Result = i32::try_from(8u8); +} diff --git a/tests/ui/lint/use-redundant/use-redundant-prelude-rust-2021.stderr b/tests/ui/lint/use-redundant/use-redundant-prelude-rust-2021.stderr new file mode 100644 index 000000000000..542356dc996d --- /dev/null +++ b/tests/ui/lint/use-redundant/use-redundant-prelude-rust-2021.stderr @@ -0,0 +1,26 @@ +warning: the item `TryFrom` is imported redundantly + --> $DIR/use-redundant-prelude-rust-2021.rs:5:5 + | +LL | use std::convert::TryFrom; + | ^^^^^^^^^^^^^^^^^^^^^ + --> $SRC_DIR/std/src/prelude/mod.rs:LL:COL + | + = note: the item `TryFrom` is already defined here + | +note: the lint level is defined here + --> $DIR/use-redundant-prelude-rust-2021.rs:3:9 + | +LL | #![warn(unused_imports)] + | ^^^^^^^^^^^^^^ + +warning: the item `TryInto` is imported redundantly + --> $DIR/use-redundant-prelude-rust-2021.rs:6:5 + | +LL | use std::convert::TryInto; + | ^^^^^^^^^^^^^^^^^^^^^ + --> $SRC_DIR/std/src/prelude/mod.rs:LL:COL + | + = note: the item `TryInto` is already defined here + +warning: 2 warnings emitted + diff --git a/tests/ui/rust-2018/remove-extern-crate.fixed b/tests/ui/rust-2018/remove-extern-crate.fixed index f025ccfeaeb3..19b1dc6fb013 100644 --- a/tests/ui/rust-2018/remove-extern-crate.fixed +++ b/tests/ui/rust-2018/remove-extern-crate.fixed @@ -6,6 +6,7 @@ #![warn(rust_2018_idioms)] #![allow(dropping_copy_types)] +#![allow(unused_imports)] //~ WARNING unused extern crate // Shouldn't suggest changing to `use`, as `another_name` diff --git a/tests/ui/rust-2018/remove-extern-crate.rs b/tests/ui/rust-2018/remove-extern-crate.rs index 0312964d5f6f..88ef858da147 100644 --- a/tests/ui/rust-2018/remove-extern-crate.rs +++ b/tests/ui/rust-2018/remove-extern-crate.rs @@ -6,6 +6,7 @@ #![warn(rust_2018_idioms)] #![allow(dropping_copy_types)] +#![allow(unused_imports)] extern crate core; //~ WARNING unused extern crate // Shouldn't suggest changing to `use`, as `another_name` diff --git a/tests/ui/rust-2018/remove-extern-crate.stderr b/tests/ui/rust-2018/remove-extern-crate.stderr index f752cac8ed63..020db9975c09 100644 --- a/tests/ui/rust-2018/remove-extern-crate.stderr +++ b/tests/ui/rust-2018/remove-extern-crate.stderr @@ -1,5 +1,5 @@ warning: unused extern crate - --> $DIR/remove-extern-crate.rs:10:1 + --> $DIR/remove-extern-crate.rs:11:1 | LL | extern crate core; | ^^^^^^^^^^^^^^^^^^ help: remove it @@ -12,7 +12,7 @@ LL | #![warn(rust_2018_idioms)] = note: `#[warn(unused_extern_crates)]` implied by `#[warn(rust_2018_idioms)]` warning: `extern crate` is not idiomatic in the new edition - --> $DIR/remove-extern-crate.rs:34:5 + --> $DIR/remove-extern-crate.rs:35:5 | LL | extern crate core; | ^^^^^^^^^^^^^^^^^^ @@ -23,7 +23,7 @@ LL | use core; | ~~~ warning: `extern crate` is not idiomatic in the new edition - --> $DIR/remove-extern-crate.rs:44:5 + --> $DIR/remove-extern-crate.rs:45:5 | LL | pub extern crate core; | ^^^^^^^^^^^^^^^^^^^^^^ From 61479730826b83c92285d60b8571b8f5ce428129 Mon Sep 17 00:00:00 2001 From: Tshepang Mbambo Date: Sun, 18 Feb 2024 10:41:31 +0200 Subject: [PATCH 117/127] remove extraneous text from example config --- config.example.toml | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/config.example.toml b/config.example.toml index 098811195d7c..975fcd71fc94 100644 --- a/config.example.toml +++ b/config.example.toml @@ -829,7 +829,7 @@ # target triples containing `-none`, `nvptx`, `switch`, or `-uefi`. #no-std = (bool) -# This is an array of the codegen backends that will be compiled a rustc +# This is an array of the codegen backends that will be # compiled for this target, overriding the global rust.codegen-backends option. # See that option for more info. #codegen-backends = rust.codegen-backends (array) From 50e316bdc0f5ebcd35831c983b0f1856ec48b519 Mon Sep 17 00:00:00 2001 From: Gurinder Singh Date: Sun, 18 Feb 2024 15:28:58 +0530 Subject: [PATCH 118/127] Remove const_prop.rs and move its content to const_prop_lint.rs and dataflow_const_prop.rs where it is used --- .../rustc_mir_transform/src/const_prop.rs | 161 ------------------ .../src/const_prop_lint.rs | 132 +++++++++++++- .../src/dataflow_const_prop.rs | 27 ++- compiler/rustc_mir_transform/src/lib.rs | 1 - 4 files changed, 153 insertions(+), 168 deletions(-) delete mode 100644 compiler/rustc_mir_transform/src/const_prop.rs diff --git a/compiler/rustc_mir_transform/src/const_prop.rs b/compiler/rustc_mir_transform/src/const_prop.rs deleted file mode 100644 index eba62aae60f7..000000000000 --- a/compiler/rustc_mir_transform/src/const_prop.rs +++ /dev/null @@ -1,161 +0,0 @@ -//! Propagates constants for early reporting of statically known -//! assertion failures - -use rustc_index::bit_set::BitSet; -use rustc_index::IndexVec; -use rustc_middle::mir::visit::{MutatingUseContext, NonMutatingUseContext, PlaceContext, Visitor}; -use rustc_middle::mir::*; -use rustc_middle::ty::{ParamEnv, TyCtxt}; -use rustc_target::abi::Size; - -/// The maximum number of bytes that we'll allocate space for a local or the return value. -/// Needed for #66397, because otherwise we eval into large places and that can cause OOM or just -/// Severely regress performance. -const MAX_ALLOC_LIMIT: u64 = 1024; - -/// Macro for machine-specific `InterpError` without allocation. -/// (These will never be shown to the user, but they help diagnose ICEs.) -pub(crate) macro throw_machine_stop_str($($tt:tt)*) {{ - // We make a new local type for it. The type itself does not carry any information, - // but its vtable (for the `MachineStopType` trait) does. - #[derive(Debug)] - struct Zst; - // Printing this type shows the desired string. - impl std::fmt::Display for Zst { - fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result { - write!(f, $($tt)*) - } - } - - impl rustc_middle::mir::interpret::MachineStopType for Zst { - fn diagnostic_message(&self) -> rustc_errors::DiagnosticMessage { - self.to_string().into() - } - - fn add_args( - self: Box, - _: &mut dyn FnMut(rustc_errors::DiagnosticArgName, rustc_errors::DiagnosticArgValue), - ) {} - } - throw_machine_stop!(Zst) -}} - -/// The mode that `ConstProp` is allowed to run in for a given `Local`. -#[derive(Clone, Copy, Debug, PartialEq)] -pub enum ConstPropMode { - /// The `Local` can be propagated into and reads of this `Local` can also be propagated. - FullConstProp, - /// The `Local` can only be propagated into and from its own block. - OnlyInsideOwnBlock, - /// The `Local` cannot be part of propagation at all. Any statement - /// referencing it either for reading or writing will not get propagated. - NoPropagation, -} - -pub struct CanConstProp { - can_const_prop: IndexVec, - // False at the beginning. Once set, no more assignments are allowed to that local. - found_assignment: BitSet, -} - -impl CanConstProp { - /// Returns true if `local` can be propagated - pub fn check<'tcx>( - tcx: TyCtxt<'tcx>, - param_env: ParamEnv<'tcx>, - body: &Body<'tcx>, - ) -> IndexVec { - let mut cpv = CanConstProp { - can_const_prop: IndexVec::from_elem(ConstPropMode::FullConstProp, &body.local_decls), - found_assignment: BitSet::new_empty(body.local_decls.len()), - }; - for (local, val) in cpv.can_const_prop.iter_enumerated_mut() { - let ty = body.local_decls[local].ty; - match tcx.layout_of(param_env.and(ty)) { - Ok(layout) if layout.size < Size::from_bytes(MAX_ALLOC_LIMIT) => {} - // Either the layout fails to compute, then we can't use this local anyway - // or the local is too large, then we don't want to. - _ => { - *val = ConstPropMode::NoPropagation; - continue; - } - } - } - // Consider that arguments are assigned on entry. - for arg in body.args_iter() { - cpv.found_assignment.insert(arg); - } - cpv.visit_body(body); - cpv.can_const_prop - } -} - -impl<'tcx> Visitor<'tcx> for CanConstProp { - fn visit_place(&mut self, place: &Place<'tcx>, mut context: PlaceContext, loc: Location) { - use rustc_middle::mir::visit::PlaceContext::*; - - // Dereferencing just read the addess of `place.local`. - if place.projection.first() == Some(&PlaceElem::Deref) { - context = NonMutatingUse(NonMutatingUseContext::Copy); - } - - self.visit_local(place.local, context, loc); - self.visit_projection(place.as_ref(), context, loc); - } - - fn visit_local(&mut self, local: Local, context: PlaceContext, _: Location) { - use rustc_middle::mir::visit::PlaceContext::*; - match context { - // These are just stores, where the storing is not propagatable, but there may be later - // mutations of the same local via `Store` - | MutatingUse(MutatingUseContext::Call) - | MutatingUse(MutatingUseContext::AsmOutput) - | MutatingUse(MutatingUseContext::Deinit) - // Actual store that can possibly even propagate a value - | MutatingUse(MutatingUseContext::Store) - | MutatingUse(MutatingUseContext::SetDiscriminant) => { - if !self.found_assignment.insert(local) { - match &mut self.can_const_prop[local] { - // If the local can only get propagated in its own block, then we don't have - // to worry about multiple assignments, as we'll nuke the const state at the - // end of the block anyway, and inside the block we overwrite previous - // states as applicable. - ConstPropMode::OnlyInsideOwnBlock => {} - ConstPropMode::NoPropagation => {} - other @ ConstPropMode::FullConstProp => { - trace!( - "local {:?} can't be propagated because of multiple assignments. Previous state: {:?}", - local, other, - ); - *other = ConstPropMode::OnlyInsideOwnBlock; - } - } - } - } - // Reading constants is allowed an arbitrary number of times - NonMutatingUse(NonMutatingUseContext::Copy) - | NonMutatingUse(NonMutatingUseContext::Move) - | NonMutatingUse(NonMutatingUseContext::Inspect) - | NonMutatingUse(NonMutatingUseContext::PlaceMention) - | NonUse(_) => {} - - // These could be propagated with a smarter analysis or just some careful thinking about - // whether they'd be fine right now. - MutatingUse(MutatingUseContext::Yield) - | MutatingUse(MutatingUseContext::Drop) - | MutatingUse(MutatingUseContext::Retag) - // These can't ever be propagated under any scheme, as we can't reason about indirect - // mutation. - | NonMutatingUse(NonMutatingUseContext::SharedBorrow) - | NonMutatingUse(NonMutatingUseContext::FakeBorrow) - | NonMutatingUse(NonMutatingUseContext::AddressOf) - | MutatingUse(MutatingUseContext::Borrow) - | MutatingUse(MutatingUseContext::AddressOf) => { - trace!("local {:?} can't be propagated because it's used: {:?}", local, context); - self.can_const_prop[local] = ConstPropMode::NoPropagation; - } - MutatingUse(MutatingUseContext::Projection) - | NonMutatingUse(NonMutatingUseContext::Projection) => bug!("visit_place should not pass {context:?} for {local:?}"), - } - } -} diff --git a/compiler/rustc_mir_transform/src/const_prop_lint.rs b/compiler/rustc_mir_transform/src/const_prop_lint.rs index 6212155a8fe9..6f2ef8f9a4fd 100644 --- a/compiler/rustc_mir_transform/src/const_prop_lint.rs +++ b/compiler/rustc_mir_transform/src/const_prop_lint.rs @@ -9,17 +9,14 @@ use rustc_const_eval::interpret::{ use rustc_data_structures::fx::FxHashSet; use rustc_hir::def::DefKind; use rustc_hir::HirId; -use rustc_index::bit_set::BitSet; -use rustc_index::{Idx, IndexVec}; -use rustc_middle::mir::visit::Visitor; +use rustc_index::{bit_set::BitSet, Idx, IndexVec}; +use rustc_middle::mir::visit::{MutatingUseContext, NonMutatingUseContext, PlaceContext, Visitor}; use rustc_middle::mir::*; use rustc_middle::ty::layout::{LayoutError, LayoutOf, LayoutOfHelpers, TyAndLayout}; use rustc_middle::ty::{self, ConstInt, ParamEnv, ScalarInt, Ty, TyCtxt, TypeVisitableExt}; use rustc_span::Span; use rustc_target::abi::{Abi, FieldIdx, HasDataLayout, Size, TargetDataLayout, VariantIdx}; -use crate::const_prop::CanConstProp; -use crate::const_prop::ConstPropMode; use crate::dataflow_const_prop::DummyMachine; use crate::errors::{AssertLint, AssertLintKind}; use crate::MirLint; @@ -849,3 +846,128 @@ impl<'tcx> Visitor<'tcx> for ConstPropagator<'_, 'tcx> { } } } + +/// The maximum number of bytes that we'll allocate space for a local or the return value. +/// Needed for #66397, because otherwise we eval into large places and that can cause OOM or just +/// Severely regress performance. +const MAX_ALLOC_LIMIT: u64 = 1024; + +/// The mode that `ConstProp` is allowed to run in for a given `Local`. +#[derive(Clone, Copy, Debug, PartialEq)] +pub enum ConstPropMode { + /// The `Local` can be propagated into and reads of this `Local` can also be propagated. + FullConstProp, + /// The `Local` can only be propagated into and from its own block. + OnlyInsideOwnBlock, + /// The `Local` cannot be part of propagation at all. Any statement + /// referencing it either for reading or writing will not get propagated. + NoPropagation, +} + +pub struct CanConstProp { + can_const_prop: IndexVec, + // False at the beginning. Once set, no more assignments are allowed to that local. + found_assignment: BitSet, +} + +impl CanConstProp { + /// Returns true if `local` can be propagated + pub fn check<'tcx>( + tcx: TyCtxt<'tcx>, + param_env: ParamEnv<'tcx>, + body: &Body<'tcx>, + ) -> IndexVec { + let mut cpv = CanConstProp { + can_const_prop: IndexVec::from_elem(ConstPropMode::FullConstProp, &body.local_decls), + found_assignment: BitSet::new_empty(body.local_decls.len()), + }; + for (local, val) in cpv.can_const_prop.iter_enumerated_mut() { + let ty = body.local_decls[local].ty; + match tcx.layout_of(param_env.and(ty)) { + Ok(layout) if layout.size < Size::from_bytes(MAX_ALLOC_LIMIT) => {} + // Either the layout fails to compute, then we can't use this local anyway + // or the local is too large, then we don't want to. + _ => { + *val = ConstPropMode::NoPropagation; + continue; + } + } + } + // Consider that arguments are assigned on entry. + for arg in body.args_iter() { + cpv.found_assignment.insert(arg); + } + cpv.visit_body(body); + cpv.can_const_prop + } +} + +impl<'tcx> Visitor<'tcx> for CanConstProp { + fn visit_place(&mut self, place: &Place<'tcx>, mut context: PlaceContext, loc: Location) { + use rustc_middle::mir::visit::PlaceContext::*; + + // Dereferencing just read the addess of `place.local`. + if place.projection.first() == Some(&PlaceElem::Deref) { + context = NonMutatingUse(NonMutatingUseContext::Copy); + } + + self.visit_local(place.local, context, loc); + self.visit_projection(place.as_ref(), context, loc); + } + + fn visit_local(&mut self, local: Local, context: PlaceContext, _: Location) { + use rustc_middle::mir::visit::PlaceContext::*; + match context { + // These are just stores, where the storing is not propagatable, but there may be later + // mutations of the same local via `Store` + | MutatingUse(MutatingUseContext::Call) + | MutatingUse(MutatingUseContext::AsmOutput) + | MutatingUse(MutatingUseContext::Deinit) + // Actual store that can possibly even propagate a value + | MutatingUse(MutatingUseContext::Store) + | MutatingUse(MutatingUseContext::SetDiscriminant) => { + if !self.found_assignment.insert(local) { + match &mut self.can_const_prop[local] { + // If the local can only get propagated in its own block, then we don't have + // to worry about multiple assignments, as we'll nuke the const state at the + // end of the block anyway, and inside the block we overwrite previous + // states as applicable. + ConstPropMode::OnlyInsideOwnBlock => {} + ConstPropMode::NoPropagation => {} + other @ ConstPropMode::FullConstProp => { + trace!( + "local {:?} can't be propagated because of multiple assignments. Previous state: {:?}", + local, other, + ); + *other = ConstPropMode::OnlyInsideOwnBlock; + } + } + } + } + // Reading constants is allowed an arbitrary number of times + NonMutatingUse(NonMutatingUseContext::Copy) + | NonMutatingUse(NonMutatingUseContext::Move) + | NonMutatingUse(NonMutatingUseContext::Inspect) + | NonMutatingUse(NonMutatingUseContext::PlaceMention) + | NonUse(_) => {} + + // These could be propagated with a smarter analysis or just some careful thinking about + // whether they'd be fine right now. + MutatingUse(MutatingUseContext::Yield) + | MutatingUse(MutatingUseContext::Drop) + | MutatingUse(MutatingUseContext::Retag) + // These can't ever be propagated under any scheme, as we can't reason about indirect + // mutation. + | NonMutatingUse(NonMutatingUseContext::SharedBorrow) + | NonMutatingUse(NonMutatingUseContext::FakeBorrow) + | NonMutatingUse(NonMutatingUseContext::AddressOf) + | MutatingUse(MutatingUseContext::Borrow) + | MutatingUse(MutatingUseContext::AddressOf) => { + trace!("local {:?} can't be propagated because it's used: {:?}", local, context); + self.can_const_prop[local] = ConstPropMode::NoPropagation; + } + MutatingUse(MutatingUseContext::Projection) + | NonMutatingUse(NonMutatingUseContext::Projection) => bug!("visit_place should not pass {context:?} for {local:?}"), + } + } +} diff --git a/compiler/rustc_mir_transform/src/dataflow_const_prop.rs b/compiler/rustc_mir_transform/src/dataflow_const_prop.rs index 86e99a8a5b5c..9dc7a50eca9f 100644 --- a/compiler/rustc_mir_transform/src/dataflow_const_prop.rs +++ b/compiler/rustc_mir_transform/src/dataflow_const_prop.rs @@ -21,7 +21,32 @@ use rustc_span::def_id::DefId; use rustc_span::DUMMY_SP; use rustc_target::abi::{Abi, FieldIdx, Size, VariantIdx, FIRST_VARIANT}; -use crate::const_prop::throw_machine_stop_str; +/// Macro for machine-specific `InterpError` without allocation. +/// (These will never be shown to the user, but they help diagnose ICEs.) +pub(crate) macro throw_machine_stop_str($($tt:tt)*) {{ + // We make a new local type for it. The type itself does not carry any information, + // but its vtable (for the `MachineStopType` trait) does. + #[derive(Debug)] + struct Zst; + // Printing this type shows the desired string. + impl std::fmt::Display for Zst { + fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result { + write!(f, $($tt)*) + } + } + + impl rustc_middle::mir::interpret::MachineStopType for Zst { + fn diagnostic_message(&self) -> rustc_errors::DiagnosticMessage { + self.to_string().into() + } + + fn add_args( + self: Box, + _: &mut dyn FnMut(rustc_errors::DiagnosticArgName, rustc_errors::DiagnosticArgValue), + ) {} + } + throw_machine_stop!(Zst) +}} // These constants are somewhat random guesses and have not been optimized. // If `tcx.sess.mir_opt_level() >= 4`, we ignore the limits (this can become very expensive). diff --git a/compiler/rustc_mir_transform/src/lib.rs b/compiler/rustc_mir_transform/src/lib.rs index 7f0e6f90dbb1..74b36eb5ee89 100644 --- a/compiler/rustc_mir_transform/src/lib.rs +++ b/compiler/rustc_mir_transform/src/lib.rs @@ -59,7 +59,6 @@ mod remove_place_mention; mod add_subtyping_projections; pub mod cleanup_post_borrowck; mod const_debuginfo; -mod const_prop; mod const_prop_lint; mod copy_prop; mod coroutine; From 5e2a7ac47a817b598aec3848efa8e674beebbcbf Mon Sep 17 00:00:00 2001 From: klensy Date: Sun, 18 Feb 2024 14:40:19 +0300 Subject: [PATCH 119/127] opt-dist: bump derive_builder to dedupe darling* and remove one more syn 1.* dep --- Cargo.lock | 73 +++++++++-------------------------- src/tools/opt-dist/Cargo.toml | 2 +- 2 files changed, 20 insertions(+), 55 deletions(-) diff --git a/Cargo.lock b/Cargo.lock index f9ad78e37951..77469cb29b70 100644 --- a/Cargo.lock +++ b/Cargo.lock @@ -931,43 +931,19 @@ dependencies = [ [[package]] name = "darling" -version = "0.14.4" +version = "0.20.6" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "7b750cb3417fd1b327431a470f388520309479ab0bf5e323505daf0290cd3850" +checksum = "c376d08ea6aa96aafe61237c7200d1241cb177b7d3a542d791f2d118e9cbb955" dependencies = [ - "darling_core 0.14.4", - "darling_macro 0.14.4", -] - -[[package]] -name = "darling" -version = "0.20.5" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "fc5d6b04b3fd0ba9926f945895de7d806260a2d7431ba82e7edaecb043c4c6b8" -dependencies = [ - "darling_core 0.20.5", - "darling_macro 0.20.5", + "darling_core", + "darling_macro", ] [[package]] name = "darling_core" -version = "0.14.4" +version = "0.20.6" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "109c1ca6e6b7f82cc233a97004ea8ed7ca123a9af07a8230878fcfda9b158bf0" -dependencies = [ - "fnv", - "ident_case", - "proc-macro2", - "quote", - "strsim 0.10.0", - "syn 1.0.109", -] - -[[package]] -name = "darling_core" -version = "0.20.5" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "04e48a959bcd5c761246f5d090ebc2fbf7b9cd527a492b07a67510c108f1e7e3" +checksum = "33043dcd19068b8192064c704b3f83eb464f91f1ff527b44a4e2b08d9cdb8855" dependencies = [ "fnv", "ident_case", @@ -979,22 +955,11 @@ dependencies = [ [[package]] name = "darling_macro" -version = "0.14.4" +version = "0.20.6" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "a4aab4dbc9f7611d8b55048a3a16d2d010c2c8334e46304b40ac1cc14bf3b48e" +checksum = "c5a91391accf613803c2a9bf9abccdbaa07c54b4244a5b64883f9c3c137c86be" dependencies = [ - "darling_core 0.14.4", - "quote", - "syn 1.0.109", -] - -[[package]] -name = "darling_macro" -version = "0.20.5" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "1d1545d67a2149e1d93b7e5c7752dce5a7426eb5d1357ddcfd89336b94444f77" -dependencies = [ - "darling_core 0.20.5", + "darling_core", "quote", "syn 2.0.48", ] @@ -1036,33 +1001,33 @@ dependencies = [ [[package]] name = "derive_builder" -version = "0.12.0" +version = "0.20.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "8d67778784b508018359cbc8696edb3db78160bab2c2a28ba7f56ef6932997f8" +checksum = "0350b5cb0331628a5916d6c5c0b72e97393b8b6b03b47a9284f4e7f5a405ffd7" dependencies = [ "derive_builder_macro", ] [[package]] name = "derive_builder_core" -version = "0.12.0" +version = "0.20.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "c11bdc11a0c47bc7d37d582b5285da6849c96681023680b906673c5707af7b0f" +checksum = "d48cda787f839151732d396ac69e3473923d54312c070ee21e9effcaa8ca0b1d" dependencies = [ - "darling 0.14.4", + "darling", "proc-macro2", "quote", - "syn 1.0.109", + "syn 2.0.48", ] [[package]] name = "derive_builder_macro" -version = "0.12.0" +version = "0.20.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "ebcda35c7a396850a55ffeac740804b40ffec779b98fffbb1738f4033f0ee79e" +checksum = "206868b8242f27cecce124c19fd88157fbd0dd334df2587f36417bafbc85097b" dependencies = [ "derive_builder_core", - "syn 1.0.109", + "syn 2.0.48", ] [[package]] @@ -1084,7 +1049,7 @@ version = "0.1.6" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "4e8ef033054e131169b8f0f9a7af8f5533a9436fadf3c500ed547f730f07090d" dependencies = [ - "darling 0.20.5", + "darling", "proc-macro2", "quote", "syn 2.0.48", diff --git a/src/tools/opt-dist/Cargo.toml b/src/tools/opt-dist/Cargo.toml index 9e852b0645a2..eb6f889a6f30 100644 --- a/src/tools/opt-dist/Cargo.toml +++ b/src/tools/opt-dist/Cargo.toml @@ -21,6 +21,6 @@ serde = { version = "1", features = ["derive"] } serde_json = "1" glob = "0.3" tempfile = "3.5" -derive_builder = "0.12" +derive_builder = "0.20" clap = { version = "4", features = ["derive"] } tabled = { version = "0.13", default-features = false, features = ["std"] } From 35fe26757adce8cd8d1b9e7f4107a0ac9a7ab27b Mon Sep 17 00:00:00 2001 From: klensy Date: Sun, 18 Feb 2024 16:02:16 +0300 Subject: [PATCH 120/127] windows bump to 0.52 --- Cargo.lock | 7 ++++--- compiler/rustc_codegen_ssa/Cargo.toml | 2 +- compiler/rustc_data_structures/Cargo.toml | 2 +- compiler/rustc_data_structures/src/flock/windows.rs | 1 - compiler/rustc_data_structures/src/profiling.rs | 6 ++---- compiler/rustc_driver_impl/Cargo.toml | 2 +- compiler/rustc_errors/Cargo.toml | 2 +- compiler/rustc_errors/src/lock.rs | 6 ++++-- compiler/rustc_session/Cargo.toml | 2 +- compiler/rustc_session/src/filesearch.rs | 1 - src/tools/compiletest/Cargo.toml | 2 +- 11 files changed, 16 insertions(+), 17 deletions(-) diff --git a/Cargo.lock b/Cargo.lock index 77469cb29b70..ad1bad7d4f38 100644 --- a/Cargo.lock +++ b/Cargo.lock @@ -6176,11 +6176,12 @@ checksum = "712e227841d057c1ee1cd2fb22fa7e5a5461ae8e48fa2ca79ec42cfc1931183f" [[package]] name = "windows" -version = "0.48.0" +version = "0.52.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "e686886bc078bc1b0b600cac0147aadb815089b6e4da64016cbd754b6342700f" +checksum = "e48a53791691ab099e5e2ad123536d0fff50652600abaf43bbf952894110d0be" dependencies = [ - "windows-targets 0.48.5", + "windows-core", + "windows-targets 0.52.0", ] [[package]] diff --git a/compiler/rustc_codegen_ssa/Cargo.toml b/compiler/rustc_codegen_ssa/Cargo.toml index c4aaf421444e..e144b1dc1bd8 100644 --- a/compiler/rustc_codegen_ssa/Cargo.toml +++ b/compiler/rustc_codegen_ssa/Cargo.toml @@ -51,5 +51,5 @@ default-features = false features = ["read_core", "elf", "macho", "pe", "xcoff", "unaligned", "archive", "write"] [target.'cfg(windows)'.dependencies.windows] -version = "0.48.0" +version = "0.52.0" features = ["Win32_Globalization"] diff --git a/compiler/rustc_data_structures/Cargo.toml b/compiler/rustc_data_structures/Cargo.toml index 0635d8552ae5..80b6e72e49ba 100644 --- a/compiler/rustc_data_structures/Cargo.toml +++ b/compiler/rustc_data_structures/Cargo.toml @@ -33,7 +33,7 @@ tracing = "0.1" version = "0.12" [target.'cfg(windows)'.dependencies.windows] -version = "0.48.0" +version = "0.52.0" features = [ "Win32_Foundation", "Win32_Storage_FileSystem", diff --git a/compiler/rustc_data_structures/src/flock/windows.rs b/compiler/rustc_data_structures/src/flock/windows.rs index da128f464a60..9be1065135ac 100644 --- a/compiler/rustc_data_structures/src/flock/windows.rs +++ b/compiler/rustc_data_structures/src/flock/windows.rs @@ -69,7 +69,6 @@ impl Lock { &mut overlapped, ) } - .ok() .map_err(|e| { let err = io::Error::from_raw_os_error(e.code().0); debug!("failed acquiring file lock: {}", err); diff --git a/compiler/rustc_data_structures/src/profiling.rs b/compiler/rustc_data_structures/src/profiling.rs index e29d4811980c..2569684df3fc 100644 --- a/compiler/rustc_data_structures/src/profiling.rs +++ b/compiler/rustc_data_structures/src/profiling.rs @@ -866,16 +866,14 @@ cfg_match! { use std::mem; use windows::{ - // FIXME: change back to K32GetProcessMemoryInfo when windows crate - // updated to 0.49.0+ to drop dependency on psapi.dll - Win32::System::ProcessStatus::{GetProcessMemoryInfo, PROCESS_MEMORY_COUNTERS}, + Win32::System::ProcessStatus::{K32GetProcessMemoryInfo, PROCESS_MEMORY_COUNTERS}, Win32::System::Threading::GetCurrentProcess, }; let mut pmc = PROCESS_MEMORY_COUNTERS::default(); let pmc_size = mem::size_of_val(&pmc); unsafe { - GetProcessMemoryInfo( + K32GetProcessMemoryInfo( GetCurrentProcess(), &mut pmc, pmc_size as u32, diff --git a/compiler/rustc_driver_impl/Cargo.toml b/compiler/rustc_driver_impl/Cargo.toml index bfdd871455c9..242aa06fe3e3 100644 --- a/compiler/rustc_driver_impl/Cargo.toml +++ b/compiler/rustc_driver_impl/Cargo.toml @@ -60,7 +60,7 @@ libc = "0.2" # tidy-alphabetical-end [target.'cfg(windows)'.dependencies.windows] -version = "0.48.0" +version = "0.52.0" features = [ "Win32_System_Diagnostics_Debug", ] diff --git a/compiler/rustc_errors/Cargo.toml b/compiler/rustc_errors/Cargo.toml index a2d1fd2a9241..cc114fdcd8c3 100644 --- a/compiler/rustc_errors/Cargo.toml +++ b/compiler/rustc_errors/Cargo.toml @@ -30,7 +30,7 @@ unicode-width = "0.1.4" # tidy-alphabetical-end [target.'cfg(windows)'.dependencies.windows] -version = "0.48.0" +version = "0.52.0" features = [ "Win32_Foundation", "Win32_Security", diff --git a/compiler/rustc_errors/src/lock.rs b/compiler/rustc_errors/src/lock.rs index bd5cf49b56bd..0aeb511214bb 100644 --- a/compiler/rustc_errors/src/lock.rs +++ b/compiler/rustc_errors/src/lock.rs @@ -27,7 +27,8 @@ pub fn acquire_global_lock(name: &str) -> Box { impl Drop for Handle { fn drop(&mut self) { unsafe { - CloseHandle(self.0); + // FIXME can panic here + CloseHandle(self.0).unwrap(); } } } @@ -37,7 +38,8 @@ pub fn acquire_global_lock(name: &str) -> Box { impl Drop for Guard { fn drop(&mut self) { unsafe { - ReleaseMutex((self.0).0); + // FIXME can panic here + ReleaseMutex((self.0).0).unwrap(); } } } diff --git a/compiler/rustc_session/Cargo.toml b/compiler/rustc_session/Cargo.toml index ba8f67982f51..721a9275d018 100644 --- a/compiler/rustc_session/Cargo.toml +++ b/compiler/rustc_session/Cargo.toml @@ -30,7 +30,7 @@ libc = "0.2" # tidy-alphabetical-end [target.'cfg(windows)'.dependencies.windows] -version = "0.48.0" +version = "0.52.0" features = [ "Win32_Foundation", "Win32_System_LibraryLoader", diff --git a/compiler/rustc_session/src/filesearch.rs b/compiler/rustc_session/src/filesearch.rs index 6e459ac45d35..2456b4b3a15c 100644 --- a/compiler/rustc_session/src/filesearch.rs +++ b/compiler/rustc_session/src/filesearch.rs @@ -143,7 +143,6 @@ fn current_dll_path() -> Result { &mut module, ) } - .ok() .map_err(|e| e.to_string())?; let mut filename = vec![0; 1024]; diff --git a/src/tools/compiletest/Cargo.toml b/src/tools/compiletest/Cargo.toml index 31c6353e6754..62d0fcc1a608 100644 --- a/src/tools/compiletest/Cargo.toml +++ b/src/tools/compiletest/Cargo.toml @@ -34,7 +34,7 @@ libc = "0.2" miow = "0.6" [target.'cfg(windows)'.dependencies.windows] -version = "0.48.0" +version = "0.52.0" features = [ "Win32_Foundation", "Win32_System_Diagnostics_Debug", From 92386223a1e2e16e27f7ab56663bbe19febc410c Mon Sep 17 00:00:00 2001 From: klensy Date: Sun, 18 Feb 2024 16:17:56 +0300 Subject: [PATCH 121/127] tidy: add windows-core --- src/tools/tidy/src/deps.rs | 1 + 1 file changed, 1 insertion(+) diff --git a/src/tools/tidy/src/deps.rs b/src/tools/tidy/src/deps.rs index d4642deb2c69..b36b6da308ec 100644 --- a/src/tools/tidy/src/deps.rs +++ b/src/tools/tidy/src/deps.rs @@ -384,6 +384,7 @@ const PERMITTED_RUSTC_DEPENDENCIES: &[&str] = &[ "winapi-util", "winapi-x86_64-pc-windows-gnu", "windows", + "windows-core", "windows-sys", "windows-targets", "windows_aarch64_gnullvm", From 6be93ccbee1caf4aaf2d9f5d386b9dccdb385ca8 Mon Sep 17 00:00:00 2001 From: SabrinaJewson Date: Sun, 18 Feb 2024 14:04:27 +0000 Subject: [PATCH 122/127] Add uncontroversial syscall doc aliases to std docs --- library/std/src/env.rs | 3 ++- library/std/src/fs.rs | 18 ++++++++++++++++-- library/std/src/os/wasi/fs.rs | 14 ++++++++++++++ 3 files changed, 32 insertions(+), 3 deletions(-) diff --git a/library/std/src/env.rs b/library/std/src/env.rs index 30ac0512348c..5bd20ebe208c 100644 --- a/library/std/src/env.rs +++ b/library/std/src/env.rs @@ -78,7 +78,7 @@ pub fn current_dir() -> io::Result { /// assert!(env::set_current_dir(&root).is_ok()); /// println!("Successfully changed working directory to {}!", root.display()); /// ``` -#[doc(alias = "chdir")] +#[doc(alias = "chdir", alias = "SetCurrentDirectory", alias = "SetCurrentDirectoryW")] #[stable(feature = "env", since = "1.0.0")] pub fn set_current_dir>(path: P) -> io::Result<()> { os_imp::chdir(path.as_ref()) @@ -655,6 +655,7 @@ pub fn home_dir() -> Option { /// } /// ``` #[must_use] +#[doc(alias = "GetTempPath", alias = "GetTempPath2")] #[stable(feature = "env", since = "1.0.0")] pub fn temp_dir() -> PathBuf { os_imp::temp_dir() diff --git a/library/std/src/fs.rs b/library/std/src/fs.rs index db8de1b1e3de..6b1dd1b5af4c 100644 --- a/library/std/src/fs.rs +++ b/library/std/src/fs.rs @@ -656,6 +656,7 @@ impl File { /// /// Note that this method alters the permissions of the underlying file, /// even though it takes `&self` rather than `&mut self`. + #[doc(alias = "fchmod", alias = "SetFileInformationByHandle")] #[stable(feature = "set_permissions_atomic", since = "1.16.0")] pub fn set_permissions(&self, perm: Permissions) -> io::Result<()> { self.inner.set_permissions(perm.0) @@ -1314,6 +1315,7 @@ impl Metadata { /// Ok(()) /// } /// ``` + #[doc(alias = "mtime", alias = "ftLastWriteTime")] #[stable(feature = "fs_time", since = "1.10.0")] pub fn modified(&self) -> io::Result { self.0.modified().map(FromInner::from_inner) @@ -1349,6 +1351,7 @@ impl Metadata { /// Ok(()) /// } /// ``` + #[doc(alias = "atime", alias = "ftLastAccessTime")] #[stable(feature = "fs_time", since = "1.10.0")] pub fn accessed(&self) -> io::Result { self.0.accessed().map(FromInner::from_inner) @@ -1381,6 +1384,7 @@ impl Metadata { /// Ok(()) /// } /// ``` + #[doc(alias = "btime", alias = "birthtime", alias = "ftCreationTime")] #[stable(feature = "fs_time", since = "1.10.0")] pub fn created(&self) -> io::Result { self.0.created().map(FromInner::from_inner) @@ -1879,6 +1883,7 @@ impl AsInner for DirEntry { /// Ok(()) /// } /// ``` +#[doc(alias = "rm", alias = "unlink", alias = "DeleteFile")] #[stable(feature = "rust1", since = "1.0.0")] pub fn remove_file>(path: P) -> io::Result<()> { fs_imp::unlink(path.as_ref()) @@ -1917,6 +1922,7 @@ pub fn remove_file>(path: P) -> io::Result<()> { /// Ok(()) /// } /// ``` +#[doc(alias = "stat")] #[stable(feature = "rust1", since = "1.0.0")] pub fn metadata>(path: P) -> io::Result { fs_imp::stat(path.as_ref()).map(Metadata) @@ -1951,6 +1957,7 @@ pub fn metadata>(path: P) -> io::Result { /// Ok(()) /// } /// ``` +#[doc(alias = "lstat")] #[stable(feature = "symlink_metadata", since = "1.1.0")] pub fn symlink_metadata>(path: P) -> io::Result { fs_imp::lstat(path.as_ref()).map(Metadata) @@ -1994,6 +2001,7 @@ pub fn symlink_metadata>(path: P) -> io::Result { /// Ok(()) /// } /// ``` +#[doc(alias = "mv", alias = "MoveFile", alias = "MoveFileEx")] #[stable(feature = "rust1", since = "1.0.0")] pub fn rename, Q: AsRef>(from: P, to: Q) -> io::Result<()> { fs_imp::rename(from.as_ref(), to.as_ref()) @@ -2052,6 +2060,9 @@ pub fn rename, Q: AsRef>(from: P, to: Q) -> io::Result<()> /// Ok(()) /// } /// ``` +#[doc(alias = "cp")] +#[doc(alias = "CopyFile", alias = "CopyFileEx")] +#[doc(alias = "fclonefileat", alias = "fcopyfile")] #[stable(feature = "rust1", since = "1.0.0")] pub fn copy, Q: AsRef>(from: P, to: Q) -> io::Result { fs_imp::copy(from.as_ref(), to.as_ref()) @@ -2096,6 +2107,7 @@ pub fn copy, Q: AsRef>(from: P, to: Q) -> io::Result { /// Ok(()) /// } /// ``` +#[doc(alias = "CreateHardLink", alias = "linkat")] #[stable(feature = "rust1", since = "1.0.0")] pub fn hard_link, Q: AsRef>(original: P, link: Q) -> io::Result<()> { fs_imp::link(original.as_ref(), link.as_ref()) @@ -2245,7 +2257,7 @@ pub fn canonicalize>(path: P) -> io::Result { /// Ok(()) /// } /// ``` -#[doc(alias = "mkdir")] +#[doc(alias = "mkdir", alias = "CreateDirectory")] #[stable(feature = "rust1", since = "1.0.0")] #[cfg_attr(not(test), rustc_diagnostic_item = "fs_create_dir")] pub fn create_dir>(path: P) -> io::Result<()> { @@ -2326,7 +2338,7 @@ pub fn create_dir_all>(path: P) -> io::Result<()> { /// Ok(()) /// } /// ``` -#[doc(alias = "rmdir")] +#[doc(alias = "rmdir", alias = "RemoveDirectory")] #[stable(feature = "rust1", since = "1.0.0")] pub fn remove_dir>(path: P) -> io::Result<()> { fs_imp::rmdir(path.as_ref()) @@ -2449,6 +2461,7 @@ pub fn remove_dir_all>(path: P) -> io::Result<()> { /// Ok(()) /// } /// ``` +#[doc(alias = "ls", alias = "opendir", alias = "FindFirstFile", alias = "FindNextFile")] #[stable(feature = "rust1", since = "1.0.0")] pub fn read_dir>(path: P) -> io::Result { fs_imp::readdir(path.as_ref()).map(ReadDir) @@ -2484,6 +2497,7 @@ pub fn read_dir>(path: P) -> io::Result { /// Ok(()) /// } /// ``` +#[doc(alias = "chmod", alias = "SetFileAttributes")] #[stable(feature = "set_permissions", since = "1.1.0")] pub fn set_permissions>(path: P, perm: Permissions) -> io::Result<()> { fs_imp::set_perm(path.as_ref(), perm.0) diff --git a/library/std/src/os/wasi/fs.rs b/library/std/src/os/wasi/fs.rs index 3da8c835511a..4525c3aa9147 100644 --- a/library/std/src/os/wasi/fs.rs +++ b/library/std/src/os/wasi/fs.rs @@ -173,51 +173,61 @@ pub trait FileExt { /// /// This corresponds to the `fd_tell` syscall and is similar to /// `seek` where you offset 0 bytes from the current position. + #[doc(alias = "fd_tell")] fn tell(&self) -> io::Result; /// Adjust the flags associated with this file. /// /// This corresponds to the `fd_fdstat_set_flags` syscall. + #[doc(alias = "fd_fdstat_set_flags")] fn fdstat_set_flags(&self, flags: u16) -> io::Result<()>; /// Adjust the rights associated with this file. /// /// This corresponds to the `fd_fdstat_set_rights` syscall. + #[doc(alias = "fd_fdstat_set_rights")] fn fdstat_set_rights(&self, rights: u64, inheriting: u64) -> io::Result<()>; /// Provide file advisory information on a file descriptor. /// /// This corresponds to the `fd_advise` syscall. + #[doc(alias = "fd_advise")] fn advise(&self, offset: u64, len: u64, advice: u8) -> io::Result<()>; /// Force the allocation of space in a file. /// /// This corresponds to the `fd_allocate` syscall. + #[doc(alias = "fd_allocate")] fn allocate(&self, offset: u64, len: u64) -> io::Result<()>; /// Create a directory. /// /// This corresponds to the `path_create_directory` syscall. + #[doc(alias = "path_create_directory")] fn create_directory>(&self, dir: P) -> io::Result<()>; /// Read the contents of a symbolic link. /// /// This corresponds to the `path_readlink` syscall. + #[doc(alias = "path_readlink")] fn read_link>(&self, path: P) -> io::Result; /// Return the attributes of a file or directory. /// /// This corresponds to the `path_filestat_get` syscall. + #[doc(alias = "path_filestat_get")] fn metadata_at>(&self, lookup_flags: u32, path: P) -> io::Result; /// Unlink a file. /// /// This corresponds to the `path_unlink_file` syscall. + #[doc(alias = "path_unlink_file")] fn remove_file>(&self, path: P) -> io::Result<()>; /// Remove a directory. /// /// This corresponds to the `path_remove_directory` syscall. + #[doc(alias = "path_remove_directory")] fn remove_directory>(&self, path: P) -> io::Result<()>; } @@ -359,6 +369,7 @@ pub trait OpenOptionsExt { /// Open a file or directory. /// /// This corresponds to the `path_open` syscall. + #[doc(alias = "path_open")] fn open_at>(&self, file: &File, path: P) -> io::Result; } @@ -500,6 +511,7 @@ impl DirEntryExt for fs::DirEntry { /// Create a hard link. /// /// This corresponds to the `path_link` syscall. +#[doc(alias = "path_link")] pub fn link, U: AsRef>( old_fd: &File, old_flags: u32, @@ -518,6 +530,7 @@ pub fn link, U: AsRef>( /// Rename a file or directory. /// /// This corresponds to the `path_rename` syscall. +#[doc(alias = "path_rename")] pub fn rename, U: AsRef>( old_fd: &File, old_path: P, @@ -534,6 +547,7 @@ pub fn rename, U: AsRef>( /// Create a symbolic link. /// /// This corresponds to the `path_symlink` syscall. +#[doc(alias = "path_symlink")] pub fn symlink, U: AsRef>( old_path: P, fd: &File, From 12d7bac2a4ed307b10b0d339b013cddd9401b512 Mon Sep 17 00:00:00 2001 From: Vadim Petrochenkov Date: Sun, 18 Feb 2024 20:51:45 +0300 Subject: [PATCH 123/127] Update coverage-run tests --- tests/coverage/no_spans.coverage | 2 +- tests/coverage/no_spans_if_not.coverage | 8 ++++---- 2 files changed, 5 insertions(+), 5 deletions(-) diff --git a/tests/coverage/no_spans.coverage b/tests/coverage/no_spans.coverage index e55177698a26..b117c32ffd0d 100644 --- a/tests/coverage/no_spans.coverage +++ b/tests/coverage/no_spans.coverage @@ -23,7 +23,7 @@ LL| |} LL| | LL| |macro_that_defines_a_function! { - LL| | fn affected_function() { + LL| 1| fn affected_function() { LL| 1| || () LL| | } LL| |} diff --git a/tests/coverage/no_spans_if_not.coverage b/tests/coverage/no_spans_if_not.coverage index 1b6bbc75a04e..d235568db654 100644 --- a/tests/coverage/no_spans_if_not.coverage +++ b/tests/coverage/no_spans_if_not.coverage @@ -19,11 +19,11 @@ LL| |} LL| | LL| |macro_that_defines_a_function! { - LL| | fn affected_function() { - LL| | if !false { - LL| | () + LL| 1| fn affected_function() { + LL| 1| if !false { + LL| 1| () LL| | } else { - LL| | () + LL| 0| () LL| | } LL| | } LL| |} From 8daf1375437eb381e10fd4a88da5526c7b3ae64b Mon Sep 17 00:00:00 2001 From: David Thomas Date: Wed, 14 Feb 2024 17:41:45 +0000 Subject: [PATCH 124/127] Reduce monomorphisation bloat in small_c_string --- .../std/src/sys/pal/common/small_c_string.rs | 23 ++++++++++++------- .../tests/fail/shims/fs/isolated_file.stderr | 1 + 2 files changed, 16 insertions(+), 8 deletions(-) diff --git a/library/std/src/sys/pal/common/small_c_string.rs b/library/std/src/sys/pal/common/small_c_string.rs index af9b18e372da..2312e6e5ee29 100644 --- a/library/std/src/sys/pal/common/small_c_string.rs +++ b/library/std/src/sys/pal/common/small_c_string.rs @@ -17,20 +17,27 @@ const NUL_ERR: io::Error = #[inline] pub fn run_path_with_cstr(path: &Path, f: F) -> io::Result where - F: FnOnce(&CStr) -> io::Result, + F: FnMut(&CStr) -> io::Result, { run_with_cstr(path.as_os_str().as_encoded_bytes(), f) } #[inline] -pub fn run_with_cstr(bytes: &[u8], f: F) -> io::Result +pub fn run_with_cstr(bytes: &[u8], mut f: F) -> io::Result where - F: FnOnce(&CStr) -> io::Result, + F: FnMut(&CStr) -> io::Result, { if bytes.len() >= MAX_STACK_ALLOCATION { - return run_with_cstr_allocating(bytes, f); + run_with_cstr_allocating(bytes, &mut f) + } else { + unsafe { run_with_cstr_stack(bytes, &mut f) } } +} +unsafe fn run_with_cstr_stack( + bytes: &[u8], + f: &mut dyn FnMut(&CStr) -> io::Result, +) -> io::Result { let mut buf = MaybeUninit::<[u8; MAX_STACK_ALLOCATION]>::uninit(); let buf_ptr = buf.as_mut_ptr() as *mut u8; @@ -47,10 +54,10 @@ where #[cold] #[inline(never)] -fn run_with_cstr_allocating(bytes: &[u8], f: F) -> io::Result -where - F: FnOnce(&CStr) -> io::Result, -{ +fn run_with_cstr_allocating( + bytes: &[u8], + f: &mut dyn FnMut(&CStr) -> io::Result, +) -> io::Result { match CString::new(bytes) { Ok(s) => f(&s), Err(_) => Err(NUL_ERR), diff --git a/src/tools/miri/tests/fail/shims/fs/isolated_file.stderr b/src/tools/miri/tests/fail/shims/fs/isolated_file.stderr index ec670c4a391e..4596367488bc 100644 --- a/src/tools/miri/tests/fail/shims/fs/isolated_file.stderr +++ b/src/tools/miri/tests/fail/shims/fs/isolated_file.stderr @@ -11,6 +11,7 @@ LL | let fd = cvt_r(|| unsafe { open64(path.as_ptr(), flags, opts.mode a = note: inside `std::sys::pal::PLATFORM::cvt_r::` at RUSTLIB/std/src/sys/pal/PLATFORM/mod.rs:LL:CC = note: inside `std::sys::pal::PLATFORM::fs::File::open_c` at RUSTLIB/std/src/sys/pal/PLATFORM/fs.rs:LL:CC = note: inside closure at RUSTLIB/std/src/sys/pal/PLATFORM/fs.rs:LL:CC + = note: inside `std::sys::pal::PLATFORM::small_c_string::run_with_cstr_stack::` at RUSTLIB/std/src/sys/pal/PLATFORM/small_c_string.rs:LL:CC = note: inside `std::sys::pal::PLATFORM::small_c_string::run_with_cstr::` at RUSTLIB/std/src/sys/pal/PLATFORM/small_c_string.rs:LL:CC = note: inside `std::sys::pal::PLATFORM::small_c_string::run_path_with_cstr::` at RUSTLIB/std/src/sys/pal/PLATFORM/small_c_string.rs:LL:CC = note: inside `std::sys::pal::PLATFORM::fs::File::open` at RUSTLIB/std/src/sys/pal/PLATFORM/fs.rs:LL:CC From 04334394337e19e632eba32ae1dfd554f5831b42 Mon Sep 17 00:00:00 2001 From: David Thomas Date: Wed, 14 Feb 2024 20:03:55 +0000 Subject: [PATCH 125/127] Add some comments to prevent regression --- library/std/src/sys/pal/common/small_c_string.rs | 5 +++++ 1 file changed, 5 insertions(+) diff --git a/library/std/src/sys/pal/common/small_c_string.rs b/library/std/src/sys/pal/common/small_c_string.rs index 2312e6e5ee29..8e5ecdea9a8b 100644 --- a/library/std/src/sys/pal/common/small_c_string.rs +++ b/library/std/src/sys/pal/common/small_c_string.rs @@ -27,6 +27,8 @@ pub fn run_with_cstr(bytes: &[u8], mut f: F) -> io::Result where F: FnMut(&CStr) -> io::Result, { + // Dispatch and dyn erase the closure type to prevent mono bloat. + // See https://github.com/rust-lang/rust/pull/121101. if bytes.len() >= MAX_STACK_ALLOCATION { run_with_cstr_allocating(bytes, &mut f) } else { @@ -34,6 +36,9 @@ where } } +/// # Safety +/// +/// `bytes` must have a length less than `MAX_STACK_ALLOCATION`. unsafe fn run_with_cstr_stack( bytes: &[u8], f: &mut dyn FnMut(&CStr) -> io::Result, From dbb15fb45d45615b443f3fbf009af047719005ff Mon Sep 17 00:00:00 2001 From: David Thomas Date: Sat, 17 Feb 2024 13:36:00 +0000 Subject: [PATCH 126/127] Dyn erase at call site --- .../std/src/sys/pal/common/small_c_string.rs | 21 +++------- library/std/src/sys/pal/common/tests.rs | 12 +++--- library/std/src/sys/pal/hermit/fs.rs | 4 +- library/std/src/sys/pal/solid/os.rs | 8 ++-- library/std/src/sys/pal/unix/fs.rs | 42 +++++++++---------- library/std/src/sys/pal/unix/os.rs | 10 ++--- library/std/src/sys/pal/wasi/fs.rs | 2 +- library/std/src/sys/pal/wasi/os.rs | 10 ++--- library/std/src/sys_common/net.rs | 2 +- .../tests/fail/shims/fs/isolated_file.stderr | 4 +- 10 files changed, 53 insertions(+), 62 deletions(-) diff --git a/library/std/src/sys/pal/common/small_c_string.rs b/library/std/src/sys/pal/common/small_c_string.rs index 8e5ecdea9a8b..37812fc0659a 100644 --- a/library/std/src/sys/pal/common/small_c_string.rs +++ b/library/std/src/sys/pal/common/small_c_string.rs @@ -15,24 +15,18 @@ const NUL_ERR: io::Error = io::const_io_error!(io::ErrorKind::InvalidInput, "file name contained an unexpected NUL byte"); #[inline] -pub fn run_path_with_cstr(path: &Path, f: F) -> io::Result -where - F: FnMut(&CStr) -> io::Result, -{ +pub fn run_path_with_cstr(path: &Path, f: &dyn Fn(&CStr) -> io::Result) -> io::Result { run_with_cstr(path.as_os_str().as_encoded_bytes(), f) } #[inline] -pub fn run_with_cstr(bytes: &[u8], mut f: F) -> io::Result -where - F: FnMut(&CStr) -> io::Result, -{ +pub fn run_with_cstr(bytes: &[u8], f: &dyn Fn(&CStr) -> io::Result) -> io::Result { // Dispatch and dyn erase the closure type to prevent mono bloat. // See https://github.com/rust-lang/rust/pull/121101. if bytes.len() >= MAX_STACK_ALLOCATION { - run_with_cstr_allocating(bytes, &mut f) + run_with_cstr_allocating(bytes, f) } else { - unsafe { run_with_cstr_stack(bytes, &mut f) } + unsafe { run_with_cstr_stack(bytes, f) } } } @@ -41,7 +35,7 @@ where /// `bytes` must have a length less than `MAX_STACK_ALLOCATION`. unsafe fn run_with_cstr_stack( bytes: &[u8], - f: &mut dyn FnMut(&CStr) -> io::Result, + f: &dyn Fn(&CStr) -> io::Result, ) -> io::Result { let mut buf = MaybeUninit::<[u8; MAX_STACK_ALLOCATION]>::uninit(); let buf_ptr = buf.as_mut_ptr() as *mut u8; @@ -59,10 +53,7 @@ unsafe fn run_with_cstr_stack( #[cold] #[inline(never)] -fn run_with_cstr_allocating( - bytes: &[u8], - f: &mut dyn FnMut(&CStr) -> io::Result, -) -> io::Result { +fn run_with_cstr_allocating(bytes: &[u8], f: &dyn Fn(&CStr) -> io::Result) -> io::Result { match CString::new(bytes) { Ok(s) => f(&s), Err(_) => Err(NUL_ERR), diff --git a/library/std/src/sys/pal/common/tests.rs b/library/std/src/sys/pal/common/tests.rs index 32dc18ee1cff..e72d02203da1 100644 --- a/library/std/src/sys/pal/common/tests.rs +++ b/library/std/src/sys/pal/common/tests.rs @@ -7,7 +7,7 @@ use core::iter::repeat; #[test] fn stack_allocation_works() { let path = Path::new("abc"); - let result = run_path_with_cstr(path, |p| { + let result = run_path_with_cstr(path, &|p| { assert_eq!(p, &*CString::new(path.as_os_str().as_encoded_bytes()).unwrap()); Ok(42) }); @@ -17,14 +17,14 @@ fn stack_allocation_works() { #[test] fn stack_allocation_fails() { let path = Path::new("ab\0"); - assert!(run_path_with_cstr::<(), _>(path, |_| unreachable!()).is_err()); + assert!(run_path_with_cstr::<()>(path, &|_| unreachable!()).is_err()); } #[test] fn heap_allocation_works() { let path = repeat("a").take(384).collect::(); let path = Path::new(&path); - let result = run_path_with_cstr(path, |p| { + let result = run_path_with_cstr(path, &|p| { assert_eq!(p, &*CString::new(path.as_os_str().as_encoded_bytes()).unwrap()); Ok(42) }); @@ -36,7 +36,7 @@ fn heap_allocation_fails() { let mut path = repeat("a").take(384).collect::(); path.push('\0'); let path = Path::new(&path); - assert!(run_path_with_cstr::<(), _>(path, |_| unreachable!()).is_err()); + assert!(run_path_with_cstr::<()>(path, &|_| unreachable!()).is_err()); } #[bench] @@ -44,7 +44,7 @@ fn bench_stack_path_alloc(b: &mut test::Bencher) { let path = repeat("a").take(383).collect::(); let p = Path::new(&path); b.iter(|| { - run_path_with_cstr(p, |cstr| { + run_path_with_cstr(p, &|cstr| { black_box(cstr); Ok(()) }) @@ -57,7 +57,7 @@ fn bench_heap_path_alloc(b: &mut test::Bencher) { let path = repeat("a").take(384).collect::(); let p = Path::new(&path); b.iter(|| { - run_path_with_cstr(p, |cstr| { + run_path_with_cstr(p, &|cstr| { black_box(cstr); Ok(()) }) diff --git a/library/std/src/sys/pal/hermit/fs.rs b/library/std/src/sys/pal/hermit/fs.rs index 694482a8a30a..d4da53fd3df9 100644 --- a/library/std/src/sys/pal/hermit/fs.rs +++ b/library/std/src/sys/pal/hermit/fs.rs @@ -269,7 +269,7 @@ impl OpenOptions { impl File { pub fn open(path: &Path, opts: &OpenOptions) -> io::Result { - run_path_with_cstr(path, |path| File::open_c(&path, opts)) + run_path_with_cstr(path, &|path| File::open_c(&path, opts)) } pub fn open_c(path: &CStr, opts: &OpenOptions) -> io::Result { @@ -421,7 +421,7 @@ pub fn readdir(_p: &Path) -> io::Result { } pub fn unlink(path: &Path) -> io::Result<()> { - run_path_with_cstr(path, |path| cvt(unsafe { abi::unlink(path.as_ptr()) }).map(|_| ())) + run_path_with_cstr(path, &|path| cvt(unsafe { abi::unlink(path.as_ptr()) }).map(|_| ())) } pub fn rename(_old: &Path, _new: &Path) -> io::Result<()> { diff --git a/library/std/src/sys/pal/solid/os.rs b/library/std/src/sys/pal/solid/os.rs index ff81544ba915..5ceab3b27e0a 100644 --- a/library/std/src/sys/pal/solid/os.rs +++ b/library/std/src/sys/pal/solid/os.rs @@ -172,7 +172,7 @@ pub fn env() -> Env { pub fn getenv(k: &OsStr) -> Option { // environment variables with a nul byte can't be set, so their value is // always None as well - run_with_cstr(k.as_bytes(), |k| { + run_with_cstr(k.as_bytes(), &|k| { let _guard = env_read_lock(); let v = unsafe { libc::getenv(k.as_ptr()) } as *const libc::c_char; @@ -190,8 +190,8 @@ pub fn getenv(k: &OsStr) -> Option { } pub fn setenv(k: &OsStr, v: &OsStr) -> io::Result<()> { - run_with_cstr(k.as_bytes(), |k| { - run_with_cstr(v.as_bytes(), |v| { + run_with_cstr(k.as_bytes(), &|k| { + run_with_cstr(v.as_bytes(), &|v| { let _guard = ENV_LOCK.write(); cvt_env(unsafe { libc::setenv(k.as_ptr(), v.as_ptr(), 1) }).map(drop) }) @@ -199,7 +199,7 @@ pub fn setenv(k: &OsStr, v: &OsStr) -> io::Result<()> { } pub fn unsetenv(n: &OsStr) -> io::Result<()> { - run_with_cstr(n.as_bytes(), |nbuf| { + run_with_cstr(n.as_bytes(), &|nbuf| { let _guard = ENV_LOCK.write(); cvt_env(unsafe { libc::unsetenv(nbuf.as_ptr()) }).map(drop) }) diff --git a/library/std/src/sys/pal/unix/fs.rs b/library/std/src/sys/pal/unix/fs.rs index 6d0b892ea2f4..c75323ef7757 100644 --- a/library/std/src/sys/pal/unix/fs.rs +++ b/library/std/src/sys/pal/unix/fs.rs @@ -1118,7 +1118,7 @@ impl OpenOptions { impl File { pub fn open(path: &Path, opts: &OpenOptions) -> io::Result { - run_path_with_cstr(path, |path| File::open_c(path, opts)) + run_path_with_cstr(path, &|path| File::open_c(path, opts)) } pub fn open_c(path: &CStr, opts: &OpenOptions) -> io::Result { @@ -1394,7 +1394,7 @@ impl DirBuilder { } pub fn mkdir(&self, p: &Path) -> io::Result<()> { - run_path_with_cstr(p, |p| cvt(unsafe { libc::mkdir(p.as_ptr(), self.mode) }).map(|_| ())) + run_path_with_cstr(p, &|p| cvt(unsafe { libc::mkdir(p.as_ptr(), self.mode) }).map(|_| ())) } pub fn set_mode(&mut self, mode: u32) { @@ -1575,7 +1575,7 @@ impl fmt::Debug for File { } pub fn readdir(path: &Path) -> io::Result { - let ptr = run_path_with_cstr(path, |p| unsafe { Ok(libc::opendir(p.as_ptr())) })?; + let ptr = run_path_with_cstr(path, &|p| unsafe { Ok(libc::opendir(p.as_ptr())) })?; if ptr.is_null() { Err(Error::last_os_error()) } else { @@ -1586,27 +1586,27 @@ pub fn readdir(path: &Path) -> io::Result { } pub fn unlink(p: &Path) -> io::Result<()> { - run_path_with_cstr(p, |p| cvt(unsafe { libc::unlink(p.as_ptr()) }).map(|_| ())) + run_path_with_cstr(p, &|p| cvt(unsafe { libc::unlink(p.as_ptr()) }).map(|_| ())) } pub fn rename(old: &Path, new: &Path) -> io::Result<()> { - run_path_with_cstr(old, |old| { - run_path_with_cstr(new, |new| { + run_path_with_cstr(old, &|old| { + run_path_with_cstr(new, &|new| { cvt(unsafe { libc::rename(old.as_ptr(), new.as_ptr()) }).map(|_| ()) }) }) } pub fn set_perm(p: &Path, perm: FilePermissions) -> io::Result<()> { - run_path_with_cstr(p, |p| cvt_r(|| unsafe { libc::chmod(p.as_ptr(), perm.mode) }).map(|_| ())) + run_path_with_cstr(p, &|p| cvt_r(|| unsafe { libc::chmod(p.as_ptr(), perm.mode) }).map(|_| ())) } pub fn rmdir(p: &Path) -> io::Result<()> { - run_path_with_cstr(p, |p| cvt(unsafe { libc::rmdir(p.as_ptr()) }).map(|_| ())) + run_path_with_cstr(p, &|p| cvt(unsafe { libc::rmdir(p.as_ptr()) }).map(|_| ())) } pub fn readlink(p: &Path) -> io::Result { - run_path_with_cstr(p, |c_path| { + run_path_with_cstr(p, &|c_path| { let p = c_path.as_ptr(); let mut buf = Vec::with_capacity(256); @@ -1635,16 +1635,16 @@ pub fn readlink(p: &Path) -> io::Result { } pub fn symlink(original: &Path, link: &Path) -> io::Result<()> { - run_path_with_cstr(original, |original| { - run_path_with_cstr(link, |link| { + run_path_with_cstr(original, &|original| { + run_path_with_cstr(link, &|link| { cvt(unsafe { libc::symlink(original.as_ptr(), link.as_ptr()) }).map(|_| ()) }) }) } pub fn link(original: &Path, link: &Path) -> io::Result<()> { - run_path_with_cstr(original, |original| { - run_path_with_cstr(link, |link| { + run_path_with_cstr(original, &|original| { + run_path_with_cstr(link, &|link| { cfg_if::cfg_if! { if #[cfg(any(target_os = "vxworks", target_os = "redox", target_os = "android", target_os = "espidf", target_os = "horizon", target_os = "vita"))] { // VxWorks, Redox and ESP-IDF lack `linkat`, so use `link` instead. POSIX leaves @@ -1678,7 +1678,7 @@ pub fn link(original: &Path, link: &Path) -> io::Result<()> { } pub fn stat(p: &Path) -> io::Result { - run_path_with_cstr(p, |p| { + run_path_with_cstr(p, &|p| { cfg_has_statx! { if let Some(ret) = unsafe { try_statx( libc::AT_FDCWD, @@ -1697,7 +1697,7 @@ pub fn stat(p: &Path) -> io::Result { } pub fn lstat(p: &Path) -> io::Result { - run_path_with_cstr(p, |p| { + run_path_with_cstr(p, &|p| { cfg_has_statx! { if let Some(ret) = unsafe { try_statx( libc::AT_FDCWD, @@ -1716,7 +1716,7 @@ pub fn lstat(p: &Path) -> io::Result { } pub fn canonicalize(p: &Path) -> io::Result { - let r = run_path_with_cstr(p, |path| unsafe { + let r = run_path_with_cstr(p, &|path| unsafe { Ok(libc::realpath(path.as_ptr(), ptr::null_mut())) })?; if r.is_null() { @@ -1879,7 +1879,7 @@ pub fn copy(from: &Path, to: &Path) -> io::Result { // Opportunistically attempt to create a copy-on-write clone of `from` // using `fclonefileat`. if HAS_FCLONEFILEAT.load(Ordering::Relaxed) { - let clonefile_result = run_path_with_cstr(to, |to| { + let clonefile_result = run_path_with_cstr(to, &|to| { cvt(unsafe { fclonefileat(reader.as_raw_fd(), libc::AT_FDCWD, to.as_ptr(), 0) }) }); match clonefile_result { @@ -1925,7 +1925,7 @@ pub fn copy(from: &Path, to: &Path) -> io::Result { } pub fn chown(path: &Path, uid: u32, gid: u32) -> io::Result<()> { - run_path_with_cstr(path, |path| { + run_path_with_cstr(path, &|path| { cvt(unsafe { libc::chown(path.as_ptr(), uid as libc::uid_t, gid as libc::gid_t) }) .map(|_| ()) }) @@ -1937,7 +1937,7 @@ pub fn fchown(fd: c_int, uid: u32, gid: u32) -> io::Result<()> { } pub fn lchown(path: &Path, uid: u32, gid: u32) -> io::Result<()> { - run_path_with_cstr(path, |path| { + run_path_with_cstr(path, &|path| { cvt(unsafe { libc::lchown(path.as_ptr(), uid as libc::uid_t, gid as libc::gid_t) }) .map(|_| ()) }) @@ -1945,7 +1945,7 @@ pub fn lchown(path: &Path, uid: u32, gid: u32) -> io::Result<()> { #[cfg(not(any(target_os = "fuchsia", target_os = "vxworks")))] pub fn chroot(dir: &Path) -> io::Result<()> { - run_path_with_cstr(dir, |dir| cvt(unsafe { libc::chroot(dir.as_ptr()) }).map(|_| ())) + run_path_with_cstr(dir, &|dir| cvt(unsafe { libc::chroot(dir.as_ptr()) }).map(|_| ())) } pub use remove_dir_impl::remove_dir_all; @@ -2140,7 +2140,7 @@ mod remove_dir_impl { if attr.file_type().is_symlink() { crate::fs::remove_file(p) } else { - run_path_with_cstr(p, |p| remove_dir_all_recursive(None, &p)) + run_path_with_cstr(p, &|p| remove_dir_all_recursive(None, &p)) } } diff --git a/library/std/src/sys/pal/unix/os.rs b/library/std/src/sys/pal/unix/os.rs index 881b3a25c515..af2b9db46854 100644 --- a/library/std/src/sys/pal/unix/os.rs +++ b/library/std/src/sys/pal/unix/os.rs @@ -186,7 +186,7 @@ pub fn chdir(_p: &path::Path) -> io::Result<()> { #[cfg(not(target_os = "espidf"))] pub fn chdir(p: &path::Path) -> io::Result<()> { - let result = run_path_with_cstr(p, |p| unsafe { Ok(libc::chdir(p.as_ptr())) })?; + let result = run_path_with_cstr(p, &|p| unsafe { Ok(libc::chdir(p.as_ptr())) })?; if result == 0 { Ok(()) } else { Err(io::Error::last_os_error()) } } @@ -643,7 +643,7 @@ pub fn env() -> Env { pub fn getenv(k: &OsStr) -> Option { // environment variables with a nul byte can't be set, so their value is // always None as well - run_with_cstr(k.as_bytes(), |k| { + run_with_cstr(k.as_bytes(), &|k| { let _guard = env_read_lock(); let v = unsafe { libc::getenv(k.as_ptr()) } as *const libc::c_char; @@ -661,8 +661,8 @@ pub fn getenv(k: &OsStr) -> Option { } pub fn setenv(k: &OsStr, v: &OsStr) -> io::Result<()> { - run_with_cstr(k.as_bytes(), |k| { - run_with_cstr(v.as_bytes(), |v| { + run_with_cstr(k.as_bytes(), &|k| { + run_with_cstr(v.as_bytes(), &|v| { let _guard = ENV_LOCK.write(); cvt(unsafe { libc::setenv(k.as_ptr(), v.as_ptr(), 1) }).map(drop) }) @@ -670,7 +670,7 @@ pub fn setenv(k: &OsStr, v: &OsStr) -> io::Result<()> { } pub fn unsetenv(n: &OsStr) -> io::Result<()> { - run_with_cstr(n.as_bytes(), |nbuf| { + run_with_cstr(n.as_bytes(), &|nbuf| { let _guard = ENV_LOCK.write(); cvt(unsafe { libc::unsetenv(nbuf.as_ptr()) }).map(drop) }) diff --git a/library/std/src/sys/pal/wasi/fs.rs b/library/std/src/sys/pal/wasi/fs.rs index e82386654522..529b82e01989 100644 --- a/library/std/src/sys/pal/wasi/fs.rs +++ b/library/std/src/sys/pal/wasi/fs.rs @@ -698,7 +698,7 @@ fn open_at(fd: &WasiFd, path: &Path, opts: &OpenOptions) -> io::Result { /// Note that this can fail if `p` doesn't look like it can be opened relative /// to any pre-opened file descriptor. fn open_parent(p: &Path) -> io::Result<(ManuallyDrop, PathBuf)> { - run_path_with_cstr(p, |p| { + run_path_with_cstr(p, &|p| { let mut buf = Vec::::with_capacity(512); loop { unsafe { diff --git a/library/std/src/sys/pal/wasi/os.rs b/library/std/src/sys/pal/wasi/os.rs index 530d36021721..d62ff8a2f182 100644 --- a/library/std/src/sys/pal/wasi/os.rs +++ b/library/std/src/sys/pal/wasi/os.rs @@ -95,7 +95,7 @@ pub fn getcwd() -> io::Result { } pub fn chdir(p: &path::Path) -> io::Result<()> { - let result = run_path_with_cstr(p, |p| unsafe { Ok(libc::chdir(p.as_ptr())) })?; + let result = run_path_with_cstr(p, &|p| unsafe { Ok(libc::chdir(p.as_ptr())) })?; match result == (0 as libc::c_int) { true => Ok(()), false => Err(io::Error::last_os_error()), @@ -227,7 +227,7 @@ pub fn env() -> Env { pub fn getenv(k: &OsStr) -> Option { // environment variables with a nul byte can't be set, so their value is // always None as well - run_with_cstr(k.as_bytes(), |k| { + run_with_cstr(k.as_bytes(), &|k| { let _guard = env_read_lock(); let v = unsafe { libc::getenv(k.as_ptr()) } as *const libc::c_char; @@ -245,8 +245,8 @@ pub fn getenv(k: &OsStr) -> Option { } pub fn setenv(k: &OsStr, v: &OsStr) -> io::Result<()> { - run_with_cstr(k.as_bytes(), |k| { - run_with_cstr(v.as_bytes(), |v| unsafe { + run_with_cstr(k.as_bytes(), &|k| { + run_with_cstr(v.as_bytes(), &|v| unsafe { let _guard = env_write_lock(); cvt(libc::setenv(k.as_ptr(), v.as_ptr(), 1)).map(drop) }) @@ -254,7 +254,7 @@ pub fn setenv(k: &OsStr, v: &OsStr) -> io::Result<()> { } pub fn unsetenv(n: &OsStr) -> io::Result<()> { - run_with_cstr(n.as_bytes(), |nbuf| unsafe { + run_with_cstr(n.as_bytes(), &|nbuf| unsafe { let _guard = env_write_lock(); cvt(libc::unsetenv(nbuf.as_ptr())).map(drop) }) diff --git a/library/std/src/sys_common/net.rs b/library/std/src/sys_common/net.rs index 8712bd2eca76..de7d31baaaf8 100644 --- a/library/std/src/sys_common/net.rs +++ b/library/std/src/sys_common/net.rs @@ -199,7 +199,7 @@ impl<'a> TryFrom<(&'a str, u16)> for LookupHost { fn try_from((host, port): (&'a str, u16)) -> io::Result { init(); - run_with_cstr(host.as_bytes(), |c_host| { + run_with_cstr(host.as_bytes(), &|c_host| { let mut hints: c::addrinfo = unsafe { mem::zeroed() }; hints.ai_socktype = c::SOCK_STREAM; let mut res = ptr::null_mut(); diff --git a/src/tools/miri/tests/fail/shims/fs/isolated_file.stderr b/src/tools/miri/tests/fail/shims/fs/isolated_file.stderr index 4596367488bc..1f08649428a4 100644 --- a/src/tools/miri/tests/fail/shims/fs/isolated_file.stderr +++ b/src/tools/miri/tests/fail/shims/fs/isolated_file.stderr @@ -12,8 +12,8 @@ LL | let fd = cvt_r(|| unsafe { open64(path.as_ptr(), flags, opts.mode a = note: inside `std::sys::pal::PLATFORM::fs::File::open_c` at RUSTLIB/std/src/sys/pal/PLATFORM/fs.rs:LL:CC = note: inside closure at RUSTLIB/std/src/sys/pal/PLATFORM/fs.rs:LL:CC = note: inside `std::sys::pal::PLATFORM::small_c_string::run_with_cstr_stack::` at RUSTLIB/std/src/sys/pal/PLATFORM/small_c_string.rs:LL:CC - = note: inside `std::sys::pal::PLATFORM::small_c_string::run_with_cstr::` at RUSTLIB/std/src/sys/pal/PLATFORM/small_c_string.rs:LL:CC - = note: inside `std::sys::pal::PLATFORM::small_c_string::run_path_with_cstr::` at RUSTLIB/std/src/sys/pal/PLATFORM/small_c_string.rs:LL:CC + = note: inside `std::sys::pal::PLATFORM::small_c_string::run_with_cstr::` at RUSTLIB/std/src/sys/pal/PLATFORM/small_c_string.rs:LL:CC + = note: inside `std::sys::pal::PLATFORM::small_c_string::run_path_with_cstr::` at RUSTLIB/std/src/sys/pal/PLATFORM/small_c_string.rs:LL:CC = note: inside `std::sys::pal::PLATFORM::fs::File::open` at RUSTLIB/std/src/sys/pal/PLATFORM/fs.rs:LL:CC = note: inside `std::fs::OpenOptions::_open` at RUSTLIB/std/src/fs.rs:LL:CC = note: inside `std::fs::OpenOptions::open::<&std::path::Path>` at RUSTLIB/std/src/fs.rs:LL:CC From 6b6da93e51851d098574b5e6f39b6d187e78f34a Mon Sep 17 00:00:00 2001 From: The Miri Conjob Bot Date: Mon, 19 Feb 2024 05:05:10 +0000 Subject: [PATCH 127/127] Preparing for merge from rustc --- src/tools/miri/rust-version | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/src/tools/miri/rust-version b/src/tools/miri/rust-version index 921a55d48c6c..715917e0f2fb 100644 --- a/src/tools/miri/rust-version +++ b/src/tools/miri/rust-version @@ -1 +1 @@ -1f8e824f111c972c9df8dbb378d87c33f67bbad4 +d5735645753e990a72446094f703df9b5e421555