Auto merge of #152035 - Zalathar:rollup-Ur7QmrJ, r=Zalathar

Rollup of 7 pull requests

Successful merges:

 - rust-lang/rust#152008 (`rust-analyzer` subtree update)
 - rust-lang/rust#151109 (fN::BITS constants for feature float_bits_const)
 - rust-lang/rust#151976 (Rename `collect_active_jobs` to several distinct names)
 - rust-lang/rust#151691 (compiletest: Don't assume `aux-crate` becomes a `*.so` with `no-prefer-dynamic`)
 - rust-lang/rust#151919 (fix: Make `--color always` always print color with `--explain`)
 - rust-lang/rust#152017 (Remove `with_no_trimmed_paths` use in query macro)
 - rust-lang/rust#152028 (Convert to inline diagnostics in `rustc_driver_impl`)
This commit is contained in:
bors 2026-02-03 10:59:42 +00:00
commit 79a1e77fe3
234 changed files with 7693 additions and 2221 deletions

View file

@ -3789,7 +3789,6 @@ dependencies = [
"rustc_errors",
"rustc_expand",
"rustc_feature",
"rustc_fluent_macro",
"rustc_hir_analysis",
"rustc_hir_pretty",
"rustc_hir_typeck",

View file

@ -21,7 +21,6 @@ rustc_data_structures = { path = "../rustc_data_structures" }
rustc_errors = { path = "../rustc_errors" }
rustc_expand = { path = "../rustc_expand" }
rustc_feature = { path = "../rustc_feature" }
rustc_fluent_macro = { path = "../rustc_fluent_macro" }
rustc_hir_analysis = { path = "../rustc_hir_analysis" }
rustc_hir_pretty = { path = "../rustc_hir_pretty" }
rustc_hir_typeck = { path = "../rustc_hir_typeck" }

View file

@ -1,29 +0,0 @@
driver_impl_cant_emit_mir = could not emit MIR: {$error}
driver_impl_ice = the compiler unexpectedly panicked. this is a bug.
driver_impl_ice_bug_report = we would appreciate a bug report: {$bug_report_url}
driver_impl_ice_bug_report_internal_feature = using internal features is not supported and expected to cause internal compiler errors when used incorrectly
driver_impl_ice_bug_report_update_note = please make sure that you have updated to the latest nightly
driver_impl_ice_exclude_cargo_defaults = some of the compiler flags provided by cargo are hidden
driver_impl_ice_flags = compiler flags: {$flags}
driver_impl_ice_path = please attach the file at `{$path}` to your bug report
driver_impl_ice_path_error = the ICE couldn't be written to `{$path}`: {$error}
driver_impl_ice_path_error_env = the environment variable `RUSTC_ICE` is set to `{$env_var}`
driver_impl_ice_version = rustc {$version} running on {$triple}
driver_impl_rlink_corrupt_file = corrupt metadata encountered in `{$file}`
driver_impl_rlink_empty_version_number = the input does not contain version number
driver_impl_rlink_encoding_version_mismatch = .rlink file was produced with encoding version `{$version_array}`, but the current version is `{$rlink_version}`
driver_impl_rlink_no_a_file = rlink must be a file
driver_impl_rlink_rustc_version_mismatch = .rlink file was produced by rustc version `{$rustc_version}`, but the current version is `{$current_version}`
driver_impl_rlink_unable_to_read = failed to read rlink file: `{$err}`
driver_impl_rlink_wrong_file_type = the input does not look like a .rlink file
driver_impl_unstable_feature_usage = cannot dump feature usage metrics: {$error}

View file

@ -108,15 +108,12 @@ use crate::session_diagnostics::{
RLinkWrongFileType, RlinkCorruptFile, RlinkNotAFile, RlinkUnableToRead, UnstableFeatureUsage,
};
rustc_fluent_macro::fluent_messages! { "../messages.ftl" }
pub fn default_translator() -> Translator {
Translator::with_fallback_bundle(DEFAULT_LOCALE_RESOURCES.to_vec(), false)
}
pub static DEFAULT_LOCALE_RESOURCES: &[&str] = &[
// tidy-alphabetical-start
crate::DEFAULT_LOCALE_RESOURCE,
rustc_ast_lowering::DEFAULT_LOCALE_RESOURCE,
rustc_ast_passes::DEFAULT_LOCALE_RESOURCE,
rustc_attr_parsing::DEFAULT_LOCALE_RESOURCE,
@ -491,10 +488,18 @@ fn handle_explain(early_dcx: &EarlyDiagCtxt, registry: Registry, code: &str, col
}
text.push('\n');
}
// If output is a terminal, use a pager to display the content.
if io::stdout().is_terminal() {
show_md_content_with_pager(&text, color);
} else {
safe_print!("{text}");
// Otherwise, if the user has requested colored output
// print the content in color, else print the md content.
if color == ColorConfig::Always {
show_colored_md_content(&text);
} else {
safe_print!("{text}");
}
}
} else {
early_dcx.early_fatal(format!("{code} is not a valid error code"));
@ -564,6 +569,33 @@ fn show_md_content_with_pager(content: &str, color: ColorConfig) {
safe_print!("{content}");
}
/// Prints the markdown content with colored output.
///
/// This function is used when the output is not a terminal,
/// but the user has requested colored output with `--color=always`.
fn show_colored_md_content(content: &str) {
// Try to prettify the raw markdown text.
let mut pretty_data = {
let mdstream = markdown::MdStream::parse_str(content);
let bufwtr = markdown::create_stdout_bufwtr();
let mut mdbuf = Vec::new();
if mdstream.write_anstream_buf(&mut mdbuf, Some(&highlighter::highlight)).is_ok() {
Some((bufwtr, mdbuf))
} else {
None
}
};
if let Some((bufwtr, mdbuf)) = &mut pretty_data
&& bufwtr.write_all(&mdbuf).is_ok()
{
return;
}
// Everything failed. Print the raw markdown text.
safe_print!("{content}");
}
fn process_rlink(sess: &Session, compiler: &interface::Compiler) {
assert!(sess.opts.unstable_opts.link_only);
let dcx = sess.dcx();

View file

@ -3,82 +3,88 @@ use std::error::Error;
use rustc_macros::{Diagnostic, Subdiagnostic};
#[derive(Diagnostic)]
#[diag(driver_impl_cant_emit_mir)]
#[diag("could not emit MIR: {$error}")]
pub struct CantEmitMIR {
pub error: std::io::Error,
}
#[derive(Diagnostic)]
#[diag(driver_impl_rlink_unable_to_read)]
#[diag("failed to read rlink file: `{$err}`")]
pub(crate) struct RlinkUnableToRead {
pub err: std::io::Error,
}
#[derive(Diagnostic)]
#[diag(driver_impl_rlink_wrong_file_type)]
#[diag("the input does not look like a .rlink file")]
pub(crate) struct RLinkWrongFileType;
#[derive(Diagnostic)]
#[diag(driver_impl_rlink_empty_version_number)]
#[diag("the input does not contain version number")]
pub(crate) struct RLinkEmptyVersionNumber;
#[derive(Diagnostic)]
#[diag(driver_impl_rlink_encoding_version_mismatch)]
#[diag(
".rlink file was produced with encoding version `{$version_array}`, but the current version is `{$rlink_version}`"
)]
pub(crate) struct RLinkEncodingVersionMismatch {
pub version_array: String,
pub rlink_version: u32,
}
#[derive(Diagnostic)]
#[diag(driver_impl_rlink_rustc_version_mismatch)]
#[diag(
".rlink file was produced by rustc version `{$rustc_version}`, but the current version is `{$current_version}`"
)]
pub(crate) struct RLinkRustcVersionMismatch<'a> {
pub rustc_version: String,
pub current_version: &'a str,
}
#[derive(Diagnostic)]
#[diag(driver_impl_rlink_no_a_file)]
#[diag("rlink must be a file")]
pub(crate) struct RlinkNotAFile;
#[derive(Diagnostic)]
#[diag(driver_impl_rlink_corrupt_file)]
#[diag("corrupt metadata encountered in `{$file}`")]
pub(crate) struct RlinkCorruptFile<'a> {
pub file: &'a std::path::Path,
}
#[derive(Diagnostic)]
#[diag(driver_impl_ice)]
#[diag("the compiler unexpectedly panicked. this is a bug.")]
pub(crate) struct Ice;
#[derive(Diagnostic)]
#[diag(driver_impl_ice_bug_report)]
#[diag("we would appreciate a bug report: {$bug_report_url}")]
pub(crate) struct IceBugReport<'a> {
pub bug_report_url: &'a str,
}
#[derive(Diagnostic)]
#[diag(driver_impl_ice_bug_report_update_note)]
#[diag("please make sure that you have updated to the latest nightly")]
pub(crate) struct UpdateNightlyNote;
#[derive(Diagnostic)]
#[diag(driver_impl_ice_bug_report_internal_feature)]
#[diag(
"using internal features is not supported and expected to cause internal compiler errors when used incorrectly"
)]
pub(crate) struct IceBugReportInternalFeature;
#[derive(Diagnostic)]
#[diag(driver_impl_ice_version)]
#[diag("rustc {$version} running on {$triple}")]
pub(crate) struct IceVersion<'a> {
pub version: &'a str,
pub triple: &'a str,
}
#[derive(Diagnostic)]
#[diag(driver_impl_ice_path)]
#[diag("please attach the file at `{$path}` to your bug report")]
pub(crate) struct IcePath {
pub path: std::path::PathBuf,
}
#[derive(Diagnostic)]
#[diag(driver_impl_ice_path_error)]
#[diag("the ICE couldn't be written to `{$path}`: {$error}")]
pub(crate) struct IcePathError {
pub path: std::path::PathBuf,
pub error: String,
@ -87,23 +93,23 @@ pub(crate) struct IcePathError {
}
#[derive(Subdiagnostic)]
#[note(driver_impl_ice_path_error_env)]
#[note("the environment variable `RUSTC_ICE` is set to `{$env_var}`")]
pub(crate) struct IcePathErrorEnv {
pub env_var: std::path::PathBuf,
}
#[derive(Diagnostic)]
#[diag(driver_impl_ice_flags)]
#[diag("compiler flags: {$flags}")]
pub(crate) struct IceFlags {
pub flags: String,
}
#[derive(Diagnostic)]
#[diag(driver_impl_ice_exclude_cargo_defaults)]
#[diag("some of the compiler flags provided by cargo are hidden")]
pub(crate) struct IceExcludeCargoDefaults;
#[derive(Diagnostic)]
#[diag(driver_impl_unstable_feature_usage)]
#[diag("cannot dump feature usage metrics: {$error}")]
pub(crate) struct UnstableFeatureUsage {
pub error: Box<dyn Error>,
}

View file

@ -254,7 +254,7 @@ internal compiler error: query cycle handler thread panicked, aborting process";
|| {
// Ensure there were no errors collecting all active jobs.
// We need the complete map to ensure we find a cycle to break.
QueryCtxt::new(tcx).collect_active_jobs(false).expect(
QueryCtxt::new(tcx).collect_active_jobs_from_all_queries(false).expect(
"failed to collect active queries in deadlock handler",
)
},

View file

@ -303,9 +303,7 @@ fn add_query_desc_cached_impl(
#[allow(unused_variables)]
pub fn #name<'tcx>(tcx: TyCtxt<'tcx>, key: crate::query::queries::#name::Key<'tcx>) -> String {
let (#tcx, #key) = (tcx, key);
::rustc_middle::ty::print::with_no_trimmed_paths!(
format!(#desc)
)
format!(#desc)
}
};

View file

@ -50,7 +50,9 @@ impl<'tcx> QueryCtxt<'tcx> {
}
fn depth_limit_error(self, job: QueryJobId) {
let query_map = self.collect_active_jobs(true).expect("failed to collect active queries");
let query_map = self
.collect_active_jobs_from_all_queries(true)
.expect("failed to collect active queries");
let (info, depth) = job.find_dep_kind_root(query_map);
let suggested_limit = match self.tcx.recursion_limit() {
@ -98,7 +100,7 @@ impl<'tcx> QueryContext<'tcx> for QueryCtxt<'tcx> {
tls::with_related_context(self.tcx, |icx| icx.query)
}
/// Returns a map of currently active query jobs.
/// Returns a map of currently active query jobs, collected from all queries.
///
/// If `require_complete` is `true`, this function locks all shards of the
/// query results to produce a complete map, which always returns `Ok`.
@ -108,12 +110,15 @@ impl<'tcx> QueryContext<'tcx> for QueryCtxt<'tcx> {
/// Prefer passing `false` to `require_complete` to avoid potential deadlocks,
/// especially when called from within a deadlock handler, unless a
/// complete map is needed and no deadlock is possible at this call site.
fn collect_active_jobs(self, require_complete: bool) -> Result<QueryMap<'tcx>, QueryMap<'tcx>> {
fn collect_active_jobs_from_all_queries(
self,
require_complete: bool,
) -> Result<QueryMap<'tcx>, QueryMap<'tcx>> {
let mut jobs = QueryMap::default();
let mut complete = true;
for collect in super::COLLECT_ACTIVE_JOBS.iter() {
if collect(self.tcx, &mut jobs, require_complete).is_none() {
for gather_fn in crate::PER_QUERY_GATHER_ACTIVE_JOBS_FNS.iter() {
if gather_fn(self.tcx, &mut jobs, require_complete).is_none() {
complete = false;
}
}
@ -731,7 +736,10 @@ macro_rules! define_queries {
}
}
pub(crate) fn collect_active_jobs<'tcx>(
/// Internal per-query plumbing for collecting the set of active jobs for this query.
///
/// Should only be called through `PER_QUERY_GATHER_ACTIVE_JOBS_FNS`.
pub(crate) fn gather_active_jobs<'tcx>(
tcx: TyCtxt<'tcx>,
qmap: &mut QueryMap<'tcx>,
require_complete: bool,
@ -741,12 +749,15 @@ macro_rules! define_queries {
let name = stringify!($name);
$crate::plumbing::create_query_frame(tcx, rustc_middle::query::descs::$name, key, kind, name)
};
let res = tcx.query_system.states.$name.collect_active_jobs(
// Call `gather_active_jobs_inner` to do the actual work.
let res = tcx.query_system.states.$name.gather_active_jobs_inner(
tcx,
make_frame,
qmap,
require_complete,
);
// this can be called during unwinding, and the function has a `try_`-prefix, so
// don't `unwrap()` here, just manually check for `None` and do best-effort error
// reporting.
@ -816,10 +827,17 @@ macro_rules! define_queries {
// These arrays are used for iteration and can't be indexed by `DepKind`.
const COLLECT_ACTIVE_JOBS: &[
for<'tcx> fn(TyCtxt<'tcx>, &mut QueryMap<'tcx>, bool) -> Option<()>
] =
&[$(query_impl::$name::collect_active_jobs),*];
/// Used by `collect_active_jobs_from_all_queries` to iterate over all
/// queries, and gather the active jobs for each query.
///
/// (We arbitrarily use the word "gather" when collecting the jobs for
/// each individual query, so that we have distinct function names to
/// grep for.)
const PER_QUERY_GATHER_ACTIVE_JOBS_FNS: &[
for<'tcx> fn(TyCtxt<'tcx>, &mut QueryMap<'tcx>, require_complete: bool) -> Option<()>
] = &[
$(query_impl::$name::gather_active_jobs),*
];
const ALLOC_SELF_PROFILE_QUERY_STRINGS: &[
for<'tcx> fn(TyCtxt<'tcx>, &mut QueryKeyStringCache)

View file

@ -32,6 +32,8 @@ impl<'tcx> QueryInfo<QueryStackDeferred<'tcx>> {
}
}
/// Map from query job IDs to job information collected by
/// [`QueryContext::collect_active_jobs_from_all_queries`].
pub type QueryMap<'tcx> = FxHashMap<QueryJobId, QueryJobInfo<'tcx>>;
/// A value uniquely identifying an active query job.
@ -613,7 +615,7 @@ pub fn print_query_stack<'tcx, Qcx: QueryContext<'tcx>>(
let mut count_total = 0;
// Make use of a partial query map if we fail to take locks collecting active queries.
let query_map = match qcx.collect_active_jobs(false) {
let query_map = match qcx.collect_active_jobs_from_all_queries(false) {
Ok(query_map) => query_map,
Err(query_map) => query_map,
};

View file

@ -166,7 +166,10 @@ pub trait QueryContext<'tcx>: HasDepContext {
/// Get the query information from the TLS context.
fn current_query_job(self) -> Option<QueryJobId>;
fn collect_active_jobs(self, require_complete: bool) -> Result<QueryMap<'tcx>, QueryMap<'tcx>>;
fn collect_active_jobs_from_all_queries(
self,
require_complete: bool,
) -> Result<QueryMap<'tcx>, QueryMap<'tcx>>;
/// Load a side effect associated to the node in the previous session.
fn load_side_effect(

View file

@ -11,7 +11,6 @@ use rustc_data_structures::fingerprint::Fingerprint;
use rustc_data_structures::hash_table::{self, Entry, HashTable};
use rustc_data_structures::sharded::{self, Sharded};
use rustc_data_structures::stack::ensure_sufficient_stack;
use rustc_data_structures::sync::LockGuard;
use rustc_data_structures::{outline, sync};
use rustc_errors::{Diag, FatalError, StashKey};
use rustc_span::{DUMMY_SP, Span};
@ -79,7 +78,10 @@ where
self.active.lock_shards().all(|shard| shard.is_empty())
}
pub fn collect_active_jobs<Qcx: Copy>(
/// Internal plumbing for collecting the set of active jobs for this query.
///
/// Should only be called from `gather_active_jobs`.
pub fn gather_active_jobs_inner<Qcx: Copy>(
&self,
qcx: Qcx,
make_frame: fn(Qcx, K) -> QueryStackFrame<QueryStackDeferred<'tcx>>,
@ -88,23 +90,26 @@ where
) -> Option<()> {
let mut active = Vec::new();
let mut collect = |iter: LockGuard<'_, HashTable<(K, ActiveKeyStatus<'tcx>)>>| {
for (k, v) in iter.iter() {
// Helper to gather active jobs from a single shard.
let mut gather_shard_jobs = |shard: &HashTable<(K, ActiveKeyStatus<'tcx>)>| {
for (k, v) in shard.iter() {
if let ActiveKeyStatus::Started(ref job) = *v {
active.push((*k, job.clone()));
}
}
};
// Lock shards and gather jobs from each shard.
if require_complete {
for shard in self.active.lock_shards() {
collect(shard);
gather_shard_jobs(&shard);
}
} else {
// We use try_lock_shards here since we are called from the
// deadlock handler, and this shouldn't be locked.
for shard in self.active.try_lock_shards() {
collect(shard?);
let shard = shard?;
gather_shard_jobs(&shard);
}
}
@ -294,7 +299,10 @@ where
{
// Ensure there was no errors collecting all active jobs.
// We need the complete map to ensure we find a cycle to break.
let query_map = qcx.collect_active_jobs(false).ok().expect("failed to collect active queries");
let query_map = qcx
.collect_active_jobs_from_all_queries(false)
.ok()
.expect("failed to collect active queries");
let error = try_execute.find_cycle_in_stack(query_map, &qcx.current_query_job(), span);
(mk_cycle(query, qcx, error.lift()), None)

View file

@ -11,6 +11,7 @@
#![feature(repr_simd)]
#![feature(macro_metavar_expr_concat)]
#![feature(rustc_attrs)]
#![feature(float_bits_const)]
#![cfg_attr(f16_enabled, feature(f16))]
#![cfg_attr(f128_enabled, feature(f128))]
#![no_builtins]

View file

@ -154,6 +154,11 @@ impl f128 {
#[unstable(feature = "f128", issue = "116909")]
pub const RADIX: u32 = 2;
/// The size of this float type in bits.
// #[unstable(feature = "f128", issue = "116909")]
#[unstable(feature = "float_bits_const", issue = "151073")]
pub const BITS: u32 = 128;
/// Number of significant digits in base 2.
///
/// Note that the size of the mantissa in the bitwise representation is one

View file

@ -148,6 +148,11 @@ impl f16 {
#[unstable(feature = "f16", issue = "116909")]
pub const RADIX: u32 = 2;
/// The size of this float type in bits.
// #[unstable(feature = "f16", issue = "116909")]
#[unstable(feature = "float_bits_const", issue = "151073")]
pub const BITS: u32 = 16;
/// Number of significant digits in base 2.
///
/// Note that the size of the mantissa in the bitwise representation is one

View file

@ -398,6 +398,10 @@ impl f32 {
#[stable(feature = "assoc_int_consts", since = "1.43.0")]
pub const RADIX: u32 = 2;
/// The size of this float type in bits.
#[unstable(feature = "float_bits_const", issue = "151073")]
pub const BITS: u32 = 32;
/// Number of significant digits in base 2.
///
/// Note that the size of the mantissa in the bitwise representation is one

View file

@ -398,6 +398,10 @@ impl f64 {
#[stable(feature = "assoc_int_consts", since = "1.43.0")]
pub const RADIX: u32 = 2;
/// The size of this float type in bits.
#[unstable(feature = "float_bits_const", issue = "151073")]
pub const BITS: u32 = 64;
/// Number of significant digits in base 2.
///
/// Note that the size of the mantissa in the bitwise representation is one

View file

@ -513,6 +513,7 @@ impl Step for RustAnalyzer {
// This builds a proc macro against the bootstrap libproc_macro, which is not ABI
// compatible with the ABI proc-macro-srv expects to load.
cargo.arg("--exclude=proc-macro-srv");
cargo.arg("--exclude=proc-macro-srv-cli");
}
let mut skip_tests = vec![];

View file

@ -1438,7 +1438,7 @@ impl<'test> TestCx<'test> {
} else if aux_type.is_some() {
panic!("aux_type {aux_type:?} not expected");
} else if aux_props.no_prefer_dynamic {
(AuxType::Dylib, None)
(AuxType::Lib, None)
} else if self.config.target.contains("emscripten")
|| (self.config.target.contains("musl")
&& !aux_props.force_host

View file

@ -353,7 +353,7 @@ macro_rules! test_ftoi_itof {
assert_itof(i, f, msg);
}
let fbits = <$fty>::BITS;
let fbits = <$fty as Float>::BITS;
let fsig_bits = <$fty>::SIGNIFICAND_BITS;
let ibits = <$ity>::BITS;
let imax: $ity = <$ity>::MAX;
@ -528,9 +528,9 @@ macro_rules! test_ftof {
assert!((<$f1>::NAN as $f2).is_nan(), "{} -> {} nan", stringify!($f1), stringify!($f2));
let min_sub_casted = <$f1>::from_bits(0x1) as $f2;
let min_neg_sub_casted = <$f1>::from_bits(0x1 | 1 << (<$f1>::BITS - 1)) as $f2;
let min_neg_sub_casted = <$f1>::from_bits(0x1 | 1 << (<$f1 as Float>::BITS - 1)) as $f2;
if <$f1>::BITS > <$f2>::BITS {
if <$f1 as Float>::BITS > <$f2 as Float>::BITS {
assert_feq(<$f1>::MAX as $f2, <$f2>::INFINITY, "max -> inf");
assert_feq(<$f1>::MIN as $f2, <$f2>::NEG_INFINITY, "max -> inf");
assert_biteq(min_sub_casted, f2zero, "min subnormal -> 0.0");

View file

@ -24,7 +24,7 @@ jobs:
run: rustup update --no-self-update stable
- name: Build Documentation
run: cargo doc --all --no-deps
run: cargo doc --all --no-deps --document-private-items
- name: Deploy Docs
uses: peaceiris/actions-gh-pages@4f9cc6602d3f66b9c108549d475ec49e8ef4d45e # v4.0.0

View file

@ -178,9 +178,9 @@ checksum = "1fd0f2584146f6f2ef48085050886acf353beff7305ebd1ae69500e27c67f64b"
[[package]]
name = "camino"
version = "1.2.0"
version = "1.2.2"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "e1de8bc0aa9e9385ceb3bf0c152e3a9b9544f6c4a912c8ae504e80c1f0368603"
checksum = "e629a66d692cb9ff1a1c664e41771b3dcaf961985a9774c0eb0bd1b51cf60a48"
dependencies = [
"serde_core",
]
@ -1864,6 +1864,7 @@ dependencies = [
"intern",
"libc",
"libloading",
"line-index 0.1.2 (registry+https://github.com/rust-lang/crates.io-index)",
"memmap2",
"object",
"paths",
@ -1878,9 +1879,14 @@ name = "proc-macro-srv-cli"
version = "0.0.0"
dependencies = [
"clap",
"postcard",
"expect-test",
"intern",
"paths",
"proc-macro-api",
"proc-macro-srv",
"proc-macro-test",
"span",
"tt",
]
[[package]]
@ -2628,7 +2634,7 @@ dependencies = [
[[package]]
name = "smol_str"
version = "0.3.4"
version = "0.3.5"
dependencies = [
"arbitrary",
"borsh",

View file

@ -42,7 +42,7 @@ debug = 2
# lsp-server = { path = "lib/lsp-server" }
# ungrammar = { path = "lin/ungrammar" }
# ungrammar = { path = "lib/ungrammar" }
# salsa = { path = "../salsa" }
# salsa-macros = { path = "../salsa/components/salsa-macros" }
@ -107,7 +107,7 @@ anyhow = "1.0.98"
arrayvec = "0.7.6"
bitflags = "2.9.1"
cargo_metadata = "0.23.0"
camino = "1.1.10"
camino = "1.2.2"
crossbeam-channel = "0.5.15"
dissimilar = "1.0.10"
dot = "0.1.4"

View file

@ -221,6 +221,7 @@ pub enum LangCrateOrigin {
ProcMacro,
Std,
Test,
Dependency,
Other,
}
@ -245,7 +246,7 @@ impl fmt::Display for LangCrateOrigin {
LangCrateOrigin::ProcMacro => "proc_macro",
LangCrateOrigin::Std => "std",
LangCrateOrigin::Test => "test",
LangCrateOrigin::Other => "other",
LangCrateOrigin::Other | LangCrateOrigin::Dependency => "other",
};
f.write_str(text)
}

View file

@ -135,6 +135,7 @@ fn match_attr_flags(attr_flags: &mut AttrFlags, attr: Meta) -> ControlFlow<Infal
match attr {
Meta::NamedKeyValue { name: Some(name), value, .. } => match name.text() {
"deprecated" => attr_flags.insert(AttrFlags::IS_DEPRECATED),
"ignore" => attr_flags.insert(AttrFlags::IS_IGNORE),
"lang" => attr_flags.insert(AttrFlags::LANG_ITEM),
"path" => attr_flags.insert(AttrFlags::HAS_PATH),
"unstable" => attr_flags.insert(AttrFlags::IS_UNSTABLE),

View file

@ -8,7 +8,8 @@ use intern::{Symbol, sym};
use tt::TextRange;
use crate::{
AdtId, BuiltinDeriveImplId, BuiltinDeriveImplLoc, FunctionId, HasModule, db::DefDatabase,
AdtId, BuiltinDeriveImplId, BuiltinDeriveImplLoc, FunctionId, HasModule, MacroId,
db::DefDatabase, lang_item::LangItems,
};
macro_rules! declare_enum {
@ -86,6 +87,25 @@ declare_enum!(
DispatchFromDyn => [],
);
impl BuiltinDeriveImplTrait {
pub fn derive_macro(self, lang_items: &LangItems) -> Option<MacroId> {
match self {
BuiltinDeriveImplTrait::Copy => lang_items.CopyDerive,
BuiltinDeriveImplTrait::Clone => lang_items.CloneDerive,
BuiltinDeriveImplTrait::Default => lang_items.DefaultDerive,
BuiltinDeriveImplTrait::Debug => lang_items.DebugDerive,
BuiltinDeriveImplTrait::Hash => lang_items.HashDerive,
BuiltinDeriveImplTrait::Ord => lang_items.OrdDerive,
BuiltinDeriveImplTrait::PartialOrd => lang_items.PartialOrdDerive,
BuiltinDeriveImplTrait::Eq => lang_items.EqDerive,
BuiltinDeriveImplTrait::PartialEq => lang_items.PartialEqDerive,
BuiltinDeriveImplTrait::CoerceUnsized | BuiltinDeriveImplTrait::DispatchFromDyn => {
lang_items.CoercePointeeDerive
}
}
}
}
impl BuiltinDeriveImplMethod {
pub fn trait_method(
self,

View file

@ -27,14 +27,15 @@
pub mod keys {
use std::marker::PhantomData;
use either::Either;
use hir_expand::{MacroCallId, attrs::AttrId};
use rustc_hash::FxHashMap;
use syntax::{AstNode, AstPtr, ast};
use crate::{
BlockId, ConstId, EnumId, EnumVariantId, ExternBlockId, ExternCrateId, FieldId, FunctionId,
ImplId, LifetimeParamId, Macro2Id, MacroRulesId, ProcMacroId, StaticId, StructId, TraitId,
TypeAliasId, TypeOrConstParamId, UnionId, UseId,
BlockId, BuiltinDeriveImplId, ConstId, EnumId, EnumVariantId, ExternBlockId, ExternCrateId,
FieldId, FunctionId, ImplId, LifetimeParamId, Macro2Id, MacroRulesId, ProcMacroId,
StaticId, StructId, TraitId, TypeAliasId, TypeOrConstParamId, UnionId, UseId,
dyn_map::{DynMap, Policy},
};
@ -71,7 +72,8 @@ pub mod keys {
(
AttrId,
/* derive() */ MacroCallId,
/* actual derive macros */ Box<[Option<MacroCallId>]>,
/* actual derive macros */
Box<[Option<Either<MacroCallId, BuiltinDeriveImplId>>]>,
),
> = Key::new();

View file

@ -32,7 +32,7 @@ use crate::{
expr_store::path::Path,
hir::{
Array, AsmOperand, Binding, BindingId, Expr, ExprId, ExprOrPatId, Label, LabelId, Pat,
PatId, RecordFieldPat, Statement,
PatId, RecordFieldPat, RecordSpread, Statement,
},
nameres::{DefMap, block_def_map},
type_ref::{LifetimeRef, LifetimeRefId, PathId, TypeRef, TypeRefId},
@ -474,8 +474,8 @@ impl ExpressionStore {
match expr_only.binding_owners.get(&binding) {
Some(it) => {
// We assign expression ids in a way that outer closures will receive
// a lower id
it.into_raw() < relative_to.into_raw()
// a higher id (allocated after their body is collected)
it.into_raw() > relative_to.into_raw()
}
None => true,
}
@ -575,8 +575,8 @@ impl ExpressionStore {
for field in fields.iter() {
f(field.expr);
}
if let &Some(expr) = spread {
f(expr);
if let RecordSpread::Expr(expr) = spread {
f(*expr);
}
}
Expr::Closure { body, .. } => {
@ -706,8 +706,8 @@ impl ExpressionStore {
for field in fields.iter() {
f(field.expr);
}
if let &Some(expr) = spread {
f(expr);
if let RecordSpread::Expr(expr) = spread {
f(*expr);
}
}
Expr::Closure { body, .. } => {

View file

@ -47,7 +47,7 @@ use crate::{
hir::{
Array, Binding, BindingAnnotation, BindingId, BindingProblems, CaptureBy, ClosureKind,
Expr, ExprId, Item, Label, LabelId, Literal, MatchArm, Movability, OffsetOf, Pat, PatId,
RecordFieldPat, RecordLitField, Statement, generics::GenericParams,
RecordFieldPat, RecordLitField, RecordSpread, Statement, generics::GenericParams,
},
item_scope::BuiltinShadowMode,
item_tree::FieldsShape,
@ -150,6 +150,7 @@ pub(super) fn lower_body(
};
let body_expr = collector.collect(
&mut params,
body,
if is_async_fn {
Awaitable::Yes
@ -903,24 +904,57 @@ impl<'db> ExprCollector<'db> {
})
}
fn collect(&mut self, expr: Option<ast::Expr>, awaitable: Awaitable) -> ExprId {
/// An `async fn` needs to capture all parameters in the generated `async` block, even if they have
/// non-captured patterns such as wildcards (to ensure consistent drop order).
fn lower_async_fn(&mut self, params: &mut Vec<PatId>, body: ExprId) -> ExprId {
let mut statements = Vec::new();
for param in params {
let name = match self.store.pats[*param] {
Pat::Bind { id, .. }
if matches!(
self.store.bindings[id].mode,
BindingAnnotation::Unannotated | BindingAnnotation::Mutable
) =>
{
// If this is a direct binding, we can leave it as-is, as it'll always be captured anyway.
continue;
}
Pat::Bind { id, .. } => {
// If this is a `ref` binding, we can't leave it as is but we can at least reuse the name, for better display.
self.store.bindings[id].name.clone()
}
_ => self.generate_new_name(),
};
let binding_id =
self.alloc_binding(name.clone(), BindingAnnotation::Mutable, HygieneId::ROOT);
let pat_id = self.alloc_pat_desugared(Pat::Bind { id: binding_id, subpat: None });
let expr = self.alloc_expr_desugared(Expr::Path(name.into()));
statements.push(Statement::Let {
pat: *param,
type_ref: None,
initializer: Some(expr),
else_branch: None,
});
*param = pat_id;
}
self.alloc_expr_desugared(Expr::Async {
id: None,
statements: statements.into_boxed_slice(),
tail: Some(body),
})
}
fn collect(
&mut self,
params: &mut Vec<PatId>,
expr: Option<ast::Expr>,
awaitable: Awaitable,
) -> ExprId {
self.awaitable_context.replace(awaitable);
self.with_label_rib(RibKind::Closure, |this| {
if awaitable == Awaitable::Yes {
match expr {
Some(e) => {
let syntax_ptr = AstPtr::new(&e);
let expr = this.collect_expr(e);
this.alloc_expr_desugared_with_ptr(
Expr::Async { id: None, statements: Box::new([]), tail: Some(expr) },
syntax_ptr,
)
}
None => this.missing_expr(),
}
} else {
this.collect_expr_opt(expr)
}
let body = this.collect_expr_opt(expr);
if awaitable == Awaitable::Yes { this.lower_async_fn(params, body) } else { body }
})
}
@ -1232,10 +1266,16 @@ impl<'db> ExprCollector<'db> {
Some(RecordLitField { name, expr })
})
.collect();
let spread = nfl.spread().map(|s| self.collect_expr(s));
let spread_expr = nfl.spread().map(|s| self.collect_expr(s));
let has_spread_syntax = nfl.dotdot_token().is_some();
let spread = match (spread_expr, has_spread_syntax) {
(None, false) => RecordSpread::None,
(None, true) => RecordSpread::FieldDefaults,
(Some(expr), _) => RecordSpread::Expr(expr),
};
Expr::RecordLit { path, fields, spread }
} else {
Expr::RecordLit { path, fields: Box::default(), spread: None }
Expr::RecordLit { path, fields: Box::default(), spread: RecordSpread::None }
};
self.alloc_expr(record_lit, syntax_ptr)
@ -1961,7 +2001,7 @@ impl<'db> ExprCollector<'db> {
}
}
fn collect_expr_opt(&mut self, expr: Option<ast::Expr>) -> ExprId {
pub fn collect_expr_opt(&mut self, expr: Option<ast::Expr>) -> ExprId {
match expr {
Some(expr) => self.collect_expr(expr),
None => self.missing_expr(),

View file

@ -10,7 +10,8 @@ use crate::{
builtin_type::BuiltinUint,
expr_store::{HygieneId, lower::ExprCollector, path::Path},
hir::{
Array, BindingAnnotation, Expr, ExprId, Literal, Pat, RecordLitField, Statement,
Array, BindingAnnotation, Expr, ExprId, Literal, Pat, RecordLitField, RecordSpread,
Statement,
format_args::{
self, FormatAlignment, FormatArgs, FormatArgsPiece, FormatArgument, FormatArgumentKind,
FormatArgumentsCollector, FormatCount, FormatDebugHex, FormatOptions,
@ -869,7 +870,7 @@ impl<'db> ExprCollector<'db> {
self.alloc_expr_desugared(Expr::RecordLit {
path: self.lang_path(lang_items.FormatPlaceholder).map(Box::new),
fields: Box::new([position, flags, precision, width]),
spread: None,
spread: RecordSpread::None,
})
} else {
let format_placeholder_new =

View file

@ -16,7 +16,8 @@ use crate::{
attrs::AttrFlags,
expr_store::path::{GenericArg, GenericArgs},
hir::{
Array, BindingAnnotation, CaptureBy, ClosureKind, Literal, Movability, Statement,
Array, BindingAnnotation, CaptureBy, ClosureKind, Literal, Movability, RecordSpread,
Statement,
generics::{GenericParams, WherePredicate},
},
lang_item::LangItemTarget,
@ -139,7 +140,7 @@ pub fn print_variant_body_hir(db: &dyn DefDatabase, owner: VariantId, edition: E
}
for (_, data) in fields.fields().iter() {
let FieldData { name, type_ref, visibility, is_unsafe } = data;
let FieldData { name, type_ref, visibility, is_unsafe, default_value: _ } = data;
match visibility {
crate::item_tree::RawVisibility::Module(interned, _visibility_explicitness) => {
w!(p, "pub(in {})", interned.display(db, p.edition))
@ -679,10 +680,17 @@ impl Printer<'_> {
p.print_expr(field.expr);
wln!(p, ",");
}
if let Some(spread) = spread {
w!(p, "..");
p.print_expr(*spread);
wln!(p);
match spread {
RecordSpread::None => {}
RecordSpread::FieldDefaults => {
w!(p, "..");
wln!(p);
}
RecordSpread::Expr(spread_expr) => {
w!(p, "..");
p.print_expr(*spread_expr);
wln!(p);
}
}
});
w!(self, "}}");

View file

@ -659,3 +659,21 @@ fn main() {
}"#]]
.assert_eq(&body.pretty_print(&db, def, Edition::CURRENT))
}
#[test]
fn async_fn_weird_param_patterns() {
let (db, body, def) = lower(
r#"
async fn main(&self, param1: i32, ref mut param2: i32, _: i32, param4 @ _: i32, 123: i32) {}
"#,
);
expect![[r#"
fn main(self, param1, mut param2, mut <ra@gennew>0, param4 @ _, mut <ra@gennew>1) async {
let ref mut param2 = param2;
let _ = <ra@gennew>0;
let 123 = <ra@gennew>1;
{}
}"#]]
.assert_eq(&body.pretty_print(&db, def, Edition::CURRENT))
}

View file

@ -187,6 +187,13 @@ impl From<ast::LiteralKind> for Literal {
}
}
#[derive(Debug, Clone, Eq, PartialEq, Copy)]
pub enum RecordSpread {
None,
FieldDefaults,
Expr(ExprId),
}
#[derive(Debug, Clone, Eq, PartialEq)]
pub enum Expr {
/// This is produced if the syntax tree does not have a required expression piece.
@ -259,7 +266,7 @@ pub enum Expr {
RecordLit {
path: Option<Box<Path>>,
fields: Box<[RecordLitField]>,
spread: Option<ExprId>,
spread: RecordSpread,
},
Field {
expr: ExprId,

View file

@ -4,6 +4,7 @@
use std::{fmt, sync::LazyLock};
use base_db::Crate;
use either::Either;
use hir_expand::{AstId, MacroCallId, attrs::AttrId, name::Name};
use indexmap::map::Entry;
use itertools::Itertools;
@ -199,7 +200,7 @@ struct DeriveMacroInvocation {
attr_id: AttrId,
/// The `#[derive]` call
attr_call_id: MacroCallId,
derive_call_ids: SmallVec<[Option<MacroCallId>; 4]>,
derive_call_ids: SmallVec<[Option<Either<MacroCallId, BuiltinDeriveImplId>>; 4]>,
}
pub(crate) static BUILTIN_SCOPE: LazyLock<FxIndexMap<Name, PerNs>> = LazyLock::new(|| {
@ -345,7 +346,9 @@ impl ItemScope {
pub fn all_macro_calls(&self) -> impl Iterator<Item = MacroCallId> + '_ {
self.macro_invocations.values().copied().chain(self.attr_macros.values().copied()).chain(
self.derive_macros.values().flat_map(|it| {
it.iter().flat_map(|it| it.derive_call_ids.iter().copied().flatten())
it.iter().flat_map(|it| {
it.derive_call_ids.iter().copied().flatten().flat_map(|it| it.left())
})
}),
)
}
@ -379,6 +382,10 @@ impl ItemScope {
self.types.get(name).map(|item| (item.def, item.vis))
}
pub(crate) fn makro(&self, name: &Name) -> Option<MacroId> {
self.macros.get(name).map(|item| item.def)
}
/// XXX: this is O(N) rather than O(1), try to not introduce new usages.
pub(crate) fn name_of(&self, item: ItemInNs) -> Option<(&Name, Visibility, /*declared*/ bool)> {
match item {
@ -519,7 +526,7 @@ impl ItemScope {
pub(crate) fn set_derive_macro_invoc(
&mut self,
adt: AstId<ast::Adt>,
call: MacroCallId,
call: Either<MacroCallId, BuiltinDeriveImplId>,
id: AttrId,
idx: usize,
) {
@ -539,7 +546,7 @@ impl ItemScope {
adt: AstId<ast::Adt>,
attr_id: AttrId,
attr_call_id: MacroCallId,
mut derive_call_ids: SmallVec<[Option<MacroCallId>; 4]>,
mut derive_call_ids: SmallVec<[Option<Either<MacroCallId, BuiltinDeriveImplId>>; 4]>,
) {
derive_call_ids.shrink_to_fit();
self.derive_macros.entry(adt).or_default().push(DeriveMacroInvocation {
@ -554,7 +561,9 @@ impl ItemScope {
) -> impl Iterator<
Item = (
AstId<ast::Adt>,
impl Iterator<Item = (AttrId, MacroCallId, &[Option<MacroCallId>])>,
impl Iterator<
Item = (AttrId, MacroCallId, &[Option<Either<MacroCallId, BuiltinDeriveImplId>>]),
>,
),
> + '_ {
self.derive_macros.iter().map(|(k, v)| {

View file

@ -7,8 +7,8 @@ use intern::{Symbol, sym};
use stdx::impl_from;
use crate::{
AdtId, AssocItemId, AttrDefId, Crate, EnumId, EnumVariantId, FunctionId, ImplId, ModuleDefId,
StaticId, StructId, TraitId, TypeAliasId, UnionId,
AdtId, AssocItemId, AttrDefId, Crate, EnumId, EnumVariantId, FunctionId, ImplId, MacroId,
ModuleDefId, StaticId, StructId, TraitId, TypeAliasId, UnionId,
attrs::AttrFlags,
db::DefDatabase,
nameres::{DefMap, assoc::TraitItems, crate_def_map, crate_local_def_map},
@ -99,7 +99,7 @@ pub fn crate_lang_items(db: &dyn DefDatabase, krate: Crate) -> Option<Box<LangIt
}
if matches!(krate.data(db).origin, base_db::CrateOrigin::Lang(base_db::LangCrateOrigin::Core)) {
lang_items.fill_non_lang_core_traits(db, crate_def_map);
lang_items.fill_non_lang_core_items(db, crate_def_map);
}
if lang_items.is_empty() { None } else { Some(Box::new(lang_items)) }
@ -169,6 +169,27 @@ fn resolve_core_trait(
Some(trait_)
}
fn resolve_core_macro(
db: &dyn DefDatabase,
core_def_map: &DefMap,
modules: &[Symbol],
name: Symbol,
) -> Option<MacroId> {
let mut current = &core_def_map[core_def_map.root];
for module in modules {
let Some((ModuleDefId::ModuleId(cur), _)) =
current.scope.type_(&Name::new_symbol_root(module.clone()))
else {
return None;
};
if cur.krate(db) != core_def_map.krate() || cur.block(db) != core_def_map.block_id() {
return None;
}
current = &core_def_map[cur];
}
current.scope.makro(&Name::new_symbol_root(name))
}
#[salsa::tracked(returns(as_deref))]
pub(crate) fn crate_notable_traits(db: &dyn DefDatabase, krate: Crate) -> Option<Box<[TraitId]>> {
let mut traits = Vec::new();
@ -195,7 +216,11 @@ macro_rules! language_item_table {
@non_lang_core_traits:
$( core::$($non_lang_module:ident)::*, $non_lang_trait:ident; )*
$( core::$($non_lang_trait_module:ident)::*, $non_lang_trait:ident; )*
@non_lang_core_macros:
$( core::$($non_lang_macro_module:ident)::*, $non_lang_macro:ident, $non_lang_macro_field:ident; )*
) => {
#[allow(non_snake_case)] // FIXME: Should we remove this?
#[derive(Debug, Default, Clone, PartialEq, Eq, Hash)]
@ -207,6 +232,9 @@ macro_rules! language_item_table {
$(
pub $non_lang_trait: Option<TraitId>,
)*
$(
pub $non_lang_macro_field: Option<MacroId>,
)*
}
impl LangItems {
@ -218,6 +246,7 @@ macro_rules! language_item_table {
fn merge_prefer_self(&mut self, other: &Self) {
$( self.$lang_item = self.$lang_item.or(other.$lang_item); )*
$( self.$non_lang_trait = self.$non_lang_trait.or(other.$non_lang_trait); )*
$( self.$non_lang_macro_field = self.$non_lang_macro_field.or(other.$non_lang_macro_field); )*
}
fn assign_lang_item(&mut self, name: Symbol, target: LangItemTarget) {
@ -233,8 +262,9 @@ macro_rules! language_item_table {
}
}
fn fill_non_lang_core_traits(&mut self, db: &dyn DefDatabase, core_def_map: &DefMap) {
$( self.$non_lang_trait = resolve_core_trait(db, core_def_map, &[ $(sym::$non_lang_module),* ], sym::$non_lang_trait); )*
fn fill_non_lang_core_items(&mut self, db: &dyn DefDatabase, core_def_map: &DefMap) {
$( self.$non_lang_trait = resolve_core_trait(db, core_def_map, &[ $(sym::$non_lang_trait_module),* ], sym::$non_lang_trait); )*
$( self.$non_lang_macro_field = resolve_core_macro(db, core_def_map, &[ $(sym::$non_lang_macro_module),* ], sym::$non_lang_macro); )*
}
}
@ -469,6 +499,11 @@ language_item_table! { LangItems =>
RangeToInclusive, sym::RangeToInclusive, StructId;
RangeTo, sym::RangeTo, StructId;
RangeFromCopy, sym::RangeFromCopy, StructId;
RangeInclusiveCopy, sym::RangeInclusiveCopy, StructId;
RangeCopy, sym::RangeCopy, StructId;
RangeToInclusiveCopy, sym::RangeToInclusiveCopy, StructId;
String, sym::String, StructId;
CStr, sym::CStr, StructId;
Ordering, sym::Ordering, EnumId;
@ -479,4 +514,16 @@ language_item_table! { LangItems =>
core::hash, Hash;
core::cmp, Ord;
core::cmp, Eq;
@non_lang_core_macros:
core::default, Default, DefaultDerive;
core::fmt, Debug, DebugDerive;
core::hash, Hash, HashDerive;
core::cmp, PartialOrd, PartialOrdDerive;
core::cmp, Ord, OrdDerive;
core::cmp, PartialEq, PartialEqDerive;
core::cmp, Eq, EqDerive;
core::marker, CoercePointee, CoercePointeeDerive;
core::marker, Copy, CopyDerive;
core::clone, Clone, CloneDerive;
}

View file

@ -237,3 +237,23 @@ fn test() {
"#]],
);
}
#[test]
fn meta_fat_arrow() {
check(
r#"
macro_rules! m {
( $m:meta => ) => {};
}
m! { foo => }
"#,
expect![[r#"
macro_rules! m {
( $m:meta => ) => {};
}
"#]],
);
}

View file

@ -61,6 +61,7 @@ mod tests;
use std::ops::{Deref, DerefMut, Index, IndexMut};
use base_db::Crate;
use either::Either;
use hir_expand::{
EditionedFileId, ErasedAstId, HirFileId, InFile, MacroCallId, mod_path::ModPath, name::Name,
proc_macro::ProcMacroKind,
@ -75,8 +76,8 @@ use triomphe::Arc;
use tt::TextRange;
use crate::{
AstId, BlockId, BlockLoc, ExternCrateId, FunctionId, FxIndexMap, Lookup, MacroCallStyles,
MacroExpander, MacroId, ModuleId, ModuleIdLt, ProcMacroId, UseId,
AstId, BlockId, BlockLoc, BuiltinDeriveImplId, ExternCrateId, FunctionId, FxIndexMap, Lookup,
MacroCallStyles, MacroExpander, MacroId, ModuleId, ModuleIdLt, ProcMacroId, UseId,
db::DefDatabase,
item_scope::{BuiltinShadowMode, ItemScope},
item_tree::TreeId,
@ -192,7 +193,8 @@ pub struct DefMap {
/// Tracks which custom derives are in scope for an item, to allow resolution of derive helper
/// attributes.
// FIXME: Figure out a better way for the IDE layer to resolve these?
derive_helpers_in_scope: FxHashMap<AstId<ast::Item>, Vec<(Name, MacroId, MacroCallId)>>,
derive_helpers_in_scope:
FxHashMap<AstId<ast::Item>, Vec<(Name, MacroId, Either<MacroCallId, BuiltinDeriveImplId>)>>,
/// A mapping from [`hir_expand::MacroDefId`] to [`crate::MacroId`].
pub macro_def_to_macro_id: FxHashMap<ErasedAstId, MacroId>,
@ -214,7 +216,7 @@ struct DefMapCrateData {
registered_tools: Vec<Symbol>,
/// Unstable features of Rust enabled with `#![feature(A, B)]`.
unstable_features: FxHashSet<Symbol>,
/// #[rustc_coherence_is_core]
/// `#[rustc_coherence_is_core]`
rustc_coherence_is_core: bool,
no_core: bool,
no_std: bool,
@ -540,7 +542,7 @@ impl DefMap {
pub fn derive_helpers_in_scope(
&self,
id: AstId<ast::Adt>,
) -> Option<&[(Name, MacroId, MacroCallId)]> {
) -> Option<&[(Name, MacroId, Either<MacroCallId, BuiltinDeriveImplId>)]> {
self.derive_helpers_in_scope.get(&id.map(|it| it.upcast())).map(Deref::deref)
}

View file

@ -3,7 +3,7 @@
//! `DefCollector::collect` contains the fixed-point iteration loop which
//! resolves imports and expands macros.
use std::{iter, mem};
use std::{iter, mem, ops::Range};
use base_db::{BuiltDependency, Crate, CrateOrigin, LangCrateOrigin};
use cfg::{CfgAtom, CfgExpr, CfgOptions};
@ -226,6 +226,7 @@ struct DeferredBuiltinDerive {
container: ItemContainerId,
derive_attr_id: AttrId,
derive_index: u32,
helpers_range: Range<usize>,
}
/// Walks the tree of module recursively
@ -1354,7 +1355,7 @@ impl<'db> DefCollector<'db> {
if let Ok((macro_id, def_id, call_id)) = id {
self.def_map.modules[directive.module_id].scope.set_derive_macro_invoc(
ast_id.ast_id,
call_id,
Either::Left(call_id),
*derive_attr,
*derive_pos,
);
@ -1369,7 +1370,7 @@ impl<'db> DefCollector<'db> {
.extend(izip!(
helpers.iter().cloned(),
iter::repeat(macro_id),
iter::repeat(call_id),
iter::repeat(Either::Left(call_id)),
));
}
}
@ -1492,6 +1493,8 @@ impl<'db> DefCollector<'db> {
Interned::new(path),
);
derive_call_ids.push(None);
// Try to resolve the derive immediately. If we succeed, we can also use the fast path
// for builtin derives. If not, we cannot use it, as it can cause the ADT to become
// interned while the derive is still unresolved, which will cause it to get forgotten.
@ -1506,23 +1509,42 @@ impl<'db> DefCollector<'db> {
call_id,
);
let ast_id_without_path = ast_id.ast_id;
let directive = MacroDirective {
module_id: directive.module_id,
depth: directive.depth + 1,
kind: MacroDirectiveKind::Derive {
ast_id,
derive_attr: *attr_id,
derive_pos: idx,
ctxt: call_site.ctx,
derive_macro_id: call_id,
},
container: directive.container,
};
if let Ok((macro_id, def_id, call_id)) = id {
derive_call_ids.push(Some(call_id));
let (mut helpers_start, mut helpers_end) = (0, 0);
// Record its helper attributes.
if def_id.krate != self.def_map.krate {
let def_map = crate_def_map(self.db, def_id.krate);
if let Some(helpers) =
def_map.data.exported_derives.get(&macro_id)
{
self.def_map
let derive_helpers = self
.def_map
.derive_helpers_in_scope
.entry(ast_id.ast_id.map(|it| it.upcast()))
.or_default()
.extend(izip!(
helpers.iter().cloned(),
iter::repeat(macro_id),
iter::repeat(call_id),
));
.entry(
ast_id_without_path.map(|it| it.upcast()),
)
.or_default();
helpers_start = derive_helpers.len();
derive_helpers.extend(izip!(
helpers.iter().cloned(),
iter::repeat(macro_id),
iter::repeat(Either::Left(call_id)),
));
helpers_end = derive_helpers.len();
}
}
@ -1531,7 +1553,7 @@ impl<'db> DefCollector<'db> {
def_id.kind
{
self.deferred_builtin_derives
.entry(ast_id.ast_id.upcast())
.entry(ast_id_without_path.upcast())
.or_default()
.push(DeferredBuiltinDerive {
call_id,
@ -1541,24 +1563,15 @@ impl<'db> DefCollector<'db> {
depth: directive.depth,
derive_attr_id: *attr_id,
derive_index: idx as u32,
helpers_range: helpers_start..helpers_end,
});
} else {
push_resolved(&mut resolved, directive, call_id);
push_resolved(&mut resolved, &directive, call_id);
*derive_call_ids.last_mut().unwrap() =
Some(Either::Left(call_id));
}
} else {
derive_call_ids.push(None);
self.unresolved_macros.push(MacroDirective {
module_id: directive.module_id,
depth: directive.depth + 1,
kind: MacroDirectiveKind::Derive {
ast_id,
derive_attr: *attr_id,
derive_pos: idx,
ctxt: call_site.ctx,
derive_macro_id: call_id,
},
container: directive.container,
});
self.unresolved_macros.push(directive);
}
}
@ -1858,9 +1871,8 @@ impl ModCollector<'_, '_> {
ast_id: FileAstId<ast::Adt>,
id: AdtId,
def_map: &mut DefMap| {
let Some(deferred_derives) =
deferred_derives.remove(&InFile::new(file_id, ast_id.upcast()))
else {
let ast_id = InFile::new(file_id, ast_id.upcast());
let Some(deferred_derives) = deferred_derives.remove(&ast_id.upcast()) else {
return;
};
let module = &mut def_map.modules[module_id];
@ -1876,6 +1888,22 @@ impl ModCollector<'_, '_> {
},
);
module.scope.define_builtin_derive_impl(impl_id);
module.scope.set_derive_macro_invoc(
ast_id,
Either::Right(impl_id),
deferred_derive.derive_attr_id,
deferred_derive.derive_index as usize,
);
// Change its helper attributes to the new id.
if let Some(derive_helpers) =
def_map.derive_helpers_in_scope.get_mut(&ast_id.map(|it| it.upcast()))
{
for (_, _, call_id) in
&mut derive_helpers[deferred_derive.helpers_range.clone()]
{
*call_id = Either::Right(impl_id);
}
}
});
}
};

View file

@ -1,8 +1,8 @@
mod globs;
mod imports;
mod incremental;
mod macros;
mod mod_resolution;
mod primitives;
use base_db::RootQueryDb;
use expect_test::{Expect, expect};

View file

@ -0,0 +1,63 @@
use super::*;
#[test]
fn kw_path_renames() {
check(
r#"
macro_rules! m {
() => {
pub use $crate as dollar_crate;
pub use $crate::{self as self_dollar_crate};
};
}
pub use self as this;
pub use crate as krate;
pub use crate::{self as self_krate};
m!();
mod foo {
pub use super as zuper;
pub use super::{self as self_zuper};
}
"#,
expect![[r#"
crate
- dollar_crate : type (import)
- foo : type
- krate : type (import)
- self_dollar_crate : type (import)
- self_krate : type (import)
- this : type (import)
- (legacy) m : macro!
crate::foo
- self_zuper : type (import)
- zuper : type (import)
- (legacy) m : macro!
"#]],
);
}
#[test]
fn primitive_reexport() {
check(
r#"
//- /lib.rs
mod foo;
use foo::int;
//- /foo.rs
pub use i32 as int;
"#,
expect![[r#"
crate
- foo : type
- int : type (import)
crate::foo
- int : type (import)
"#]],
);
}

View file

@ -1,23 +0,0 @@
use super::*;
#[test]
fn primitive_reexport() {
check(
r#"
//- /lib.rs
mod foo;
use foo::int;
//- /foo.rs
pub use i32 as int;
"#,
expect![[r#"
crate
- foo : type
- int : type (import)
crate::foo
- int : type (import)
"#]],
);
}

View file

@ -12,7 +12,7 @@ use intern::{Symbol, sym};
use la_arena::{Arena, Idx};
use rustc_abi::{IntegerType, ReprOptions};
use syntax::{
NodeOrToken, SyntaxNodePtr, T,
AstNode, NodeOrToken, SyntaxNodePtr, T,
ast::{self, HasGenericParams, HasName, HasVisibility, IsString},
};
use thin_vec::ThinVec;
@ -754,6 +754,7 @@ pub struct FieldData {
pub type_ref: TypeRefId,
pub visibility: RawVisibility,
pub is_unsafe: bool,
pub default_value: Option<ExprId>,
}
pub type LocalFieldId = Idx<FieldData>;
@ -903,7 +904,14 @@ fn lower_fields<Field: ast::HasAttrs + ast::HasVisibility>(
.filter_map(NodeOrToken::into_token)
.any(|token| token.kind() == T![unsafe]);
let name = field_name(idx, &field);
arena.alloc(FieldData { name, type_ref, visibility, is_unsafe });
// Check if field has default value (only for record fields)
let default_value = ast::RecordField::cast(field.syntax().clone())
.and_then(|rf| rf.eq_token().is_some().then_some(rf.expr()))
.flatten()
.map(|expr| col.collect_expr_opt(Some(expr)));
arena.alloc(FieldData { name, type_ref, visibility, is_unsafe, default_value });
idx += 1;
}
Err(cfg) => {

View file

@ -115,7 +115,7 @@ fn dummy_gate_test_expand(
/// wasting a lot of memory, and it would also require some way to use a path in a way that makes it
/// always resolve as a derive without nameres recollecting them.
/// So this hacky approach is a lot more friendly for us, though it does require a bit of support in
/// [`hir::Semantics`] to make this work.
/// hir::Semantics to make this work.
fn derive_expand(
db: &dyn ExpandDatabase,
id: MacroCallId,

View file

@ -1,4 +1,4 @@
//! Processes out #[cfg] and #[cfg_attr] attributes from the input for the derive macro
//! Processes out `#[cfg]` and `#[cfg_attr]` attributes from the input for the derive macro
use std::{cell::OnceCell, ops::ControlFlow};
use ::tt::TextRange;

View file

@ -101,7 +101,9 @@ impl DeclarativeMacroExpander {
match &*value {
"transparent" => ControlFlow::Break(Transparency::Transparent),
// "semitransparent" is for old rustc versions.
"semiopaque" | "semitransparent" => ControlFlow::Break(Transparency::SemiOpaque),
"semiopaque" | "semitransparent" => {
ControlFlow::Break(Transparency::SemiOpaque)
}
"opaque" => ControlFlow::Break(Transparency::Opaque),
_ => ControlFlow::Continue(()),
}

View file

@ -423,6 +423,10 @@ macro_rules! __known_path {
(core::ops::RangeTo) => {};
(core::ops::RangeToInclusive) => {};
(core::ops::RangeInclusive) => {};
(core::range::Range) => {};
(core::range::RangeFrom) => {};
(core::range::RangeInclusive) => {};
(core::range::RangeToInclusive) => {};
(core::future::Future) => {};
(core::future::IntoFuture) => {};
(core::fmt::Debug) => {};

View file

@ -41,7 +41,7 @@ use crate::{
pub(crate) use hir_def::{
LocalFieldId, VariantId,
expr_store::Body,
hir::{Expr, ExprId, MatchArm, Pat, PatId, Statement},
hir::{Expr, ExprId, MatchArm, Pat, PatId, RecordSpread, Statement},
};
pub enum BodyValidationDiagnostic {
@ -123,7 +123,7 @@ impl<'db> ExprValidator<'db> {
}
for (id, expr) in body.exprs() {
if let Some((variant, missed_fields, true)) =
if let Some((variant, missed_fields)) =
record_literal_missing_fields(db, self.infer, id, expr)
{
self.diagnostics.push(BodyValidationDiagnostic::RecordMissingFields {
@ -154,7 +154,7 @@ impl<'db> ExprValidator<'db> {
}
for (id, pat) in body.pats() {
if let Some((variant, missed_fields, true)) =
if let Some((variant, missed_fields)) =
record_pattern_missing_fields(db, self.infer, id, pat)
{
self.diagnostics.push(BodyValidationDiagnostic::RecordMissingFields {
@ -557,9 +557,9 @@ pub fn record_literal_missing_fields(
infer: &InferenceResult,
id: ExprId,
expr: &Expr,
) -> Option<(VariantId, Vec<LocalFieldId>, /*exhaustive*/ bool)> {
let (fields, exhaustive) = match expr {
Expr::RecordLit { fields, spread, .. } => (fields, spread.is_none()),
) -> Option<(VariantId, Vec<LocalFieldId>)> {
let (fields, spread) = match expr {
Expr::RecordLit { fields, spread, .. } => (fields, spread),
_ => return None,
};
@ -571,15 +571,28 @@ pub fn record_literal_missing_fields(
let variant_data = variant_def.fields(db);
let specified_fields: FxHashSet<_> = fields.iter().map(|f| &f.name).collect();
// don't show missing fields if:
// - has ..expr
// - or has default value + ..
// - or already in code
let missed_fields: Vec<LocalFieldId> = variant_data
.fields()
.iter()
.filter_map(|(f, d)| if specified_fields.contains(&d.name) { None } else { Some(f) })
.filter_map(|(f, d)| {
if specified_fields.contains(&d.name)
|| matches!(spread, RecordSpread::Expr(_))
|| (d.default_value.is_some() && matches!(spread, RecordSpread::FieldDefaults))
{
None
} else {
Some(f)
}
})
.collect();
if missed_fields.is_empty() {
return None;
}
Some((variant_def, missed_fields, exhaustive))
Some((variant_def, missed_fields))
}
pub fn record_pattern_missing_fields(
@ -587,9 +600,9 @@ pub fn record_pattern_missing_fields(
infer: &InferenceResult,
id: PatId,
pat: &Pat,
) -> Option<(VariantId, Vec<LocalFieldId>, /*exhaustive*/ bool)> {
let (fields, exhaustive) = match pat {
Pat::Record { path: _, args, ellipsis } => (args, !ellipsis),
) -> Option<(VariantId, Vec<LocalFieldId>)> {
let (fields, ellipsis) = match pat {
Pat::Record { path: _, args, ellipsis } => (args, *ellipsis),
_ => return None,
};
@ -601,15 +614,22 @@ pub fn record_pattern_missing_fields(
let variant_data = variant_def.fields(db);
let specified_fields: FxHashSet<_> = fields.iter().map(|f| &f.name).collect();
// don't show missing fields if:
// - in code
// - or has ..
let missed_fields: Vec<LocalFieldId> = variant_data
.fields()
.iter()
.filter_map(|(f, d)| if specified_fields.contains(&d.name) { None } else { Some(f) })
.filter_map(
|(f, d)| {
if specified_fields.contains(&d.name) || ellipsis { None } else { Some(f) }
},
)
.collect();
if missed_fields.is_empty() {
return None;
}
Some((variant_def, missed_fields, exhaustive))
Some((variant_def, missed_fields))
}
fn types_of_subpatterns_do_match(pat: PatId, body: &Body, infer: &InferenceResult) -> bool {

View file

@ -1815,18 +1815,34 @@ impl<'body, 'db> InferenceContext<'body, 'db> {
Some(struct_.into())
}
fn has_new_range_feature(&self) -> bool {
self.resolver.top_level_def_map().is_unstable_feature_enabled(&sym::new_range)
}
fn resolve_range(&self) -> Option<AdtId> {
let struct_ = self.lang_items.Range?;
let struct_ = if self.has_new_range_feature() {
self.lang_items.RangeCopy?
} else {
self.lang_items.Range?
};
Some(struct_.into())
}
fn resolve_range_inclusive(&self) -> Option<AdtId> {
let struct_ = self.lang_items.RangeInclusiveStruct?;
let struct_ = if self.has_new_range_feature() {
self.lang_items.RangeInclusiveCopy?
} else {
self.lang_items.RangeInclusiveStruct?
};
Some(struct_.into())
}
fn resolve_range_from(&self) -> Option<AdtId> {
let struct_ = self.lang_items.RangeFrom?;
let struct_ = if self.has_new_range_feature() {
self.lang_items.RangeFromCopy?
} else {
self.lang_items.RangeFrom?
};
Some(struct_.into())
}
@ -1836,7 +1852,11 @@ impl<'body, 'db> InferenceContext<'body, 'db> {
}
fn resolve_range_to_inclusive(&self) -> Option<AdtId> {
let struct_ = self.lang_items.RangeToInclusive?;
let struct_ = if self.has_new_range_feature() {
self.lang_items.RangeToInclusiveCopy?
} else {
self.lang_items.RangeToInclusive?
};
Some(struct_.into())
}

View file

@ -2,8 +2,10 @@
use hir_def::{AdtId, hir::ExprId, signatures::TraitFlags};
use rustc_ast_ir::Mutability;
use rustc_hash::FxHashSet;
use rustc_type_ir::{
Flags, InferTy, TypeFlags, UintTy,
InferTy, TypeVisitableExt, UintTy, elaborate,
error::TypeError,
inherent::{AdtDef, BoundExistentialPredicates as _, IntoKind, Ty as _},
};
use stdx::never;
@ -12,7 +14,10 @@ use crate::{
InferenceDiagnostic,
db::HirDatabase,
infer::{AllowTwoPhase, InferenceContext, expr::ExprIsRead},
next_solver::{BoundExistentialPredicates, DbInterner, ParamTy, Ty, TyKind},
next_solver::{
BoundExistentialPredicates, ExistentialPredicate, ParamTy, Region, Ty, TyKind,
infer::traits::ObligationCause,
},
};
#[derive(Debug)]
@ -66,12 +71,13 @@ pub enum CastError {
DifferingKinds,
SizedUnsizedCast,
IllegalCast,
IntToFatCast,
IntToWideCast,
NeedDeref,
NeedViaPtr,
NeedViaThinPtr,
NeedViaInt,
NonScalar,
PtrPtrAddingAutoTraits,
// We don't want to report errors with unknown types currently.
// UnknownCastPtrKind,
// UnknownExprPtrKind,
@ -137,22 +143,13 @@ impl<'db> CastCheck<'db> {
return Ok(());
}
if !self.cast_ty.flags().contains(TypeFlags::HAS_TY_INFER)
&& !ctx.table.is_sized(self.cast_ty)
{
if !self.cast_ty.has_infer_types() && !ctx.table.is_sized(self.cast_ty) {
return Err(InferenceDiagnostic::CastToUnsized {
expr: self.expr,
cast_ty: self.cast_ty.store(),
});
}
// Chalk doesn't support trait upcasting and fails to solve some obvious goals
// when the trait environment contains some recursive traits (See issue #18047)
// We skip cast checks for such cases for now, until the next-gen solver.
if contains_dyn_trait(self.cast_ty) {
return Ok(());
}
self.do_check(ctx).map_err(|e| e.into_diagnostic(self.expr, self.expr_ty, self.cast_ty))
}
@ -162,22 +159,23 @@ impl<'db> CastCheck<'db> {
(Some(t_from), Some(t_cast)) => (t_from, t_cast),
(None, Some(t_cast)) => match self.expr_ty.kind() {
TyKind::FnDef(..) => {
let sig =
self.expr_ty.callable_sig(ctx.interner()).expect("FnDef had no sig");
let sig = ctx.table.normalize_associated_types_in(sig);
// rustc calls `FnCtxt::normalize` on this but it's a no-op in next-solver
let sig = self.expr_ty.fn_sig(ctx.interner());
let fn_ptr = Ty::new_fn_ptr(ctx.interner(), sig);
if ctx
.coerce(
self.source_expr.into(),
self.expr_ty,
fn_ptr,
AllowTwoPhase::No,
ExprIsRead::Yes,
)
.is_ok()
{
} else {
return Err(CastError::IllegalCast);
match ctx.coerce(
self.source_expr.into(),
self.expr_ty,
fn_ptr,
AllowTwoPhase::No,
ExprIsRead::Yes,
) {
Ok(_) => {}
Err(TypeError::IntrinsicCast) => {
return Err(CastError::IllegalCast);
}
Err(_) => {
return Err(CastError::NonScalar);
}
}
(CastTy::FnPtr, t_cast)
@ -213,23 +211,41 @@ impl<'db> CastCheck<'db> {
// rustc checks whether the `expr_ty` is foreign adt with `non_exhaustive` sym
match (t_from, t_cast) {
// These types have invariants! can't cast into them.
(_, CastTy::Int(Int::CEnum) | CastTy::FnPtr) => Err(CastError::NonScalar),
// * -> Bool
(_, CastTy::Int(Int::Bool)) => Err(CastError::CastToBool),
(CastTy::Int(Int::U(UintTy::U8)), CastTy::Int(Int::Char)) => Ok(()),
// * -> Char
(CastTy::Int(Int::U(UintTy::U8)), CastTy::Int(Int::Char)) => Ok(()), // u8-char-cast
(_, CastTy::Int(Int::Char)) => Err(CastError::CastToChar),
// prim -> float,ptr
(CastTy::Int(Int::Bool | Int::CEnum | Int::Char), CastTy::Float) => {
Err(CastError::NeedViaInt)
}
(CastTy::Int(Int::Bool | Int::CEnum | Int::Char) | CastTy::Float, CastTy::Ptr(..))
| (CastTy::Ptr(..) | CastTy::FnPtr, CastTy::Float) => Err(CastError::IllegalCast),
(CastTy::Ptr(src, _), CastTy::Ptr(dst, _)) => self.check_ptr_ptr_cast(ctx, src, dst),
// ptr -> ptr
(CastTy::Ptr(src, _), CastTy::Ptr(dst, _)) => self.check_ptr_ptr_cast(ctx, src, dst), // ptr-ptr-cast
// // ptr-addr-cast
(CastTy::Ptr(src, _), CastTy::Int(_)) => self.check_ptr_addr_cast(ctx, src),
(CastTy::FnPtr, CastTy::Int(_)) => Ok(()),
// addr-ptr-cast
(CastTy::Int(_), CastTy::Ptr(dst, _)) => self.check_addr_ptr_cast(ctx, dst),
// fn-ptr-cast
(CastTy::FnPtr, CastTy::Ptr(dst, _)) => self.check_fptr_ptr_cast(ctx, dst),
// prim -> prim
(CastTy::Int(Int::CEnum), CastTy::Int(_)) => Ok(()),
(CastTy::Int(Int::Char | Int::Bool), CastTy::Int(_)) => Ok(()),
(CastTy::Int(_) | CastTy::Float, CastTy::Int(_) | CastTy::Float) => Ok(()),
(CastTy::FnPtr, CastTy::Int(_)) => Ok(()),
}
}
@ -241,10 +257,16 @@ impl<'db> CastCheck<'db> {
t_cast: Ty<'db>,
m_cast: Mutability,
) -> Result<(), CastError> {
// Mutability order is opposite to rustc. `Mut < Not`
if m_expr <= m_cast
let t_expr = ctx.table.try_structurally_resolve_type(t_expr);
let t_cast = ctx.table.try_structurally_resolve_type(t_cast);
if m_expr >= m_cast
&& let TyKind::Array(ety, _) = t_expr.kind()
&& ctx.infcx().can_eq(ctx.table.param_env, ety, t_cast)
{
// Due to historical reasons we allow directly casting references of
// arrays into raw pointers of their element type.
// Coerce to a raw pointer so that we generate RawPtr in MIR.
let array_ptr_type = Ty::new_ptr(ctx.interner(), t_expr, m_expr);
if ctx
@ -265,14 +287,9 @@ impl<'db> CastCheck<'db> {
);
}
// This is a less strict condition than rustc's `demand_eqtype`,
// but false negative is better than false positive
if ctx
.coerce(self.source_expr.into(), ety, t_cast, AllowTwoPhase::No, ExprIsRead::Yes)
.is_ok()
{
return Ok(());
}
// this will report a type mismatch if needed
let _ = ctx.demand_eqtype(self.expr.into(), ety, t_cast);
return Ok(());
}
Err(CastError::IllegalCast)
@ -289,30 +306,147 @@ impl<'db> CastCheck<'db> {
match (src_kind, dst_kind) {
(Some(PointerKind::Error), _) | (_, Some(PointerKind::Error)) => Ok(()),
// (_, None) => Err(CastError::UnknownCastPtrKind),
// (None, _) => Err(CastError::UnknownExprPtrKind),
(_, None) | (None, _) => Ok(()),
// Cast to thin pointer is OK
(_, Some(PointerKind::Thin)) => Ok(()),
// thin -> fat? report invalid cast (don't complain about vtable kinds)
(Some(PointerKind::Thin), _) => Err(CastError::SizedUnsizedCast),
// trait object -> trait object? need to do additional checks
(Some(PointerKind::VTable(src_tty)), Some(PointerKind::VTable(dst_tty))) => {
match (src_tty.principal_def_id(), dst_tty.principal_def_id()) {
// A<dyn Src<...> + SrcAuto> -> B<dyn Dst<...> + DstAuto>. need to make sure
// - `Src` and `Dst` traits are the same
// - traits have the same generic arguments
// - projections are the same
// - `SrcAuto` (+auto traits implied by `Src`) is a superset of `DstAuto`
//
// Note that trait upcasting goes through a different mechanism (`coerce_unsized`)
// and is unaffected by this check.
(Some(src_principal), Some(dst_principal)) => {
if src_principal == dst_principal {
return Ok(());
}
let src_principal = ctx.db.trait_signature(src_principal.0);
let dst_principal = ctx.db.trait_signature(dst_principal.0);
if src_principal.flags.contains(TraitFlags::AUTO)
&& dst_principal.flags.contains(TraitFlags::AUTO)
// We need to reconstruct trait object types.
// `m_src` and `m_dst` won't work for us here because they will potentially
// contain wrappers, which we do not care about.
//
// e.g. we want to allow `dyn T -> (dyn T,)`, etc.
//
// We also need to skip auto traits to emit an FCW and not an error.
let src_obj = Ty::new_dynamic(
ctx.interner(),
BoundExistentialPredicates::new_from_iter(
ctx.interner(),
src_tty.iter().filter(|pred| {
!matches!(
pred.skip_binder(),
ExistentialPredicate::AutoTrait(_)
)
}),
),
Region::new_erased(ctx.interner()),
);
let dst_obj = Ty::new_dynamic(
ctx.interner(),
BoundExistentialPredicates::new_from_iter(
ctx.interner(),
dst_tty.iter().filter(|pred| {
!matches!(
pred.skip_binder(),
ExistentialPredicate::AutoTrait(_)
)
}),
),
Region::new_erased(ctx.interner()),
);
// `dyn Src = dyn Dst`, this checks for matching traits/generics/projections
// This is `fcx.demand_eqtype`, but inlined to give a better error.
if ctx
.table
.at(&ObligationCause::dummy())
.eq(src_obj, dst_obj)
.map(|infer_ok| ctx.table.register_infer_ok(infer_ok))
.is_err()
{
Ok(())
} else {
Err(CastError::DifferingKinds)
return Err(CastError::DifferingKinds);
}
// Check that `SrcAuto` (+auto traits implied by `Src`) is a superset of `DstAuto`.
// Emit an FCW otherwise.
let src_auto: FxHashSet<_> = src_tty
.auto_traits()
.into_iter()
.chain(
elaborate::supertrait_def_ids(ctx.interner(), src_principal)
.filter(|trait_| {
ctx.db
.trait_signature(trait_.0)
.flags
.contains(TraitFlags::AUTO)
}),
)
.collect();
let added = dst_tty
.auto_traits()
.into_iter()
.any(|trait_| !src_auto.contains(&trait_));
if added {
return Err(CastError::PtrPtrAddingAutoTraits);
}
Ok(())
}
_ => Err(CastError::Unknown),
// dyn Auto -> dyn Auto'? ok.
(None, None) => Ok(()),
// dyn Trait -> dyn Auto? not ok (for now).
//
// Although dropping the principal is already allowed for unsizing coercions
// (e.g. `*const (dyn Trait + Auto)` to `*const dyn Auto`), dropping it is
// currently **NOT** allowed for (non-coercion) ptr-to-ptr casts (e.g
// `*const Foo` to `*const Bar` where `Foo` has a `dyn Trait + Auto` tail
// and `Bar` has a `dyn Auto` tail), because the underlying MIR operations
// currently work very differently:
//
// * A MIR unsizing coercion on raw pointers to trait objects (`*const dyn Src`
// to `*const dyn Dst`) is currently equivalent to downcasting the source to
// the concrete sized type that it was originally unsized from first (via a
// ptr-to-ptr cast from `*const Src` to `*const T` with `T: Sized`) and then
// unsizing this thin pointer to the target type (unsizing `*const T` to
// `*const Dst`). In particular, this means that the pointer's metadata
// (vtable) will semantically change, e.g. for const eval and miri, even
// though the vtables will always be merged for codegen.
//
// * A MIR ptr-to-ptr cast is currently equivalent to a transmute and does not
// change the pointer metadata (vtable) at all.
//
// In addition to this potentially surprising difference between coercion and
// non-coercion casts, casting away the principal with a MIR ptr-to-ptr cast
// is currently considered undefined behavior:
//
// As a validity invariant of pointers to trait objects, we currently require
// that the principal of the vtable in the pointer metadata exactly matches
// the principal of the pointee type, where "no principal" is also considered
// a kind of principal.
(Some(_), None) => Err(CastError::DifferingKinds),
// dyn Auto -> dyn Trait? not ok.
(None, Some(_)) => Err(CastError::DifferingKinds),
}
}
// fat -> fat? metadata kinds must match
(Some(src_kind), Some(dst_kind)) if src_kind == dst_kind => Ok(()),
(_, _) => Err(CastError::DifferingKinds),
}
@ -342,9 +476,9 @@ impl<'db> CastCheck<'db> {
None => Ok(()),
Some(PointerKind::Error) => Ok(()),
Some(PointerKind::Thin) => Ok(()),
Some(PointerKind::VTable(_)) => Err(CastError::IntToFatCast),
Some(PointerKind::Length) => Err(CastError::IntToFatCast),
Some(PointerKind::OfAlias | PointerKind::OfParam(_)) => Err(CastError::IntToFatCast),
Some(PointerKind::VTable(_)) => Err(CastError::IntToWideCast),
Some(PointerKind::Length) => Err(CastError::IntToWideCast),
Some(PointerKind::OfAlias | PointerKind::OfParam(_)) => Err(CastError::IntToWideCast),
}
}
@ -363,15 +497,20 @@ impl<'db> CastCheck<'db> {
}
}
/// The kind of pointer and associated metadata (thin, length or vtable) - we
/// only allow casts between wide pointers if their metadata have the same
/// kind.
#[derive(Debug, PartialEq, Eq)]
enum PointerKind<'db> {
// thin pointer
/// No metadata attached, ie pointer to sized type or foreign type
Thin,
// trait object
/// A trait object
VTable(BoundExistentialPredicates<'db>),
// slice
/// Slice
Length,
/// The unsize info of this projection or opaque type
OfAlias,
/// The unsize info of this parameter
OfParam(ParamTy),
Error,
}
@ -439,24 +578,3 @@ fn pointer_kind<'db>(
}
}
}
fn contains_dyn_trait<'db>(ty: Ty<'db>) -> bool {
use std::ops::ControlFlow;
use rustc_type_ir::{TypeSuperVisitable, TypeVisitable, TypeVisitor};
struct DynTraitVisitor;
impl<'db> TypeVisitor<DbInterner<'db>> for DynTraitVisitor {
type Result = ControlFlow<()>;
fn visit_ty(&mut self, ty: Ty<'db>) -> ControlFlow<()> {
match ty.kind() {
TyKind::Dynamic(..) => ControlFlow::Break(()),
_ => ty.super_visit_with(self),
}
}
}
ty.visit_with(&mut DynTraitVisitor).is_break()
}

View file

@ -466,7 +466,7 @@ impl<'db> InferenceContext<'_, 'db> {
}
/// Given an `FnOnce::Output` or `AsyncFn::Output` projection, extract the args
/// and return type to infer a [`ty::PolyFnSig`] for the closure.
/// and return type to infer a `PolyFnSig` for the closure.
fn extract_sig_from_projection(
&self,
projection: PolyProjectionPredicate<'db>,

View file

@ -8,7 +8,7 @@ use hir_def::{
expr_store::path::Path,
hir::{
Array, AsmOperand, BinaryOp, BindingId, CaptureBy, Expr, ExprId, ExprOrPatId, Pat, PatId,
Statement, UnaryOp,
RecordSpread, Statement, UnaryOp,
},
item_tree::FieldsShape,
resolver::ValueNs,
@ -627,7 +627,7 @@ impl<'db> InferenceContext<'_, 'db> {
self.consume_expr(expr);
}
Expr::RecordLit { fields, spread, .. } => {
if let &Some(expr) = spread {
if let RecordSpread::Expr(expr) = *spread {
self.consume_expr(expr);
}
self.consume_exprs(fields.iter().map(|it| it.expr));

View file

@ -8,7 +8,7 @@ use hir_def::{
expr_store::path::{GenericArgs as HirGenericArgs, Path},
hir::{
Array, AsmOperand, AsmOptions, BinaryOp, BindingAnnotation, Expr, ExprId, ExprOrPatId,
LabelId, Literal, Pat, PatId, Statement, UnaryOp,
InlineAsmKind, LabelId, Literal, Pat, PatId, RecordSpread, Statement, UnaryOp,
},
resolver::ValueNs,
};
@ -657,8 +657,8 @@ impl<'db> InferenceContext<'_, 'db> {
}
}
}
if let Some(expr) = spread {
self.infer_expr(*expr, &Expectation::has_type(ty), ExprIsRead::Yes);
if let RecordSpread::Expr(expr) = *spread {
self.infer_expr(expr, &Expectation::has_type(ty), ExprIsRead::Yes);
}
ty
}
@ -1037,7 +1037,11 @@ impl<'db> InferenceContext<'_, 'db> {
// FIXME: `sym` should report for things that are not functions or statics.
AsmOperand::Sym(_) => (),
});
if diverge { self.types.types.never } else { self.types.types.unit }
if diverge || asm.kind == InlineAsmKind::NakedAsm {
self.types.types.never
} else {
self.types.types.unit
}
}
};
// use a new type variable if we got unknown here
@ -1704,7 +1708,7 @@ impl<'db> InferenceContext<'_, 'db> {
});
match resolved {
Ok((func, _is_visible)) => {
self.check_method_call(tgt_expr, &[], func.sig, receiver_ty, expected)
self.check_method_call(tgt_expr, &[], func.sig, expected)
}
Err(_) => self.err_ty(),
}
@ -1844,7 +1848,7 @@ impl<'db> InferenceContext<'_, 'db> {
item: func.def_id.into(),
})
}
self.check_method_call(tgt_expr, args, func.sig, receiver_ty, expected)
self.check_method_call(tgt_expr, args, func.sig, expected)
}
// Failed to resolve, report diagnostic and try to resolve as call to field access or
// assoc function
@ -1934,16 +1938,14 @@ impl<'db> InferenceContext<'_, 'db> {
tgt_expr: ExprId,
args: &[ExprId],
sig: FnSig<'db>,
receiver_ty: Ty<'db>,
expected: &Expectation<'db>,
) -> Ty<'db> {
let (formal_receiver_ty, param_tys) = if !sig.inputs_and_output.inputs().is_empty() {
(sig.inputs_and_output.as_slice()[0], &sig.inputs_and_output.inputs()[1..])
let param_tys = if !sig.inputs_and_output.inputs().is_empty() {
&sig.inputs_and_output.inputs()[1..]
} else {
(self.types.types.error, &[] as _)
&[]
};
let ret_ty = sig.output();
self.table.unify(formal_receiver_ty, receiver_ty);
self.check_call_arguments(tgt_expr, param_tys, ret_ty, expected, args, &[], sig.c_variadic);
ret_ty

View file

@ -2,7 +2,8 @@
//! between `Deref` and `DerefMut` or `Index` and `IndexMut` or similar.
use hir_def::hir::{
Array, AsmOperand, BinaryOp, BindingAnnotation, Expr, ExprId, Pat, PatId, Statement, UnaryOp,
Array, AsmOperand, BinaryOp, BindingAnnotation, Expr, ExprId, Pat, PatId, RecordSpread,
Statement, UnaryOp,
};
use rustc_ast_ir::Mutability;
@ -132,8 +133,11 @@ impl<'db> InferenceContext<'_, 'db> {
Expr::Become { expr } => {
self.infer_mut_expr(*expr, Mutability::Not);
}
Expr::RecordLit { path: _, fields, spread } => {
self.infer_mut_not_expr_iter(fields.iter().map(|it| it.expr).chain(*spread))
Expr::RecordLit { path: _, fields, spread, .. } => {
self.infer_mut_not_expr_iter(fields.iter().map(|it| it.expr));
if let RecordSpread::Expr(expr) = *spread {
self.infer_mut_expr(expr, Mutability::Not);
}
}
&Expr::Index { base, index } => {
if mutability == Mutability::Mut {

View file

@ -93,6 +93,7 @@ impl<'db> InferenceContext<'_, 'db> {
if let GenericDefId::StaticId(_) = generic_def {
// `Static` is the kind of item that can never be generic currently. We can just skip the binders to get its type.
let ty = self.db.value_ty(value_def)?.skip_binder();
let ty = self.process_remote_user_written_ty(ty);
return Some(ValuePathResolution::NonGeneric(ty));
};

View file

@ -261,16 +261,6 @@ impl<'db> InferenceTable<'db> {
self.infer_ctxt.canonicalize_response(t)
}
// FIXME: We should get rid of this method. We cannot deeply normalize during inference, only when finishing.
// Inference should use shallow normalization (`try_structurally_resolve_type()`) only, when needed.
pub(crate) fn normalize_associated_types_in<T>(&mut self, ty: T) -> T
where
T: TypeFoldable<DbInterner<'db>> + Clone,
{
let ty = self.resolve_vars_with_obligations(ty);
self.at(&ObligationCause::new()).deeply_normalize(ty.clone()).unwrap_or(ty)
}
pub(crate) fn normalize_alias_ty(&mut self, alias: Ty<'db>) -> Ty<'db> {
self.infer_ctxt
.at(&ObligationCause::new(), self.param_env)

View file

@ -27,8 +27,8 @@ use hir_def::{
resolver::{HasResolver, LifetimeNs, Resolver, TypeNs, ValueNs},
signatures::{FunctionSignature, TraitFlags, TypeAliasFlags},
type_ref::{
ConstRef, LifetimeRefId, PathId, TraitBoundModifier, TraitRef as HirTraitRef, TypeBound,
TypeRef, TypeRefId,
ConstRef, FnType, LifetimeRefId, PathId, TraitBoundModifier, TraitRef as HirTraitRef,
TypeBound, TypeRef, TypeRefId,
},
};
use hir_expand::name::Name;
@ -53,7 +53,7 @@ use tracing::debug;
use triomphe::{Arc, ThinArc};
use crate::{
FnAbi, ImplTraitId, TyLoweringDiagnostic, TyLoweringDiagnosticKind,
FnAbi, ImplTraitId, TyLoweringDiagnostic, TyLoweringDiagnosticKind, all_super_traits,
consteval::intern_const_ref,
db::{HirDatabase, InternedOpaqueTyId},
generics::{Generics, generics, trait_self_param_idx},
@ -77,6 +77,7 @@ pub struct ImplTraits {
#[derive(PartialEq, Eq, Debug, Hash)]
pub struct ImplTrait {
pub(crate) predicates: StoredClauses,
pub(crate) assoc_ty_bounds_start: u32,
}
pub type ImplTraitIdx = Idx<ImplTrait>;
@ -97,7 +98,7 @@ impl ImplTraitLoweringState {
}
}
#[derive(Debug, Clone)]
#[derive(Debug, Clone, Copy)]
pub enum LifetimeElisionKind<'db> {
/// Create a new anonymous lifetime parameter and reference it.
///
@ -166,6 +167,12 @@ impl<'db> LifetimeElisionKind<'db> {
}
}
#[derive(Clone, Copy, PartialEq, Debug)]
pub(crate) enum GenericPredicateSource {
SelfOnly,
AssocTyBound,
}
#[derive(Debug)]
pub struct TyLoweringContext<'db, 'a> {
pub db: &'db dyn HirDatabase,
@ -430,26 +437,7 @@ impl<'db, 'a> TyLoweringContext<'db, 'a> {
Ty::new_ref(interner, lifetime, inner_ty, lower_mutability(ref_.mutability))
}
TypeRef::Placeholder => Ty::new_error(interner, ErrorGuaranteed),
TypeRef::Fn(fn_) => {
let substs = self.with_shifted_in(
DebruijnIndex::from_u32(1),
|ctx: &mut TyLoweringContext<'_, '_>| {
Tys::new_from_iter(
interner,
fn_.params.iter().map(|&(_, tr)| ctx.lower_ty(tr)),
)
},
);
Ty::new_fn_ptr(
interner,
Binder::dummy(FnSig {
abi: fn_.abi.as_ref().map_or(FnAbi::Rust, FnAbi::from_symbol),
safety: if fn_.is_unsafe { Safety::Unsafe } else { Safety::Safe },
c_variadic: fn_.is_varargs,
inputs_and_output: substs,
}),
)
}
TypeRef::Fn(fn_) => self.lower_fn_ptr(fn_),
TypeRef::DynTrait(bounds) => self.lower_dyn_trait(bounds),
TypeRef::ImplTrait(bounds) => {
match self.impl_trait_mode.mode {
@ -465,10 +453,10 @@ impl<'db, 'a> TyLoweringContext<'db, 'a> {
// this dance is to make sure the data is in the right
// place even if we encounter more opaque types while
// lowering the bounds
let idx = self
.impl_trait_mode
.opaque_type_data
.alloc(ImplTrait { predicates: Clauses::empty(interner).store() });
let idx = self.impl_trait_mode.opaque_type_data.alloc(ImplTrait {
predicates: Clauses::empty(interner).store(),
assoc_ty_bounds_start: 0,
});
let impl_trait_id = origin.either(
|f| ImplTraitId::ReturnTypeImplTrait(f, idx),
@ -510,6 +498,30 @@ impl<'db, 'a> TyLoweringContext<'db, 'a> {
(ty, res)
}
fn lower_fn_ptr(&mut self, fn_: &FnType) -> Ty<'db> {
let interner = self.interner;
let (params, ret_ty) = fn_.split_params_and_ret();
let old_lifetime_elision = self.lifetime_elision;
let mut args = Vec::with_capacity(fn_.params.len());
self.with_shifted_in(DebruijnIndex::from_u32(1), |ctx: &mut TyLoweringContext<'_, '_>| {
ctx.lifetime_elision =
LifetimeElisionKind::AnonymousCreateParameter { report_in_path: false };
args.extend(params.iter().map(|&(_, tr)| ctx.lower_ty(tr)));
ctx.lifetime_elision = LifetimeElisionKind::for_fn_ret(interner);
args.push(ctx.lower_ty(ret_ty));
});
self.lifetime_elision = old_lifetime_elision;
Ty::new_fn_ptr(
interner,
Binder::dummy(FnSig {
abi: fn_.abi.as_ref().map_or(FnAbi::Rust, FnAbi::from_symbol),
safety: if fn_.is_unsafe { Safety::Unsafe } else { Safety::Safe },
c_variadic: fn_.is_varargs,
inputs_and_output: Tys::new_from_slice(&args),
}),
)
}
/// This is only for `generic_predicates_for_param`, where we can't just
/// lower the self types of the predicates since that could lead to cycles.
/// So we just check here if the `type_ref` resolves to a generic param, and which.
@ -608,7 +620,7 @@ impl<'db, 'a> TyLoweringContext<'db, 'a> {
ignore_bindings: bool,
generics: &Generics,
predicate_filter: PredicateFilter,
) -> impl Iterator<Item = Clause<'db>> + use<'a, 'b, 'db> {
) -> impl Iterator<Item = (Clause<'db>, GenericPredicateSource)> + use<'a, 'b, 'db> {
match where_predicate {
WherePredicate::ForLifetime { target, bound, .. }
| WherePredicate::TypeBound { target, bound } => {
@ -634,8 +646,8 @@ impl<'db, 'a> TyLoweringContext<'db, 'a> {
let self_ty = self.lower_ty(*target);
Either::Left(Either::Right(self.lower_type_bound(bound, self_ty, ignore_bindings)))
}
&WherePredicate::Lifetime { bound, target } => {
Either::Right(iter::once(Clause(Predicate::new(
&WherePredicate::Lifetime { bound, target } => Either::Right(iter::once((
Clause(Predicate::new(
self.interner,
Binder::dummy(rustc_type_ir::PredicateKind::Clause(
rustc_type_ir::ClauseKind::RegionOutlives(OutlivesPredicate(
@ -643,8 +655,9 @@ impl<'db, 'a> TyLoweringContext<'db, 'a> {
self.lower_lifetime(target),
)),
)),
))))
}
)),
GenericPredicateSource::SelfOnly,
))),
}
.into_iter()
}
@ -654,7 +667,7 @@ impl<'db, 'a> TyLoweringContext<'db, 'a> {
bound: &'b TypeBound,
self_ty: Ty<'db>,
ignore_bindings: bool,
) -> impl Iterator<Item = Clause<'db>> + use<'b, 'a, 'db> {
) -> impl Iterator<Item = (Clause<'db>, GenericPredicateSource)> + use<'b, 'a, 'db> {
let interner = self.interner;
let meta_sized = self.lang_items.MetaSized;
let pointee_sized = self.lang_items.PointeeSized;
@ -712,7 +725,10 @@ impl<'db, 'a> TyLoweringContext<'db, 'a> {
}
TypeBound::Use(_) | TypeBound::Error => {}
}
clause.into_iter().chain(assoc_bounds.into_iter().flatten())
clause
.into_iter()
.map(|pred| (pred, GenericPredicateSource::SelfOnly))
.chain(assoc_bounds.into_iter().flatten())
}
fn lower_dyn_trait(&mut self, bounds: &[TypeBound]) -> Ty<'db> {
@ -732,7 +748,7 @@ impl<'db, 'a> TyLoweringContext<'db, 'a> {
for b in bounds {
let db = ctx.db;
ctx.lower_type_bound(b, dummy_self_ty, false).for_each(|b| {
ctx.lower_type_bound(b, dummy_self_ty, false).for_each(|(b, _)| {
match b.kind().skip_binder() {
rustc_type_ir::ClauseKind::Trait(t) => {
let id = t.def_id();
@ -990,35 +1006,49 @@ impl<'db, 'a> TyLoweringContext<'db, 'a> {
rustc_type_ir::AliasTyKind::Opaque,
AliasTy::new_from_args(interner, def_id, args),
);
let predicates = self.with_shifted_in(DebruijnIndex::from_u32(1), |ctx| {
let mut predicates = Vec::new();
for b in bounds {
predicates.extend(ctx.lower_type_bound(b, self_ty, false));
}
let (predicates, assoc_ty_bounds_start) =
self.with_shifted_in(DebruijnIndex::from_u32(1), |ctx| {
let mut predicates = Vec::new();
let mut assoc_ty_bounds = Vec::new();
for b in bounds {
for (pred, source) in ctx.lower_type_bound(b, self_ty, false) {
match source {
GenericPredicateSource::SelfOnly => predicates.push(pred),
GenericPredicateSource::AssocTyBound => assoc_ty_bounds.push(pred),
}
}
}
if !ctx.unsized_types.contains(&self_ty) {
let sized_trait = self.lang_items.Sized;
let sized_clause = sized_trait.map(|trait_id| {
let trait_ref = TraitRef::new_from_args(
interner,
trait_id.into(),
GenericArgs::new_from_slice(&[self_ty.into()]),
);
Clause(Predicate::new(
interner,
Binder::dummy(rustc_type_ir::PredicateKind::Clause(
rustc_type_ir::ClauseKind::Trait(TraitPredicate {
trait_ref,
polarity: rustc_type_ir::PredicatePolarity::Positive,
}),
)),
))
});
predicates.extend(sized_clause);
}
predicates
});
ImplTrait { predicates: Clauses::new_from_slice(&predicates).store() }
if !ctx.unsized_types.contains(&self_ty) {
let sized_trait = self.lang_items.Sized;
let sized_clause = sized_trait.map(|trait_id| {
let trait_ref = TraitRef::new_from_args(
interner,
trait_id.into(),
GenericArgs::new_from_slice(&[self_ty.into()]),
);
Clause(Predicate::new(
interner,
Binder::dummy(rustc_type_ir::PredicateKind::Clause(
rustc_type_ir::ClauseKind::Trait(TraitPredicate {
trait_ref,
polarity: rustc_type_ir::PredicatePolarity::Positive,
}),
)),
))
});
predicates.extend(sized_clause);
}
let assoc_ty_bounds_start = predicates.len() as u32;
predicates.extend(assoc_ty_bounds);
(predicates, assoc_ty_bounds_start)
});
ImplTrait {
predicates: Clauses::new_from_slice(&predicates).store(),
assoc_ty_bounds_start,
}
}
pub(crate) fn lower_lifetime(&mut self, lifetime: LifetimeRefId) -> Region<'db> {
@ -1139,6 +1169,31 @@ impl ImplTraitId {
.expect("owner should have opaque type")
.get_with(|it| it.impl_traits[idx].predicates.as_ref().as_slice())
}
#[inline]
pub fn self_predicates<'db>(
self,
db: &'db dyn HirDatabase,
) -> EarlyBinder<'db, &'db [Clause<'db>]> {
let (impl_traits, idx) = match self {
ImplTraitId::ReturnTypeImplTrait(owner, idx) => {
(ImplTraits::return_type_impl_traits(db, owner), idx)
}
ImplTraitId::TypeAliasImplTrait(owner, idx) => {
(ImplTraits::type_alias_impl_traits(db, owner), idx)
}
};
let predicates =
impl_traits.as_deref().expect("owner should have opaque type").get_with(|it| {
let impl_trait = &it.impl_traits[idx];
(
impl_trait.predicates.as_ref().as_slice(),
impl_trait.assoc_ty_bounds_start as usize,
)
});
predicates.map_bound(|(preds, len)| &preds[..len])
}
}
impl InternedOpaqueTyId {
@ -1146,6 +1201,14 @@ impl InternedOpaqueTyId {
pub fn predicates<'db>(self, db: &'db dyn HirDatabase) -> EarlyBinder<'db, &'db [Clause<'db>]> {
self.loc(db).predicates(db)
}
#[inline]
pub fn self_predicates<'db>(
self,
db: &'db dyn HirDatabase,
) -> EarlyBinder<'db, &'db [Clause<'db>]> {
self.loc(db).self_predicates(db)
}
}
#[salsa::tracked]
@ -1561,11 +1624,16 @@ pub(crate) fn field_types_with_diagnostics_query<'db>(
(res, create_diagnostics(ctx.diagnostics))
}
/// Predicates for `param_id` of the form `P: SomeTrait`. If
/// `assoc_name` is provided, only return predicates referencing traits
/// that have an associated type of that name.
///
/// This query exists only to be used when resolving short-hand associated types
/// like `T::Item`.
///
/// See the analogous query in rustc and its comment:
/// <https://github.com/rust-lang/rust/blob/9150f844e2624eb013ec78ca08c1d416e6644026/src/librustc_typeck/astconv.rs#L46>
///
/// This is a query mostly to handle cycles somewhat gracefully; e.g. the
/// following bounds are disallowed: `T: Foo<U::Item>, U: Foo<T::Item>`, but
/// these are fine: `T: Foo<U::Item>, U: Foo<()>`.
@ -1589,7 +1657,7 @@ pub(crate) fn generic_predicates_for_param<'db>(
);
// we have to filter out all other predicates *first*, before attempting to lower them
let predicate = |pred: &_, ctx: &mut TyLoweringContext<'_, '_>| match pred {
let has_relevant_bound = |pred: &_, ctx: &mut TyLoweringContext<'_, '_>| match pred {
WherePredicate::ForLifetime { target, bound, .. }
| WherePredicate::TypeBound { target, bound, .. } => {
let invalid_target = { ctx.lower_ty_only_param(*target) != Some(param_id) };
@ -1637,11 +1705,7 @@ pub(crate) fn generic_predicates_for_param<'db>(
return false;
};
rustc_type_ir::elaborate::supertrait_def_ids(interner, tr.into()).any(|tr| {
tr.0.trait_items(db).items.iter().any(|(name, item)| {
matches!(item, AssocItemId::TypeAliasId(_)) && name == assoc_name
})
})
trait_or_supertrait_has_assoc_type(db, tr, assoc_name)
}
TypeBound::Use(_) | TypeBound::Lifetime(_) | TypeBound::Error => false,
}
@ -1654,13 +1718,16 @@ pub(crate) fn generic_predicates_for_param<'db>(
{
ctx.store = maybe_parent_generics.store();
for pred in maybe_parent_generics.where_predicates() {
if predicate(pred, &mut ctx) {
predicates.extend(ctx.lower_where_predicate(
pred,
true,
maybe_parent_generics,
PredicateFilter::All,
));
if has_relevant_bound(pred, &mut ctx) {
predicates.extend(
ctx.lower_where_predicate(
pred,
true,
maybe_parent_generics,
PredicateFilter::All,
)
.map(|(pred, _)| pred),
);
}
}
}
@ -1691,26 +1758,70 @@ pub(crate) fn generic_predicates_for_param_cycle_result(
StoredEarlyBinder::bind(Clauses::empty(DbInterner::new_no_crate(db)).store())
}
/// Check if this trait or any of its supertraits define an associated
/// type with the given name.
fn trait_or_supertrait_has_assoc_type(
db: &dyn HirDatabase,
tr: TraitId,
assoc_name: &Name,
) -> bool {
for trait_id in all_super_traits(db, tr) {
if trait_id
.trait_items(db)
.items
.iter()
.any(|(name, item)| matches!(item, AssocItemId::TypeAliasId(_)) && name == assoc_name)
{
return true;
}
}
false
}
#[inline]
pub(crate) fn type_alias_bounds<'db>(
db: &'db dyn HirDatabase,
type_alias: TypeAliasId,
) -> EarlyBinder<'db, &'db [Clause<'db>]> {
type_alias_bounds_with_diagnostics(db, type_alias).0.map_bound(|it| it.as_slice())
type_alias_bounds_with_diagnostics(db, type_alias).0.predicates.map_bound(|it| it.as_slice())
}
pub(crate) fn type_alias_bounds_with_diagnostics<'db>(
#[inline]
pub(crate) fn type_alias_self_bounds<'db>(
db: &'db dyn HirDatabase,
type_alias: TypeAliasId,
) -> (EarlyBinder<'db, Clauses<'db>>, Diagnostics) {
let (bounds, diags) = type_alias_bounds_with_diagnostics_query(db, type_alias);
return (bounds.get(), diags.clone());
) -> EarlyBinder<'db, &'db [Clause<'db>]> {
let (TypeAliasBounds { predicates, assoc_ty_bounds_start }, _) =
type_alias_bounds_with_diagnostics(db, type_alias);
predicates.map_bound(|it| &it.as_slice()[..assoc_ty_bounds_start as usize])
}
#[derive(PartialEq, Eq, Debug, Hash)]
struct TypeAliasBounds<T> {
predicates: T,
assoc_ty_bounds_start: u32,
}
fn type_alias_bounds_with_diagnostics<'db>(
db: &'db dyn HirDatabase,
type_alias: TypeAliasId,
) -> (TypeAliasBounds<EarlyBinder<'db, Clauses<'db>>>, Diagnostics) {
let (TypeAliasBounds { predicates, assoc_ty_bounds_start }, diags) =
type_alias_bounds_with_diagnostics_query(db, type_alias);
return (
TypeAliasBounds {
predicates: predicates.get(),
assoc_ty_bounds_start: *assoc_ty_bounds_start,
},
diags.clone(),
);
#[salsa::tracked(returns(ref))]
pub fn type_alias_bounds_with_diagnostics_query<'db>(
db: &'db dyn HirDatabase,
type_alias: TypeAliasId,
) -> (StoredEarlyBinder<StoredClauses>, Diagnostics) {
) -> (TypeAliasBounds<StoredEarlyBinder<StoredClauses>>, Diagnostics) {
let type_alias_data = db.type_alias_signature(type_alias);
let resolver = hir_def::resolver::HasResolver::resolver(type_alias, db);
let mut ctx = TyLoweringContext::new(
@ -1727,10 +1838,18 @@ pub(crate) fn type_alias_bounds_with_diagnostics<'db>(
let interner_ty = Ty::new_projection_from_args(interner, def_id, item_args);
let mut bounds = Vec::new();
let mut assoc_ty_bounds = Vec::new();
for bound in &type_alias_data.bounds {
ctx.lower_type_bound(bound, interner_ty, false).for_each(|pred| {
bounds.push(pred);
});
ctx.lower_type_bound(bound, interner_ty, false).for_each(
|(pred, source)| match source {
GenericPredicateSource::SelfOnly => {
bounds.push(pred);
}
GenericPredicateSource::AssocTyBound => {
assoc_ty_bounds.push(pred);
}
},
);
}
if !ctx.unsized_types.contains(&interner_ty) {
@ -1745,8 +1864,14 @@ pub(crate) fn type_alias_bounds_with_diagnostics<'db>(
};
}
let assoc_ty_bounds_start = bounds.len() as u32;
bounds.extend(assoc_ty_bounds);
(
StoredEarlyBinder::bind(Clauses::new_from_slice(&bounds).store()),
TypeAliasBounds {
predicates: StoredEarlyBinder::bind(Clauses::new_from_slice(&bounds).store()),
assoc_ty_bounds_start,
},
create_diagnostics(ctx.diagnostics),
)
}
@ -1754,11 +1879,15 @@ pub(crate) fn type_alias_bounds_with_diagnostics<'db>(
#[derive(Debug, Clone, PartialEq, Eq, Hash)]
pub struct GenericPredicates {
// The order is the following: first, if `parent_is_trait == true`, comes the implicit trait predicate for the
// parent. Then come the explicit predicates for the parent, then the explicit trait predicate for the child,
// The order is the following: first, if `parent_is_trait == true`, comes the implicit trait
// predicate for the parent. Then come the bounds of the associated types of the parents,
// then the explicit, self-only predicates for the parent, then the explicit, self-only trait
// predicate for the child, then the bounds of the associated types of the child,
// then the implicit trait predicate for the child, if `is_trait` is `true`.
predicates: StoredEarlyBinder<StoredClauses>,
parent_explicit_self_predicates_start: u32,
own_predicates_start: u32,
own_assoc_ty_bounds_start: u32,
is_trait: bool,
parent_is_trait: bool,
}
@ -1782,7 +1911,15 @@ impl GenericPredicates {
pub(crate) fn from_explicit_own_predicates(
predicates: StoredEarlyBinder<StoredClauses>,
) -> Self {
Self { predicates, own_predicates_start: 0, is_trait: false, parent_is_trait: false }
let len = predicates.get().skip_binder().len() as u32;
Self {
predicates,
parent_explicit_self_predicates_start: 0,
own_predicates_start: 0,
own_assoc_ty_bounds_start: len,
is_trait: false,
parent_is_trait: false,
}
}
#[inline]
@ -1814,6 +1951,14 @@ impl GenericPredicates {
Self::query(db, def).explicit_predicates()
}
#[inline]
pub fn query_explicit_implied<'db>(
db: &'db dyn HirDatabase,
def: GenericDefId,
) -> EarlyBinder<'db, &'db [Clause<'db>]> {
Self::query(db, def).explicit_implied_predicates()
}
#[inline]
pub fn all_predicates(&self) -> EarlyBinder<'_, &[Clause<'_>]> {
self.predicates.get().map_bound(|it| it.as_slice())
@ -1824,9 +1969,18 @@ impl GenericPredicates {
self.predicates.get().map_bound(|it| &it.as_slice()[self.own_predicates_start as usize..])
}
/// Returns the predicates, minus the implicit `Self: Trait` predicate for a trait.
/// Returns the predicates, minus the implicit `Self: Trait` predicate and bounds of the
/// associated types for a trait.
#[inline]
pub fn explicit_predicates(&self) -> EarlyBinder<'_, &[Clause<'_>]> {
self.predicates.get().map_bound(|it| {
&it.as_slice()[self.parent_explicit_self_predicates_start as usize
..self.own_assoc_ty_bounds_start as usize]
})
}
#[inline]
pub fn explicit_implied_predicates(&self) -> EarlyBinder<'_, &[Clause<'_>]> {
self.predicates.get().map_bound(|it| {
&it.as_slice()[usize::from(self.parent_is_trait)..it.len() - usize::from(self.is_trait)]
})
@ -1902,26 +2056,22 @@ where
);
let sized_trait = ctx.lang_items.Sized;
let mut predicates = Vec::new();
// We need to lower parents and self separately - see the comment below lowering of implicit
// `Sized` predicates for why.
let mut own_predicates = Vec::new();
let mut parent_predicates = Vec::new();
let mut own_assoc_ty_bounds = Vec::new();
let mut parent_assoc_ty_bounds = Vec::new();
let all_generics =
std::iter::successors(Some(&generics), |generics| generics.parent_generics())
.collect::<ArrayVec<_, 2>>();
let mut is_trait = false;
let mut parent_is_trait = false;
if all_generics.len() > 1 {
add_implicit_trait_predicate(
interner,
all_generics.last().unwrap().def(),
predicate_filter,
&mut predicates,
&mut parent_is_trait,
);
}
// We need to lower parent predicates first - see the comment below lowering of implicit `Sized` predicates
// for why.
let mut own_predicates_start = 0;
let own_implicit_trait_predicate = implicit_trait_predicate(interner, def, predicate_filter);
let parent_implicit_trait_predicate = if all_generics.len() > 1 {
implicit_trait_predicate(interner, all_generics.last().unwrap().def(), predicate_filter)
} else {
None
};
for &maybe_parent_generics in all_generics.iter().rev() {
let current_def_predicates_start = predicates.len();
// Collect only diagnostics from the child, not including parents.
ctx.diagnostics.clear();
@ -1929,15 +2079,37 @@ where
ctx.store = maybe_parent_generics.store();
for pred in maybe_parent_generics.where_predicates() {
tracing::debug!(?pred);
predicates.extend(ctx.lower_where_predicate(
pred,
false,
maybe_parent_generics,
predicate_filter,
));
for (pred, source) in
ctx.lower_where_predicate(pred, false, maybe_parent_generics, predicate_filter)
{
match source {
GenericPredicateSource::SelfOnly => {
if maybe_parent_generics.def() == def {
own_predicates.push(pred);
} else {
parent_predicates.push(pred);
}
}
GenericPredicateSource::AssocTyBound => {
if maybe_parent_generics.def() == def {
own_assoc_ty_bounds.push(pred);
} else {
parent_assoc_ty_bounds.push(pred);
}
}
}
}
}
push_const_arg_has_type_predicates(db, &mut predicates, maybe_parent_generics);
if maybe_parent_generics.def() == def {
push_const_arg_has_type_predicates(db, &mut own_predicates, maybe_parent_generics);
} else {
push_const_arg_has_type_predicates(
db,
&mut parent_predicates,
maybe_parent_generics,
);
}
if let Some(sized_trait) = sized_trait {
let mut add_sized_clause = |param_idx, param_id, param_data| {
@ -1971,7 +2143,11 @@ where
}),
)),
));
predicates.push(clause);
if maybe_parent_generics.def() == def {
own_predicates.push(clause);
} else {
parent_predicates.push(clause);
}
};
let parent_params_len = maybe_parent_generics.len_parent();
maybe_parent_generics.iter_self().enumerate().for_each(
@ -1990,30 +2166,55 @@ where
// predicates before lowering the child, as a child cannot define a `?Sized` predicate for its parent.
// But we do have to lower the parent first.
}
if maybe_parent_generics.def() == def {
own_predicates_start = current_def_predicates_start as u32;
}
}
add_implicit_trait_predicate(interner, def, predicate_filter, &mut predicates, &mut is_trait);
let diagnostics = create_diagnostics(ctx.diagnostics);
// The order is:
//
// 1. parent implicit trait pred
// 2. parent assoc bounds
// 3. parent self only preds
// 4. own self only preds
// 5. own assoc ty bounds
// 6. own implicit trait pred
//
// The purpose of this is to index the slice of the followings, without making extra `Vec`s or
// iterators:
// - explicit self only predicates, of own or own + self
// - explicit predicates, of own or own + self
let predicates = parent_implicit_trait_predicate
.iter()
.chain(parent_assoc_ty_bounds.iter())
.chain(parent_predicates.iter())
.chain(own_predicates.iter())
.chain(own_assoc_ty_bounds.iter())
.chain(own_implicit_trait_predicate.iter())
.copied()
.collect::<Vec<_>>();
let parent_is_trait = parent_implicit_trait_predicate.is_some();
let is_trait = own_implicit_trait_predicate.is_some();
let parent_explicit_self_predicates_start =
parent_is_trait as u32 + parent_assoc_ty_bounds.len() as u32;
let own_predicates_start =
parent_explicit_self_predicates_start + parent_predicates.len() as u32;
let own_assoc_ty_bounds_start = own_predicates_start + own_predicates.len() as u32;
let predicates = GenericPredicates {
parent_explicit_self_predicates_start,
own_predicates_start,
own_assoc_ty_bounds_start,
is_trait,
parent_is_trait,
predicates: StoredEarlyBinder::bind(Clauses::new_from_slice(&predicates).store()),
};
return (predicates, diagnostics);
fn add_implicit_trait_predicate<'db>(
fn implicit_trait_predicate<'db>(
interner: DbInterner<'db>,
def: GenericDefId,
predicate_filter: PredicateFilter,
predicates: &mut Vec<Clause<'db>>,
set_is_trait: &mut bool,
) {
) -> Option<Clause<'db>> {
// For traits, add `Self: Trait` predicate. This is
// not part of the predicates that a user writes, but it
// is something that one must prove in order to invoke a
@ -2029,8 +2230,9 @@ where
if let GenericDefId::TraitId(def_id) = def
&& predicate_filter == PredicateFilter::All
{
*set_is_trait = true;
predicates.push(TraitRef::identity(interner, def_id.into()).upcast(interner));
Some(TraitRef::identity(interner, def_id.into()).upcast(interner))
} else {
None
}
}
}
@ -2327,7 +2529,7 @@ pub(crate) fn associated_ty_item_bounds<'db>(
let mut bounds = Vec::new();
for bound in &type_alias_data.bounds {
ctx.lower_type_bound(bound, self_ty, false).for_each(|pred| {
ctx.lower_type_bound(bound, self_ty, false).for_each(|(pred, _)| {
if let Some(bound) = pred
.kind()
.map_bound(|c| match c {

View file

@ -32,7 +32,8 @@ use crate::{
db::HirDatabase,
generics::{Generics, generics},
lower::{
LifetimeElisionKind, PathDiagnosticCallbackData, named_associated_type_shorthand_candidates,
GenericPredicateSource, LifetimeElisionKind, PathDiagnosticCallbackData,
named_associated_type_shorthand_candidates,
},
next_solver::{
Binder, Clause, Const, DbInterner, ErrorGuaranteed, GenericArg, GenericArgs, Predicate,
@ -598,7 +599,7 @@ impl<'a, 'b, 'db> PathLoweringContext<'a, 'b, 'db> {
explicit_self_ty: Option<Ty<'db>>,
lowering_assoc_type_generics: bool,
) -> GenericArgs<'db> {
let old_lifetime_elision = self.ctx.lifetime_elision.clone();
let old_lifetime_elision = self.ctx.lifetime_elision;
if let Some(args) = self.current_or_prev_segment.args_and_bindings
&& args.parenthesized != GenericArgsParentheses::No
@ -639,7 +640,7 @@ impl<'a, 'b, 'db> PathLoweringContext<'a, 'b, 'db> {
explicit_self_ty,
PathGenericsSource::Segment(self.current_segment_u32()),
lowering_assoc_type_generics,
self.ctx.lifetime_elision.clone(),
self.ctx.lifetime_elision,
);
self.ctx.lifetime_elision = old_lifetime_elision;
result
@ -853,7 +854,8 @@ impl<'a, 'b, 'db> PathLoweringContext<'a, 'b, 'db> {
pub(super) fn assoc_type_bindings_from_type_bound<'c>(
mut self,
trait_ref: TraitRef<'db>,
) -> Option<impl Iterator<Item = Clause<'db>> + use<'a, 'b, 'c, 'db>> {
) -> Option<impl Iterator<Item = (Clause<'db>, GenericPredicateSource)> + use<'a, 'b, 'c, 'db>>
{
let interner = self.ctx.interner;
self.current_or_prev_segment.args_and_bindings.map(|args_and_bindings| {
args_and_bindings.bindings.iter().enumerate().flat_map(move |(binding_idx, binding)| {
@ -882,7 +884,7 @@ impl<'a, 'b, 'db> PathLoweringContext<'a, 'b, 'db> {
assoc_type: binding_idx as u32,
},
false,
this.ctx.lifetime_elision.clone(),
this.ctx.lifetime_elision,
)
});
let args = GenericArgs::new_from_iter(
@ -900,7 +902,7 @@ impl<'a, 'b, 'db> PathLoweringContext<'a, 'b, 'db> {
// `Fn()`-style generics are elided like functions. This is `Output` (we lower to it in hir-def).
LifetimeElisionKind::for_fn_ret(self.ctx.interner)
} else {
self.ctx.lifetime_elision.clone()
self.ctx.lifetime_elision
};
self.with_lifetime_elision(lifetime_elision, |this| {
match (&this.ctx.store[type_ref], this.ctx.impl_trait_mode.mode) {
@ -921,21 +923,29 @@ impl<'a, 'b, 'db> PathLoweringContext<'a, 'b, 'db> {
),
)),
));
predicates.push(pred);
predicates.push((pred, GenericPredicateSource::SelfOnly));
}
}
})
}
for bound in binding.bounds.iter() {
predicates.extend(self.ctx.lower_type_bound(
bound,
Ty::new_alias(
self.ctx.interner,
AliasTyKind::Projection,
AliasTy::new_from_args(self.ctx.interner, associated_ty.into(), args),
),
false,
));
predicates.extend(
self.ctx
.lower_type_bound(
bound,
Ty::new_alias(
self.ctx.interner,
AliasTyKind::Projection,
AliasTy::new_from_args(
self.ctx.interner,
associated_ty.into(),
args,
),
),
false,
)
.map(|(pred, _)| (pred, GenericPredicateSource::AssocTyBound)),
);
}
predicates
})

View file

@ -206,11 +206,11 @@ impl<'a, 'db> InferenceContext<'a, 'db> {
}
}
/// Used by [FnCtxt::lookup_method_for_operator] with `-Znext-solver`.
/// Used by `FnCtxt::lookup_method_for_operator` with `-Znext-solver`.
///
/// With `AsRigid` we error on `impl Opaque: NotInItemBounds` while
/// `AsInfer` just treats it as ambiguous and succeeds. This is necessary
/// as we want [FnCtxt::check_expr_call] to treat not-yet-defined opaque
/// as we want `FnCtxt::check_expr_call` to treat not-yet-defined opaque
/// types as rigid to support `impl Deref<Target = impl FnOnce()>` and
/// `Box<impl FnOnce()>`.
///

View file

@ -285,11 +285,15 @@ impl<'a, 'db> MethodResolutionContext<'a, 'db> {
let infcx = self.infcx;
let (self_ty, var_values) = infcx.instantiate_canonical(&query_input);
debug!(?self_ty, ?query_input, "probe_op: Mode::Path");
let prev_opaque_entries =
self.infcx.inner.borrow_mut().opaque_types().num_entries();
MethodAutoderefStepsResult {
steps: smallvec![CandidateStep {
self_ty: self
.infcx
.make_query_response_ignoring_pending_obligations(var_values, self_ty),
self_ty: self.infcx.make_query_response_ignoring_pending_obligations(
var_values,
self_ty,
prev_opaque_entries
),
self_ty_is_opaque: false,
autoderefs: 0,
from_unsafe_deref: false,
@ -376,6 +380,8 @@ impl<'a, 'db> MethodResolutionContext<'a, 'db> {
// infer var is not an opaque.
let infcx = self.infcx;
let (self_ty, inference_vars) = infcx.instantiate_canonical(self_ty);
let prev_opaque_entries = infcx.inner.borrow_mut().opaque_types().num_entries();
let self_ty_is_opaque = |ty: Ty<'_>| {
if let TyKind::Infer(InferTy::TyVar(vid)) = ty.kind() {
infcx.has_opaques_with_sub_unified_hidden_type(vid)
@ -414,6 +420,7 @@ impl<'a, 'db> MethodResolutionContext<'a, 'db> {
self_ty: infcx.make_query_response_ignoring_pending_obligations(
inference_vars,
ty,
prev_opaque_entries,
),
self_ty_is_opaque: self_ty_is_opaque(ty),
autoderefs: d,
@ -437,6 +444,7 @@ impl<'a, 'db> MethodResolutionContext<'a, 'db> {
self_ty: infcx.make_query_response_ignoring_pending_obligations(
inference_vars,
ty,
prev_opaque_entries,
),
self_ty_is_opaque: self_ty_is_opaque(ty),
autoderefs: d,
@ -461,13 +469,17 @@ impl<'a, 'db> MethodResolutionContext<'a, 'db> {
ty: infcx.make_query_response_ignoring_pending_obligations(
inference_vars,
final_ty,
prev_opaque_entries,
),
})
}
TyKind::Error(_) => Some(MethodAutoderefBadTy {
reached_raw_pointer,
ty: infcx
.make_query_response_ignoring_pending_obligations(inference_vars, final_ty),
ty: infcx.make_query_response_ignoring_pending_obligations(
inference_vars,
final_ty,
prev_opaque_entries,
),
}),
TyKind::Array(elem_ty, _) => {
let autoderefs = steps.iter().filter(|s| s.reachable_via_deref).count() - 1;
@ -475,6 +487,7 @@ impl<'a, 'db> MethodResolutionContext<'a, 'db> {
self_ty: infcx.make_query_response_ignoring_pending_obligations(
inference_vars,
Ty::new_slice(infcx.interner, elem_ty),
prev_opaque_entries,
),
self_ty_is_opaque: false,
autoderefs,
@ -1246,9 +1259,9 @@ impl<'a, 'db, Choice: ProbeChoice<'db>> ProbeContext<'a, 'db, Choice> {
.filter(|step| step.reachable_via_deref)
.filter(|step| {
debug!("pick_all_method: step={:?}", step);
// skip types that are from a type error or that would require dereferencing
// a raw pointer
!step.self_ty.value.value.references_non_lt_error() && !step.from_unsafe_deref
// Skip types with type errors (but not const/lifetime errors, which are
// often spurious due to incomplete const evaluation) and raw pointer derefs.
!step.self_ty.value.value.references_only_ty_error() && !step.from_unsafe_deref
})
.try_for_each(|step| {
let InferOk { value: self_ty, obligations: instantiate_self_ty_obligations } = self
@ -1740,7 +1753,7 @@ impl<'a, 'db, Choice: ProbeChoice<'db>> ProbeContext<'a, 'db, Choice> {
/// We want to only accept trait methods if they were hold even if the
/// opaque types were rigid. To handle this, we both check that for trait
/// candidates the goal were to hold even when treating opaques as rigid,
/// see [OpaqueTypesJank](rustc_trait_selection::solve::OpaqueTypesJank).
/// see `rustc_trait_selection::solve::OpaqueTypesJank`.
///
/// We also check that all opaque types encountered as self types in the
/// autoderef chain don't get constrained when applying the candidate.

View file

@ -9,7 +9,7 @@ use hir_def::{
expr_store::{Body, ExpressionStore, HygieneId, path::Path},
hir::{
ArithOp, Array, BinaryOp, BindingAnnotation, BindingId, ExprId, LabelId, Literal, MatchArm,
Pat, PatId, RecordFieldPat, RecordLitField,
Pat, PatId, RecordFieldPat, RecordLitField, RecordSpread,
},
item_tree::FieldsShape,
lang_item::LangItems,
@ -867,16 +867,17 @@ impl<'a, 'db> MirLowerCtx<'a, 'db> {
}
Expr::Become { .. } => not_supported!("tail-calls"),
Expr::Yield { .. } => not_supported!("yield"),
Expr::RecordLit { fields, path, spread } => {
let spread_place = match spread {
&Some(it) => {
Expr::RecordLit { fields, path, spread, .. } => {
let spread_place = match *spread {
RecordSpread::Expr(it) => {
let Some((p, c)) = self.lower_expr_as_place(current, it, true)? else {
return Ok(None);
};
current = c;
Some(p)
}
None => None,
RecordSpread::None => None,
RecordSpread::FieldDefaults => not_supported!("empty record spread"),
};
let variant_id =
self.infer.variant_resolution_for_expr(expr_id).ok_or_else(|| match path {

View file

@ -48,6 +48,7 @@ pub struct FulfillmentCtxt<'db> {
/// use the context in exactly this snapshot.
#[expect(unused)]
usable_in_snapshot: usize,
try_evaluate_obligations_scratch: PendingObligations<'db>,
}
#[derive(Default, Debug, Clone)]
@ -115,6 +116,7 @@ impl<'db> FulfillmentCtxt<'db> {
FulfillmentCtxt {
obligations: Default::default(),
usable_in_snapshot: infcx.num_open_snapshots(),
try_evaluate_obligations_scratch: Default::default(),
}
}
}
@ -162,12 +164,12 @@ impl<'db> FulfillmentCtxt<'db> {
// and select. They should use a different `ObligationCtxt` instead. Then we'll be also able
// to not put the obligations queue in `InferenceTable`'s snapshots.
// assert_eq!(self.usable_in_snapshot, infcx.num_open_snapshots());
self.try_evaluate_obligations_scratch.clear();
let mut errors = Vec::new();
let mut obligations = Vec::new();
loop {
let mut any_changed = false;
obligations.extend(self.obligations.drain_pending(|_| true));
for (mut obligation, stalled_on) in obligations.drain(..) {
self.try_evaluate_obligations_scratch.extend(self.obligations.drain_pending(|_| true));
for (mut obligation, stalled_on) in self.try_evaluate_obligations_scratch.drain(..) {
if obligation.recursion_depth >= infcx.interner.recursion_limit() {
self.obligations.on_fulfillment_overflow(infcx);
// Only return true errors that we have accumulated while processing.

View file

@ -15,6 +15,7 @@ use crate::next_solver::{
infer::{
InferCtxt, InferOk, InferResult,
canonical::{QueryRegionConstraints, QueryResponse, canonicalizer::OriginalQueryValues},
opaque_types::table::OpaqueTypeStorageEntries,
traits::{ObligationCause, PredicateObligations},
},
};
@ -194,6 +195,7 @@ impl<'db> InferCtxt<'db> {
&self,
inference_vars: CanonicalVarValues<'db>,
answer: T,
prev_entries: OpaqueTypeStorageEntries,
) -> Canonical<'db, QueryResponse<'db, T>>
where
T: TypeFoldable<DbInterner<'db>>,
@ -209,7 +211,7 @@ impl<'db> InferCtxt<'db> {
.inner
.borrow_mut()
.opaque_type_storage
.iter_opaque_types()
.opaque_types_added_since(prev_entries)
.map(|(k, v)| (k, v.ty))
.collect();

View file

@ -140,7 +140,7 @@ pub struct InferCtxtInner<'db> {
///
/// Before running `resolve_regions_and_report_errors`, the creator
/// of the inference context is expected to invoke
/// [`InferCtxt::process_registered_region_obligations`]
/// `InferCtxt::process_registered_region_obligations`
/// for each body-id in this map, which will process the
/// obligations within. This is expected to be done 'late enough'
/// that all type inference variables have been bound and so forth.

View file

@ -55,6 +55,13 @@ impl ObligationCause {
}
}
impl Default for ObligationCause {
#[inline]
fn default() -> Self {
Self::new()
}
}
/// An `Obligation` represents some trait reference (e.g., `i32: Eq`) for
/// which the "impl_source" must be found. The process of finding an "impl_source" is
/// called "resolving" the `Obligation`. This process consists of

View file

@ -41,7 +41,8 @@ use crate::{
AdtIdWrapper, AnyImplId, BoundConst, CallableIdWrapper, CanonicalVarKind, ClosureIdWrapper,
CoroutineIdWrapper, Ctor, FnSig, FxIndexMap, GeneralConstIdWrapper, OpaqueTypeKey,
RegionAssumptions, SimplifiedType, SolverContext, SolverDefIds, TraitIdWrapper,
TypeAliasIdWrapper, UnevaluatedConst, util::explicit_item_bounds,
TypeAliasIdWrapper, UnevaluatedConst,
util::{explicit_item_bounds, explicit_item_self_bounds},
},
};
@ -1421,7 +1422,7 @@ impl<'db> Interner for DbInterner<'db> {
self,
def_id: Self::DefId,
) -> EarlyBinder<Self, impl IntoIterator<Item = Self::Clause>> {
explicit_item_bounds(self, def_id)
explicit_item_self_bounds(self, def_id)
.map_bound(|bounds| elaborate(self, bounds).filter_only_self())
}
@ -1500,7 +1501,7 @@ impl<'db> Interner for DbInterner<'db> {
}
}
predicates_of(self.db, def_id).explicit_predicates().map_bound(|predicates| {
predicates_of(self.db, def_id).explicit_implied_predicates().map_bound(|predicates| {
predicates
.iter()
.copied()

View file

@ -273,9 +273,8 @@ impl<'db> std::fmt::Debug for Clauses<'db> {
impl<'db> Clauses<'db> {
#[inline]
pub fn empty(_interner: DbInterner<'db>) -> Self {
// FIXME: Get from a static.
Self::new_from_slice(&[])
pub fn empty(interner: DbInterner<'db>) -> Self {
interner.default_types().empty.clauses
}
#[inline]

View file

@ -508,6 +508,11 @@ impl<'db> Ty<'db> {
references_non_lt_error(&self)
}
/// Whether the type contains a type error (ignoring const and lifetime errors).
pub fn references_only_ty_error(self) -> bool {
references_only_ty_error(&self)
}
pub fn callable_sig(self, interner: DbInterner<'db>) -> Option<Binder<'db, FnSig<'db>>> {
match self.kind() {
TyKind::FnDef(callable, args) => {
@ -777,6 +782,20 @@ impl<'db> TypeVisitor<DbInterner<'db>> for ReferencesNonLifetimeError {
}
}
pub fn references_only_ty_error<'db, T: TypeVisitableExt<DbInterner<'db>>>(t: &T) -> bool {
t.references_error() && t.visit_with(&mut ReferencesOnlyTyError).is_break()
}
struct ReferencesOnlyTyError;
impl<'db> TypeVisitor<DbInterner<'db>> for ReferencesOnlyTyError {
type Result = ControlFlow<()>;
fn visit_ty(&mut self, ty: Ty<'db>) -> Self::Result {
if ty.is_ty_error() { ControlFlow::Break(()) } else { ty.super_visit_with(self) }
}
}
impl<'db> std::fmt::Debug for Ty<'db> {
fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
self.inner().internee.fmt(f)

View file

@ -455,6 +455,21 @@ pub fn explicit_item_bounds<'db>(
clauses.map_bound(|clauses| clauses.iter().copied())
}
pub fn explicit_item_self_bounds<'db>(
interner: DbInterner<'db>,
def_id: SolverDefId,
) -> EarlyBinder<'db, impl DoubleEndedIterator<Item = Clause<'db>> + ExactSizeIterator> {
let db = interner.db();
let clauses = match def_id {
SolverDefId::TypeAliasId(type_alias) => {
crate::lower::type_alias_self_bounds(db, type_alias)
}
SolverDefId::InternedOpaqueTyId(id) => id.self_predicates(db),
_ => panic!("Unexpected GenericDefId"),
};
clauses.map_bound(|clauses| clauses.iter().copied())
}
pub struct ContainsTypeErrors;
impl<'db> TypeVisitor<DbInterner<'db>> for ContainsTypeErrors {

View file

@ -135,7 +135,7 @@ fn check_closure_captures(#[rust_analyzer::rust_fixture] ra_fixture: &str, expec
fn deref_in_let() {
check_closure_captures(
r#"
//- minicore:copy
//- minicore:copy, fn
fn main() {
let a = &mut true;
let closure = || { let b = *a; };
@ -149,7 +149,7 @@ fn main() {
fn deref_then_ref_pattern() {
check_closure_captures(
r#"
//- minicore:copy
//- minicore:copy, fn
fn main() {
let a = &mut true;
let closure = || { let &mut ref b = a; };
@ -159,7 +159,7 @@ fn main() {
);
check_closure_captures(
r#"
//- minicore:copy
//- minicore:copy, fn
fn main() {
let a = &mut true;
let closure = || { let &mut ref mut b = a; };
@ -173,7 +173,7 @@ fn main() {
fn unique_borrow() {
check_closure_captures(
r#"
//- minicore:copy
//- minicore:copy, fn
fn main() {
let a = &mut true;
let closure = || { *a = false; };
@ -187,7 +187,7 @@ fn main() {
fn deref_ref_mut() {
check_closure_captures(
r#"
//- minicore:copy
//- minicore:copy, fn
fn main() {
let a = &mut true;
let closure = || { let ref mut b = *a; };
@ -201,7 +201,7 @@ fn main() {
fn let_else_not_consuming() {
check_closure_captures(
r#"
//- minicore:copy
//- minicore:copy, fn
fn main() {
let a = &mut true;
let closure = || { let _ = *a else { return; }; };
@ -215,7 +215,7 @@ fn main() {
fn consume() {
check_closure_captures(
r#"
//- minicore:copy
//- minicore:copy, fn
struct NonCopy;
fn main() {
let a = NonCopy;
@ -230,7 +230,7 @@ fn main() {
fn ref_to_upvar() {
check_closure_captures(
r#"
//- minicore:copy
//- minicore:copy, fn
struct NonCopy;
fn main() {
let mut a = NonCopy;
@ -248,7 +248,7 @@ fn main() {
fn field() {
check_closure_captures(
r#"
//- minicore:copy
//- minicore:copy, fn
struct Foo { a: i32, b: i32 }
fn main() {
let a = Foo { a: 0, b: 0 };
@ -263,7 +263,7 @@ fn main() {
fn fields_different_mode() {
check_closure_captures(
r#"
//- minicore:copy
//- minicore:copy, fn
struct NonCopy;
struct Foo { a: i32, b: i32, c: NonCopy, d: bool }
fn main() {
@ -286,7 +286,7 @@ fn main() {
fn autoref() {
check_closure_captures(
r#"
//- minicore:copy
//- minicore:copy, fn
struct Foo;
impl Foo {
fn imm(&self) {}
@ -308,7 +308,7 @@ fn main() {
fn captures_priority() {
check_closure_captures(
r#"
//- minicore:copy
//- minicore:copy, fn
struct NonCopy;
fn main() {
let mut a = &mut true;
@ -336,7 +336,7 @@ fn main() {
fn let_underscore() {
check_closure_captures(
r#"
//- minicore:copy
//- minicore:copy, fn
fn main() {
let mut a = true;
let closure = || { let _ = a; };
@ -350,7 +350,7 @@ fn main() {
fn match_wildcard() {
check_closure_captures(
r#"
//- minicore:copy
//- minicore:copy, fn
struct NonCopy;
fn main() {
let mut a = NonCopy;
@ -375,7 +375,7 @@ fn main() {
fn multiple_bindings() {
check_closure_captures(
r#"
//- minicore:copy
//- minicore:copy, fn
fn main() {
let mut a = false;
let mut closure = || { let (b | b) = a; };
@ -389,7 +389,7 @@ fn main() {
fn multiple_usages() {
check_closure_captures(
r#"
//- minicore:copy
//- minicore:copy, fn
fn main() {
let mut a = false;
let mut closure = || {
@ -410,7 +410,7 @@ fn main() {
fn ref_then_deref() {
check_closure_captures(
r#"
//- minicore:copy
//- minicore:copy, fn
fn main() {
let mut a = false;
let mut closure = || { let b = *&mut a; };
@ -424,7 +424,7 @@ fn main() {
fn ref_of_ref() {
check_closure_captures(
r#"
//- minicore:copy
//- minicore:copy, fn
fn main() {
let mut a = &false;
let closure = || { let b = &a; };
@ -446,7 +446,7 @@ fn main() {
fn multiple_capture_usages() {
check_closure_captures(
r#"
//- minicore:copy
//- minicore:copy, fn
struct A { a: i32, b: bool }
fn main() {
let mut a = A { a: 123, b: false };
@ -465,7 +465,7 @@ fn main() {
fn let_binding_is_a_ref_capture_in_ref_binding() {
check_closure_captures(
r#"
//- minicore:copy
//- minicore:copy, fn
struct S;
fn main() {
let mut s = S;
@ -489,7 +489,7 @@ fn main() {
fn let_binding_is_a_value_capture_in_binding() {
check_closure_captures(
r#"
//- minicore:copy, option
//- minicore:copy, fn, option
struct Box(i32);
fn main() {
let b = Some(Box(0));
@ -508,7 +508,7 @@ fn main() {
fn alias_needs_to_be_normalized() {
check_closure_captures(
r#"
//- minicore:copy
//- minicore:copy, fn
trait Trait {
type Associated;
}
@ -528,3 +528,41 @@ fn main() {
expect!["220..257;174..175;245..250 ByRef(Shared) c.b.x &'? i32"],
);
}
#[test]
fn nested_ref_captures_from_outer() {
check_closure_captures(
r#"
//- minicore:copy, fn
fn f() {
let a = 1;
let a_closure = || {
let b_closure = || {
{ a };
};
};
}
"#,
expect![[r#"
44..113;17..18;92..93 ByRef(Shared) a &'? i32
73..106;17..18;92..93 ByRef(Shared) a &'? i32"#]],
);
}
#[test]
fn nested_ref_captures() {
check_closure_captures(
r#"
//- minicore:copy, fn
fn f() {
let a_closure = || {
let b = 2;
let b_closure = || {
{ b };
};
};
}
"#,
expect!["77..110;46..47;96..97 ByRef(Shared) b &'? i32"],
);
}

View file

@ -1,5 +1,7 @@
use expect_test::expect;
use crate::tests::check_infer;
use super::{check_infer_with_mismatches, check_no_mismatches, check_types};
#[test]
@ -176,3 +178,37 @@ fn main() {
"#,
);
}
#[test]
fn regression_21455() {
check_infer(
r#"
//- minicore: copy
struct Vec<T>(T);
impl<T> Vec<T> {
pub fn new() -> Self { loop {} }
}
pub struct Miku {}
impl Miku {
pub fn all_paths_to(&self) -> impl Copy {
Miku {
full_paths: Vec::new(),
}
}
}
"#,
expect![[r#"
61..72 '{ loop {} }': Vec<T>
63..70 'loop {}': !
68..70 '{}': ()
133..137 'self': &'? Miku
152..220 '{ ... }': Miku
162..214 'Miku {... }': Miku
193..201 'Vec::new': fn new<{unknown}>() -> Vec<{unknown}>
193..203 'Vec::new()': Vec<{unknown}>
"#]],
);
}

View file

@ -13,11 +13,11 @@ fn infer_pattern() {
let a = z;
let (c, d) = (1, "hello");
for (e, f) in some_iter {
for (e, f) in [(0, 1)] {
let g = e;
}
if let [val] = opt {
if let [val] = [y] {
let h = val;
}
@ -33,7 +33,7 @@ fn infer_pattern() {
"#,
expect![[r#"
8..9 'x': &'? i32
17..400 '{ ...o_x; }': ()
17..399 '{ ...o_x; }': ()
27..28 'y': &'? i32
31..32 'x': &'? i32
42..44 '&z': &'? i32
@ -47,58 +47,62 @@ fn infer_pattern() {
82..94 '(1, "hello")': (i32, &'? str)
83..84 '1': i32
86..93 '"hello"': &'static str
101..151 'for (e... }': fn into_iter<{unknown}>({unknown}) -> <{unknown} as IntoIterator>::IntoIter
101..151 'for (e... }': <{unknown} as IntoIterator>::IntoIter
101..151 'for (e... }': !
101..151 'for (e... }': {unknown}
101..151 'for (e... }': &'? mut {unknown}
101..151 'for (e... }': fn next<{unknown}>(&'? mut {unknown}) -> Option<<{unknown} as Iterator>::Item>
101..151 'for (e... }': Option<<{unknown} as Iterator>::Item>
101..151 'for (e... }': ()
101..151 'for (e... }': ()
101..151 'for (e... }': ()
101..151 'for (e... }': ()
105..111 '(e, f)': ({unknown}, {unknown})
106..107 'e': {unknown}
109..110 'f': {unknown}
115..124 'some_iter': {unknown}
125..151 '{ ... }': ()
139..140 'g': {unknown}
143..144 'e': {unknown}
157..204 'if let... }': ()
160..175 'let [val] = opt': bool
164..169 '[val]': [{unknown}]
165..168 'val': {unknown}
172..175 'opt': [{unknown}]
176..204 '{ ... }': ()
190..191 'h': {unknown}
194..197 'val': {unknown}
210..236 'if let...rue {}': ()
213..233 'let x ... &true': bool
217..225 'x @ true': &'? bool
221..225 'true': bool
221..225 'true': bool
228..233 '&true': &'? bool
229..233 'true': bool
234..236 '{}': ()
246..252 'lambda': impl Fn(u64, u64, i32) -> i32
255..287 '|a: u6...b; c }': impl Fn(u64, u64, i32) -> i32
256..257 'a': u64
264..265 'b': u64
267..268 'c': i32
275..287 '{ a + b; c }': i32
277..278 'a': u64
277..282 'a + b': u64
281..282 'b': u64
284..285 'c': i32
298..310 'ref ref_to_x': &'? &'? i32
313..314 'x': &'? i32
324..333 'mut mut_x': &'? i32
336..337 'x': &'? i32
347..367 'ref mu...f_to_x': &'? mut &'? i32
370..371 'x': &'? i32
381..382 'k': &'? mut &'? i32
385..397 'mut_ref_to_x': &'? mut &'? i32
101..150 'for (e... }': fn into_iter<[(i32, i32); 1]>([(i32, i32); 1]) -> <[(i32, i32); 1] as IntoIterator>::IntoIter
101..150 'for (e... }': IntoIter<(i32, i32), 1>
101..150 'for (e... }': !
101..150 'for (e... }': IntoIter<(i32, i32), 1>
101..150 'for (e... }': &'? mut IntoIter<(i32, i32), 1>
101..150 'for (e... }': fn next<IntoIter<(i32, i32), 1>>(&'? mut IntoIter<(i32, i32), 1>) -> Option<<IntoIter<(i32, i32), 1> as Iterator>::Item>
101..150 'for (e... }': Option<(i32, i32)>
101..150 'for (e... }': ()
101..150 'for (e... }': ()
101..150 'for (e... }': ()
101..150 'for (e... }': ()
105..111 '(e, f)': (i32, i32)
106..107 'e': i32
109..110 'f': i32
115..123 '[(0, 1)]': [(i32, i32); 1]
116..122 '(0, 1)': (i32, i32)
117..118 '0': i32
120..121 '1': i32
124..150 '{ ... }': ()
138..139 'g': i32
142..143 'e': i32
156..203 'if let... }': ()
159..174 'let [val] = [y]': bool
163..168 '[val]': [&'? i32; 1]
164..167 'val': &'? i32
171..174 '[y]': [&'? i32; 1]
172..173 'y': &'? i32
175..203 '{ ... }': ()
189..190 'h': &'? i32
193..196 'val': &'? i32
209..235 'if let...rue {}': ()
212..232 'let x ... &true': bool
216..224 'x @ true': &'? bool
220..224 'true': bool
220..224 'true': bool
227..232 '&true': &'? bool
228..232 'true': bool
233..235 '{}': ()
245..251 'lambda': impl Fn(u64, u64, i32) -> i32
254..286 '|a: u6...b; c }': impl Fn(u64, u64, i32) -> i32
255..256 'a': u64
263..264 'b': u64
266..267 'c': i32
274..286 '{ a + b; c }': i32
276..277 'a': u64
276..281 'a + b': u64
280..281 'b': u64
283..284 'c': i32
297..309 'ref ref_to_x': &'? &'? i32
312..313 'x': &'? i32
323..332 'mut mut_x': &'? i32
335..336 'x': &'? i32
346..366 'ref mu...f_to_x': &'? mut &'? i32
369..370 'x': &'? i32
380..381 'k': &'? mut &'? i32
384..396 'mut_ref_to_x': &'? mut &'? i32
"#]],
);
}
@ -380,7 +384,7 @@ fn infer_pattern_match_string_literal() {
fn infer_pattern_match_byte_string_literal() {
check_infer_with_mismatches(
r#"
//- minicore: index
//- minicore: index, range
struct S;
impl<T, const N: usize> core::ops::Index<S> for [T; N] {
type Output = [u8];
@ -395,7 +399,7 @@ fn infer_pattern_match_byte_string_literal() {
"#,
expect![[r#"
105..109 'self': &'? [T; N]
111..116 'index': {unknown}
111..116 'index': RangeFull
157..180 '{ ... }': &'? [u8]
167..174 'loop {}': !
172..174 '{}': ()

View file

@ -891,13 +891,14 @@ use core::ops::Deref;
struct BufWriter {}
struct Mutex<T> {}
struct MutexGuard<'a, T> {}
struct Mutex<T>(T);
struct MutexGuard<'a, T>(&'a T);
impl<T> Mutex<T> {
fn lock(&self) -> MutexGuard<'_, T> {}
}
impl<'a, T: 'a> Deref for MutexGuard<'a, T> {
type Target = T;
fn deref(&self) -> &Self::Target { loop {} }
}
fn flush(&self) {
let w: &Mutex<BufWriter>;
@ -905,14 +906,18 @@ fn flush(&self) {
}
"#,
expect![[r#"
123..127 'self': &'? Mutex<T>
150..152 '{}': MutexGuard<'?, T>
234..238 'self': &'? {unknown}
240..290 '{ ...()); }': ()
250..251 'w': &'? Mutex<BufWriter>
276..287 '*(w.lock())': BufWriter
278..279 'w': &'? Mutex<BufWriter>
278..286 'w.lock()': MutexGuard<'?, BufWriter>
129..133 'self': &'? Mutex<T>
156..158 '{}': MutexGuard<'?, T>
242..246 'self': &'? MutexGuard<'a, T>
265..276 '{ loop {} }': &'? T
267..274 'loop {}': !
272..274 '{}': ()
289..293 'self': &'? {unknown}
295..345 '{ ...()); }': ()
305..306 'w': &'? Mutex<BufWriter>
331..342 '*(w.lock())': BufWriter
333..334 'w': &'? Mutex<BufWriter>
333..341 'w.lock()': MutexGuard<'?, BufWriter>
"#]],
);
}
@ -2230,7 +2235,6 @@ async fn f<A, B, C>() -> Bar {}
"#,
expect![[r#"
64..66 '{}': ()
64..66 '{}': impl Future<Output = ()>
"#]],
);
}
@ -2563,3 +2567,81 @@ fn main() {
"#,
);
}
#[test]
fn regression_21429() {
check_no_mismatches(
r#"
trait DatabaseLike {
type ForeignKey: ForeignKeyLike<DB = Self>;
}
trait ForeignKeyLike {
type DB: DatabaseLike;
fn host_columns(&self, database: &Self::DB);
}
trait ColumnLike {
type DB: DatabaseLike;
fn foo() -> &&<<Self as ColumnLike>::DB as DatabaseLike>::ForeignKey {
loop {}
}
fn foreign_keys(&self, database: &Self::DB) {
let fk = Self::foo();
fk.host_columns(database);
}
}
"#,
);
}
#[test]
fn issue_21006_generic_predicates_for_param_supertrait_cycle() {
check_no_mismatches(
r#"
trait VCipherSuite {}
trait CipherSuite
where
OprfHash<Self>: Hash,
{
}
type Bar<CS: CipherSuite> = <CS::Baz as VCipherSuite>::Hash;
type OprfHash<CS: CipherSuite> = <CS::Baz as VCipherSuite>::Hash;
impl<CS: CipherSuite> Foo<CS> {
fn seal() {}
}
"#,
);
}
#[test]
fn issue_21006_self_assoc_trait() {
check_types(
r#"
trait Baz {
fn baz(&self);
}
trait Foo {
type Assoc;
}
trait Bar: Foo
where
Self::Assoc: Baz,
{
fn bar(v: Self::Assoc) {
let _ = v.baz();
// ^ ()
}
}
"#,
);
}

View file

@ -471,7 +471,76 @@ fn foo() {
244..246 '_x': {unknown}
249..257 'to_bytes': fn to_bytes() -> [u8; _]
249..259 'to_bytes()': [u8; _]
249..268 'to_byt..._vec()': {unknown}
249..268 'to_byt..._vec()': Vec<<[u8; _] as Foo>::Item>
"#]],
);
}
#[test]
fn regression_21315() {
check_infer(
r#"
struct Consts;
impl Consts { const MAX: usize = 0; }
struct Between<const M: usize, const N: usize, T>(T);
impl<const M: usize, T> Between<M, { Consts::MAX }, T> {
fn sep_once(self, _sep: &str, _other: Self) -> Self {
self
}
}
trait Parser: Sized {
fn at_least<const M: usize>(self) -> Between<M, { Consts::MAX }, Self> {
Between(self)
}
fn at_most<const N: usize>(self) -> Between<0, N, Self> {
Between(self)
}
}
impl Parser for char {}
fn test_at_least() {
let num = '9'.at_least::<1>();
let _ver = num.sep_once(".", num);
}
fn test_at_most() {
let num = '9'.at_most::<1>();
}
"#,
expect![[r#"
48..49 '0': usize
182..186 'self': Between<M, _, T>
188..192 '_sep': &'? str
200..206 '_other': Between<M, _, T>
222..242 '{ ... }': Between<M, _, T>
232..236 'self': Between<M, _, T>
300..304 'self': Self
343..372 '{ ... }': Between<M, _, Self>
353..360 'Between': fn Between<M, _, Self>(Self) -> Between<M, _, Self>
353..366 'Between(self)': Between<M, _, Self>
361..365 'self': Self
404..408 'self': Self
433..462 '{ ... }': Between<0, N, Self>
443..450 'Between': fn Between<0, N, Self>(Self) -> Between<0, N, Self>
443..456 'Between(self)': Between<0, N, Self>
451..455 'self': Self
510..587 '{ ...um); }': ()
520..523 'num': Between<1, _, char>
526..529 ''9'': char
526..545 ''9'.at...:<1>()': Between<1, _, char>
555..559 '_ver': Between<1, _, char>
562..565 'num': Between<1, _, char>
562..584 'num.se..., num)': Between<1, _, char>
575..578 '"."': &'static str
580..583 'num': Between<1, _, char>
607..644 '{ ...>(); }': ()
617..620 'num': Between<0, 1, char>
623..626 ''9'': char
623..641 ''9'.at...:<1>()': Between<0, 1, char>
"#]],
);
}
@ -750,3 +819,63 @@ fn main() {
"#]],
);
}
#[test]
fn regression_19339() {
check_infer(
r#"
trait Bar {
type Baz;
fn baz(&self) -> Self::Baz;
}
trait Foo {
type Bar;
fn bar(&self) -> Self::Bar;
}
trait FooFactory {
type Output: Foo<Bar: Bar<Baz = u8>>;
fn foo(&self) -> Self::Output;
fn foo_rpit(&self) -> impl Foo<Bar: Bar<Baz = u8>>;
}
fn test1(foo: impl Foo<Bar: Bar<Baz = u8>>) {
let baz = foo.bar().baz();
}
fn test2<T: FooFactory>(factory: T) {
let baz = factory.foo().bar().baz();
let baz = factory.foo_rpit().bar().baz();
}
"#,
expect![[r#"
39..43 'self': &'? Self
101..105 'self': &'? Self
198..202 'self': &'? Self
239..243 'self': &'? Self
290..293 'foo': impl Foo + ?Sized
325..359 '{ ...z(); }': ()
335..338 'baz': u8
341..344 'foo': impl Foo + ?Sized
341..350 'foo.bar()': impl Bar
341..356 'foo.bar().baz()': u8
385..392 'factory': T
397..487 '{ ...z(); }': ()
407..410 'baz': u8
413..420 'factory': T
413..426 'factory.foo()': <T as FooFactory>::Output
413..432 'factor....bar()': <<T as FooFactory>::Output as Foo>::Bar
413..438 'factor....baz()': u8
448..451 'baz': u8
454..461 'factory': T
454..472 'factor...rpit()': impl Foo + Bar<Baz = u8> + ?Sized
454..478 'factor....bar()': <impl Foo + Bar<Baz = u8> + ?Sized as Foo>::Bar
454..484 'factor....baz()': u8
"#]],
);
}

View file

@ -64,20 +64,37 @@ fn type_alias_in_struct_lit() {
#[test]
fn infer_ranges() {
check_types(
check_no_mismatches(
r#"
//- minicore: range
fn test() {
let a = ..;
let b = 1..;
let c = ..2u32;
let d = 1..2usize;
let e = ..=10;
let f = 'a'..='z';
//- minicore: range, new_range
let t = (a, b, c, d, e, f);
t;
} //^ (RangeFull, RangeFrom<i32>, RangeTo<u32>, Range<usize>, RangeToInclusive<i32>, RangeInclusive<char>)
fn test() {
let _: core::ops::RangeFull = ..;
let _: core::ops::RangeFrom<i32> = 1..;
let _: core::ops::RangeTo<u32> = ..2u32;
let _: core::ops::Range<usize> = 1..2usize;
let _: core::ops::RangeToInclusive<i32> = ..=10;
let _: core::ops::RangeInclusive<char> = 'a'..='z';
}
"#,
);
}
#[test]
fn infer_ranges_new_range() {
check_no_mismatches(
r#"
//- minicore: range, new_range
#![feature(new_range)]
fn test() {
let _: core::ops::RangeFull = ..;
let _: core::range::RangeFrom<i32> = 1..;
let _: core::ops::RangeTo<u32> = ..2u32;
let _: core::range::Range<usize> = 1..2usize;
let _: core::range::RangeToInclusive<i32> = ..=10;
let _: core::range::RangeInclusive<char> = 'a'..='z';
}
"#,
);
}
@ -2139,7 +2156,6 @@ async fn main() {
"#,
expect![[r#"
16..193 '{ ...2 }; }': ()
16..193 '{ ...2 }; }': impl Future<Output = ()>
26..27 'x': i32
30..43 'unsafe { 92 }': i32
39..41 '92': i32
@ -3983,3 +3999,60 @@ fn foo() {
"#]],
);
}
#[test]
fn naked_asm_returns_never() {
check_no_mismatches(
r#"
//- minicore: asm
#[unsafe(naked)]
extern "C" fn foo() -> ! {
core::arch::naked_asm!("");
}
"#,
);
}
#[test]
fn regression_21478() {
check_infer(
r#"
//- minicore: unsize, coerce_unsized
struct LazyLock<T>(T);
impl<T> LazyLock<T> {
const fn new() -> Self {
loop {}
}
fn force(this: &Self) -> &T {
loop {}
}
}
static VALUES_LAZY_LOCK: LazyLock<[u32; { 0 }]> = LazyLock::new();
fn foo() {
let _ = LazyLock::force(&VALUES_LAZY_LOCK);
}
"#,
expect![[r#"
73..96 '{ ... }': LazyLock<T>
83..90 'loop {}': !
88..90 '{}': ()
111..115 'this': &'? LazyLock<T>
130..153 '{ ... }': &'? T
140..147 'loop {}': !
145..147 '{}': ()
207..220 'LazyLock::new': fn new<[u32; _]>() -> LazyLock<[u32; _]>
207..222 'LazyLock::new()': LazyLock<[u32; _]>
234..285 '{ ...CK); }': ()
244..245 '_': &'? [u32; _]
248..263 'LazyLock::force': fn force<[u32; _]>(&'? LazyLock<[u32; _]>) -> &'? [u32; _]
248..282 'LazyLo..._LOCK)': &'? [u32; _]
264..281 '&VALUE...Y_LOCK': &'? LazyLock<[u32; _]>
265..281 'VALUES...Y_LOCK': LazyLock<[u32; _]>
"#]],
);
}

View file

@ -429,7 +429,7 @@ fn associated_type_shorthand_from_method_bound() {
trait Iterable {
type Item;
}
struct S<T>;
struct S<T>(T);
impl<T> S<T> {
fn foo(self) -> T::Item where T: Iterable { loop {} }
}
@ -1103,40 +1103,50 @@ fn test() {
fn argument_impl_trait_type_args_2() {
check_infer_with_mismatches(
r#"
//- minicore: sized
//- minicore: sized, phantom_data
use core::marker::PhantomData;
trait Trait {}
struct S;
impl Trait for S {}
struct F<T>;
struct F<T>(PhantomData<T>);
impl<T> F<T> {
fn foo<U>(self, x: impl Trait) -> (T, U) { loop {} }
}
fn test() {
F.foo(S);
F::<u32>.foo(S);
F::<u32>.foo::<i32>(S);
F::<u32>.foo::<i32, u32>(S); // extraneous argument should be ignored
F(PhantomData).foo(S);
F::<u32>(PhantomData).foo(S);
F::<u32>(PhantomData).foo::<i32>(S);
F::<u32>(PhantomData).foo::<i32, u32>(S); // extraneous argument should be ignored
}"#,
expect![[r#"
87..91 'self': F<T>
93..94 'x': impl Trait
118..129 '{ loop {} }': (T, U)
120..127 'loop {}': !
125..127 '{}': ()
143..283 '{ ...ored }': ()
149..150 'F': F<{unknown}>
149..157 'F.foo(S)': ({unknown}, {unknown})
155..156 'S': S
163..171 'F::<u32>': F<u32>
163..178 'F::<u32>.foo(S)': (u32, {unknown})
176..177 'S': S
184..192 'F::<u32>': F<u32>
184..206 'F::<u3...32>(S)': (u32, i32)
204..205 'S': S
212..220 'F::<u32>': F<u32>
212..239 'F::<u3...32>(S)': (u32, i32)
237..238 'S': S
135..139 'self': F<T>
141..142 'x': impl Trait
166..177 '{ loop {} }': (T, U)
168..175 'loop {}': !
173..175 '{}': ()
191..383 '{ ...ored }': ()
197..198 'F': fn F<{unknown}>(PhantomData<{unknown}>) -> F<{unknown}>
197..211 'F(PhantomData)': F<{unknown}>
197..218 'F(Phan...foo(S)': ({unknown}, {unknown})
199..210 'PhantomData': PhantomData<{unknown}>
216..217 'S': S
224..232 'F::<u32>': fn F<u32>(PhantomData<u32>) -> F<u32>
224..245 'F::<u3...mData)': F<u32>
224..252 'F::<u3...foo(S)': (u32, {unknown})
233..244 'PhantomData': PhantomData<u32>
250..251 'S': S
258..266 'F::<u32>': fn F<u32>(PhantomData<u32>) -> F<u32>
258..279 'F::<u3...mData)': F<u32>
258..293 'F::<u3...32>(S)': (u32, i32)
267..278 'PhantomData': PhantomData<u32>
291..292 'S': S
299..307 'F::<u32>': fn F<u32>(PhantomData<u32>) -> F<u32>
299..320 'F::<u3...mData)': F<u32>
299..339 'F::<u3...32>(S)': (u32, i32)
308..319 'PhantomData': PhantomData<u32>
337..338 'S': S
"#]],
);
}
@ -4012,7 +4022,7 @@ fn f<F: Foo>() {
fn dyn_map() {
check_types(
r#"
pub struct Key<K, V, P = (K, V)> {}
pub struct Key<K, V, P = (K, V)>(K, V, P);
pub trait Policy {
type K;
@ -4024,7 +4034,7 @@ impl<K, V> Policy for (K, V) {
type V = V;
}
pub struct KeyMap<KEY> {}
pub struct KeyMap<KEY>(KEY);
impl<P: Policy> KeyMap<Key<P::K, P::V, P>> {
pub fn get(&self, key: &P::K) -> P::V {
@ -4859,7 +4869,6 @@ async fn baz<T: AsyncFnOnce(u32) -> i32>(c: T) {
expect![[r#"
37..38 'a': T
43..83 '{ ...ait; }': ()
43..83 '{ ...ait; }': impl Future<Output = ()>
53..57 'fut1': <T as AsyncFnMut<(u32,)>>::CallRefFuture<'?>
60..61 'a': T
60..64 'a(0)': <T as AsyncFnMut<(u32,)>>::CallRefFuture<'?>
@ -4868,7 +4877,6 @@ async fn baz<T: AsyncFnOnce(u32) -> i32>(c: T) {
70..80 'fut1.await': i32
124..129 'mut b': T
134..174 '{ ...ait; }': ()
134..174 '{ ...ait; }': impl Future<Output = ()>
144..148 'fut2': <T as AsyncFnMut<(u32,)>>::CallRefFuture<'?>
151..152 'b': T
151..155 'b(0)': <T as AsyncFnMut<(u32,)>>::CallRefFuture<'?>
@ -4877,7 +4885,6 @@ async fn baz<T: AsyncFnOnce(u32) -> i32>(c: T) {
161..171 'fut2.await': i32
216..217 'c': T
222..262 '{ ...ait; }': ()
222..262 '{ ...ait; }': impl Future<Output = ()>
232..236 'fut3': <T as AsyncFnOnce<(u32,)>>::CallOnceFuture
239..240 'c': T
239..243 'c(0)': <T as AsyncFnOnce<(u32,)>>::CallOnceFuture
@ -5023,7 +5030,7 @@ fn main() {
278..280 '{}': ()
290..291 '_': Box<dyn Iterator<Item = &'? [u8]> + '?>
294..298 'iter': Box<dyn Iterator<Item = &'? [u8]> + 'static>
294..310 'iter.i...iter()': Box<dyn Iterator<Item = &'? [u8]> + 'static>
294..310 'iter.i...iter()': Box<dyn Iterator<Item = &'? [u8]> + '?>
152..156 'self': &'? mut Box<I>
177..208 '{ ... }': Option<<I as Iterator>::Item>
191..198 'loop {}': !

View file

@ -3,7 +3,8 @@
use cfg::CfgExpr;
use either::Either;
use hir_def::{
AssocItemId, AttrDefId, FieldId, LifetimeParamId, ModuleDefId, TypeOrConstParamId,
AssocItemId, AttrDefId, FieldId, GenericDefId, ItemContainerId, LifetimeParamId, ModuleDefId,
TraitId, TypeOrConstParamId,
attrs::{AttrFlags, Docs, IsInnerDoc},
expr_store::path::Path,
item_scope::ItemInNs,
@ -22,6 +23,7 @@ use hir_ty::{
next_solver::{DbInterner, TypingMode, infer::DbInternerInferExt},
};
use intern::Symbol;
use stdx::never;
use crate::{
Adt, AsAssocItem, AssocItem, BuiltinType, Const, ConstParam, DocLinkDef, Enum, ExternCrateDecl,
@ -357,13 +359,46 @@ fn resolve_assoc_or_field(
ns: Option<Namespace>,
) -> Option<DocLinkDef> {
let path = Path::from_known_path_with_no_generic(path);
// FIXME: This does not handle `Self` on trait definitions, which we should resolve to the
// trait itself.
let base_def = resolver.resolve_path_in_type_ns_fully(db, &path)?;
let handle_trait = |id: TraitId| {
// Doc paths in this context may only resolve to an item of this trait
// (i.e. no items of its supertraits), so we need to handle them here
// independently of others.
id.trait_items(db).items.iter().find(|it| it.0 == name).map(|(_, assoc_id)| {
let def = match *assoc_id {
AssocItemId::FunctionId(it) => ModuleDef::Function(it.into()),
AssocItemId::ConstId(it) => ModuleDef::Const(it.into()),
AssocItemId::TypeAliasId(it) => ModuleDef::TypeAlias(it.into()),
};
DocLinkDef::ModuleDef(def)
})
};
let ty = match base_def {
TypeNs::SelfType(id) => Impl::from(id).self_ty(db),
TypeNs::GenericParam(_) => {
TypeNs::GenericParam(param) => {
let generic_params = db.generic_params(param.parent());
if generic_params[param.local_id()].is_trait_self() {
// `Self::assoc` in traits should refer to the trait itself.
let parent_trait = |container| match container {
ItemContainerId::TraitId(trait_) => handle_trait(trait_),
_ => {
never!("container {container:?} should be a trait");
None
}
};
return match param.parent() {
GenericDefId::TraitId(trait_) => handle_trait(trait_),
GenericDefId::ConstId(it) => parent_trait(it.loc(db).container),
GenericDefId::FunctionId(it) => parent_trait(it.loc(db).container),
GenericDefId::TypeAliasId(it) => parent_trait(it.loc(db).container),
_ => {
never!("type param {param:?} should belong to a trait");
None
}
};
}
// Even if this generic parameter has some trait bounds, rustdoc doesn't
// resolve `name` to trait items.
return None;
@ -384,19 +419,7 @@ fn resolve_assoc_or_field(
alias.ty(db)
}
TypeNs::BuiltinType(id) => BuiltinType::from(id).ty(db),
TypeNs::TraitId(id) => {
// Doc paths in this context may only resolve to an item of this trait
// (i.e. no items of its supertraits), so we need to handle them here
// independently of others.
return id.trait_items(db).items.iter().find(|it| it.0 == name).map(|(_, assoc_id)| {
let def = match *assoc_id {
AssocItemId::FunctionId(it) => ModuleDef::Function(it.into()),
AssocItemId::ConstId(it) => ModuleDef::Const(it.into()),
AssocItemId::TypeAliasId(it) => ModuleDef::TypeAlias(it.into()),
};
DocLinkDef::ModuleDef(def)
});
}
TypeNs::TraitId(id) => return handle_trait(id),
TypeNs::ModuleId(_) => {
return None;
}
@ -414,7 +437,14 @@ fn resolve_assoc_or_field(
let variant_def = match ty.as_adt()? {
Adt::Struct(it) => it.into(),
Adt::Union(it) => it.into(),
Adt::Enum(_) => return None,
Adt::Enum(enum_) => {
// Can happen on `Self::Variant` (otherwise would be fully resolved by the resolver).
return enum_
.id
.enum_variants(db)
.variant(&name)
.map(|variant| DocLinkDef::ModuleDef(ModuleDef::Variant(variant.into())));
}
};
resolve_field(db, variant_def, name, ns)
}

View file

@ -610,6 +610,23 @@ impl Module {
res
}
pub fn modules_in_scope(&self, db: &dyn HirDatabase, pub_only: bool) -> Vec<(Name, Module)> {
let def_map = self.id.def_map(db);
let scope = &def_map[self.id].scope;
let mut res = Vec::new();
for (name, item) in scope.types() {
if let ModuleDefId::ModuleId(m) = item.def
&& (!pub_only || item.vis == Visibility::Public)
{
res.push((name.clone(), Module { id: m }));
}
}
res
}
/// Returns a `ModuleScope`: a set of items, visible in this module.
pub fn scope(
self,

View file

@ -13,7 +13,7 @@ use std::{
use base_db::FxIndexSet;
use either::Either;
use hir_def::{
DefWithBodyId, MacroId, StructId, TraitId, VariantId,
BuiltinDeriveImplId, DefWithBodyId, HasModule, MacroId, StructId, TraitId, VariantId,
attrs::parse_extra_crate_attrs,
expr_store::{Body, ExprOrPatSource, HygieneId, path::Path},
hir::{BindingId, Expr, ExprId, ExprOrPatId, Pat},
@ -622,18 +622,34 @@ impl<'db> SemanticsImpl<'db> {
Some(
calls
.into_iter()
.map(|call| macro_call_to_macro_id(ctx, call?).map(|id| Macro { id }))
.map(|call| {
let call = call?;
match call {
Either::Left(call) => {
macro_call_to_macro_id(ctx, call).map(|id| Macro { id })
}
Either::Right(call) => {
let call = call.loc(self.db);
let krate = call.krate(self.db);
let lang_items = hir_def::lang_item::lang_items(self.db, krate);
call.trait_.derive_macro(lang_items).map(|id| Macro { id })
}
}
})
.collect(),
)
})
}
pub fn expand_derive_macro(&self, attr: &ast::Attr) -> Option<Vec<ExpandResult<SyntaxNode>>> {
pub fn expand_derive_macro(
&self,
attr: &ast::Attr,
) -> Option<Vec<Option<ExpandResult<SyntaxNode>>>> {
let res: Vec<_> = self
.derive_macro_calls(attr)?
.into_iter()
.flat_map(|call| {
let file_id = call?;
.map(|call| {
let file_id = call?.left()?;
let ExpandResult { value, err } = self.db.parse_macro_expansion(file_id);
let root_node = value.0.syntax_node();
self.cache(root_node.clone(), file_id.into());
@ -643,7 +659,10 @@ impl<'db> SemanticsImpl<'db> {
Some(res)
}
fn derive_macro_calls(&self, attr: &ast::Attr) -> Option<Vec<Option<MacroCallId>>> {
fn derive_macro_calls(
&self,
attr: &ast::Attr,
) -> Option<Vec<Option<Either<MacroCallId, BuiltinDeriveImplId>>>> {
let adt = attr.syntax().parent().and_then(ast::Adt::cast)?;
let file_id = self.find_file(adt.syntax()).file_id;
let adt = InFile::new(file_id, &adt);
@ -690,8 +709,9 @@ impl<'db> SemanticsImpl<'db> {
.derive_helpers_in_scope(InFile::new(sa.file_id, id))?
.iter()
.filter(|&(name, _, _)| *name == attr_name)
.map(|&(_, macro_, call)| (macro_.into(), call))
.filter_map(|&(_, macro_, call)| Some((macro_.into(), call.left()?)))
.collect();
// FIXME: We filter our builtin derive "fake" expansions, is this correct? Should we still expose them somehow?
res.is_empty().not().then_some(res)
}
@ -1338,6 +1358,7 @@ impl<'db> SemanticsImpl<'db> {
// FIXME: We need to call `f` for all of them as well though!
process_expansion_for_token(ctx, &mut stack, derive_attr);
for derive in derives.into_iter().flatten() {
let Either::Left(derive) = derive else { continue };
process_expansion_for_token(ctx, &mut stack, derive);
}
}
@ -1467,11 +1488,12 @@ impl<'db> SemanticsImpl<'db> {
for (.., derive) in
helpers.iter().filter(|(helper, ..)| *helper == attr_name)
{
let Either::Left(derive) = *derive else { continue };
// as there may be multiple derives registering the same helper
// name, we gotta make sure to call this for all of them!
// FIXME: We need to call `f` for all of them as well though!
res = res
.or(process_expansion_for_token(ctx, &mut stack, *derive));
.or(process_expansion_for_token(ctx, &mut stack, derive));
}
res
})
@ -1981,6 +2003,15 @@ impl<'db> SemanticsImpl<'db> {
.unwrap_or_default()
}
pub fn record_literal_matched_fields(
&self,
literal: &ast::RecordExpr,
) -> Vec<(Field, Type<'db>)> {
self.analyze(literal.syntax())
.and_then(|it| it.record_literal_matched_fields(self.db, literal))
.unwrap_or_default()
}
pub fn record_pattern_missing_fields(
&self,
pattern: &ast::RecordPat,
@ -1990,6 +2021,15 @@ impl<'db> SemanticsImpl<'db> {
.unwrap_or_default()
}
pub fn record_pattern_matched_fields(
&self,
pattern: &ast::RecordPat,
) -> Vec<(Field, Type<'db>)> {
self.analyze(pattern.syntax())
.and_then(|it| it.record_pattern_matched_fields(self.db, pattern))
.unwrap_or_default()
}
fn with_ctx<F: FnOnce(&mut SourceToDefCtx<'_, '_>) -> T, T>(&self, f: F) -> T {
let mut ctx = SourceToDefCtx { db: self.db, cache: &mut self.s2d_cache.borrow_mut() };
f(&mut ctx)

View file

@ -87,10 +87,10 @@
use either::Either;
use hir_def::{
AdtId, BlockId, ConstId, ConstParamId, DefWithBodyId, EnumId, EnumVariantId, ExternBlockId,
ExternCrateId, FieldId, FunctionId, GenericDefId, GenericParamId, ImplId, LifetimeParamId,
Lookup, MacroId, ModuleId, StaticId, StructId, TraitId, TypeAliasId, TypeParamId, UnionId,
UseId, VariantId,
AdtId, BlockId, BuiltinDeriveImplId, ConstId, ConstParamId, DefWithBodyId, EnumId,
EnumVariantId, ExternBlockId, ExternCrateId, FieldId, FunctionId, GenericDefId, GenericParamId,
ImplId, LifetimeParamId, Lookup, MacroId, ModuleId, StaticId, StructId, TraitId, TypeAliasId,
TypeParamId, UnionId, UseId, VariantId,
dyn_map::{
DynMap,
keys::{self, Key},
@ -394,7 +394,7 @@ impl SourceToDefCtx<'_, '_> {
&mut self,
item: InFile<&ast::Adt>,
src: InFile<ast::Attr>,
) -> Option<(AttrId, MacroCallId, &[Option<MacroCallId>])> {
) -> Option<(AttrId, MacroCallId, &[Option<Either<MacroCallId, BuiltinDeriveImplId>>])> {
let map = self.dyn_map(item)?;
map[keys::DERIVE_MACRO_CALL]
.get(&AstPtr::new(&src.value))
@ -409,8 +409,11 @@ impl SourceToDefCtx<'_, '_> {
pub(super) fn derive_macro_calls<'slf>(
&'slf mut self,
adt: InFile<&ast::Adt>,
) -> Option<impl Iterator<Item = (AttrId, MacroCallId, &'slf [Option<MacroCallId>])> + use<'slf>>
{
) -> Option<
impl Iterator<
Item = (AttrId, MacroCallId, &'slf [Option<Either<MacroCallId, BuiltinDeriveImplId>>]),
> + use<'slf>,
> {
self.dyn_map(adt).as_ref().map(|&map| {
let dyn_map = &map[keys::DERIVE_MACRO_CALL];
adt.value

View file

@ -17,7 +17,7 @@ use hir_def::{
path::Path,
scope::{ExprScopes, ScopeId},
},
hir::{BindingId, Expr, ExprId, ExprOrPatId, Pat},
hir::{BindingId, Expr, ExprId, ExprOrPatId, Pat, PatId},
lang_item::LangItems,
nameres::MacroSubNs,
resolver::{HasResolver, Resolver, TypeNs, ValueNs, resolver_for_scope},
@ -44,6 +44,7 @@ use hir_ty::{
};
use intern::sym;
use itertools::Itertools;
use rustc_hash::FxHashSet;
use rustc_type_ir::{
AliasTyKind,
inherent::{AdtDef, IntoKind, Ty as _},
@ -531,18 +532,12 @@ impl<'db> SourceAnalyzer<'db> {
db: &'db dyn HirDatabase,
range_pat: &ast::RangePat,
) -> Option<StructId> {
let path: ModPath = match (range_pat.op_kind()?, range_pat.start(), range_pat.end()) {
(RangeOp::Exclusive, None, Some(_)) => path![core::ops::RangeTo],
(RangeOp::Exclusive, Some(_), None) => path![core::ops::RangeFrom],
(RangeOp::Exclusive, Some(_), Some(_)) => path![core::ops::Range],
(RangeOp::Inclusive, None, Some(_)) => path![core::ops::RangeToInclusive],
(RangeOp::Inclusive, Some(_), Some(_)) => path![core::ops::RangeInclusive],
(RangeOp::Exclusive, None, None) => return None,
(RangeOp::Inclusive, None, None) => return None,
(RangeOp::Inclusive, Some(_), None) => return None,
};
self.resolver.resolve_known_struct(db, &path)
self.resolve_range_struct(
db,
range_pat.op_kind()?,
range_pat.start().is_some(),
range_pat.end().is_some(),
)
}
pub(crate) fn resolve_range_expr(
@ -550,19 +545,59 @@ impl<'db> SourceAnalyzer<'db> {
db: &'db dyn HirDatabase,
range_expr: &ast::RangeExpr,
) -> Option<StructId> {
let path: ModPath = match (range_expr.op_kind()?, range_expr.start(), range_expr.end()) {
(RangeOp::Exclusive, None, None) => path![core::ops::RangeFull],
(RangeOp::Exclusive, None, Some(_)) => path![core::ops::RangeTo],
(RangeOp::Exclusive, Some(_), None) => path![core::ops::RangeFrom],
(RangeOp::Exclusive, Some(_), Some(_)) => path![core::ops::Range],
(RangeOp::Inclusive, None, Some(_)) => path![core::ops::RangeToInclusive],
(RangeOp::Inclusive, Some(_), Some(_)) => path![core::ops::RangeInclusive],
self.resolve_range_struct(
db,
range_expr.op_kind()?,
range_expr.start().is_some(),
range_expr.end().is_some(),
)
}
fn resolve_range_struct(
&self,
db: &'db dyn HirDatabase,
op_kind: RangeOp,
has_start: bool,
has_end: bool,
) -> Option<StructId> {
let has_new_range =
self.resolver.top_level_def_map().is_unstable_feature_enabled(&sym::new_range);
let lang_items = self.lang_items(db);
match (op_kind, has_start, has_end) {
(RangeOp::Exclusive, false, false) => lang_items.RangeFull,
(RangeOp::Exclusive, false, true) => lang_items.RangeTo,
(RangeOp::Exclusive, true, false) => {
if has_new_range {
lang_items.RangeFromCopy
} else {
lang_items.RangeFrom
}
}
(RangeOp::Exclusive, true, true) => {
if has_new_range {
lang_items.RangeCopy
} else {
lang_items.Range
}
}
(RangeOp::Inclusive, false, true) => {
if has_new_range {
lang_items.RangeToInclusiveCopy
} else {
lang_items.RangeToInclusive
}
}
(RangeOp::Inclusive, true, true) => {
if has_new_range {
lang_items.RangeInclusiveCopy
} else {
lang_items.RangeInclusiveStruct
}
}
// [E0586] inclusive ranges must be bounded at the end
(RangeOp::Inclusive, None, None) => return None,
(RangeOp::Inclusive, Some(_), None) => return None,
};
self.resolver.resolve_known_struct(db, &path)
(RangeOp::Inclusive, false, false) => None,
(RangeOp::Inclusive, true, false) => None,
}
}
pub(crate) fn resolve_await_to_poll(
@ -1241,21 +1276,31 @@ impl<'db> SourceAnalyzer<'db> {
let body = self.store()?;
let infer = self.infer()?;
let expr_id = self.expr_id(literal.clone().into())?;
let substs = infer.expr_or_pat_ty(expr_id).as_adt()?.1;
let (variant, missing_fields, _exhaustive) = match expr_id {
ExprOrPatId::ExprId(expr_id) => {
record_literal_missing_fields(db, infer, expr_id, &body[expr_id])?
}
ExprOrPatId::PatId(pat_id) => {
record_pattern_missing_fields(db, infer, pat_id, &body[pat_id])?
}
};
let expr_id = self.expr_id(literal.clone().into())?.as_expr()?;
let substs = infer.expr_ty(expr_id).as_adt()?.1;
let (variant, missing_fields) =
record_literal_missing_fields(db, infer, expr_id, &body[expr_id])?;
let res = self.missing_fields(db, substs, variant, missing_fields);
Some(res)
}
pub(crate) fn record_literal_matched_fields(
&self,
db: &'db dyn HirDatabase,
literal: &ast::RecordExpr,
) -> Option<Vec<(Field, Type<'db>)>> {
let body = self.store()?;
let infer = self.infer()?;
let expr_id = self.expr_id(literal.clone().into())?.as_expr()?;
let substs = infer.expr_ty(expr_id).as_adt()?.1;
let (variant, matched_fields) =
record_literal_matched_fields(db, infer, expr_id, &body[expr_id])?;
let res = self.missing_fields(db, substs, variant, matched_fields);
Some(res)
}
pub(crate) fn record_pattern_missing_fields(
&self,
db: &'db dyn HirDatabase,
@ -1267,12 +1312,29 @@ impl<'db> SourceAnalyzer<'db> {
let pat_id = self.pat_id(&pattern.clone().into())?.as_pat()?;
let substs = infer.pat_ty(pat_id).as_adt()?.1;
let (variant, missing_fields, _exhaustive) =
let (variant, missing_fields) =
record_pattern_missing_fields(db, infer, pat_id, &body[pat_id])?;
let res = self.missing_fields(db, substs, variant, missing_fields);
Some(res)
}
pub(crate) fn record_pattern_matched_fields(
&self,
db: &'db dyn HirDatabase,
pattern: &ast::RecordPat,
) -> Option<Vec<(Field, Type<'db>)>> {
let body = self.store()?;
let infer = self.infer()?;
let pat_id = self.pat_id(&pattern.clone().into())?.as_pat()?;
let substs = infer.pat_ty(pat_id).as_adt()?.1;
let (variant, matched_fields) =
record_pattern_matched_fields(db, infer, pat_id, &body[pat_id])?;
let res = self.missing_fields(db, substs, variant, matched_fields);
Some(res)
}
fn missing_fields(
&self,
db: &'db dyn HirDatabase,
@ -1810,3 +1872,67 @@ pub(crate) fn name_hygiene(db: &dyn HirDatabase, name: InFile<&SyntaxNode>) -> H
let ctx = span_map.span_at(name.value.text_range().start()).ctx;
HygieneId::new(ctx.opaque_and_semiopaque(db))
}
fn record_literal_matched_fields(
db: &dyn HirDatabase,
infer: &InferenceResult,
id: ExprId,
expr: &Expr,
) -> Option<(VariantId, Vec<LocalFieldId>)> {
let (fields, _spread) = match expr {
Expr::RecordLit { fields, spread, .. } => (fields, spread),
_ => return None,
};
let variant_def = infer.variant_resolution_for_expr(id)?;
if let VariantId::UnionId(_) = variant_def {
return None;
}
let variant_data = variant_def.fields(db);
let specified_fields: FxHashSet<_> = fields.iter().map(|f| &f.name).collect();
// suggest fields if:
// - not in code
let matched_fields: Vec<LocalFieldId> = variant_data
.fields()
.iter()
.filter_map(|(f, d)| (!specified_fields.contains(&d.name)).then_some(f))
.collect();
if matched_fields.is_empty() {
return None;
}
Some((variant_def, matched_fields))
}
fn record_pattern_matched_fields(
db: &dyn HirDatabase,
infer: &InferenceResult,
id: PatId,
pat: &Pat,
) -> Option<(VariantId, Vec<LocalFieldId>)> {
let (fields, _ellipsis) = match pat {
Pat::Record { path: _, args, ellipsis } => (args, *ellipsis),
_ => return None,
};
let variant_def = infer.variant_resolution_for_pat(id)?;
if let VariantId::UnionId(_) = variant_def {
return None;
}
let variant_data = variant_def.fields(db);
let specified_fields: FxHashSet<_> = fields.iter().map(|f| &f.name).collect();
// suggest fields if:
// - not in code
let matched_fields: Vec<LocalFieldId> = variant_data
.fields()
.iter()
.filter_map(|(f, d)| if !specified_fields.contains(&d.name) { Some(f) } else { None })
.collect();
if matched_fields.is_empty() {
return None;
}
Some((variant_def, matched_fields))
}

View file

@ -5,10 +5,11 @@ use std::marker::PhantomData;
use base_db::FxIndexSet;
use either::Either;
use hir_def::{
AdtId, AssocItemId, Complete, DefWithBodyId, ExternCrateId, HasModule, ImplId, Lookup, MacroId,
ModuleDefId, ModuleId, TraitId,
AdtId, AssocItemId, AstIdLoc, Complete, DefWithBodyId, ExternCrateId, HasModule, ImplId,
Lookup, MacroId, ModuleDefId, ModuleId, TraitId,
db::DefDatabase,
item_scope::{ImportId, ImportOrExternCrate, ImportOrGlob},
nameres::crate_def_map,
per_ns::Item,
src::{HasChildSource, HasSource},
visibility::{Visibility, VisibilityExplicitness},
@ -22,7 +23,7 @@ use intern::Symbol;
use rustc_hash::FxHashMap;
use syntax::{AstNode, AstPtr, SyntaxNode, SyntaxNodePtr, ToSmolStr, ast::HasName};
use crate::{HasCrate, Module, ModuleDef, Semantics};
use crate::{Crate, HasCrate, Module, ModuleDef, Semantics};
/// The actual data that is stored in the index. It should be as compact as
/// possible.
@ -40,14 +41,14 @@ pub struct FileSymbol<'db> {
_marker: PhantomData<&'db ()>,
}
#[derive(Debug, Clone, PartialEq, Eq, Hash)]
#[derive(Debug, Copy, Clone, PartialEq, Eq, Hash)]
pub struct DeclarationLocation {
/// The file id for both the `ptr` and `name_ptr`.
pub hir_file_id: HirFileId,
/// This points to the whole syntax node of the declaration.
pub ptr: SyntaxNodePtr,
/// This points to the [`syntax::ast::Name`] identifier of the declaration.
pub name_ptr: AstPtr<Either<syntax::ast::Name, syntax::ast::NameRef>>,
pub name_ptr: Option<AstPtr<Either<syntax::ast::Name, syntax::ast::NameRef>>>,
}
impl DeclarationLocation {
@ -108,6 +109,51 @@ impl<'a> SymbolCollector<'a> {
}
}
/// Push a symbol for a crate's root module.
/// This allows crate roots to appear in the symbol index for queries like `::` or `::foo`.
pub fn push_crate_root(&mut self, krate: Crate) {
let Some(display_name) = krate.display_name(self.db) else { return };
let crate_name = display_name.crate_name();
let canonical_name = display_name.canonical_name();
let def_map = crate_def_map(self.db, krate.into());
let module_data = &def_map[def_map.crate_root(self.db)];
let definition = module_data.origin.definition_source(self.db);
let hir_file_id = definition.file_id;
let syntax_node = definition.value.node();
let ptr = SyntaxNodePtr::new(&syntax_node);
let loc = DeclarationLocation { hir_file_id, ptr, name_ptr: None };
let root_module = krate.root_module(self.db);
self.symbols.insert(FileSymbol {
name: crate_name.symbol().clone(),
def: ModuleDef::Module(root_module),
loc,
container_name: None,
is_alias: false,
is_assoc: false,
is_import: false,
do_not_complete: Complete::Yes,
_marker: PhantomData,
});
if canonical_name != crate_name.symbol() {
self.symbols.insert(FileSymbol {
name: canonical_name.clone(),
def: ModuleDef::Module(root_module),
loc,
container_name: None,
is_alias: false,
is_assoc: false,
is_import: false,
do_not_complete: Complete::Yes,
_marker: PhantomData,
});
}
}
pub fn finish(self) -> Box<[FileSymbol<'a>]> {
self.symbols.into_iter().collect()
}
@ -123,6 +169,7 @@ impl<'a> SymbolCollector<'a> {
fn collect_from_module(&mut self, module_id: ModuleId) {
let collect_pub_only = self.collect_pub_only;
let is_block_module = module_id.is_block_module(self.db);
let push_decl = |this: &mut Self, def: ModuleDefId, name, vis| {
if collect_pub_only && vis != Visibility::Public {
return;
@ -194,6 +241,10 @@ impl<'a> SymbolCollector<'a> {
let source = import_child_source_cache
.entry(i.use_)
.or_insert_with(|| i.use_.child_source(this.db));
if is_block_module && source.file_id.is_macro() {
// Macros tend to generate a lot of imports, the user really won't care about them
return;
}
let Some(use_tree_src) = source.value.get(i.idx) else { return };
let rename = use_tree_src.rename().and_then(|rename| rename.name());
let name_syntax = match rename {
@ -209,7 +260,7 @@ impl<'a> SymbolCollector<'a> {
let dec_loc = DeclarationLocation {
hir_file_id: source.file_id,
ptr: SyntaxNodePtr::new(use_tree_src.syntax()),
name_ptr: AstPtr::new(&name_syntax),
name_ptr: Some(AstPtr::new(&name_syntax)),
};
this.symbols.insert(FileSymbol {
name: name.symbol().clone(),
@ -230,6 +281,12 @@ impl<'a> SymbolCollector<'a> {
return;
}
let loc = i.lookup(this.db);
if is_block_module && loc.ast_id().file_id.is_macro() {
// Macros (especially derivves) tend to generate renamed extern crate items,
// the user really won't care about them
return;
}
let source = loc.source(this.db);
let rename = source.value.rename().and_then(|rename| rename.name());
@ -244,7 +301,7 @@ impl<'a> SymbolCollector<'a> {
let dec_loc = DeclarationLocation {
hir_file_id: source.file_id,
ptr: SyntaxNodePtr::new(source.value.syntax()),
name_ptr: AstPtr::new(&name_syntax),
name_ptr: Some(AstPtr::new(&name_syntax)),
};
this.symbols.insert(FileSymbol {
name: name.symbol().clone(),
@ -409,10 +466,10 @@ impl<'a> SymbolCollector<'a> {
let source = loc.source(self.db);
let Some(name_node) = source.value.name() else { return Complete::Yes };
let def = ModuleDef::from(id.into());
let dec_loc = DeclarationLocation {
let loc = DeclarationLocation {
hir_file_id: source.file_id,
ptr: SyntaxNodePtr::new(source.value.syntax()),
name_ptr: AstPtr::new(&name_node).wrap_left(),
name_ptr: Some(AstPtr::new(&name_node).wrap_left()),
};
let mut do_not_complete = Complete::Yes;
@ -427,7 +484,7 @@ impl<'a> SymbolCollector<'a> {
self.symbols.insert(FileSymbol {
name: alias.clone(),
def,
loc: dec_loc.clone(),
loc,
container_name: self.current_container_name.clone(),
is_alias: true,
is_assoc,
@ -442,7 +499,7 @@ impl<'a> SymbolCollector<'a> {
name: name.symbol().clone(),
def,
container_name: self.current_container_name.clone(),
loc: dec_loc,
loc,
is_alias: false,
is_assoc,
is_import: false,
@ -459,10 +516,10 @@ impl<'a> SymbolCollector<'a> {
let Some(declaration) = module_data.origin.declaration() else { return };
let module = declaration.to_node(self.db);
let Some(name_node) = module.name() else { return };
let dec_loc = DeclarationLocation {
let loc = DeclarationLocation {
hir_file_id: declaration.file_id,
ptr: SyntaxNodePtr::new(module.syntax()),
name_ptr: AstPtr::new(&name_node).wrap_left(),
name_ptr: Some(AstPtr::new(&name_node).wrap_left()),
};
let def = ModuleDef::Module(module_id.into());
@ -475,7 +532,7 @@ impl<'a> SymbolCollector<'a> {
self.symbols.insert(FileSymbol {
name: alias.clone(),
def,
loc: dec_loc.clone(),
loc,
container_name: self.current_container_name.clone(),
is_alias: true,
is_assoc: false,
@ -490,7 +547,7 @@ impl<'a> SymbolCollector<'a> {
name: name.symbol().clone(),
def: ModuleDef::Module(module_id.into()),
container_name: self.current_container_name.clone(),
loc: dec_loc,
loc,
is_alias: false,
is_assoc: false,
is_import: false,

View file

@ -172,7 +172,7 @@ impl<'db> LookupTable<'db> {
/// Insert new type trees for type
///
/// Note that the types have to be the same, unification is not enough as unification is not
/// transitive. For example Vec<i32> and FxHashSet<i32> both unify with Iterator<Item = i32>,
/// transitive. For example `Vec<i32>` and `FxHashSet<i32>` both unify with `Iterator<Item = i32>`,
/// but they clearly do not unify themselves.
fn insert(&mut self, ty: Type<'db>, exprs: impl Iterator<Item = Expr<'db>>) {
match self.data.get_mut(&ty) {

View file

@ -3,7 +3,7 @@ use std::collections::VecDeque;
use ide_db::{
assists::GroupLabel,
famous_defs::FamousDefs,
syntax_helpers::node_ext::{for_each_tail_expr, walk_expr},
syntax_helpers::node_ext::{for_each_tail_expr, is_pattern_cond, walk_expr},
};
use syntax::{
NodeOrToken, SyntaxKind, T,
@ -69,6 +69,10 @@ pub(crate) fn apply_demorgan(acc: &mut Assists, ctx: &AssistContext<'_>) -> Opti
}
}
if is_pattern_cond(bin_expr.clone().into()) {
return None;
}
let op = bin_expr.op_kind()?;
let (inv_token, prec) = match op {
ast::BinaryOp::LogicOp(ast::LogicOp::And) => (SyntaxKind::PIPE2, ExprPrecedence::LOr),
@ -375,6 +379,16 @@ fn f() { !(S <= S || S < S) }
)
}
#[test]
fn demorgan_doesnt_handles_pattern() {
check_assist_not_applicable(
apply_demorgan,
r#"
fn f() { if let 1 = 1 &&$0 true { } }
"#,
);
}
#[test]
fn demorgan_on_not() {
check_assist(

View file

@ -1,13 +1,15 @@
use ide_db::assists::AssistId;
use itertools::Itertools;
use syntax::{
AstNode, T,
AstNode, SyntaxElement,
SyntaxKind::WHITESPACE,
T,
algo::previous_non_trivia_token,
ast::{
self, HasArgList, HasLoopBody, HasName, RangeItem, edit::AstNodeEdit, make,
syntax_factory::SyntaxFactory,
},
syntax_editor::{Element, Position},
syntax_editor::{Element, Position, SyntaxEditor},
};
use crate::assist_context::{AssistContext, Assists};
@ -40,8 +42,8 @@ pub(crate) fn convert_range_for_to_while(acc: &mut Assists, ctx: &AssistContext<
let iterable = for_.iterable()?;
let (start, end, step, inclusive) = extract_range(&iterable)?;
let name = pat.name()?;
let body = for_.loop_body()?;
let last = previous_non_trivia_token(body.stmt_list()?.r_curly_token()?)?;
let body = for_.loop_body()?.stmt_list()?;
let label = for_.label();
let description = if end.is_some() {
"Replace with while expression"
@ -90,8 +92,10 @@ pub(crate) fn convert_range_for_to_while(acc: &mut Assists, ctx: &AssistContext<
);
let op = ast::BinaryOp::Assignment { op: Some(ast::ArithOp::Add) };
edit.insert_all(
Position::after(last),
process_loop_body(
body,
label,
&mut edit,
vec![
make.whitespace(&format!("\n{}", indent + 1)).syntax_element(),
make.expr_bin(var_expr, op, step).syntax().syntax_element(),
@ -121,6 +125,86 @@ fn extract_range(iterable: &ast::Expr) -> Option<(ast::Expr, Option<ast::Expr>,
})
}
fn process_loop_body(
body: ast::StmtList,
label: Option<ast::Label>,
edit: &mut SyntaxEditor,
incrementer: Vec<SyntaxElement>,
) -> Option<()> {
let last = previous_non_trivia_token(body.r_curly_token()?)?.syntax_element();
let new_body = body.indent(1.into()).clone_subtree();
let mut continues = vec![];
collect_continue_to(
&mut continues,
&label.and_then(|it| it.lifetime()),
new_body.syntax(),
false,
);
if continues.is_empty() {
edit.insert_all(Position::after(last), incrementer);
return Some(());
}
let mut children = body
.syntax()
.children_with_tokens()
.filter(|it| !matches!(it.kind(), WHITESPACE | T!['{'] | T!['}']));
let first = children.next()?;
let block_content = first.clone()..=children.last().unwrap_or(first);
let continue_label = make::lifetime("'cont");
let break_expr = make::expr_break(Some(continue_label.clone()), None).clone_for_update();
let mut new_edit = SyntaxEditor::new(new_body.syntax().clone());
for continue_expr in &continues {
new_edit.replace(continue_expr.syntax(), break_expr.syntax());
}
let new_body = new_edit.finish().new_root().clone();
let elements = itertools::chain(
[
continue_label.syntax().clone_for_update().syntax_element(),
make::token(T![:]).syntax_element(),
make::tokens::single_space().syntax_element(),
new_body.syntax_element(),
],
incrementer,
);
edit.replace_all(block_content, elements.collect());
Some(())
}
fn collect_continue_to(
acc: &mut Vec<ast::ContinueExpr>,
label: &Option<ast::Lifetime>,
node: &syntax::SyntaxNode,
only_label: bool,
) {
let match_label = |it: &Option<ast::Lifetime>, label: &Option<ast::Lifetime>| match (it, label)
{
(None, _) => !only_label,
(Some(a), Some(b)) if a.text() == b.text() => true,
_ => false,
};
if let Some(expr) = ast::ContinueExpr::cast(node.clone())
&& match_label(&expr.lifetime(), label)
{
acc.push(expr);
} else if let Some(any_loop) = ast::AnyHasLoopBody::cast(node.clone()) {
if match_label(label, &any_loop.label().and_then(|it| it.lifetime())) {
return;
}
for children in node.children() {
collect_continue_to(acc, label, &children, true);
}
} else {
for children in node.children() {
collect_continue_to(acc, label, &children, only_label);
}
}
}
#[cfg(test)]
mod tests {
use crate::tests::{check_assist, check_assist_not_applicable};
@ -219,6 +303,67 @@ fn foo() {
);
}
#[test]
fn test_convert_range_for_to_while_with_continue() {
check_assist(
convert_range_for_to_while,
"
fn foo() {
$0for mut i in 3..7 {
foo(i);
continue;
loop { break; continue }
bar(i);
}
}
",
"
fn foo() {
let mut i = 3;
while i < 7 {
'cont: {
foo(i);
break 'cont;
loop { break; continue }
bar(i);
}
i += 1;
}
}
",
);
check_assist(
convert_range_for_to_while,
"
fn foo() {
'x: $0for mut i in 3..7 {
foo(i);
continue 'x;
loop { break; continue 'x }
'x: loop { continue 'x }
bar(i);
}
}
",
"
fn foo() {
let mut i = 3;
'x: while i < 7 {
'cont: {
foo(i);
break 'cont;
loop { break; break 'cont }
'x: loop { continue 'x }
bar(i);
}
i += 1;
}
}
",
);
}
#[test]
fn test_convert_range_for_to_while_step_by() {
check_assist(

View file

@ -95,7 +95,9 @@ fn if_expr_to_guarded_return(
let parent_block = if_expr.syntax().parent()?.ancestors().find_map(ast::BlockExpr::cast)?;
if parent_block.tail_expr()? != if_expr.clone().into() {
if parent_block.tail_expr() != Some(if_expr.clone().into())
&& !(else_block.is_some() && ast::ExprStmt::can_cast(if_expr.syntax().parent()?.kind()))
{
return None;
}
@ -502,6 +504,36 @@ fn main() {
);
}
#[test]
fn convert_if_let_has_else_block_in_statement() {
check_assist(
convert_to_guarded_return,
r#"
fn main() {
some_statements();
if$0 let Ok(x) = Err(92) {
foo(x);
} else {
// needless comment
return;
}
some_statements();
}
"#,
r#"
fn main() {
some_statements();
let Ok(x) = Err(92) else {
// needless comment
return;
};
foo(x);
some_statements();
}
"#,
);
}
#[test]
fn convert_if_let_result_inside_let() {
check_assist(
@ -1136,6 +1168,44 @@ fn main() {
);
}
#[test]
fn ignore_else_if() {
check_assist_not_applicable(
convert_to_guarded_return,
r#"
fn main() {
some_statements();
if cond {
()
} else if$0 let Ok(x) = Err(92) {
foo(x);
} else {
return;
}
some_statements();
}
"#,
);
}
#[test]
fn ignore_if_inside_let() {
check_assist_not_applicable(
convert_to_guarded_return,
r#"
fn main() {
some_statements();
let _ = if$0 let Ok(x) = Err(92) {
foo(x);
} else {
return;
}
some_statements();
}
"#,
);
}
#[test]
fn ignore_let_else_branch() {
check_assist_not_applicable(

View file

@ -33,8 +33,8 @@ fn expand_record_rest_pattern(
record_pat: ast::RecordPat,
rest_pat: ast::RestPat,
) -> Option<()> {
let missing_fields = ctx.sema.record_pattern_missing_fields(&record_pat);
if missing_fields.is_empty() {
let matched_fields = ctx.sema.record_pattern_matched_fields(&record_pat);
if matched_fields.is_empty() {
cov_mark::hit!(no_missing_fields);
return None;
}
@ -53,7 +53,7 @@ fn expand_record_rest_pattern(
|builder| {
let make = SyntaxFactory::with_mappings();
let mut editor = builder.make_editor(rest_pat.syntax());
let new_fields = old_field_list.fields().chain(missing_fields.iter().map(|(f, _)| {
let new_fields = old_field_list.fields().chain(matched_fields.iter().map(|(f, _)| {
make.record_pat_field_shorthand(
make.ident_pat(
false,

View file

@ -25,7 +25,7 @@ use syntax::{
SyntaxKind::{self, COMMENT},
SyntaxNode, SyntaxToken, T, TextRange, TextSize, TokenAtOffset, WalkEvent,
ast::{
self, AstNode, AstToken, HasGenericParams, HasName, edit::IndentLevel,
self, AstNode, AstToken, HasAttrs, HasGenericParams, HasName, edit::IndentLevel,
edit_in_place::Indent,
},
match_ast, ted,
@ -120,7 +120,7 @@ pub(crate) fn extract_function(acc: &mut Assists, ctx: &AssistContext<'_>) -> Op
let params = body.extracted_function_params(ctx, &container_info, locals_used);
let name = make_function_name(&semantics_scope);
let name = make_function_name(&semantics_scope, &body);
let fun = Function {
name,
@ -241,7 +241,10 @@ pub(crate) fn extract_function(acc: &mut Assists, ctx: &AssistContext<'_>) -> Op
)
}
fn make_function_name(semantics_scope: &hir::SemanticsScope<'_>) -> ast::NameRef {
fn make_function_name(
semantics_scope: &hir::SemanticsScope<'_>,
body: &FunctionBody,
) -> ast::NameRef {
let mut names_in_scope = vec![];
semantics_scope.process_all_names(&mut |name, _| {
names_in_scope.push(
@ -252,7 +255,10 @@ fn make_function_name(semantics_scope: &hir::SemanticsScope<'_>) -> ast::NameRef
let default_name = "fun_name";
let mut name = default_name.to_owned();
let mut name = body
.suggest_name()
.filter(|name| name.len() > 2)
.unwrap_or_else(|| default_name.to_owned());
let mut counter = 0;
while names_in_scope.contains(&name) {
counter += 1;
@ -375,6 +381,7 @@ struct ContainerInfo<'db> {
ret_type: Option<hir::Type<'db>>,
generic_param_lists: Vec<ast::GenericParamList>,
where_clauses: Vec<ast::WhereClause>,
attrs: Vec<ast::Attr>,
edition: Edition,
}
@ -778,6 +785,16 @@ impl FunctionBody {
fn contains_node(&self, node: &SyntaxNode) -> bool {
self.contains_range(node.text_range())
}
fn suggest_name(&self) -> Option<String> {
if let Some(ast::Pat::IdentPat(pat)) = self.parent().and_then(ast::LetStmt::cast)?.pat()
&& let Some(name) = pat.name().and_then(|it| it.ident_token())
{
Some(name.text().to_owned())
} else {
None
}
}
}
impl FunctionBody {
@ -911,6 +928,7 @@ impl FunctionBody {
let parents = generic_parents(&parent);
let generic_param_lists = parents.iter().filter_map(|it| it.generic_param_list()).collect();
let where_clauses = parents.iter().filter_map(|it| it.where_clause()).collect();
let attrs = parents.iter().flat_map(|it| it.attrs()).filter(is_inherit_attr).collect();
Some((
ContainerInfo {
@ -919,6 +937,7 @@ impl FunctionBody {
ret_type: ty,
generic_param_lists,
where_clauses,
attrs,
edition,
},
contains_tail_expr,
@ -1103,6 +1122,14 @@ impl GenericParent {
GenericParent::Trait(trait_) => trait_.where_clause(),
}
}
fn attrs(&self) -> impl Iterator<Item = ast::Attr> {
match self {
GenericParent::Fn(fn_) => fn_.attrs(),
GenericParent::Impl(impl_) => impl_.attrs(),
GenericParent::Trait(trait_) => trait_.attrs(),
}
}
}
/// Search `parent`'s ancestors for items with potentially applicable generic parameters
@ -1578,7 +1605,7 @@ fn format_function(
let (generic_params, where_clause) = make_generic_params_and_where_clause(ctx, fun);
make::fn_(
None,
fun.mods.attrs.clone(),
None,
fun_name,
generic_params,
@ -1958,6 +1985,11 @@ fn format_type(ty: &hir::Type<'_>, ctx: &AssistContext<'_>, module: hir::Module)
ty.display_source_code(ctx.db(), module.into(), true).ok().unwrap_or_else(|| "_".to_owned())
}
fn is_inherit_attr(attr: &ast::Attr) -> bool {
let Some(name) = attr.simple_name() else { return false };
matches!(name.as_str(), "track_caller" | "cfg")
}
fn make_ty(ty: &hir::Type<'_>, ctx: &AssistContext<'_>, module: hir::Module) -> ast::Type {
let ty_str = format_type(ty, ctx, module);
make::ty(&ty_str)
@ -5414,12 +5446,12 @@ impl Struct {
impl Trait for Struct {
fn bar(&self) -> i32 {
let three_squared = fun_name();
let three_squared = three_squared();
self.0 + three_squared
}
}
fn $0fun_name() -> i32 {
fn $0three_squared() -> i32 {
3 * 3
}
"#,
@ -6372,6 +6404,55 @@ fn foo() {
fn $0fun_name(mut a: i32, mut b: i32) {
(a, b) = (b, a);
}
"#,
);
}
#[test]
fn with_cfg_attr() {
check_assist(
extract_function,
r#"
//- /main.rs crate:main cfg:test
#[cfg(test)]
fn foo() {
foo($01 + 1$0);
}
"#,
r#"
#[cfg(test)]
fn foo() {
foo(fun_name());
}
#[cfg(test)]
fn $0fun_name() -> i32 {
1 + 1
}
"#,
);
}
#[test]
fn with_track_caller() {
check_assist(
extract_function,
r#"
#[track_caller]
fn foo() {
foo($01 + 1$0);
}
"#,
r#"
#[track_caller]
fn foo() {
foo(fun_name());
}
#[track_caller]
fn $0fun_name() -> i32 {
1 + 1
}
"#,
);
}

View file

@ -1,8 +1,8 @@
use ide_db::{famous_defs::FamousDefs, traits::resolve_target_trait};
use syntax::{
AstNode, T,
ast::{self, edit_in_place::Indent, make},
ted,
AstNode, SyntaxElement, SyntaxNode, T,
ast::{self, edit::AstNodeEdit, edit_in_place::Indent, syntax_factory::SyntaxFactory},
syntax_editor::{Element, Position, SyntaxEditor},
};
use crate::{AssistContext, AssistId, Assists};
@ -45,12 +45,13 @@ use crate::{AssistContext, AssistId, Assists};
// }
// ```
pub(crate) fn generate_mut_trait_impl(acc: &mut Assists, ctx: &AssistContext<'_>) -> Option<()> {
let impl_def = ctx.find_node_at_offset::<ast::Impl>()?.clone_for_update();
let indent = impl_def.indent_level();
let impl_def = ctx.find_node_at_offset::<ast::Impl>()?;
let indent = Indent::indent_level(&impl_def);
let ast::Type::PathType(path) = impl_def.trait_()? else {
return None;
};
let trait_name = path.path()?.segment()?.name_ref()?;
let scope = ctx.sema.scope(impl_def.trait_()?.syntax())?;
@ -59,75 +60,133 @@ pub(crate) fn generate_mut_trait_impl(acc: &mut Assists, ctx: &AssistContext<'_>
let trait_ = resolve_target_trait(&ctx.sema, &impl_def)?;
let trait_new = get_trait_mut(&trait_, famous)?;
// Index -> IndexMut
ted::replace(trait_name.syntax(), make::name_ref(trait_new).clone_for_update().syntax());
// index -> index_mut
let (trait_method_name, new_trait_method_name) = impl_def
.syntax()
.descendants()
.filter_map(ast::Name::cast)
.find_map(process_method_name)?;
ted::replace(
trait_method_name.syntax(),
make::name(new_trait_method_name).clone_for_update().syntax(),
);
if let Some(type_alias) = impl_def.syntax().descendants().find_map(ast::TypeAlias::cast) {
ted::remove(type_alias.syntax());
}
// &self -> &mut self
let mut_self_param = make::mut_self_param();
let self_param: ast::SelfParam =
impl_def.syntax().descendants().find_map(ast::SelfParam::cast)?;
ted::replace(self_param.syntax(), mut_self_param.clone_for_update().syntax());
// &Self::Output -> &mut Self::Output
let ret_type = impl_def.syntax().descendants().find_map(ast::RetType::cast)?;
let new_ret_type = process_ret_type(&ret_type)?;
ted::replace(ret_type.syntax(), make::ret_type(new_ret_type).clone_for_update().syntax());
let fn_ = impl_def.assoc_item_list()?.assoc_items().find_map(|it| match it {
ast::AssocItem::Fn(f) => Some(f),
_ => None,
})?;
let _ = process_ref_mut(&fn_);
let assoc_list = make::assoc_item_list(None).clone_for_update();
ted::replace(impl_def.assoc_item_list()?.syntax(), assoc_list.syntax());
impl_def.get_or_create_assoc_item_list().add_item(syntax::ast::AssocItem::Fn(fn_));
let target = impl_def.syntax().text_range();
acc.add(
AssistId::generate("generate_mut_trait_impl"),
format!("Generate `{trait_new}` impl from this `{trait_name}` trait"),
target,
|edit| {
edit.insert(
target.start(),
if ctx.config.snippet_cap.is_some() {
format!("$0{impl_def}\n\n{indent}")
} else {
format!("{impl_def}\n\n{indent}")
},
let impl_clone = impl_def.reset_indent().clone_subtree();
let mut editor = SyntaxEditor::new(impl_clone.syntax().clone());
let factory = SyntaxFactory::without_mappings();
apply_generate_mut_impl(&mut editor, &factory, &impl_clone, trait_new);
let new_root = editor.finish();
let new_root = new_root.new_root();
let new_impl = ast::Impl::cast(new_root.clone()).unwrap();
Indent::indent(&new_impl, indent);
let mut editor = edit.make_editor(impl_def.syntax());
editor.insert_all(
Position::before(impl_def.syntax()),
vec![
new_impl.syntax().syntax_element(),
factory.whitespace(&format!("\n\n{indent}")).syntax_element(),
],
);
if let Some(cap) = ctx.config.snippet_cap {
let tabstop_before = edit.make_tabstop_before(cap);
editor.add_annotation(new_impl.syntax(), tabstop_before);
}
edit.add_file_edits(ctx.vfs_file_id(), editor);
},
)
}
fn process_ref_mut(fn_: &ast::Fn) -> Option<()> {
let expr = fn_.body()?.tail_expr()?;
match &expr {
ast::Expr::RefExpr(ref_expr) if ref_expr.mut_token().is_none() => {
ted::insert_all_raw(
ted::Position::after(ref_expr.amp_token()?),
vec![make::token(T![mut]).into(), make::tokens::whitespace(" ").into()],
);
}
_ => {}
fn delete_with_trivia(editor: &mut SyntaxEditor, node: &SyntaxNode) {
let mut end: SyntaxElement = node.clone().into();
if let Some(next) = node.next_sibling_or_token()
&& let SyntaxElement::Token(tok) = &next
&& tok.kind().is_trivia()
{
end = next.clone();
}
None
editor.delete_all(node.clone().into()..=end);
}
fn apply_generate_mut_impl(
editor: &mut SyntaxEditor,
factory: &SyntaxFactory,
impl_def: &ast::Impl,
trait_new: &str,
) -> Option<()> {
let path =
impl_def.trait_().and_then(|t| t.syntax().descendants().find_map(ast::Path::cast))?;
let seg = path.segment()?;
let name_ref = seg.name_ref()?;
let new_name_ref = factory.name_ref(trait_new);
editor.replace(name_ref.syntax(), new_name_ref.syntax());
if let Some((name, new_name)) =
impl_def.syntax().descendants().filter_map(ast::Name::cast).find_map(process_method_name)
{
let new_name_node = factory.name(new_name);
editor.replace(name.syntax(), new_name_node.syntax());
}
if let Some(type_alias) = impl_def.syntax().descendants().find_map(ast::TypeAlias::cast) {
delete_with_trivia(editor, type_alias.syntax());
}
if let Some(self_param) = impl_def.syntax().descendants().find_map(ast::SelfParam::cast) {
let mut_self = factory.mut_self_param();
editor.replace(self_param.syntax(), mut_self.syntax());
}
if let Some(ret_type) = impl_def.syntax().descendants().find_map(ast::RetType::cast)
&& let Some(new_ty) = process_ret_type(factory, &ret_type)
{
let new_ret = factory.ret_type(new_ty);
editor.replace(ret_type.syntax(), new_ret.syntax())
}
if let Some(fn_) = impl_def.assoc_item_list().and_then(|l| {
l.assoc_items().find_map(|it| match it {
ast::AssocItem::Fn(f) => Some(f),
_ => None,
})
}) {
process_ref_mut(editor, factory, &fn_);
}
Some(())
}
fn process_ref_mut(editor: &mut SyntaxEditor, factory: &SyntaxFactory, fn_: &ast::Fn) {
let Some(expr) = fn_.body().and_then(|b| b.tail_expr()) else { return };
let ast::Expr::RefExpr(ref_expr) = expr else { return };
if ref_expr.mut_token().is_some() {
return;
}
let Some(amp) = ref_expr.amp_token() else { return };
let mut_kw = factory.token(T![mut]);
let space = factory.whitespace(" ");
editor.insert(Position::after(amp.clone()), space.syntax_element());
editor.insert(Position::after(amp), mut_kw.syntax_element());
}
fn process_ret_type(factory: &SyntaxFactory, ref_ty: &ast::RetType) -> Option<ast::Type> {
let ty = ref_ty.ty()?;
let ast::Type::RefType(ref_type) = ty else {
return None;
};
let inner = ref_type.ty()?;
Some(factory.ty_ref(inner, true))
}
fn get_trait_mut(apply_trait: &hir::Trait, famous: FamousDefs<'_, '_>) -> Option<&'static str> {
@ -158,14 +217,6 @@ fn process_method_name(name: ast::Name) -> Option<(ast::Name, &'static str)> {
Some((name, new_name))
}
fn process_ret_type(ref_ty: &ast::RetType) -> Option<ast::Type> {
let ty = ref_ty.ty()?;
let ast::Type::RefType(ref_type) = ty else {
return None;
};
Some(make::ty_ref(ref_type.ty()?, true))
}
#[cfg(test)]
mod tests {
use crate::{

View file

@ -290,19 +290,23 @@ impl ConstAndTypeMap {
/// ^ alias generic params
/// let a: A<100>;
/// ^ instance generic args
/// ```
///
/// generic['a] = '_ due to omission
/// generic[N] = 100 due to the instance arg
/// generic[T] = u64 due to the default param
/// ```
///
/// 2. Copy the concrete type and substitute in each found mapping:
///
/// ```ignore
/// &'_ [u64; 100]
/// ```
///
/// 3. Remove wildcard lifetimes entirely:
///
/// ```ignore
/// &[u64; 100]
/// ```
fn create_replacement(
lifetime_map: &LifetimeMap,
const_and_type_map: &ConstAndTypeMap,

View file

@ -3,7 +3,7 @@ use syntax::{
SyntaxKind::WHITESPACE,
ast::{
AstNode, BlockExpr, ElseBranch, Expr, IfExpr, MatchArm, Pat, edit::AstNodeEdit, make,
syntax_factory::SyntaxFactory,
prec::ExprPrecedence, syntax_factory::SyntaxFactory,
},
syntax_editor::Element,
};
@ -49,7 +49,7 @@ pub(crate) fn move_guard_to_arm_body(acc: &mut Assists, ctx: &AssistContext<'_>)
let guard_condition = guard.condition()?.reset_indent();
let arm_expr = match_arm.expr()?;
let then_branch = make::block_expr(None, Some(arm_expr.reset_indent().indent(1.into())));
let then_branch = crate::utils::wrap_block(&arm_expr);
let if_expr = make::expr_if(guard_condition, then_branch, None).indent(arm_expr.indent_level());
let target = guard.syntax().text_range();
@ -109,6 +109,7 @@ pub(crate) fn move_arm_cond_to_match_guard(
let match_arm: MatchArm = ctx.find_node_at_offset::<MatchArm>()?;
let match_pat = match_arm.pat()?;
let arm_body = match_arm.expr()?;
let arm_guard = match_arm.guard().and_then(|it| it.condition());
let mut replace_node = None;
let if_expr: IfExpr = IfExpr::cast(arm_body.syntax().clone()).or_else(|| {
@ -149,6 +150,25 @@ pub(crate) fn move_arm_cond_to_match_guard(
0
};
let indent_level = match_arm.indent_level();
let make_guard = |cond: Option<Expr>| {
let condition = match (arm_guard.clone(), cond) {
(None, None) => return None,
(None, Some(it)) | (Some(it), None) => it,
(Some(lhs), Some(rhs)) => {
let op_expr = |expr: Expr| {
if expr.precedence().needs_parentheses_in(ExprPrecedence::LAnd) {
make.expr_paren(expr).into()
} else {
expr
}
};
let op = syntax::ast::BinaryOp::LogicOp(syntax::ast::LogicOp::And);
let expr_bin = make.expr_bin(op_expr(lhs), op, op_expr(rhs));
expr_bin.into()
}
};
Some(make.match_guard(condition))
};
for (cond, block) in conds_blocks {
let only_expr = block.statements().next().is_none();
@ -156,8 +176,7 @@ pub(crate) fn move_arm_cond_to_match_guard(
Some(then_expr) if only_expr => then_expr,
_ => block.dedent(dedent.into()).into(),
};
let guard = make.match_guard(cond);
let new_arm = make.match_arm(match_pat.clone(), Some(guard), expr);
let new_arm = make.match_arm(match_pat.clone(), make_guard(Some(cond)), expr);
replace_arms.push(new_arm);
}
if let Some(block) = tail {
@ -170,7 +189,7 @@ pub(crate) fn move_arm_cond_to_match_guard(
}
_ => block.dedent(dedent.into()).into(),
};
let new_arm = make.match_arm(match_pat, None, expr);
let new_arm = make.match_arm(match_pat, make_guard(None), expr);
replace_arms.push(new_arm);
} else {
// There's no else branch. Add a pattern without guard, unless the following match
@ -185,7 +204,7 @@ pub(crate) fn move_arm_cond_to_match_guard(
}
_ => {
let block_expr = make.expr_empty_block().into();
replace_arms.push(make.match_arm(match_pat, None, block_expr));
replace_arms.push(make.match_arm(match_pat, make_guard(None), block_expr));
}
}
}
@ -325,6 +344,35 @@ fn main() {
);
}
#[test]
fn move_guard_to_block_arm_body_works() {
check_assist(
move_guard_to_arm_body,
r#"
fn main() {
match 92 {
x $0if x > 10 => {
let _ = true;
false
},
_ => true
}
}
"#,
r#"
fn main() {
match 92 {
x => if x > 10 {
let _ = true;
false
},
_ => true
}
}
"#,
);
}
#[test]
fn move_let_guard_to_arm_body_works() {
check_assist(
@ -376,9 +424,7 @@ fn main() {
&& true
&& true {
{
{
false
}
false
}
},
_ => true
@ -1081,6 +1127,42 @@ fn main() {
x => {}
}
}
"#,
)
}
#[test]
fn move_arm_cond_to_match_guard_elseif_exist_guard() {
check_assist(
move_arm_cond_to_match_guard,
r#"
fn main() {
let cond = true;
match 92 {
3 => true,
x if cond => if x $0> 10 {
false
} else if x > 5 {
true
} else if x > 4 || x < -2 {
false
} else {
true
},
}
}
"#,
r#"
fn main() {
let cond = true;
match 92 {
3 => true,
x if cond && x > 10 => false,
x if cond && x > 5 => true,
x if cond && (x > 4 || x < -2) => false,
x if cond => true,
}
}
"#,
)
}

View file

@ -321,6 +321,12 @@ mod tests {
);
}
#[test]
fn remove_parens_conflict_cast_before_l_angle() {
check_assist_not_applicable(remove_parentheses, r#"fn f() { _ = $0(1 as u32) << 10; }"#);
check_assist_not_applicable(remove_parentheses, r#"fn f() { _ = $0(1 as u32) < 10; }"#);
}
#[test]
fn remove_parens_double_paren_stmt() {
check_assist(

View file

@ -86,7 +86,14 @@ pub(crate) fn toggle_macro_delimiter(acc: &mut Assists, ctx: &AssistContext<'_>)
}
MacroDelims::LCur | MacroDelims::RCur => {
editor.replace(ltoken, make.token(T!['[']));
editor.replace(rtoken, make.token(T![']']));
if semicolon.is_some() || !needs_semicolon(token_tree) {
editor.replace(rtoken, make.token(T![']']));
} else {
editor.replace_with_many(
rtoken,
vec![make.token(T![']']).into(), make.token(T![;]).into()],
);
}
}
}
editor.add_mappings(make.finish_with_mappings());
@ -103,6 +110,30 @@ fn macro_semicolon(makro: &ast::MacroCall) -> Option<SyntaxToken> {
})
}
fn needs_semicolon(tt: ast::TokenTree) -> bool {
(|| {
let call = ast::MacroCall::cast(tt.syntax().parent()?)?;
let container = call.syntax().parent()?;
let kind = container.kind();
if call.semicolon_token().is_some() {
return Some(false);
}
Some(
ast::ItemList::can_cast(kind)
|| ast::SourceFile::can_cast(kind)
|| ast::AssocItemList::can_cast(kind)
|| ast::ExternItemList::can_cast(kind)
|| ast::MacroItems::can_cast(kind)
|| ast::MacroExpr::can_cast(kind)
&& ast::ExprStmt::cast(container.parent()?)
.is_some_and(|it| it.semicolon_token().is_none()),
)
})()
.unwrap_or(false)
}
#[cfg(test)]
mod tests {
use crate::tests::{check_assist, check_assist_not_applicable};
@ -161,7 +192,7 @@ macro_rules! sth {
() => {};
}
sth!$0{ };
sth!$0{ }
"#,
r#"
macro_rules! sth {
@ -170,7 +201,117 @@ macro_rules! sth {
sth![ ];
"#,
)
);
check_assist(
toggle_macro_delimiter,
r#"
macro_rules! sth {
() => {};
}
fn foo() -> i32 {
sth!$0{ }
2
}
"#,
r#"
macro_rules! sth {
() => {};
}
fn foo() -> i32 {
sth![ ];
2
}
"#,
);
check_assist(
toggle_macro_delimiter,
r#"
macro_rules! sth {
() => {2};
}
fn foo() {
sth!$0{ };
}
"#,
r#"
macro_rules! sth {
() => {2};
}
fn foo() {
sth![ ];
}
"#,
);
check_assist(
toggle_macro_delimiter,
r#"
macro_rules! sth {
() => {2};
}
fn foo() -> i32 {
sth!$0{ }
}
"#,
r#"
macro_rules! sth {
() => {2};
}
fn foo() -> i32 {
sth![ ]
}
"#,
);
check_assist(
toggle_macro_delimiter,
r#"
macro_rules! sth {
() => {};
}
impl () {
sth!$0{}
}
"#,
r#"
macro_rules! sth {
() => {};
}
impl () {
sth![];
}
"#,
);
check_assist(
toggle_macro_delimiter,
r#"
macro_rules! sth {
() => {2};
}
fn foo() -> i32 {
bar(sth!$0{ })
}
"#,
r#"
macro_rules! sth {
() => {2};
}
fn foo() -> i32 {
bar(sth![ ])
}
"#,
);
}
#[test]
@ -204,7 +345,7 @@ mod abc {
() => {};
}
sth!$0{ };
sth!$0{ }
}
"#,
r#"

View file

@ -1,10 +1,12 @@
use syntax::{
AstNode, SyntaxKind, T, TextRange,
AstNode, SyntaxElement, SyntaxKind, SyntaxNode, T,
ast::{
self,
edit::{AstNodeEdit, IndentLevel},
make,
},
match_ast,
syntax_editor::{Element, Position, SyntaxEditor},
};
use crate::{AssistContext, AssistId, Assists};
@ -27,123 +29,108 @@ use crate::{AssistContext, AssistId, Assists};
// }
// ```
pub(crate) fn unwrap_block(acc: &mut Assists, ctx: &AssistContext<'_>) -> Option<()> {
let assist_id = AssistId::refactor_rewrite("unwrap_block");
let assist_label = "Unwrap block";
let l_curly_token = ctx.find_token_syntax_at_offset(T!['{'])?;
let mut block = ast::BlockExpr::cast(l_curly_token.parent_ancestors().nth(1)?)?;
let block = l_curly_token.parent_ancestors().nth(1).and_then(ast::BlockExpr::cast)?;
let target = block.syntax().text_range();
let mut parent = block.syntax().parent()?;
if ast::MatchArm::can_cast(parent.kind()) {
parent = parent.ancestors().find(|it| ast::MatchExpr::can_cast(it.kind()))?
}
let mut container = block.syntax().clone();
let mut replacement = block.clone();
let mut prefer_container = None;
let kind = parent.kind();
if matches!(kind, SyntaxKind::STMT_LIST | SyntaxKind::EXPR_STMT) {
acc.add(assist_id, assist_label, target, |builder| {
builder.replace(block.syntax().text_range(), update_expr_string(block.to_string()));
})
} else if matches!(kind, SyntaxKind::LET_STMT) {
let parent = ast::LetStmt::cast(parent)?;
let pattern = ast::Pat::cast(parent.syntax().first_child()?)?;
let ty = parent.ty();
let list = block.stmt_list()?;
let replaced = match list.syntax().last_child() {
Some(last) => {
let stmts: Vec<ast::Stmt> = list.statements().collect();
let initializer = ast::Expr::cast(last)?;
let let_stmt = make::let_stmt(pattern, ty, Some(initializer));
if !stmts.is_empty() {
let block = make::block_expr(stmts, None);
format!("{}\n {}", update_expr_string(block.to_string()), let_stmt)
} else {
let_stmt.to_string()
}
}
None => {
let empty_tuple = make::ext::expr_unit();
make::let_stmt(pattern, ty, Some(empty_tuple)).to_string()
}
};
acc.add(assist_id, assist_label, target, |builder| {
builder.replace(parent.syntax().text_range(), replaced);
})
} else {
let parent = ast::Expr::cast(parent)?;
match parent.clone() {
ast::Expr::ForExpr(_) | ast::Expr::WhileExpr(_) | ast::Expr::LoopExpr(_) => (),
ast::Expr::MatchExpr(_) => block = block.dedent(IndentLevel(1)),
ast::Expr::IfExpr(if_expr) => {
let then_branch = if_expr.then_branch()?;
if then_branch == block {
if let Some(ancestor) = if_expr.syntax().parent().and_then(ast::IfExpr::cast) {
// For `else if` blocks
let ancestor_then_branch = ancestor.then_branch()?;
return acc.add(assist_id, assist_label, target, |edit| {
let range_to_del_else_if = TextRange::new(
ancestor_then_branch.syntax().text_range().end(),
l_curly_token.text_range().start(),
);
let range_to_del_rest = TextRange::new(
then_branch.syntax().text_range().end(),
if_expr.syntax().text_range().end(),
);
edit.delete(range_to_del_rest);
edit.delete(range_to_del_else_if);
edit.replace(
target,
update_expr_string_without_newline(then_branch.to_string()),
);
});
}
} else {
return acc.add(assist_id, assist_label, target, |edit| {
let range_to_del = TextRange::new(
then_branch.syntax().text_range().end(),
l_curly_token.text_range().start(),
);
edit.delete(range_to_del);
edit.replace(target, update_expr_string_without_newline(block.to_string()));
let from_indent = block.indent_level();
let into_indent = loop {
let parent = container.parent()?;
container = match_ast! {
match parent {
ast::ForExpr(it) => it.syntax().clone(),
ast::LoopExpr(it) => it.syntax().clone(),
ast::WhileExpr(it) => it.syntax().clone(),
ast::MatchArm(it) => it.parent_match().syntax().clone(),
ast::LetStmt(it) => {
replacement = wrap_let(&it, replacement);
prefer_container = Some(it.syntax().clone());
it.syntax().clone()
},
ast::IfExpr(it) => {
prefer_container.get_or_insert_with(|| {
if let Some(else_branch) = it.else_branch()
&& *else_branch.syntax() == container
{
else_branch.syntax().clone()
} else {
it.syntax().clone()
}
});
}
it.syntax().clone()
},
ast::ExprStmt(it) => it.syntax().clone(),
ast::StmtList(it) => break it.indent_level(),
_ => return None,
}
_ => return None,
};
};
let replacement = replacement.stmt_list()?;
acc.add(assist_id, assist_label, target, |builder| {
builder.replace(parent.syntax().text_range(), update_expr_string(block.to_string()));
})
}
acc.add(AssistId::refactor_rewrite("unwrap_block"), "Unwrap block", target, |builder| {
let mut edit = builder.make_editor(block.syntax());
let replacement = replacement.dedent(from_indent).indent(into_indent);
let container = prefer_container.unwrap_or(container);
edit.replace_with_many(&container, extract_statements(replacement));
delete_else_before(container, &mut edit);
builder.add_file_edits(ctx.vfs_file_id(), edit);
})
}
fn update_expr_string(expr_string: String) -> String {
update_expr_string_with_pat(expr_string, &[' ', '\n'])
fn delete_else_before(container: SyntaxNode, edit: &mut SyntaxEditor) {
let Some(else_token) = container
.siblings_with_tokens(syntax::Direction::Prev)
.skip(1)
.map_while(|it| it.into_token())
.find(|it| it.kind() == T![else])
else {
return;
};
itertools::chain(else_token.prev_token(), else_token.next_token())
.filter(|it| it.kind() == SyntaxKind::WHITESPACE)
.for_each(|it| edit.delete(it));
let indent = IndentLevel::from_node(&container);
let newline = make::tokens::whitespace(&format!("\n{indent}"));
edit.replace(else_token, newline);
}
fn update_expr_string_without_newline(expr_string: String) -> String {
update_expr_string_with_pat(expr_string, &[' '])
fn wrap_let(assign: &ast::LetStmt, replacement: ast::BlockExpr) -> ast::BlockExpr {
let try_wrap_assign = || {
let initializer = assign.initializer()?.syntax().syntax_element();
let replacement = replacement.clone_subtree();
let assign = assign.clone_for_update();
let tail_expr = replacement.tail_expr()?;
let before =
assign.syntax().children_with_tokens().take_while(|it| *it != initializer).collect();
let after = assign
.syntax()
.children_with_tokens()
.skip_while(|it| *it != initializer)
.skip(1)
.collect();
let mut edit = SyntaxEditor::new(replacement.syntax().clone());
edit.insert_all(Position::before(tail_expr.syntax()), before);
edit.insert_all(Position::after(tail_expr.syntax()), after);
ast::BlockExpr::cast(edit.finish().new_root().clone())
};
try_wrap_assign().unwrap_or(replacement)
}
fn update_expr_string_with_pat(expr_str: String, whitespace_pat: &[char]) -> String {
// Remove leading whitespace, index to remove the leading '{',
// then continue to remove leading whitespace.
// We cannot assume the `{` is the first character because there are block modifiers
// (`unsafe`, `async` etc.).
let after_open_brace_index = expr_str.find('{').map_or(0, |it| it + 1);
let expr_str = expr_str[after_open_brace_index..].trim_start_matches(whitespace_pat);
// Remove trailing whitespace, index [..expr_str.len() - 1] to remove the trailing '}',
// then continue to remove trailing whitespace.
let expr_str = expr_str.trim_end_matches(whitespace_pat);
let expr_str = expr_str[..expr_str.len() - 1].trim_end_matches(whitespace_pat);
expr_str
.lines()
.map(|line| line.replacen(" ", "", 1)) // Delete indentation
.collect::<Vec<String>>()
.join("\n")
fn extract_statements(stmt_list: ast::StmtList) -> Vec<SyntaxElement> {
let mut elements = stmt_list
.syntax()
.children_with_tokens()
.filter(|it| !matches!(it.kind(), T!['{'] | T!['}']))
.skip_while(|it| it.kind() == SyntaxKind::WHITESPACE)
.collect::<Vec<_>>();
while elements.pop_if(|it| it.kind() == SyntaxKind::WHITESPACE).is_some() {}
elements
}
#[cfg(test)]
@ -593,6 +580,30 @@ fn main() {
);
}
#[test]
fn unwrap_match_arm_in_let() {
check_assist(
unwrap_block,
r#"
fn main() {
let value = match rel_path {
Ok(rel_path) => {$0
let rel_path = RelativePathBuf::from_path(rel_path).ok()?;
Some((*id, rel_path))
}
Err(_) => None,
};
}
"#,
r#"
fn main() {
let rel_path = RelativePathBuf::from_path(rel_path).ok()?;
let value = Some((*id, rel_path));
}
"#,
);
}
#[test]
fn simple_if_in_while_bad_cursor_position() {
check_assist_not_applicable(
@ -750,19 +761,6 @@ fn main() -> i32 {
check_assist(
unwrap_block,
r#"
fn main() {
let x = {$0};
}
"#,
r#"
fn main() {
let x = ();
}
"#,
);
check_assist(
unwrap_block,
r#"
fn main() {
let x = {$0
bar
@ -784,8 +782,7 @@ fn main() -> i32 {
"#,
r#"
fn main() -> i32 {
1;
let _ = 2;
1; let _ = 2;
}
"#,
);
@ -795,11 +792,29 @@ fn main() -> i32 {
fn main() -> i32 {
let mut a = {$01; 2};
}
"#,
r#"
fn main() -> i32 {
1; let mut a = 2;
}
"#,
);
check_assist(
unwrap_block,
r#"
fn main() -> i32 {
let mut a = {$0
1;
2;
3
};
}
"#,
r#"
fn main() -> i32 {
1;
let mut a = 2;
2;
let mut a = 3;
}
"#,
);

View file

@ -86,6 +86,17 @@ pub fn extract_trivial_expression(block_expr: &ast::BlockExpr) -> Option<ast::Ex
None
}
pub(crate) fn wrap_block(expr: &ast::Expr) -> ast::BlockExpr {
if let ast::Expr::BlockExpr(block) = expr
&& let Some(first) = block.syntax().first_token()
&& first.kind() == T!['{']
{
block.reset_indent()
} else {
make::block_expr(None, Some(expr.reset_indent().indent(1.into())))
}
}
/// This is a method with a heuristics to support test methods annotated with custom test annotations, such as
/// `#[test_case(...)]`, `#[tokio::test]` and similar.
/// Also a regular `#[test]` annotation is supported.

View file

@ -340,7 +340,7 @@ pub(crate) fn complete_expr_path(
let missing_fields =
ctx.sema.record_literal_missing_fields(record_expr);
if !missing_fields.is_empty() {
add_default_update(acc, ctx, ty);
add_default_update(acc, ctx, ty.as_ref());
}
}
};

View file

@ -17,7 +17,7 @@ pub(crate) fn complete_extern_crate(acc: &mut Completions, ctx: &CompletionConte
}
let mut item = CompletionItem::new(
CompletionItemKind::SymbolKind(SymbolKind::Module),
CompletionItemKind::SymbolKind(SymbolKind::CrateRoot),
ctx.source_range(),
name.display_no_db(ctx.edition).to_smolstr(),
ctx.edition,
@ -48,7 +48,7 @@ mod other_mod {}
let completion_list = completion_list_no_kw(case);
assert_eq!("md other_crate_a\n".to_owned(), completion_list);
assert_eq!("cr other_crate_a\n".to_owned(), completion_list);
}
#[test]
@ -68,6 +68,6 @@ mod other_mod {}
let completion_list = completion_list_no_kw(case);
assert_eq!("md other_crate_a\n".to_owned(), completion_list);
assert_eq!("cr other_crate_a\n".to_owned(), completion_list);
}
}

View file

@ -36,7 +36,7 @@ pub(crate) fn complete_record_pattern_fields(
true => return,
}
}
_ => ctx.sema.record_pattern_missing_fields(record_pat),
_ => ctx.sema.record_pattern_matched_fields(record_pat),
};
complete_fields(acc, ctx, missing_fields);
}
@ -69,14 +69,14 @@ pub(crate) fn complete_record_expr_fields(
}
}
_ => {
let missing_fields = ctx.sema.record_literal_missing_fields(record_expr);
let suggest_fields = ctx.sema.record_literal_matched_fields(record_expr);
let update_exists = record_expr
.record_expr_field_list()
.is_some_and(|list| list.dotdot_token().is_some());
if !missing_fields.is_empty() && !update_exists {
if !suggest_fields.is_empty() && !update_exists {
cov_mark::hit!(functional_update_field);
add_default_update(acc, ctx, ty);
add_default_update(acc, ctx, ty.as_ref());
}
if dot_prefix {
cov_mark::hit!(functional_update_one_dot);
@ -90,7 +90,7 @@ pub(crate) fn complete_record_expr_fields(
item.add_to(acc, ctx.db);
return;
}
missing_fields
suggest_fields
}
};
complete_fields(acc, ctx, missing_fields);
@ -99,11 +99,11 @@ pub(crate) fn complete_record_expr_fields(
pub(crate) fn add_default_update(
acc: &mut Completions,
ctx: &CompletionContext<'_>,
ty: Option<hir::TypeInfo<'_>>,
ty: Option<&hir::TypeInfo<'_>>,
) {
let default_trait = ctx.famous_defs().core_default_Default();
let impls_default_trait = default_trait
.zip(ty.as_ref())
.zip(ty)
.is_some_and(|(default_trait, ty)| ty.original.impls_trait(ctx.db, default_trait, &[]));
if impls_default_trait {
// FIXME: This should make use of scope_def like completions so we get all the other goodies

View file

@ -628,7 +628,7 @@ impl CompletionContext<'_> {
}
/// A version of [`SemanticsScope::process_all_names`] that filters out `#[doc(hidden)]` items and
/// passes all doc-aliases along, to funnel it into [`Completions::add_path_resolution`].
/// passes all doc-aliases along, to funnel it into `Completions::add_path_resolution`.
pub(crate) fn process_all_names(&self, f: &mut dyn FnMut(Name, ScopeDef, Vec<SmolStr>)) {
let _p = tracing::info_span!("CompletionContext::process_all_names").entered();
self.scope.process_all_names(&mut |name, def| {

View file

@ -1250,6 +1250,11 @@ fn classify_name_ref<'db>(
let original = ast::Const::cast(name.syntax().parent()?)?;
TypeLocation::TypeAscription(TypeAscriptionTarget::Const(original.body()))
},
ast::Static(it) => {
let name = find_opt_node_in_file(original_file, it.name())?;
let original = ast::Static::cast(name.syntax().parent()?)?;
TypeLocation::TypeAscription(TypeAscriptionTarget::Const(original.body()))
},
ast::RetType(it) => {
it.thin_arrow_token()?;
let parent = match ast::Fn::cast(parent.parent()?) {
@ -1305,14 +1310,14 @@ fn classify_name_ref<'db>(
let make_path_kind_expr = |expr: ast::Expr| {
let it = expr.syntax();
let prev_token = iter::successors(it.first_token(), |it| it.prev_token())
.skip(1)
.find(|it| !it.kind().is_trivia());
let in_block_expr = is_in_block(it);
let (in_loop_body, innermost_breakable) = is_in_breakable(it).unzip();
let after_if_expr = is_after_if_expr(it.clone());
let ref_expr_parent =
path.as_single_name_ref().and_then(|_| it.parent()).and_then(ast::RefExpr::cast);
let after_amp = non_trivia_sibling(it.clone().into(), Direction::Prev)
.map(|it| it.kind() == SyntaxKind::AMP)
.unwrap_or(false);
let after_amp = prev_token.as_ref().is_some_and(|it| it.kind() == SyntaxKind::AMP);
let ref_expr_parent = prev_token.and_then(|it| it.parent()).and_then(ast::RefExpr::cast);
let (innermost_ret_ty, self_param) = {
let find_ret_ty = |it: SyntaxNode| {
if let Some(item) = ast::Item::cast(it.clone()) {
@ -2030,9 +2035,10 @@ fn is_after_if_expr(node: SyntaxNode) -> bool {
Some(stmt) => stmt.syntax().clone(),
None => node,
};
let prev_sibling =
non_trivia_sibling(node.into(), Direction::Prev).and_then(NodeOrToken::into_node);
iter::successors(prev_sibling, |it| it.last_child_or_token()?.into_node())
let Some(prev_token) = previous_non_trivia_token(node) else { return false };
prev_token
.parent_ancestors()
.take_while(|it| it.text_range().end() == prev_token.text_range().end())
.find_map(ast::IfExpr::cast)
.is_some()
}

View file

@ -381,6 +381,7 @@ impl CompletionItemKind {
SymbolKind::BuiltinAttr => "ba",
SymbolKind::Const => "ct",
SymbolKind::ConstParam => "cp",
SymbolKind::CrateRoot => "cr",
SymbolKind::Derive => "de",
SymbolKind::DeriveHelper => "dh",
SymbolKind::Enum => "en",

View file

@ -706,7 +706,30 @@ fn completes_after_ref_expr() {
kw while
kw while let
"#]],
)
);
check(
r#"fn main() { let _ = &$0x.foo() }"#,
expect![[r#"
fn main() fn()
bt u32 u32
kw const
kw crate::
kw false
kw for
kw if
kw if let
kw loop
kw match
kw mut
kw raw
kw return
kw self::
kw true
kw unsafe
kw while
kw while let
"#]],
);
}
#[test]
@ -2159,6 +2182,32 @@ fn foo() { match () { () => if foo {} $0, _ => (), } }
kw ref
"#]],
);
check(
r#"
fn foo() -> (i32, i32) { if foo {} el$0 (2, 3) }
"#,
expect![[r#"
fn foo fn() -> (i32, i32)
bt u32 u32
kw const
kw crate::
kw else
kw else if
kw false
kw for
kw if
kw if let
kw loop
kw match
kw return
kw self::
kw true
kw unsafe
kw while
kw while let
ex foo()
"#]],
);
// FIXME: support else completion after ast::RecordExprField
}

Some files were not shown because too many files have changed in this diff Show more