internal: Include private definitions in generated rustdoc
rust-analyzer has handy prebuilt `cargo doc` output at https://rust-lang.github.io/rust-analyzer/ide/ However, it doesn't include private definitions, which makes it less useful when trying to learn unfamiliar parts of the codebase. Instead, pass `--document-private-items` so the HTML includes information on private types and modules too. rustdoc renders these with a padlock icon, so it's still clear that they're private. This change also exposes some more rustdoc warnings, which I've fixed.
This commit is contained in:
parent
d87c468b42
commit
e52695c3fc
21 changed files with 32 additions and 28 deletions
|
|
@ -24,7 +24,7 @@ jobs:
|
|||
run: rustup update --no-self-update stable
|
||||
|
||||
- name: Build Documentation
|
||||
run: cargo doc --all --no-deps
|
||||
run: cargo doc --all --no-deps --document-private-items
|
||||
|
||||
- name: Deploy Docs
|
||||
uses: peaceiris/actions-gh-pages@4f9cc6602d3f66b9c108549d475ec49e8ef4d45e # v4.0.0
|
||||
|
|
|
|||
|
|
@ -216,7 +216,7 @@ struct DefMapCrateData {
|
|||
registered_tools: Vec<Symbol>,
|
||||
/// Unstable features of Rust enabled with `#![feature(A, B)]`.
|
||||
unstable_features: FxHashSet<Symbol>,
|
||||
/// #[rustc_coherence_is_core]
|
||||
/// `#[rustc_coherence_is_core]`
|
||||
rustc_coherence_is_core: bool,
|
||||
no_core: bool,
|
||||
no_std: bool,
|
||||
|
|
|
|||
|
|
@ -115,7 +115,7 @@ fn dummy_gate_test_expand(
|
|||
/// wasting a lot of memory, and it would also require some way to use a path in a way that makes it
|
||||
/// always resolve as a derive without nameres recollecting them.
|
||||
/// So this hacky approach is a lot more friendly for us, though it does require a bit of support in
|
||||
/// [`hir::Semantics`] to make this work.
|
||||
/// hir::Semantics to make this work.
|
||||
fn derive_expand(
|
||||
db: &dyn ExpandDatabase,
|
||||
id: MacroCallId,
|
||||
|
|
|
|||
|
|
@ -1,4 +1,4 @@
|
|||
//! Processes out #[cfg] and #[cfg_attr] attributes from the input for the derive macro
|
||||
//! Processes out `#[cfg]` and `#[cfg_attr]` attributes from the input for the derive macro
|
||||
use std::{cell::OnceCell, ops::ControlFlow};
|
||||
|
||||
use ::tt::TextRange;
|
||||
|
|
|
|||
|
|
@ -466,7 +466,7 @@ impl<'db> InferenceContext<'_, 'db> {
|
|||
}
|
||||
|
||||
/// Given an `FnOnce::Output` or `AsyncFn::Output` projection, extract the args
|
||||
/// and return type to infer a [`ty::PolyFnSig`] for the closure.
|
||||
/// and return type to infer a `PolyFnSig` for the closure.
|
||||
fn extract_sig_from_projection(
|
||||
&self,
|
||||
projection: PolyProjectionPredicate<'db>,
|
||||
|
|
|
|||
|
|
@ -206,11 +206,11 @@ impl<'a, 'db> InferenceContext<'a, 'db> {
|
|||
}
|
||||
}
|
||||
|
||||
/// Used by [FnCtxt::lookup_method_for_operator] with `-Znext-solver`.
|
||||
/// Used by `FnCtxt::lookup_method_for_operator` with `-Znext-solver`.
|
||||
///
|
||||
/// With `AsRigid` we error on `impl Opaque: NotInItemBounds` while
|
||||
/// `AsInfer` just treats it as ambiguous and succeeds. This is necessary
|
||||
/// as we want [FnCtxt::check_expr_call] to treat not-yet-defined opaque
|
||||
/// as we want `FnCtxt::check_expr_call` to treat not-yet-defined opaque
|
||||
/// types as rigid to support `impl Deref<Target = impl FnOnce()>` and
|
||||
/// `Box<impl FnOnce()>`.
|
||||
///
|
||||
|
|
|
|||
|
|
@ -1740,7 +1740,7 @@ impl<'a, 'db, Choice: ProbeChoice<'db>> ProbeContext<'a, 'db, Choice> {
|
|||
/// We want to only accept trait methods if they were hold even if the
|
||||
/// opaque types were rigid. To handle this, we both check that for trait
|
||||
/// candidates the goal were to hold even when treating opaques as rigid,
|
||||
/// see [OpaqueTypesJank](rustc_trait_selection::solve::OpaqueTypesJank).
|
||||
/// see `rustc_trait_selection::solve::OpaqueTypesJank`.
|
||||
///
|
||||
/// We also check that all opaque types encountered as self types in the
|
||||
/// autoderef chain don't get constrained when applying the candidate.
|
||||
|
|
|
|||
|
|
@ -140,7 +140,7 @@ pub struct InferCtxtInner<'db> {
|
|||
///
|
||||
/// Before running `resolve_regions_and_report_errors`, the creator
|
||||
/// of the inference context is expected to invoke
|
||||
/// [`InferCtxt::process_registered_region_obligations`]
|
||||
/// `InferCtxt::process_registered_region_obligations`
|
||||
/// for each body-id in this map, which will process the
|
||||
/// obligations within. This is expected to be done 'late enough'
|
||||
/// that all type inference variables have been bound and so forth.
|
||||
|
|
|
|||
|
|
@ -172,7 +172,7 @@ impl<'db> LookupTable<'db> {
|
|||
/// Insert new type trees for type
|
||||
///
|
||||
/// Note that the types have to be the same, unification is not enough as unification is not
|
||||
/// transitive. For example Vec<i32> and FxHashSet<i32> both unify with Iterator<Item = i32>,
|
||||
/// transitive. For example `Vec<i32>` and `FxHashSet<i32>` both unify with `Iterator<Item = i32>`,
|
||||
/// but they clearly do not unify themselves.
|
||||
fn insert(&mut self, ty: Type<'db>, exprs: impl Iterator<Item = Expr<'db>>) {
|
||||
match self.data.get_mut(&ty) {
|
||||
|
|
|
|||
|
|
@ -290,19 +290,23 @@ impl ConstAndTypeMap {
|
|||
/// ^ alias generic params
|
||||
/// let a: A<100>;
|
||||
/// ^ instance generic args
|
||||
/// ```
|
||||
///
|
||||
/// generic['a] = '_ due to omission
|
||||
/// generic[N] = 100 due to the instance arg
|
||||
/// generic[T] = u64 due to the default param
|
||||
/// ```
|
||||
///
|
||||
/// 2. Copy the concrete type and substitute in each found mapping:
|
||||
///
|
||||
/// ```ignore
|
||||
/// &'_ [u64; 100]
|
||||
/// ```
|
||||
///
|
||||
/// 3. Remove wildcard lifetimes entirely:
|
||||
///
|
||||
/// ```ignore
|
||||
/// &[u64; 100]
|
||||
/// ```
|
||||
fn create_replacement(
|
||||
lifetime_map: &LifetimeMap,
|
||||
const_and_type_map: &ConstAndTypeMap,
|
||||
|
|
|
|||
|
|
@ -628,7 +628,7 @@ impl CompletionContext<'_> {
|
|||
}
|
||||
|
||||
/// A version of [`SemanticsScope::process_all_names`] that filters out `#[doc(hidden)]` items and
|
||||
/// passes all doc-aliases along, to funnel it into [`Completions::add_path_resolution`].
|
||||
/// passes all doc-aliases along, to funnel it into `Completions::add_path_resolution`.
|
||||
pub(crate) fn process_all_names(&self, f: &mut dyn FnMut(Name, ScopeDef, Vec<SmolStr>)) {
|
||||
let _p = tracing::info_span!("CompletionContext::process_all_names").entered();
|
||||
self.scope.process_all_names(&mut |name, def| {
|
||||
|
|
|
|||
|
|
@ -6,7 +6,7 @@
|
|||
//! each submodule starts with `use super::*` import and exports
|
||||
//! "public" productions via `pub(super)`.
|
||||
//!
|
||||
//! See docs for [`Parser`](super::parser::Parser) to learn about API,
|
||||
//! See docs for [`Parser`] to learn about API,
|
||||
//! available to the grammar, and see docs for [`Event`](super::event::Event)
|
||||
//! to learn how this actually manages to produce parse trees.
|
||||
//!
|
||||
|
|
|
|||
|
|
@ -640,7 +640,7 @@ impl FetchMetadata {
|
|||
/// Builds a command to fetch metadata for the given `cargo_toml` manifest.
|
||||
///
|
||||
/// Performs a lightweight pre-fetch using the `--no-deps` option,
|
||||
/// available via [`FetchMetadata::no_deps_metadata`], to gather basic
|
||||
/// available via `FetchMetadata::no_deps_metadata`, to gather basic
|
||||
/// information such as the `target-dir`.
|
||||
///
|
||||
/// The provided sysroot is used to set the `RUSTUP_TOOLCHAIN`
|
||||
|
|
|
|||
|
|
@ -3,7 +3,7 @@ use serde_json::{Value, json};
|
|||
|
||||
/// This function patches the json config to the new expected keys.
|
||||
/// That is we try to load old known config keys here and convert them to the new ones.
|
||||
/// See https://github.com/rust-lang/rust-analyzer/pull/12010
|
||||
/// See <https://github.com/rust-lang/rust-analyzer/pull/12010>
|
||||
///
|
||||
/// We already have an alias system for simple cases, but if we make structural changes
|
||||
/// the alias infra fails down.
|
||||
|
|
|
|||
|
|
@ -42,7 +42,7 @@ impl DiscoverCommand {
|
|||
Self { sender, command }
|
||||
}
|
||||
|
||||
/// Spawn the command inside [Discover] and report progress, if any.
|
||||
/// Spawn the command inside `DiscoverCommand` and report progress, if any.
|
||||
pub(crate) fn spawn(
|
||||
&self,
|
||||
discover_arg: DiscoverArgument,
|
||||
|
|
@ -73,7 +73,7 @@ impl DiscoverCommand {
|
|||
}
|
||||
}
|
||||
|
||||
/// A handle to a spawned [Discover].
|
||||
/// A handle to a spawned `DiscoverCommand`.
|
||||
#[derive(Debug)]
|
||||
pub(crate) struct DiscoverHandle {
|
||||
pub(crate) handle: CommandHandle<DiscoverProjectMessage>,
|
||||
|
|
|
|||
|
|
@ -188,7 +188,7 @@ pub(crate) struct GlobalState {
|
|||
/// been called.
|
||||
pub(crate) deferred_task_queue: DeferredTaskQueue,
|
||||
|
||||
/// HACK: Workaround for https://github.com/rust-lang/rust-analyzer/issues/19709
|
||||
/// HACK: Workaround for <https://github.com/rust-lang/rust-analyzer/issues/19709>
|
||||
/// This is marked true if we failed to load a crate root file at crate graph creation,
|
||||
/// which will usually end up causing a bunch of incorrect diagnostics on startup.
|
||||
pub(crate) incomplete_crate_graph: bool,
|
||||
|
|
|
|||
|
|
@ -101,7 +101,7 @@ impl RequestDispatcher<'_> {
|
|||
}
|
||||
|
||||
/// Dispatches a non-latency-sensitive request onto the thread pool. When the VFS is marked not
|
||||
/// ready this will return a default constructed [`R::Result`].
|
||||
/// ready this will return a default constructed `R::Result`.
|
||||
pub(crate) fn on<const ALLOW_RETRYING: bool, R>(
|
||||
&mut self,
|
||||
f: fn(GlobalStateSnapshot, R::Params) -> anyhow::Result<R::Result>,
|
||||
|
|
@ -128,7 +128,7 @@ impl RequestDispatcher<'_> {
|
|||
}
|
||||
|
||||
/// Dispatches a non-latency-sensitive request onto the thread pool. When the VFS is marked not
|
||||
/// ready this will return a `default` constructed [`R::Result`].
|
||||
/// ready this will return a `default` constructed `R::Result`.
|
||||
pub(crate) fn on_with_vfs_default<R>(
|
||||
&mut self,
|
||||
f: fn(GlobalStateSnapshot, R::Params) -> anyhow::Result<R::Result>,
|
||||
|
|
@ -176,7 +176,7 @@ impl RequestDispatcher<'_> {
|
|||
}
|
||||
|
||||
/// Dispatches a latency-sensitive request onto the thread pool. When the VFS is marked not
|
||||
/// ready this will return a default constructed [`R::Result`].
|
||||
/// ready this will return a default constructed `R::Result`.
|
||||
pub(crate) fn on_latency_sensitive<const ALLOW_RETRYING: bool, R>(
|
||||
&mut self,
|
||||
f: fn(GlobalStateSnapshot, R::Params) -> anyhow::Result<R::Result>,
|
||||
|
|
|
|||
|
|
@ -52,7 +52,7 @@ impl<T> TaskPool<T> {
|
|||
/// `DeferredTaskQueue` holds deferred tasks.
|
||||
///
|
||||
/// These are tasks that must be run after
|
||||
/// [`GlobalState::process_changes`] has been called.
|
||||
/// `GlobalState::process_changes` has been called.
|
||||
pub(crate) struct DeferredTaskQueue {
|
||||
pub(crate) sender: crossbeam_channel::Sender<DeferredTask>,
|
||||
pub(crate) receiver: crossbeam_channel::Receiver<DeferredTask>,
|
||||
|
|
|
|||
|
|
@ -8,9 +8,9 @@
|
|||
//!
|
||||
//! # The Expansion Order Hierarchy
|
||||
//!
|
||||
//! `ExpnData` in rustc, rust-analyzer's version is [`MacroCallLoc`]. Traversing the hierarchy
|
||||
//! upwards can be achieved by walking up [`MacroCallLoc::kind`]'s contained file id, as
|
||||
//! [`MacroFile`]s are interned [`MacroCallLoc`]s.
|
||||
//! `ExpnData` in rustc, rust-analyzer's version is `MacroCallLoc`. Traversing the hierarchy
|
||||
//! upwards can be achieved by walking up `MacroCallLoc::kind`'s contained file id, as
|
||||
//! `MacroFile`s are interned `MacroCallLoc`s.
|
||||
//!
|
||||
//! # The Macro Definition Hierarchy
|
||||
//!
|
||||
|
|
@ -18,7 +18,7 @@
|
|||
//!
|
||||
//! # The Call-site Hierarchy
|
||||
//!
|
||||
//! `ExpnData::call_site` in rustc, [`MacroCallLoc::call_site`] in rust-analyzer.
|
||||
//! `ExpnData::call_site` in rustc, `MacroCallLoc::call_site` in rust-analyzer.
|
||||
use crate::Edition;
|
||||
use std::fmt;
|
||||
|
||||
|
|
|
|||
|
|
@ -1,6 +1,6 @@
|
|||
//! Maps syntax elements through disjoint syntax nodes.
|
||||
//!
|
||||
//! [`SyntaxMappingBuilder`] should be used to create mappings to add to a [`SyntaxEditor`]
|
||||
//! [`SyntaxMappingBuilder`] should be used to create mappings to add to a `SyntaxEditor`
|
||||
|
||||
use itertools::Itertools;
|
||||
use rustc_hash::FxHashMap;
|
||||
|
|
|
|||
|
|
@ -207,7 +207,7 @@ impl LineIndex {
|
|||
}
|
||||
}
|
||||
|
||||
/// This is adapted from the rustc_span crate, https://github.com/rust-lang/rust/blob/de59844c98f7925242a798a72c59dc3610dd0e2c/compiler/rustc_span/src/analyze_source_file.rs
|
||||
/// This is adapted from the rustc_span crate, <https://github.com/rust-lang/rust/blob/de59844c98f7925242a798a72c59dc3610dd0e2c/compiler/rustc_span/src/analyze_source_file.rs>
|
||||
fn analyze_source_file(src: &str) -> (Vec<TextSize>, IntMap<u32, Box<[WideChar]>>) {
|
||||
assert!(src.len() < !0u32 as usize);
|
||||
let mut lines = vec![];
|
||||
|
|
|
|||
Loading…
Add table
Add a link
Reference in a new issue