Merge branch 'master' into compute-lazy-assits

# Conflicts:
#	crates/rust-analyzer/src/main_loop/handlers.rs
#	crates/rust-analyzer/src/to_proto.rs
This commit is contained in:
Mikhail Rakhmanov 2020-06-03 19:26:01 +02:00
commit 6a0083a519
50 changed files with 1191 additions and 1829 deletions

View file

@ -637,6 +637,10 @@ impl Function {
db.function_data(self.id).params.clone()
}
pub fn is_unsafe(self, db: &dyn HirDatabase) -> bool {
db.function_data(self.id).is_unsafe
}
pub fn diagnostics(self, db: &dyn HirDatabase, sink: &mut DiagnosticSink) {
let _p = profile("Function::diagnostics");
let infer = db.infer(self.id.into());
@ -1190,6 +1194,10 @@ impl Type {
)
}
pub fn is_raw_ptr(&self) -> bool {
matches!(&self.ty.value, Ty::Apply(ApplicationTy { ctor: TypeCtor::RawPtr(..), .. }))
}
pub fn contains_unknown(&self) -> bool {
return go(&self.ty.value);

View file

@ -87,12 +87,18 @@ impl Attrs {
}
pub(crate) fn new(owner: &dyn AttrsOwner, hygiene: &Hygiene) -> Attrs {
let docs = ast::CommentIter::from_syntax_node(owner.syntax()).doc_comment_text().map(
|docs_text| Attr {
input: Some(AttrInput::Literal(SmolStr::new(docs_text))),
path: ModPath::from(hir_expand::name!(doc)),
},
);
let mut attrs = owner.attrs().peekable();
let entries = if attrs.peek().is_none() {
// Avoid heap allocation
None
} else {
Some(attrs.flat_map(|ast| Attr::from_src(ast, hygiene)).collect())
Some(attrs.flat_map(|ast| Attr::from_src(ast, hygiene)).chain(docs).collect())
};
Attrs { entries }
}

View file

@ -34,6 +34,7 @@ pub struct FunctionData {
/// True if the first param is `self`. This is relevant to decide whether this
/// can be called as a method.
pub has_self_param: bool,
pub is_unsafe: bool,
pub visibility: RawVisibility,
}
@ -85,11 +86,14 @@ impl FunctionData {
ret_type
};
let is_unsafe = src.value.unsafe_token().is_some();
let vis_default = RawVisibility::default_for_container(loc.container);
let visibility =
RawVisibility::from_ast_with_default(db, vis_default, src.map(|s| s.visibility()));
let sig = FunctionData { name, params, ret_type, has_self_param, visibility, attrs };
let sig =
FunctionData { name, params, ret_type, has_self_param, is_unsafe, visibility, attrs };
Arc::new(sig)
}
}

View file

@ -29,6 +29,13 @@ impl Documentation {
Documentation(s.into())
}
pub fn from_ast<N>(node: &N) -> Option<Documentation>
where
N: ast::DocCommentsOwner + ast::AttrsOwner,
{
docs_from_ast(node)
}
pub fn as_str(&self) -> &str {
&*self.0
}
@ -70,6 +77,45 @@ impl Documentation {
}
}
pub(crate) fn docs_from_ast(node: &impl ast::DocCommentsOwner) -> Option<Documentation> {
node.doc_comment_text().map(|it| Documentation::new(&it))
pub(crate) fn docs_from_ast<N>(node: &N) -> Option<Documentation>
where
N: ast::DocCommentsOwner + ast::AttrsOwner,
{
let doc_comment_text = node.doc_comment_text();
let doc_attr_text = expand_doc_attrs(node);
let docs = merge_doc_comments_and_attrs(doc_comment_text, doc_attr_text);
docs.map(|it| Documentation::new(&it))
}
fn merge_doc_comments_and_attrs(
doc_comment_text: Option<String>,
doc_attr_text: Option<String>,
) -> Option<String> {
match (doc_comment_text, doc_attr_text) {
(Some(mut comment_text), Some(attr_text)) => {
comment_text.push_str("\n\n");
comment_text.push_str(&attr_text);
Some(comment_text)
}
(Some(comment_text), None) => Some(comment_text),
(None, Some(attr_text)) => Some(attr_text),
(None, None) => None,
}
}
fn expand_doc_attrs(owner: &dyn ast::AttrsOwner) -> Option<String> {
let mut docs = String::new();
for attr in owner.attrs() {
if let Some(("doc", value)) =
attr.as_simple_key_value().as_ref().map(|(k, v)| (k.as_str(), v.as_str()))
{
docs.push_str(value);
docs.push_str("\n\n");
}
}
if docs.is_empty() {
None
} else {
Some(docs.trim_end_matches("\n\n").to_owned())
}
}

View file

@ -153,6 +153,7 @@ pub mod known {
str,
// Special names
macro_rules,
doc,
// Components of known path (value or mod name)
std,
core,

View file

@ -125,3 +125,81 @@ pub(crate) fn completions(
Some(acc)
}
#[cfg(test)]
mod tests {
use crate::completion::completion_config::CompletionConfig;
use crate::mock_analysis::analysis_and_position;
struct DetailAndDocumentation<'a> {
detail: &'a str,
documentation: &'a str,
}
fn check_detail_and_documentation(fixture: &str, expected: DetailAndDocumentation) {
let (analysis, position) = analysis_and_position(fixture);
let config = CompletionConfig::default();
let completions = analysis.completions(&config, position).unwrap().unwrap();
for item in completions {
if item.detail() == Some(expected.detail) {
let opt = item.documentation();
let doc = opt.as_ref().map(|it| it.as_str());
assert_eq!(doc, Some(expected.documentation));
return;
}
}
panic!("completion detail not found: {}", expected.detail)
}
#[test]
fn test_completion_detail_from_macro_generated_struct_fn_doc_attr() {
check_detail_and_documentation(
r#"
//- /lib.rs
macro_rules! bar {
() => {
struct Bar;
impl Bar {
#[doc = "Do the foo"]
fn foo(&self) {}
}
}
}
bar!();
fn foo() {
let bar = Bar;
bar.fo<|>;
}
"#,
DetailAndDocumentation { detail: "fn foo(&self)", documentation: "Do the foo" },
);
}
#[test]
fn test_completion_detail_from_macro_generated_struct_fn_doc_comment() {
check_detail_and_documentation(
r#"
//- /lib.rs
macro_rules! bar {
() => {
struct Bar;
impl Bar {
/// Do the foo
fn foo(&self) {}
}
}
}
bar!();
fn foo() {
let bar = Bar;
bar.fo<|>;
}
"#,
DetailAndDocumentation { detail: "fn foo(&self)", documentation: " Do the foo" },
);
}
}

View file

@ -10,7 +10,7 @@ use std::{
use hir::{Docs, Documentation, HasSource, HirDisplay};
use ra_ide_db::RootDatabase;
use ra_syntax::ast::{self, AstNode, NameOwner, VisibilityOwner};
use stdx::SepBy;
use stdx::{split1, SepBy};
use crate::display::{generic_parameters, where_predicates};
@ -207,7 +207,16 @@ impl From<&'_ ast::FnDef> for FunctionSignature {
res.push(raw_param);
}
res.extend(param_list.params().map(|param| param.syntax().text().to_string()));
// macro-generated functions are missing whitespace
fn fmt_param(param: ast::Param) -> String {
let text = param.syntax().text().to_string();
match split1(&text, ':') {
Some((left, right)) => format!("{}: {}", left.trim(), right.trim()),
_ => text,
}
}
res.extend(param_list.params().map(fmt_param));
res_types.extend(param_list.params().map(|param| {
let param_text = param.syntax().text().to_string();
match param_text.split(':').nth(1).and_then(|it| it.get(1..)) {

View file

@ -1,8 +1,8 @@
use std::iter::once;
use hir::{
Adt, AsAssocItem, AssocItemContainer, FieldSource, HasSource, HirDisplay, ModuleDef,
ModuleSource, Semantics,
Adt, AsAssocItem, AssocItemContainer, Documentation, FieldSource, HasSource, HirDisplay,
ModuleDef, ModuleSource, Semantics,
};
use itertools::Itertools;
use ra_db::SourceDatabase;
@ -10,12 +10,7 @@ use ra_ide_db::{
defs::{classify_name, classify_name_ref, Definition},
RootDatabase,
};
use ra_syntax::{
ast::{self, DocCommentsOwner},
match_ast, AstNode,
SyntaxKind::*,
SyntaxToken, TokenAtOffset,
};
use ra_syntax::{ast, match_ast, AstNode, SyntaxKind::*, SyntaxToken, TokenAtOffset};
use crate::{
display::{macro_label, rust_code_markup, rust_code_markup_with_doc, ShortLabel},
@ -169,13 +164,15 @@ fn hover_text_from_name_kind(db: &RootDatabase, def: Definition) -> Option<Strin
return match def {
Definition::Macro(it) => {
let src = it.source(db);
hover_text(src.value.doc_comment_text(), Some(macro_label(&src.value)), mod_path)
let docs = Documentation::from_ast(&src.value).map(Into::into);
hover_text(docs, Some(macro_label(&src.value)), mod_path)
}
Definition::Field(it) => {
let src = it.source(db);
match src.value {
FieldSource::Named(it) => {
hover_text(it.doc_comment_text(), it.short_label(), mod_path)
let docs = Documentation::from_ast(&it).map(Into::into);
hover_text(docs, it.short_label(), mod_path)
}
_ => None,
}
@ -183,7 +180,8 @@ fn hover_text_from_name_kind(db: &RootDatabase, def: Definition) -> Option<Strin
Definition::ModuleDef(it) => match it {
ModuleDef::Module(it) => match it.definition_source(db).value {
ModuleSource::Module(it) => {
hover_text(it.doc_comment_text(), it.short_label(), mod_path)
let docs = Documentation::from_ast(&it).map(Into::into);
hover_text(docs, it.short_label(), mod_path)
}
_ => None,
},
@ -208,10 +206,11 @@ fn hover_text_from_name_kind(db: &RootDatabase, def: Definition) -> Option<Strin
fn from_def_source<A, D>(db: &RootDatabase, def: D, mod_path: Option<String>) -> Option<String>
where
D: HasSource<Ast = A>,
A: ast::DocCommentsOwner + ast::NameOwner + ShortLabel,
A: ast::DocCommentsOwner + ast::NameOwner + ShortLabel + ast::AttrsOwner,
{
let src = def.source(db);
hover_text(src.value.doc_comment_text(), src.value.short_label(), mod_path)
let docs = Documentation::from_ast(&src.value).map(Into::into);
hover_text(docs, src.value.short_label(), mod_path)
}
}
@ -951,4 +950,106 @@ fn func(foo: i32) { if true { <|>foo; }; }
&["mod my"],
);
}
#[test]
fn test_hover_struct_doc_comment() {
check_hover_result(
r#"
//- /lib.rs
/// bar docs
struct Bar;
fn foo() {
let bar = Ba<|>r;
}
"#,
&["struct Bar\n```\n___\n\nbar docs"],
);
}
#[test]
fn test_hover_struct_doc_attr() {
check_hover_result(
r#"
//- /lib.rs
#[doc = "bar docs"]
struct Bar;
fn foo() {
let bar = Ba<|>r;
}
"#,
&["struct Bar\n```\n___\n\nbar docs"],
);
}
#[test]
fn test_hover_struct_doc_attr_multiple_and_mixed() {
check_hover_result(
r#"
//- /lib.rs
/// bar docs 0
#[doc = "bar docs 1"]
#[doc = "bar docs 2"]
struct Bar;
fn foo() {
let bar = Ba<|>r;
}
"#,
&["struct Bar\n```\n___\n\nbar docs 0\n\nbar docs 1\n\nbar docs 2"],
);
}
#[test]
fn test_hover_macro_generated_struct_fn_doc_comment() {
check_hover_result(
r#"
//- /lib.rs
macro_rules! bar {
() => {
struct Bar;
impl Bar {
/// Do the foo
fn foo(&self) {}
}
}
}
bar!();
fn foo() {
let bar = Bar;
bar.fo<|>o();
}
"#,
&["Bar\n```\n\n```rust\nfn foo(&self)\n```\n___\n\n Do the foo"],
);
}
#[test]
fn test_hover_macro_generated_struct_fn_doc_attr() {
check_hover_result(
r#"
//- /lib.rs
macro_rules! bar {
() => {
struct Bar;
impl Bar {
#[doc = "Do the foo"]
fn foo(&self) {}
}
}
}
bar!();
fn foo() {
let bar = Bar;
bar.fo<|>o();
}
"#,
&["Bar\n```\n\n```rust\nfn foo(&self)\n```\n___\n\nDo the foo"],
);
}
}

View file

@ -10,6 +10,7 @@ pre { color: #DCDCCC; background: #3F3F3F; font-size: 22px; padd
.string_literal { color: #CC9393; }
.field { color: #94BFF3; }
.function { color: #93E0E3; }
.operator.unsafe { color: #E28C14; }
.parameter { color: #94BFF3; }
.text { color: #DCDCCC; }
.type { color: #7CB8BB; }

View file

@ -10,6 +10,7 @@ pre { color: #DCDCCC; background: #3F3F3F; font-size: 22px; padd
.string_literal { color: #CC9393; }
.field { color: #94BFF3; }
.function { color: #93E0E3; }
.operator.unsafe { color: #E28C14; }
.parameter { color: #94BFF3; }
.text { color: #DCDCCC; }
.type { color: #7CB8BB; }

View file

@ -0,0 +1,48 @@
<style>
body { margin: 0; }
pre { color: #DCDCCC; background: #3F3F3F; font-size: 22px; padding: 0.4em; }
.lifetime { color: #DFAF8F; font-style: italic; }
.comment { color: #7F9F7F; }
.struct, .enum { color: #7CB8BB; }
.enum_variant { color: #BDE0F3; }
.string_literal { color: #CC9393; }
.field { color: #94BFF3; }
.function { color: #93E0E3; }
.operator.unsafe { color: #E28C14; }
.parameter { color: #94BFF3; }
.text { color: #DCDCCC; }
.type { color: #7CB8BB; }
.builtin_type { color: #8CD0D3; }
.type_param { color: #DFAF8F; }
.attribute { color: #94BFF3; }
.numeric_literal { color: #BFEBBF; }
.bool_literal { color: #BFE6EB; }
.macro { color: #94BFF3; }
.module { color: #AFD8AF; }
.variable { color: #DCDCCC; }
.format_specifier { color: #CC696B; }
.mutable { text-decoration: underline; }
.keyword { color: #F0DFAF; font-weight: bold; }
.keyword.unsafe { color: #BC8383; font-weight: bold; }
.control { font-style: italic; }
</style>
<pre><code><span class="keyword unsafe">unsafe</span> <span class="keyword">fn</span> <span class="function declaration unsafe">unsafe_fn</span>() {}
<span class="keyword">struct</span> <span class="struct declaration">HasUnsafeFn</span>;
<span class="keyword">impl</span> <span class="struct">HasUnsafeFn</span> {
<span class="keyword unsafe">unsafe</span> <span class="keyword">fn</span> <span class="function declaration unsafe">unsafe_method</span>(&<span class="self_keyword">self</span>) {}
}
<span class="keyword">fn</span> <span class="function declaration">main</span>() {
<span class="keyword">let</span> <span class="variable declaration">x</span> = &<span class="numeric_literal">5</span> <span class="keyword">as</span> *<span class="keyword">const</span> <span class="builtin_type">usize</span>;
<span class="keyword unsafe">unsafe</span> {
<span class="function unsafe">unsafe_fn</span>();
<span class="struct">HasUnsafeFn</span>.<span class="function unsafe">unsafe_method</span>();
<span class="keyword">let</span> <span class="variable declaration">y</span> = <span class="operator unsafe">*</span><span class="variable">x</span>;
<span class="keyword">let</span> <span class="variable declaration">z</span> = -<span class="variable">x</span>;
}
}</code></pre>

View file

@ -10,6 +10,7 @@ pre { color: #DCDCCC; background: #3F3F3F; font-size: 22px; padd
.string_literal { color: #CC9393; }
.field { color: #94BFF3; }
.function { color: #93E0E3; }
.operator.unsafe { color: #E28C14; }
.parameter { color: #94BFF3; }
.text { color: #DCDCCC; }
.type { color: #7CB8BB; }

View file

@ -10,6 +10,7 @@ pre { color: #DCDCCC; background: #3F3F3F; font-size: 22px; padd
.string_literal { color: #CC9393; }
.field { color: #94BFF3; }
.function { color: #93E0E3; }
.operator.unsafe { color: #E28C14; }
.parameter { color: #94BFF3; }
.text { color: #DCDCCC; }
.type { color: #7CB8BB; }

View file

@ -406,6 +406,23 @@ fn highlight_element(
_ => h,
}
}
PREFIX_EXPR => {
let prefix_expr = element.into_node().and_then(ast::PrefixExpr::cast)?;
match prefix_expr.op_kind() {
Some(ast::PrefixOp::Deref) => {}
_ => return None,
}
let expr = prefix_expr.expr()?;
let ty = sema.type_of_expr(&expr)?;
if !ty.is_raw_ptr() {
return None;
}
let mut h = Highlight::new(HighlightTag::Operator);
h |= HighlightModifier::Unsafe;
h
}
k if k.is_keyword() => {
let h = Highlight::new(HighlightTag::Keyword);
@ -458,7 +475,13 @@ fn highlight_name(db: &RootDatabase, def: Definition) -> Highlight {
Definition::Field(_) => HighlightTag::Field,
Definition::ModuleDef(def) => match def {
hir::ModuleDef::Module(_) => HighlightTag::Module,
hir::ModuleDef::Function(_) => HighlightTag::Function,
hir::ModuleDef::Function(func) => {
let mut h = HighlightTag::Function.into();
if func.is_unsafe(db) {
h |= HighlightModifier::Unsafe;
}
return h;
}
hir::ModuleDef::Adt(hir::Adt::Struct(_)) => HighlightTag::Struct,
hir::ModuleDef::Adt(hir::Adt::Enum(_)) => HighlightTag::Enum,
hir::ModuleDef::Adt(hir::Adt::Union(_)) => HighlightTag::Union,

View file

@ -69,6 +69,7 @@ pre { color: #DCDCCC; background: #3F3F3F; font-size: 22px; padd
.string_literal { color: #CC9393; }
.field { color: #94BFF3; }
.function { color: #93E0E3; }
.operator.unsafe { color: #E28C14; }
.parameter { color: #94BFF3; }
.text { color: #DCDCCC; }
.type { color: #7CB8BB; }

View file

@ -24,12 +24,14 @@ pub enum HighlightTag {
Enum,
EnumVariant,
Field,
FormatSpecifier,
Function,
Keyword,
Lifetime,
Macro,
Module,
NumericLiteral,
Operator,
SelfKeyword,
SelfType,
Static,
@ -41,8 +43,6 @@ pub enum HighlightTag {
Union,
Local,
UnresolvedReference,
FormatSpecifier,
Operator,
}
#[derive(Clone, Copy, Debug, PartialEq, Eq, Hash, PartialOrd, Ord)]
@ -72,12 +72,14 @@ impl HighlightTag {
HighlightTag::Enum => "enum",
HighlightTag::EnumVariant => "enum_variant",
HighlightTag::Field => "field",
HighlightTag::FormatSpecifier => "format_specifier",
HighlightTag::Function => "function",
HighlightTag::Keyword => "keyword",
HighlightTag::Lifetime => "lifetime",
HighlightTag::Macro => "macro",
HighlightTag::Module => "module",
HighlightTag::NumericLiteral => "numeric_literal",
HighlightTag::Operator => "operator",
HighlightTag::SelfKeyword => "self_keyword",
HighlightTag::SelfType => "self_type",
HighlightTag::Static => "static",
@ -89,8 +91,6 @@ impl HighlightTag {
HighlightTag::Union => "union",
HighlightTag::Local => "variable",
HighlightTag::UnresolvedReference => "unresolved_reference",
HighlightTag::FormatSpecifier => "format_specifier",
HighlightTag::Operator => "operator",
}
}
}

View file

@ -258,3 +258,34 @@ fn main() {
fs::write(dst_file, &actual_html).unwrap();
assert_eq_text!(expected_html, actual_html);
}
#[test]
fn test_unsafe_highlighting() {
let (analysis, file_id) = single_file(
r#"
unsafe fn unsafe_fn() {}
struct HasUnsafeFn;
impl HasUnsafeFn {
unsafe fn unsafe_method(&self) {}
}
fn main() {
let x = &5 as *const usize;
unsafe {
unsafe_fn();
HasUnsafeFn.unsafe_method();
let y = *x;
let z = -x;
}
}
"#
.trim(),
);
let dst_file = project_dir().join("crates/ra_ide/src/snapshots/highlight_unsafe.html");
let actual_html = &analysis.highlight_as_html(file_id, false).unwrap();
let expected_html = &read_text(&dst_file);
fs::write(dst_file, &actual_html).unwrap();
assert_eq_text!(expected_html, actual_html);
}

View file

@ -18,9 +18,10 @@
//! // fn foo() {}
//! ```
//!
//! After adding a new inline-test, run `cargo collect-tests` to extract
//! it as a standalone text-fixture into `tests/data/parser/inline`, and
//! run `cargo test` once to create the "gold" value.
//! After adding a new inline-test, run `cargo xtask codegen` to
//! extract it as a standalone text-fixture into
//! `crates/ra_syntax/test_data/parser/`, and run `cargo test` once to
//! create the "gold" value.
//!
//! Coding convention: rules like `where_clause` always produce either a
//! node or an error, rules like `opt_where_clause` may produce nothing.

View file

@ -5,6 +5,13 @@ use std::path::PathBuf;
use rustc_hash::{FxHashMap, FxHashSet};
use serde::Deserialize;
/// Roots and crates that compose this Rust project.
#[derive(Clone, Debug, Deserialize)]
pub struct JsonProject {
pub(crate) roots: Vec<Root>,
pub(crate) crates: Vec<Crate>,
}
/// A root points to the directory which contains Rust crates. rust-analyzer watches all files in
/// all roots. Roots might be nested.
#[derive(Clone, Debug, Deserialize)]
@ -20,8 +27,17 @@ pub struct Crate {
pub(crate) root_module: PathBuf,
pub(crate) edition: Edition,
pub(crate) deps: Vec<Dep>,
// This is the preferred method of providing cfg options.
#[serde(default)]
pub(crate) cfg: FxHashSet<String>,
// These two are here for transition only.
#[serde(default)]
pub(crate) atom_cfgs: FxHashSet<String>,
#[serde(default)]
pub(crate) key_value_cfgs: FxHashMap<String, String>,
pub(crate) out_dir: Option<PathBuf>,
pub(crate) proc_macro_dylib_path: Option<PathBuf>,
}
@ -48,9 +64,72 @@ pub struct Dep {
pub(crate) name: String,
}
/// Roots and crates that compose this Rust project.
#[derive(Clone, Debug, Deserialize)]
pub struct JsonProject {
pub(crate) roots: Vec<Root>,
pub(crate) crates: Vec<Crate>,
#[cfg(test)]
mod tests {
use super::*;
use serde_json::json;
#[test]
fn test_crate_deserialization() {
let raw_json = json!( {
"crate_id": 2,
"root_module": "this/is/a/file/path.rs",
"deps": [
{
"crate": 1,
"name": "some_dep_crate"
},
],
"edition": "2015",
"cfg": [
"atom_1",
"atom_2",
"feature=feature_1",
"feature=feature_2",
"other=value",
],
});
let krate: Crate = serde_json::from_value(raw_json).unwrap();
assert!(krate.cfg.contains(&"atom_1".to_string()));
assert!(krate.cfg.contains(&"atom_2".to_string()));
assert!(krate.cfg.contains(&"feature=feature_1".to_string()));
assert!(krate.cfg.contains(&"feature=feature_2".to_string()));
assert!(krate.cfg.contains(&"other=value".to_string()));
}
#[test]
fn test_crate_deserialization_old_json() {
let raw_json = json!( {
"crate_id": 2,
"root_module": "this/is/a/file/path.rs",
"deps": [
{
"crate": 1,
"name": "some_dep_crate"
},
],
"edition": "2015",
"atom_cfgs": [
"atom_1",
"atom_2",
],
"key_value_cfgs": {
"feature": "feature_1",
"feature": "feature_2",
"other": "value",
},
});
let krate: Crate = serde_json::from_value(raw_json).unwrap();
assert!(krate.atom_cfgs.contains(&"atom_1".to_string()));
assert!(krate.atom_cfgs.contains(&"atom_2".to_string()));
assert!(krate.key_value_cfgs.contains_key(&"feature".to_string()));
assert_eq!(krate.key_value_cfgs.get("feature"), Some(&"feature_2".to_string()));
assert!(krate.key_value_cfgs.contains_key(&"other".to_string()));
assert_eq!(krate.key_value_cfgs.get("other"), Some(&"value".to_string()));
}
}

View file

@ -14,7 +14,7 @@ use std::{
use anyhow::{bail, Context, Result};
use ra_cfg::CfgOptions;
use ra_db::{CrateGraph, CrateName, Edition, Env, ExternSource, ExternSourceId, FileId};
use rustc_hash::FxHashMap;
use rustc_hash::{FxHashMap, FxHashSet};
use serde_json::from_reader;
pub use crate::{
@ -32,6 +32,12 @@ pub enum ProjectWorkspace {
Json { project: JsonProject },
}
impl From<JsonProject> for ProjectWorkspace {
fn from(project: JsonProject) -> ProjectWorkspace {
ProjectWorkspace::Json { project }
}
}
/// `PackageRoot` describes a package root folder.
/// Which may be an external dependency, or a member of
/// the current workspace.
@ -57,25 +63,25 @@ impl PackageRoot {
}
}
#[derive(Debug, Clone, PartialEq, Eq, Hash)]
pub enum ProjectRoot {
#[derive(Debug, Clone, PartialEq, Eq, Hash, Ord, PartialOrd)]
pub enum ProjectManifest {
ProjectJson(PathBuf),
CargoToml(PathBuf),
}
impl ProjectRoot {
pub fn from_manifest_file(path: PathBuf) -> Result<ProjectRoot> {
impl ProjectManifest {
pub fn from_manifest_file(path: PathBuf) -> Result<ProjectManifest> {
if path.ends_with("rust-project.json") {
return Ok(ProjectRoot::ProjectJson(path));
return Ok(ProjectManifest::ProjectJson(path));
}
if path.ends_with("Cargo.toml") {
return Ok(ProjectRoot::CargoToml(path));
return Ok(ProjectManifest::CargoToml(path));
}
bail!("project root must point to Cargo.toml or rust-project.json: {}", path.display())
}
pub fn discover_single(path: &Path) -> Result<ProjectRoot> {
let mut candidates = ProjectRoot::discover(path)?;
pub fn discover_single(path: &Path) -> Result<ProjectManifest> {
let mut candidates = ProjectManifest::discover(path)?;
let res = match candidates.pop() {
None => bail!("no projects"),
Some(it) => it,
@ -87,12 +93,12 @@ impl ProjectRoot {
Ok(res)
}
pub fn discover(path: &Path) -> io::Result<Vec<ProjectRoot>> {
pub fn discover(path: &Path) -> io::Result<Vec<ProjectManifest>> {
if let Some(project_json) = find_in_parent_dirs(path, "rust-project.json") {
return Ok(vec![ProjectRoot::ProjectJson(project_json)]);
return Ok(vec![ProjectManifest::ProjectJson(project_json)]);
}
return find_cargo_toml(path)
.map(|paths| paths.into_iter().map(ProjectRoot::CargoToml).collect());
.map(|paths| paths.into_iter().map(ProjectManifest::CargoToml).collect());
fn find_cargo_toml(path: &Path) -> io::Result<Vec<PathBuf>> {
match find_in_parent_dirs(path, "Cargo.toml") {
@ -128,16 +134,28 @@ impl ProjectRoot {
.collect()
}
}
pub fn discover_all(paths: &[impl AsRef<Path>]) -> Vec<ProjectManifest> {
let mut res = paths
.iter()
.filter_map(|it| ProjectManifest::discover(it.as_ref()).ok())
.flatten()
.collect::<FxHashSet<_>>()
.into_iter()
.collect::<Vec<_>>();
res.sort();
res
}
}
impl ProjectWorkspace {
pub fn load(
root: ProjectRoot,
manifest: ProjectManifest,
cargo_features: &CargoConfig,
with_sysroot: bool,
) -> Result<ProjectWorkspace> {
let res = match root {
ProjectRoot::ProjectJson(project_json) => {
let res = match manifest {
ProjectManifest::ProjectJson(project_json) => {
let file = File::open(&project_json).with_context(|| {
format!("Failed to open json file {}", project_json.display())
})?;
@ -148,7 +166,7 @@ impl ProjectWorkspace {
})?,
}
}
ProjectRoot::CargoToml(cargo_toml) => {
ProjectManifest::CargoToml(cargo_toml) => {
let cargo = CargoWorkspace::from_cargo_metadata(&cargo_toml, cargo_features)
.with_context(|| {
format!(
@ -252,6 +270,16 @@ impl ProjectWorkspace {
};
let cfg_options = {
let mut opts = default_cfg_options.clone();
for cfg in &krate.cfg {
match cfg.find('=') {
None => opts.insert_atom(cfg.into()),
Some(pos) => {
let key = &cfg[..pos];
let value = cfg[pos + 1..].trim_matches('"');
opts.insert_key_value(key.into(), value.into());
}
}
}
for name in &krate.atom_cfgs {
opts.insert_atom(name.into());
}

View file

@ -83,13 +83,22 @@ pub trait DocCommentsOwner: AstNode {
CommentIter { iter: self.syntax().children_with_tokens() }
}
fn doc_comment_text(&self) -> Option<String> {
self.doc_comments().doc_comment_text()
}
}
impl CommentIter {
pub fn from_syntax_node(syntax_node: &ast::SyntaxNode) -> CommentIter {
CommentIter { iter: syntax_node.children_with_tokens() }
}
/// Returns the textual content of a doc comment block as a single string.
/// That is, strips leading `///` (+ optional 1 character of whitespace),
/// trailing `*/`, trailing whitespace and then joins the lines.
fn doc_comment_text(&self) -> Option<String> {
pub fn doc_comment_text(self) -> Option<String> {
let mut has_comments = false;
let docs = self
.doc_comments()
.filter(|comment| comment.kind().doc.is_some())
.map(|comment| {
has_comments = true;

View file

@ -4,9 +4,14 @@
mod args;
use lsp_server::Connection;
use rust_analyzer::{cli, config::Config, from_json, Result};
use rust_analyzer::{
cli,
config::{Config, LinkedProject},
from_json, Result,
};
use crate::args::HelpPrinted;
use ra_project_model::ProjectManifest;
fn main() -> Result<()> {
setup_logging()?;
@ -97,17 +102,6 @@ fn run_server() -> Result<()> {
log::info!("Client '{}' {}", client_info.name, client_info.version.unwrap_or_default());
}
let cwd = std::env::current_dir()?;
let root = initialize_params.root_uri.and_then(|it| it.to_file_path().ok()).unwrap_or(cwd);
let workspace_roots = initialize_params
.workspace_folders
.map(|workspaces| {
workspaces.into_iter().filter_map(|it| it.uri.to_file_path().ok()).collect::<Vec<_>>()
})
.filter(|workspaces| !workspaces.is_empty())
.unwrap_or_else(|| vec![root]);
let config = {
let mut config = Config::default();
if let Some(value) = &initialize_params.initialization_options {
@ -115,10 +109,31 @@ fn run_server() -> Result<()> {
}
config.update_caps(&initialize_params.capabilities);
if config.linked_projects.is_empty() {
let cwd = std::env::current_dir()?;
let root =
initialize_params.root_uri.and_then(|it| it.to_file_path().ok()).unwrap_or(cwd);
let workspace_roots = initialize_params
.workspace_folders
.map(|workspaces| {
workspaces
.into_iter()
.filter_map(|it| it.uri.to_file_path().ok())
.collect::<Vec<_>>()
})
.filter(|workspaces| !workspaces.is_empty())
.unwrap_or_else(|| vec![root]);
config.linked_projects = ProjectManifest::discover_all(&workspace_roots)
.into_iter()
.map(LinkedProject::from)
.collect();
}
config
};
rust_analyzer::main_loop(workspace_roots, config, connection)?;
rust_analyzer::main_loop(config, connection)?;
log::info!("shutting down IO...");
io_threads.join()?;

View file

@ -4,7 +4,7 @@ use ra_cfg::CfgExpr;
use ra_ide::{FileId, RunnableKind, TestId};
use ra_project_model::{self, ProjectWorkspace, TargetKind};
use crate::{world::WorldSnapshot, Result};
use crate::{global_state::GlobalStateSnapshot, Result};
/// Abstract representation of Cargo target.
///
@ -89,7 +89,7 @@ impl CargoTargetSpec {
}
pub(crate) fn for_file(
world: &WorldSnapshot,
world: &GlobalStateSnapshot,
file_id: FileId,
) -> Result<Option<CargoTargetSpec>> {
let &crate_id = match world.analysis().crate_for(file_id)?.first() {

View file

@ -8,7 +8,8 @@ use crossbeam_channel::{unbounded, Receiver};
use ra_db::{ExternSourceId, FileId, SourceRootId};
use ra_ide::{AnalysisChange, AnalysisHost};
use ra_project_model::{
get_rustc_cfg_options, CargoConfig, PackageRoot, ProcMacroClient, ProjectRoot, ProjectWorkspace,
get_rustc_cfg_options, CargoConfig, PackageRoot, ProcMacroClient, ProjectManifest,
ProjectWorkspace,
};
use ra_vfs::{RootEntry, Vfs, VfsChange, VfsTask, Watch};
use rustc_hash::{FxHashMap, FxHashSet};
@ -28,7 +29,7 @@ pub fn load_cargo(
with_proc_macro: bool,
) -> Result<(AnalysisHost, FxHashMap<SourceRootId, PackageRoot>)> {
let root = std::env::current_dir()?.join(root);
let root = ProjectRoot::discover_single(&root)?;
let root = ProjectManifest::discover_single(&root)?;
let ws = ProjectWorkspace::load(
root,
&CargoConfig { load_out_dirs_from_check, ..Default::default() },

View file

@ -12,14 +12,13 @@ use std::{ffi::OsString, path::PathBuf};
use lsp_types::ClientCapabilities;
use ra_flycheck::FlycheckConfig;
use ra_ide::{AssistConfig, CompletionConfig, InlayHintsConfig};
use ra_project_model::CargoConfig;
use ra_project_model::{CargoConfig, JsonProject, ProjectManifest};
use serde::Deserialize;
#[derive(Debug, Clone)]
pub struct Config {
pub client_caps: ClientCapsConfig,
pub with_sysroot: bool,
pub publish_diagnostics: bool,
pub lru_capacity: Option<usize>,
pub proc_macro_srv: Option<(PathBuf, Vec<OsString>)>,
@ -35,6 +34,27 @@ pub struct Config {
pub assist: AssistConfig,
pub call_info_full: bool,
pub lens: LensConfig,
pub with_sysroot: bool,
pub linked_projects: Vec<LinkedProject>,
}
#[derive(Debug, Clone)]
pub enum LinkedProject {
ProjectManifest(ProjectManifest),
JsonProject(JsonProject),
}
impl From<ProjectManifest> for LinkedProject {
fn from(v: ProjectManifest) -> Self {
LinkedProject::ProjectManifest(v)
}
}
impl From<JsonProject> for LinkedProject {
fn from(v: JsonProject) -> Self {
LinkedProject::JsonProject(v)
}
}
#[derive(Clone, Debug, PartialEq, Eq)]
@ -142,6 +162,7 @@ impl Default for Config {
assist: AssistConfig::default(),
call_info_full: true,
lens: LensConfig::default(),
linked_projects: Vec::new(),
}
}
}
@ -241,6 +262,22 @@ impl Config {
self.lens = LensConfig::NO_LENS;
}
if let Some(linked_projects) = get::<Vec<ManifestOrJsonProject>>(value, "/linkedProjects") {
if !linked_projects.is_empty() {
self.linked_projects.clear();
for linked_project in linked_projects {
let linked_project = match linked_project {
ManifestOrJsonProject::Manifest(it) => match ProjectManifest::from_manifest_file(it) {
Ok(it) => it.into(),
Err(_) => continue,
}
ManifestOrJsonProject::JsonProject(it) => it.into(),
};
self.linked_projects.push(linked_project);
}
}
}
log::info!("Config::update() = {:#?}", self);
fn get<'a, T: Deserialize<'a>>(value: &'a serde_json::Value, pointer: &str) -> Option<T> {
@ -308,3 +345,10 @@ impl Config {
}
}
}
#[derive(Deserialize)]
#[serde(untagged)]
enum ManifestOrJsonProject {
Manifest(PathBuf),
JsonProject(JsonProject),
}

View file

@ -29,7 +29,7 @@ expression: diag
},
},
severity: Some(
Warning,
Hint,
),
code: Some(
String(

View file

@ -184,7 +184,7 @@ pub(crate) fn map_rust_diagnostic_to_lsp(
return Vec::new();
}
let severity = map_level_to_severity(rd.level);
let mut severity = map_level_to_severity(rd.level);
let mut source = String::from("rustc");
let mut code = rd.code.as_ref().map(|c| c.code.clone());
@ -226,6 +226,7 @@ pub(crate) fn map_rust_diagnostic_to_lsp(
}
if is_unused_or_unnecessary(rd) {
severity = Some(DiagnosticSeverity::Hint);
tags.push(DiagnosticTag::Unnecessary);
}

View file

@ -3,7 +3,7 @@ use ra_db::{FileId, FilePosition, FileRange};
use ra_ide::{LineCol, LineIndex};
use ra_syntax::{TextRange, TextSize};
use crate::{world::WorldSnapshot, Result};
use crate::{global_state::GlobalStateSnapshot, Result};
pub(crate) fn offset(line_index: &LineIndex, position: lsp_types::Position) -> TextSize {
let line_col = LineCol { line: position.line as u32, col_utf16: position.character as u32 };
@ -16,12 +16,12 @@ pub(crate) fn text_range(line_index: &LineIndex, range: lsp_types::Range) -> Tex
TextRange::new(start, end)
}
pub(crate) fn file_id(world: &WorldSnapshot, url: &lsp_types::Url) -> Result<FileId> {
pub(crate) fn file_id(world: &GlobalStateSnapshot, url: &lsp_types::Url) -> Result<FileId> {
world.uri_to_file_id(url)
}
pub(crate) fn file_position(
world: &WorldSnapshot,
world: &GlobalStateSnapshot,
tdpp: lsp_types::TextDocumentPositionParams,
) -> Result<FilePosition> {
let file_id = file_id(world, &tdpp.text_document.uri)?;
@ -31,7 +31,7 @@ pub(crate) fn file_position(
}
pub(crate) fn file_range(
world: &WorldSnapshot,
world: &GlobalStateSnapshot,
text_document_identifier: lsp_types::TextDocumentIdentifier,
range: lsp_types::Range,
) -> Result<FileRange> {

View file

@ -50,15 +50,15 @@ fn create_flycheck(workspaces: &[ProjectWorkspace], config: &FlycheckConfig) ->
})
}
/// `WorldState` is the primary mutable state of the language server
/// `GlobalState` is the primary mutable state of the language server
///
/// The most interesting components are `vfs`, which stores a consistent
/// snapshot of the file systems, and `analysis_host`, which stores our
/// incremental salsa database.
#[derive(Debug)]
pub struct WorldState {
pub struct GlobalState {
pub config: Config,
pub roots: Vec<PathBuf>,
pub local_roots: Vec<PathBuf>,
pub workspaces: Arc<Vec<ProjectWorkspace>>,
pub analysis_host: AnalysisHost,
pub vfs: Arc<RwLock<Vfs>>,
@ -70,7 +70,7 @@ pub struct WorldState {
}
/// An immutable snapshot of the world's state at a point in time.
pub struct WorldSnapshot {
pub struct GlobalStateSnapshot {
pub config: Config,
pub workspaces: Arc<Vec<ProjectWorkspace>>,
pub analysis: Analysis,
@ -79,20 +79,20 @@ pub struct WorldSnapshot {
vfs: Arc<RwLock<Vfs>>,
}
impl WorldState {
impl GlobalState {
pub fn new(
folder_roots: Vec<PathBuf>,
workspaces: Vec<ProjectWorkspace>,
lru_capacity: Option<usize>,
exclude_globs: &[Glob],
watch: Watch,
config: Config,
) -> WorldState {
) -> GlobalState {
let mut change = AnalysisChange::new();
let extern_dirs: FxHashSet<_> =
workspaces.iter().flat_map(ProjectWorkspace::out_dirs).collect();
let mut local_roots = Vec::new();
let roots: Vec<_> = {
let create_filter = |is_member| {
RustPackageFilterBuilder::default()
@ -100,12 +100,16 @@ impl WorldState {
.exclude(exclude_globs.iter().cloned())
.into_vfs_filter()
};
folder_roots
workspaces
.iter()
.map(|path| RootEntry::new(path.clone(), create_filter(true)))
.chain(workspaces.iter().flat_map(ProjectWorkspace::to_roots).map(|pkg_root| {
RootEntry::new(pkg_root.path().to_owned(), create_filter(pkg_root.is_member()))
}))
.flat_map(ProjectWorkspace::to_roots)
.map(|pkg_root| {
let path = pkg_root.path().to_owned();
if pkg_root.is_member() {
local_roots.push(path.clone());
}
RootEntry::new(path, create_filter(pkg_root.is_member()))
})
.chain(
extern_dirs
.iter()
@ -121,7 +125,7 @@ impl WorldState {
let mut extern_source_roots = FxHashMap::default();
for r in vfs_roots {
let vfs_root_path = vfs.root2path(r);
let is_local = folder_roots.iter().any(|it| vfs_root_path.starts_with(it));
let is_local = local_roots.iter().any(|it| vfs_root_path.starts_with(it));
change.add_root(SourceRootId(r.0), is_local);
change.set_debug_root_path(SourceRootId(r.0), vfs_root_path.display().to_string());
@ -176,9 +180,9 @@ impl WorldState {
let mut analysis_host = AnalysisHost::new(lru_capacity);
analysis_host.apply_change(change);
WorldState {
GlobalState {
config,
roots: folder_roots,
local_roots,
workspaces: Arc::new(workspaces),
analysis_host,
vfs: Arc::new(RwLock::new(vfs)),
@ -216,7 +220,7 @@ impl WorldState {
match c {
VfsChange::AddRoot { root, files } => {
let root_path = self.vfs.read().root2path(root);
let is_local = self.roots.iter().any(|r| root_path.starts_with(r));
let is_local = self.local_roots.iter().any(|r| root_path.starts_with(r));
if is_local {
*roots_scanned += 1;
for (file, path, text) in files {
@ -251,8 +255,8 @@ impl WorldState {
self.analysis_host.apply_change(change);
}
pub fn snapshot(&self) -> WorldSnapshot {
WorldSnapshot {
pub fn snapshot(&self) -> GlobalStateSnapshot {
GlobalStateSnapshot {
config: self.config.clone(),
workspaces: Arc::clone(&self.workspaces),
analysis: self.analysis_host.analysis(),
@ -275,7 +279,7 @@ impl WorldState {
}
}
impl WorldSnapshot {
impl GlobalStateSnapshot {
pub fn analysis(&self) -> &Analysis {
&self.analysis
}

View file

@ -26,7 +26,7 @@ mod main_loop;
mod markdown;
pub mod lsp_ext;
pub mod config;
mod world;
mod global_state;
mod diagnostics;
mod semantic_tokens;

View file

@ -12,13 +12,11 @@ use std::{
fmt,
ops::Range,
panic,
path::PathBuf,
sync::Arc,
time::{Duration, Instant},
};
use crossbeam_channel::{never, select, unbounded, RecvError, Sender};
use itertools::Itertools;
use lsp_server::{Connection, ErrorCode, Message, Notification, Request, RequestId, Response};
use lsp_types::{
DidChangeTextDocumentParams, NumberOrString, TextDocumentContentChangeEvent, WorkDoneProgress,
@ -36,14 +34,15 @@ use serde::{de::DeserializeOwned, Serialize};
use threadpool::ThreadPool;
use crate::{
config::{Config, FilesWatcher},
config::{Config, FilesWatcher, LinkedProject},
diagnostics::{to_proto::url_from_path_with_drive_lowercasing, DiagnosticTask},
from_proto, lsp_ext,
from_proto,
global_state::{GlobalState, GlobalStateSnapshot},
lsp_ext,
main_loop::{
pending_requests::{PendingRequest, PendingRequests},
subscriptions::Subscriptions,
},
world::{WorldSnapshot, WorldState},
Result,
};
@ -69,7 +68,7 @@ impl fmt::Display for LspError {
impl Error for LspError {}
pub fn main_loop(ws_roots: Vec<PathBuf>, config: Config, connection: Connection) -> Result<()> {
pub fn main_loop(config: Config, connection: Connection) -> Result<()> {
log::info!("initial config: {:#?}", config);
// Windows scheduler implements priority boosts: if thread waits for an
@ -92,43 +91,37 @@ pub fn main_loop(ws_roots: Vec<PathBuf>, config: Config, connection: Connection)
}
let mut loop_state = LoopState::default();
let mut world_state = {
let mut global_state = {
let workspaces = {
// FIXME: support dynamic workspace loading.
let project_roots: FxHashSet<_> = ws_roots
.iter()
.filter_map(|it| ra_project_model::ProjectRoot::discover(it).ok())
.flatten()
.collect();
if project_roots.is_empty() && config.notifications.cargo_toml_not_found {
if config.linked_projects.is_empty() && config.notifications.cargo_toml_not_found {
show_message(
lsp_types::MessageType::Error,
format!(
"rust-analyzer failed to discover workspace, no Cargo.toml found, dirs searched: {}",
ws_roots.iter().format_with(", ", |it, f| f(&it.display()))
),
"rust-analyzer failed to discover workspace".to_string(),
&connection.sender,
);
};
project_roots
.into_iter()
.filter_map(|root| {
ra_project_model::ProjectWorkspace::load(
root,
&config.cargo,
config.with_sysroot,
)
.map_err(|err| {
log::error!("failed to load workspace: {:#}", err);
show_message(
lsp_types::MessageType::Error,
format!("rust-analyzer failed to load workspace: {:#}", err),
&connection.sender,
);
})
.ok()
config
.linked_projects
.iter()
.filter_map(|project| match project {
LinkedProject::ProjectManifest(manifest) => {
ra_project_model::ProjectWorkspace::load(
manifest.clone(),
&config.cargo,
config.with_sysroot,
)
.map_err(|err| {
log::error!("failed to load workspace: {:#}", err);
show_message(
lsp_types::MessageType::Error,
format!("rust-analyzer failed to load workspace: {:#}", err),
&connection.sender,
);
})
.ok()
}
LinkedProject::JsonProject(it) => Some(it.clone().into()),
})
.collect::<Vec<_>>()
};
@ -163,8 +156,7 @@ pub fn main_loop(ws_roots: Vec<PathBuf>, config: Config, connection: Connection)
connection.sender.send(request.into()).unwrap();
}
WorldState::new(
ws_roots,
GlobalState::new(
workspaces,
config.lru_capacity,
&globs,
@ -173,7 +165,7 @@ pub fn main_loop(ws_roots: Vec<PathBuf>, config: Config, connection: Connection)
)
};
loop_state.roots_total = world_state.vfs.read().n_roots();
loop_state.roots_total = global_state.vfs.read().n_roots();
let pool = ThreadPool::default();
let (task_sender, task_receiver) = unbounded::<Task>();
@ -191,12 +183,12 @@ pub fn main_loop(ws_roots: Vec<PathBuf>, config: Config, connection: Connection)
Err(RecvError) => return Err("client exited without shutdown".into()),
},
recv(task_receiver) -> task => Event::Task(task.unwrap()),
recv(world_state.task_receiver) -> task => match task {
recv(global_state.task_receiver) -> task => match task {
Ok(task) => Event::Vfs(task),
Err(RecvError) => return Err("vfs died".into()),
},
recv(libdata_receiver) -> data => Event::Lib(data.unwrap()),
recv(world_state.flycheck.as_ref().map_or(&never(), |it| &it.task_recv)) -> task => match task {
recv(global_state.flycheck.as_ref().map_or(&never(), |it| &it.task_recv)) -> task => match task {
Ok(task) => Event::CheckWatcher(task),
Err(RecvError) => return Err("check watcher died".into()),
}
@ -211,16 +203,16 @@ pub fn main_loop(ws_roots: Vec<PathBuf>, config: Config, connection: Connection)
&task_sender,
&libdata_sender,
&connection,
&mut world_state,
&mut global_state,
&mut loop_state,
event,
)?;
}
}
world_state.analysis_host.request_cancellation();
global_state.analysis_host.request_cancellation();
log::info!("waiting for tasks to finish...");
task_receiver.into_iter().for_each(|task| {
on_task(task, &connection.sender, &mut loop_state.pending_requests, &mut world_state)
on_task(task, &connection.sender, &mut loop_state.pending_requests, &mut global_state)
});
libdata_receiver.into_iter().for_each(drop);
log::info!("...tasks have finished");
@ -229,7 +221,7 @@ pub fn main_loop(ws_roots: Vec<PathBuf>, config: Config, connection: Connection)
drop(pool);
log::info!("...threadpool has finished");
let vfs = Arc::try_unwrap(world_state.vfs).expect("all snapshots should be dead");
let vfs = Arc::try_unwrap(global_state.vfs).expect("all snapshots should be dead");
drop(vfs);
Ok(())
@ -320,7 +312,7 @@ fn loop_turn(
task_sender: &Sender<Task>,
libdata_sender: &Sender<LibraryData>,
connection: &Connection,
world_state: &mut WorldState,
global_state: &mut GlobalState,
loop_state: &mut LoopState,
event: Event,
) -> Result<()> {
@ -336,22 +328,22 @@ fn loop_turn(
match event {
Event::Task(task) => {
on_task(task, &connection.sender, &mut loop_state.pending_requests, world_state);
world_state.maybe_collect_garbage();
on_task(task, &connection.sender, &mut loop_state.pending_requests, global_state);
global_state.maybe_collect_garbage();
}
Event::Vfs(task) => {
world_state.vfs.write().handle_task(task);
global_state.vfs.write().handle_task(task);
}
Event::Lib(lib) => {
world_state.add_lib(lib);
world_state.maybe_collect_garbage();
global_state.add_lib(lib);
global_state.maybe_collect_garbage();
loop_state.in_flight_libraries -= 1;
loop_state.roots_scanned += 1;
}
Event::CheckWatcher(task) => on_check_task(task, world_state, task_sender)?,
Event::CheckWatcher(task) => on_check_task(task, global_state, task_sender)?,
Event::Msg(msg) => match msg {
Message::Request(req) => on_request(
world_state,
global_state,
&mut loop_state.pending_requests,
pool,
task_sender,
@ -360,7 +352,7 @@ fn loop_turn(
req,
)?,
Message::Notification(not) => {
on_notification(&connection.sender, world_state, loop_state, not)?;
on_notification(&connection.sender, global_state, loop_state, not)?;
}
Message::Response(resp) => {
let removed = loop_state.pending_responses.remove(&resp.id);
@ -379,9 +371,9 @@ fn loop_turn(
}
(None, Some(configs)) => {
if let Some(new_config) = configs.get(0) {
let mut config = world_state.config.clone();
let mut config = global_state.config.clone();
config.update(&new_config);
world_state.update_configuration(config);
global_state.update_configuration(config);
}
}
(None, None) => {
@ -394,7 +386,7 @@ fn loop_turn(
};
let mut state_changed = false;
if let Some(changes) = world_state.process_changes(&mut loop_state.roots_scanned) {
if let Some(changes) = global_state.process_changes(&mut loop_state.roots_scanned) {
state_changed = true;
loop_state.pending_libraries.extend(changes);
}
@ -416,7 +408,7 @@ fn loop_turn(
}
let show_progress =
!loop_state.workspace_loaded && world_state.config.client_caps.work_done_progress;
!loop_state.workspace_loaded && global_state.config.client_caps.work_done_progress;
if !loop_state.workspace_loaded
&& loop_state.roots_scanned == loop_state.roots_total
@ -425,7 +417,7 @@ fn loop_turn(
{
state_changed = true;
loop_state.workspace_loaded = true;
if let Some(flycheck) = &world_state.flycheck {
if let Some(flycheck) = &global_state.flycheck {
flycheck.update();
}
}
@ -437,13 +429,13 @@ fn loop_turn(
if state_changed && loop_state.workspace_loaded {
update_file_notifications_on_threadpool(
pool,
world_state.snapshot(),
global_state.snapshot(),
task_sender.clone(),
loop_state.subscriptions.subscriptions(),
);
pool.execute({
let subs = loop_state.subscriptions.subscriptions();
let snap = world_state.snapshot();
let snap = global_state.snapshot();
move || snap.analysis().prime_caches(subs).unwrap_or_else(|_: Canceled| ())
});
}
@ -467,7 +459,7 @@ fn on_task(
task: Task,
msg_sender: &Sender<Message>,
pending_requests: &mut PendingRequests,
state: &mut WorldState,
state: &mut GlobalState,
) {
match task {
Task::Respond(response) => {
@ -485,7 +477,7 @@ fn on_task(
}
fn on_request(
world: &mut WorldState,
global_state: &mut GlobalState,
pending_requests: &mut PendingRequests,
pool: &ThreadPool,
task_sender: &Sender<Task>,
@ -496,7 +488,7 @@ fn on_request(
let mut pool_dispatcher = PoolDispatcher {
req: Some(req),
pool,
world,
global_state,
task_sender,
msg_sender,
pending_requests,
@ -553,7 +545,7 @@ fn on_request(
fn on_notification(
msg_sender: &Sender<Message>,
state: &mut WorldState,
state: &mut GlobalState,
loop_state: &mut LoopState,
not: Notification,
) -> Result<()> {
@ -727,7 +719,7 @@ fn apply_document_changes(
fn on_check_task(
task: CheckTask,
world_state: &mut WorldState,
global_state: &mut GlobalState,
task_sender: &Sender<Task>,
) -> Result<()> {
match task {
@ -746,7 +738,7 @@ fn on_check_task(
.uri
.to_file_path()
.map_err(|()| format!("invalid uri: {}", diag.location.uri))?;
let file_id = match world_state.vfs.read().path2file(&path) {
let file_id = match global_state.vfs.read().path2file(&path) {
Some(file) => FileId(file.0),
None => {
log::error!(
@ -766,7 +758,7 @@ fn on_check_task(
}
CheckTask::Status(status) => {
if world_state.config.client_caps.work_done_progress {
if global_state.config.client_caps.work_done_progress {
let progress = match status {
Status::Being => {
lsp_types::WorkDoneProgress::Begin(lsp_types::WorkDoneProgressBegin {
@ -805,7 +797,7 @@ fn on_check_task(
Ok(())
}
fn on_diagnostic_task(task: DiagnosticTask, msg_sender: &Sender<Message>, state: &mut WorldState) {
fn on_diagnostic_task(task: DiagnosticTask, msg_sender: &Sender<Message>, state: &mut GlobalState) {
let subscriptions = state.diagnostics.handle_task(task);
for file_id in subscriptions {
@ -880,7 +872,7 @@ fn send_startup_progress(sender: &Sender<Message>, loop_state: &mut LoopState) {
struct PoolDispatcher<'a> {
req: Option<Request>,
pool: &'a ThreadPool,
world: &'a mut WorldState,
global_state: &'a mut GlobalState,
pending_requests: &'a mut PendingRequests,
msg_sender: &'a Sender<Message>,
task_sender: &'a Sender<Task>,
@ -891,7 +883,7 @@ impl<'a> PoolDispatcher<'a> {
/// Dispatches the request onto the current thread
fn on_sync<R>(
&mut self,
f: fn(&mut WorldState, R::Params) -> Result<R::Result>,
f: fn(&mut GlobalState, R::Params) -> Result<R::Result>,
) -> Result<&mut Self>
where
R: lsp_types::request::Request + 'static,
@ -904,18 +896,21 @@ impl<'a> PoolDispatcher<'a> {
return Ok(self);
}
};
let world = panic::AssertUnwindSafe(&mut *self.world);
let world = panic::AssertUnwindSafe(&mut *self.global_state);
let task = panic::catch_unwind(move || {
let result = f(world.0, params);
result_to_task::<R>(id, result)
})
.map_err(|_| format!("sync task {:?} panicked", R::METHOD))?;
on_task(task, self.msg_sender, self.pending_requests, self.world);
on_task(task, self.msg_sender, self.pending_requests, self.global_state);
Ok(self)
}
/// Dispatches the request onto thread pool
fn on<R>(&mut self, f: fn(WorldSnapshot, R::Params) -> Result<R::Result>) -> Result<&mut Self>
fn on<R>(
&mut self,
f: fn(GlobalStateSnapshot, R::Params) -> Result<R::Result>,
) -> Result<&mut Self>
where
R: lsp_types::request::Request + 'static,
R::Params: DeserializeOwned + Send + 'static,
@ -929,7 +924,7 @@ impl<'a> PoolDispatcher<'a> {
};
self.pool.execute({
let world = self.world.snapshot();
let world = self.global_state.snapshot();
let sender = self.task_sender.clone();
move || {
let result = f(world, params);
@ -1013,7 +1008,7 @@ where
fn update_file_notifications_on_threadpool(
pool: &ThreadPool,
world: WorldSnapshot,
world: GlobalStateSnapshot,
task_sender: Sender<Task>,
subscriptions: Vec<FileId>,
) {

View file

@ -32,17 +32,16 @@ use crate::{
config::RustfmtConfig,
diagnostics::DiagnosticTask,
from_json, from_proto,
global_state::GlobalStateSnapshot,
lsp_ext::{self, InlayHint, InlayHintsParams},
to_proto,
world::WorldSnapshot,
LspError, Result,
to_proto, LspError, Result,
};
pub fn handle_analyzer_status(world: WorldSnapshot, _: ()) -> Result<String> {
pub fn handle_analyzer_status(snap: GlobalStateSnapshot, _: ()) -> Result<String> {
let _p = profile("handle_analyzer_status");
let mut buf = world.status();
let mut buf = snap.status();
format_to!(buf, "\n\nrequests:\n");
let requests = world.latest_requests.read();
let requests = snap.latest_requests.read();
for (is_last, r) in requests.iter() {
let mark = if is_last { "*" } else { " " };
format_to!(buf, "{}{:4} {:<36}{}ms\n", mark, r.id, r.method, r.duration.as_millis());
@ -51,37 +50,37 @@ pub fn handle_analyzer_status(world: WorldSnapshot, _: ()) -> Result<String> {
}
pub fn handle_syntax_tree(
world: WorldSnapshot,
snap: GlobalStateSnapshot,
params: lsp_ext::SyntaxTreeParams,
) -> Result<String> {
let _p = profile("handle_syntax_tree");
let id = from_proto::file_id(&world, &params.text_document.uri)?;
let line_index = world.analysis().file_line_index(id)?;
let id = from_proto::file_id(&snap, &params.text_document.uri)?;
let line_index = snap.analysis().file_line_index(id)?;
let text_range = params.range.map(|r| from_proto::text_range(&line_index, r));
let res = world.analysis().syntax_tree(id, text_range)?;
let res = snap.analysis().syntax_tree(id, text_range)?;
Ok(res)
}
pub fn handle_expand_macro(
world: WorldSnapshot,
snap: GlobalStateSnapshot,
params: lsp_ext::ExpandMacroParams,
) -> Result<Option<lsp_ext::ExpandedMacro>> {
let _p = profile("handle_expand_macro");
let file_id = from_proto::file_id(&world, &params.text_document.uri)?;
let line_index = world.analysis().file_line_index(file_id)?;
let file_id = from_proto::file_id(&snap, &params.text_document.uri)?;
let line_index = snap.analysis().file_line_index(file_id)?;
let offset = from_proto::offset(&line_index, params.position);
let res = world.analysis().expand_macro(FilePosition { file_id, offset })?;
let res = snap.analysis().expand_macro(FilePosition { file_id, offset })?;
Ok(res.map(|it| lsp_ext::ExpandedMacro { name: it.name, expansion: it.expansion }))
}
pub fn handle_selection_range(
world: WorldSnapshot,
snap: GlobalStateSnapshot,
params: lsp_types::SelectionRangeParams,
) -> Result<Option<Vec<lsp_types::SelectionRange>>> {
let _p = profile("handle_selection_range");
let file_id = from_proto::file_id(&world, &params.text_document.uri)?;
let line_index = world.analysis().file_line_index(file_id)?;
let file_id = from_proto::file_id(&snap, &params.text_document.uri)?;
let line_index = snap.analysis().file_line_index(file_id)?;
let res: Result<Vec<lsp_types::SelectionRange>> = params
.positions
.into_iter()
@ -93,7 +92,7 @@ pub fn handle_selection_range(
loop {
ranges.push(range);
let frange = FileRange { file_id, range };
let next = world.analysis().extend_selection(frange)?;
let next = snap.analysis().extend_selection(frange)?;
if next == range {
break;
} else {
@ -119,18 +118,18 @@ pub fn handle_selection_range(
}
pub fn handle_matching_brace(
world: WorldSnapshot,
snap: GlobalStateSnapshot,
params: lsp_ext::MatchingBraceParams,
) -> Result<Vec<Position>> {
let _p = profile("handle_matching_brace");
let file_id = from_proto::file_id(&world, &params.text_document.uri)?;
let line_index = world.analysis().file_line_index(file_id)?;
let file_id = from_proto::file_id(&snap, &params.text_document.uri)?;
let line_index = snap.analysis().file_line_index(file_id)?;
let res = params
.positions
.into_iter()
.map(|position| {
let offset = from_proto::offset(&line_index, position);
let offset = match world.analysis().matching_brace(FilePosition { file_id, offset }) {
let offset = match snap.analysis().matching_brace(FilePosition { file_id, offset }) {
Ok(Some(matching_brace_offset)) => matching_brace_offset,
Err(_) | Ok(None) => offset,
};
@ -141,17 +140,17 @@ pub fn handle_matching_brace(
}
pub fn handle_join_lines(
world: WorldSnapshot,
snap: GlobalStateSnapshot,
params: lsp_ext::JoinLinesParams,
) -> Result<Vec<lsp_types::TextEdit>> {
let _p = profile("handle_join_lines");
let file_id = from_proto::file_id(&world, &params.text_document.uri)?;
let line_index = world.analysis().file_line_index(file_id)?;
let line_endings = world.file_line_endings(file_id);
let file_id = from_proto::file_id(&snap, &params.text_document.uri)?;
let line_index = snap.analysis().file_line_index(file_id)?;
let line_endings = snap.file_line_endings(file_id);
let mut res = TextEdit::default();
for range in params.ranges {
let range = from_proto::text_range(&line_index, range);
let edit = world.analysis().join_lines(FileRange { file_id, range })?;
let edit = snap.analysis().join_lines(FileRange { file_id, range })?;
match res.union(edit) {
Ok(()) => (),
Err(_edit) => {
@ -164,37 +163,37 @@ pub fn handle_join_lines(
}
pub fn handle_on_enter(
world: WorldSnapshot,
snap: GlobalStateSnapshot,
params: lsp_types::TextDocumentPositionParams,
) -> Result<Option<Vec<lsp_ext::SnippetTextEdit>>> {
let _p = profile("handle_on_enter");
let position = from_proto::file_position(&world, params)?;
let edit = match world.analysis().on_enter(position)? {
let position = from_proto::file_position(&snap, params)?;
let edit = match snap.analysis().on_enter(position)? {
None => return Ok(None),
Some(it) => it,
};
let line_index = world.analysis().file_line_index(position.file_id)?;
let line_endings = world.file_line_endings(position.file_id);
let line_index = snap.analysis().file_line_index(position.file_id)?;
let line_endings = snap.file_line_endings(position.file_id);
let edit = to_proto::snippet_text_edit_vec(&line_index, line_endings, true, edit);
Ok(Some(edit))
}
// Don't forget to add new trigger characters to `ServerCapabilities` in `caps.rs`.
pub fn handle_on_type_formatting(
world: WorldSnapshot,
snap: GlobalStateSnapshot,
params: lsp_types::DocumentOnTypeFormattingParams,
) -> Result<Option<Vec<lsp_types::TextEdit>>> {
let _p = profile("handle_on_type_formatting");
let mut position = from_proto::file_position(&world, params.text_document_position)?;
let line_index = world.analysis().file_line_index(position.file_id)?;
let line_endings = world.file_line_endings(position.file_id);
let mut position = from_proto::file_position(&snap, params.text_document_position)?;
let line_index = snap.analysis().file_line_index(position.file_id)?;
let line_endings = snap.file_line_endings(position.file_id);
// in `ra_ide`, the `on_type` invariant is that
// `text.char_at(position) == typed_char`.
position.offset -= TextSize::of('.');
let char_typed = params.ch.chars().next().unwrap_or('\0');
assert!({
let text = world.analysis().file_text(position.file_id)?;
let text = snap.analysis().file_text(position.file_id)?;
text[usize::from(position.offset)..].starts_with(char_typed)
});
@ -206,7 +205,7 @@ pub fn handle_on_type_formatting(
return Ok(None);
}
let edit = world.analysis().on_char_typed(position, char_typed)?;
let edit = snap.analysis().on_char_typed(position, char_typed)?;
let mut edit = match edit {
Some(it) => it,
None => return Ok(None),
@ -220,16 +219,16 @@ pub fn handle_on_type_formatting(
}
pub fn handle_document_symbol(
world: WorldSnapshot,
snap: GlobalStateSnapshot,
params: lsp_types::DocumentSymbolParams,
) -> Result<Option<lsp_types::DocumentSymbolResponse>> {
let _p = profile("handle_document_symbol");
let file_id = from_proto::file_id(&world, &params.text_document.uri)?;
let line_index = world.analysis().file_line_index(file_id)?;
let file_id = from_proto::file_id(&snap, &params.text_document.uri)?;
let line_index = snap.analysis().file_line_index(file_id)?;
let mut parents: Vec<(DocumentSymbol, Option<usize>)> = Vec::new();
for symbol in world.analysis().file_structure(file_id)? {
for symbol in snap.analysis().file_structure(file_id)? {
let doc_symbol = DocumentSymbol {
name: symbol.label,
detail: symbol.detail,
@ -255,10 +254,10 @@ pub fn handle_document_symbol(
}
}
let res = if world.config.client_caps.hierarchical_symbols {
let res = if snap.config.client_caps.hierarchical_symbols {
document_symbols.into()
} else {
let url = to_proto::url(&world, file_id)?;
let url = to_proto::url(&snap, file_id)?;
let mut symbol_information = Vec::<SymbolInformation>::new();
for symbol in document_symbols {
flatten_document_symbol(&symbol, None, &url, &mut symbol_information);
@ -288,7 +287,7 @@ pub fn handle_document_symbol(
}
pub fn handle_workspace_symbol(
world: WorldSnapshot,
snap: GlobalStateSnapshot,
params: lsp_types::WorkspaceSymbolParams,
) -> Result<Option<Vec<SymbolInformation>>> {
let _p = profile("handle_workspace_symbol");
@ -306,22 +305,22 @@ pub fn handle_workspace_symbol(
q.limit(128);
q
};
let mut res = exec_query(&world, query)?;
let mut res = exec_query(&snap, query)?;
if res.is_empty() && !all_symbols {
let mut query = Query::new(params.query);
query.limit(128);
res = exec_query(&world, query)?;
res = exec_query(&snap, query)?;
}
return Ok(Some(res));
fn exec_query(world: &WorldSnapshot, query: Query) -> Result<Vec<SymbolInformation>> {
fn exec_query(snap: &GlobalStateSnapshot, query: Query) -> Result<Vec<SymbolInformation>> {
let mut res = Vec::new();
for nav in world.analysis().symbol_search(query)? {
for nav in snap.analysis().symbol_search(query)? {
let info = SymbolInformation {
name: nav.name().to_string(),
kind: to_proto::symbol_kind(nav.kind()),
location: to_proto::location(world, nav.file_range())?,
location: to_proto::location(snap, nav.file_range())?,
container_name: nav.container_name().map(|v| v.to_string()),
deprecated: None,
};
@ -332,73 +331,73 @@ pub fn handle_workspace_symbol(
}
pub fn handle_goto_definition(
world: WorldSnapshot,
snap: GlobalStateSnapshot,
params: lsp_types::GotoDefinitionParams,
) -> Result<Option<lsp_types::GotoDefinitionResponse>> {
let _p = profile("handle_goto_definition");
let position = from_proto::file_position(&world, params.text_document_position_params)?;
let nav_info = match world.analysis().goto_definition(position)? {
let position = from_proto::file_position(&snap, params.text_document_position_params)?;
let nav_info = match snap.analysis().goto_definition(position)? {
None => return Ok(None),
Some(it) => it,
};
let src = FileRange { file_id: position.file_id, range: nav_info.range };
let res = to_proto::goto_definition_response(&world, Some(src), nav_info.info)?;
let res = to_proto::goto_definition_response(&snap, Some(src), nav_info.info)?;
Ok(Some(res))
}
pub fn handle_goto_implementation(
world: WorldSnapshot,
snap: GlobalStateSnapshot,
params: lsp_types::request::GotoImplementationParams,
) -> Result<Option<lsp_types::request::GotoImplementationResponse>> {
let _p = profile("handle_goto_implementation");
let position = from_proto::file_position(&world, params.text_document_position_params)?;
let nav_info = match world.analysis().goto_implementation(position)? {
let position = from_proto::file_position(&snap, params.text_document_position_params)?;
let nav_info = match snap.analysis().goto_implementation(position)? {
None => return Ok(None),
Some(it) => it,
};
let src = FileRange { file_id: position.file_id, range: nav_info.range };
let res = to_proto::goto_definition_response(&world, Some(src), nav_info.info)?;
let res = to_proto::goto_definition_response(&snap, Some(src), nav_info.info)?;
Ok(Some(res))
}
pub fn handle_goto_type_definition(
world: WorldSnapshot,
snap: GlobalStateSnapshot,
params: lsp_types::request::GotoTypeDefinitionParams,
) -> Result<Option<lsp_types::request::GotoTypeDefinitionResponse>> {
let _p = profile("handle_goto_type_definition");
let position = from_proto::file_position(&world, params.text_document_position_params)?;
let nav_info = match world.analysis().goto_type_definition(position)? {
let position = from_proto::file_position(&snap, params.text_document_position_params)?;
let nav_info = match snap.analysis().goto_type_definition(position)? {
None => return Ok(None),
Some(it) => it,
};
let src = FileRange { file_id: position.file_id, range: nav_info.range };
let res = to_proto::goto_definition_response(&world, Some(src), nav_info.info)?;
let res = to_proto::goto_definition_response(&snap, Some(src), nav_info.info)?;
Ok(Some(res))
}
pub fn handle_parent_module(
world: WorldSnapshot,
snap: GlobalStateSnapshot,
params: lsp_types::TextDocumentPositionParams,
) -> Result<Option<lsp_types::GotoDefinitionResponse>> {
let _p = profile("handle_parent_module");
let position = from_proto::file_position(&world, params)?;
let navs = world.analysis().parent_module(position)?;
let res = to_proto::goto_definition_response(&world, None, navs)?;
let position = from_proto::file_position(&snap, params)?;
let navs = snap.analysis().parent_module(position)?;
let res = to_proto::goto_definition_response(&snap, None, navs)?;
Ok(Some(res))
}
pub fn handle_runnables(
world: WorldSnapshot,
snap: GlobalStateSnapshot,
params: lsp_ext::RunnablesParams,
) -> Result<Vec<lsp_ext::Runnable>> {
let _p = profile("handle_runnables");
let file_id = from_proto::file_id(&world, &params.text_document.uri)?;
let line_index = world.analysis().file_line_index(file_id)?;
let file_id = from_proto::file_id(&snap, &params.text_document.uri)?;
let line_index = snap.analysis().file_line_index(file_id)?;
let offset = params.position.map(|it| from_proto::offset(&line_index, it));
let mut res = Vec::new();
let workspace_root = world.workspace_root_for(file_id);
let cargo_spec = CargoTargetSpec::for_file(&world, file_id)?;
for runnable in world.analysis().runnables(file_id)? {
let workspace_root = snap.workspace_root_for(file_id);
let cargo_spec = CargoTargetSpec::for_file(&snap, file_id)?;
for runnable in snap.analysis().runnables(file_id)? {
if let Some(offset) = offset {
if !runnable.nav.full_range().contains_inclusive(offset) {
continue;
@ -413,7 +412,7 @@ pub fn handle_runnables(
}
}
}
res.push(to_proto::runnable(&world, file_id, runnable)?);
res.push(to_proto::runnable(&snap, file_id, runnable)?);
}
// Add `cargo check` and `cargo test` for the whole package
@ -453,16 +452,16 @@ pub fn handle_runnables(
}
pub fn handle_completion(
world: WorldSnapshot,
snap: GlobalStateSnapshot,
params: lsp_types::CompletionParams,
) -> Result<Option<lsp_types::CompletionResponse>> {
let _p = profile("handle_completion");
let position = from_proto::file_position(&world, params.text_document_position)?;
let position = from_proto::file_position(&snap, params.text_document_position)?;
let completion_triggered_after_single_colon = {
let mut res = false;
if let Some(ctx) = params.context {
if ctx.trigger_character.unwrap_or_default() == ":" {
let source_file = world.analysis().parse(position.file_id)?;
let source_file = snap.analysis().parse(position.file_id)?;
let syntax = source_file.syntax();
let text = syntax.text();
if let Some(next_char) = text.char_at(position.offset) {
@ -480,12 +479,12 @@ pub fn handle_completion(
return Ok(None);
}
let items = match world.analysis().completions(&world.config.completion, position)? {
let items = match snap.analysis().completions(&snap.config.completion, position)? {
None => return Ok(None),
Some(items) => items,
};
let line_index = world.analysis().file_line_index(position.file_id)?;
let line_endings = world.file_line_endings(position.file_id);
let line_index = snap.analysis().file_line_index(position.file_id)?;
let line_endings = snap.file_line_endings(position.file_id);
let items: Vec<CompletionItem> = items
.into_iter()
.map(|item| to_proto::completion_item(&line_index, line_endings, item))
@ -495,15 +494,15 @@ pub fn handle_completion(
}
pub fn handle_folding_range(
world: WorldSnapshot,
snap: GlobalStateSnapshot,
params: FoldingRangeParams,
) -> Result<Option<Vec<FoldingRange>>> {
let _p = profile("handle_folding_range");
let file_id = from_proto::file_id(&world, &params.text_document.uri)?;
let folds = world.analysis().folding_ranges(file_id)?;
let text = world.analysis().file_text(file_id)?;
let line_index = world.analysis().file_line_index(file_id)?;
let line_folding_only = world.config.client_caps.line_folding_only;
let file_id = from_proto::file_id(&snap, &params.text_document.uri)?;
let folds = snap.analysis().folding_ranges(file_id)?;
let text = snap.analysis().file_text(file_id)?;
let line_index = snap.analysis().file_line_index(file_id)?;
let line_folding_only = snap.config.client_caps.line_folding_only;
let res = folds
.into_iter()
.map(|it| to_proto::folding_range(&*text, &line_index, line_folding_only, it))
@ -512,16 +511,16 @@ pub fn handle_folding_range(
}
pub fn handle_signature_help(
world: WorldSnapshot,
snap: GlobalStateSnapshot,
params: lsp_types::SignatureHelpParams,
) -> Result<Option<lsp_types::SignatureHelp>> {
let _p = profile("handle_signature_help");
let position = from_proto::file_position(&world, params.text_document_position_params)?;
let call_info = match world.analysis().call_info(position)? {
let position = from_proto::file_position(&snap, params.text_document_position_params)?;
let call_info = match snap.analysis().call_info(position)? {
None => return Ok(None),
Some(it) => it,
};
let concise = !world.config.call_info_full;
let concise = !snap.config.call_info_full;
let mut active_parameter = call_info.active_parameter.map(|it| it as i64);
if concise && call_info.signature.has_self_param {
active_parameter = active_parameter.map(|it| it.saturating_sub(1));
@ -535,14 +534,17 @@ pub fn handle_signature_help(
}))
}
pub fn handle_hover(world: WorldSnapshot, params: lsp_types::HoverParams) -> Result<Option<Hover>> {
pub fn handle_hover(
snap: GlobalStateSnapshot,
params: lsp_types::HoverParams,
) -> Result<Option<Hover>> {
let _p = profile("handle_hover");
let position = from_proto::file_position(&world, params.text_document_position_params)?;
let info = match world.analysis().hover(position)? {
let position = from_proto::file_position(&snap, params.text_document_position_params)?;
let info = match snap.analysis().hover(position)? {
None => return Ok(None),
Some(info) => info,
};
let line_index = world.analysis.file_line_index(position.file_id)?;
let line_index = snap.analysis.file_line_index(position.file_id)?;
let range = to_proto::range(&line_index, info.range);
let res = Hover {
contents: HoverContents::Markup(MarkupContent {
@ -555,26 +557,29 @@ pub fn handle_hover(world: WorldSnapshot, params: lsp_types::HoverParams) -> Res
}
pub fn handle_prepare_rename(
world: WorldSnapshot,
snap: GlobalStateSnapshot,
params: lsp_types::TextDocumentPositionParams,
) -> Result<Option<PrepareRenameResponse>> {
let _p = profile("handle_prepare_rename");
let position = from_proto::file_position(&world, params)?;
let position = from_proto::file_position(&snap, params)?;
let optional_change = world.analysis().rename(position, "dummy")?;
let optional_change = snap.analysis().rename(position, "dummy")?;
let range = match optional_change {
None => return Ok(None),
Some(it) => it.range,
};
let line_index = world.analysis().file_line_index(position.file_id)?;
let line_index = snap.analysis().file_line_index(position.file_id)?;
let range = to_proto::range(&line_index, range);
Ok(Some(PrepareRenameResponse::Range(range)))
}
pub fn handle_rename(world: WorldSnapshot, params: RenameParams) -> Result<Option<WorkspaceEdit>> {
pub fn handle_rename(
snap: GlobalStateSnapshot,
params: RenameParams,
) -> Result<Option<WorkspaceEdit>> {
let _p = profile("handle_rename");
let position = from_proto::file_position(&world, params.text_document_position)?;
let position = from_proto::file_position(&snap, params.text_document_position)?;
if params.new_name.is_empty() {
return Err(LspError::new(
@ -584,36 +589,36 @@ pub fn handle_rename(world: WorldSnapshot, params: RenameParams) -> Result<Optio
.into());
}
let optional_change = world.analysis().rename(position, &*params.new_name)?;
let optional_change = snap.analysis().rename(position, &*params.new_name)?;
let source_change = match optional_change {
None => return Ok(None),
Some(it) => it.info,
};
let workspace_edit = to_proto::workspace_edit(&world, source_change)?;
let workspace_edit = to_proto::workspace_edit(&snap, source_change)?;
Ok(Some(workspace_edit))
}
pub fn handle_references(
world: WorldSnapshot,
snap: GlobalStateSnapshot,
params: lsp_types::ReferenceParams,
) -> Result<Option<Vec<Location>>> {
let _p = profile("handle_references");
let position = from_proto::file_position(&world, params.text_document_position)?;
let position = from_proto::file_position(&snap, params.text_document_position)?;
let refs = match world.analysis().find_all_refs(position, None)? {
let refs = match snap.analysis().find_all_refs(position, None)? {
None => return Ok(None),
Some(refs) => refs,
};
let locations = if params.context.include_declaration {
refs.into_iter()
.filter_map(|reference| to_proto::location(&world, reference.file_range).ok())
.filter_map(|reference| to_proto::location(&snap, reference.file_range).ok())
.collect()
} else {
// Only iterate over the references if include_declaration was false
refs.references()
.iter()
.filter_map(|reference| to_proto::location(&world, reference.file_range).ok())
.filter_map(|reference| to_proto::location(&snap, reference.file_range).ok())
.collect()
};
@ -621,24 +626,24 @@ pub fn handle_references(
}
pub fn handle_formatting(
world: WorldSnapshot,
snap: GlobalStateSnapshot,
params: DocumentFormattingParams,
) -> Result<Option<Vec<lsp_types::TextEdit>>> {
let _p = profile("handle_formatting");
let file_id = from_proto::file_id(&world, &params.text_document.uri)?;
let file = world.analysis().file_text(file_id)?;
let crate_ids = world.analysis().crate_for(file_id)?;
let file_id = from_proto::file_id(&snap, &params.text_document.uri)?;
let file = snap.analysis().file_text(file_id)?;
let crate_ids = snap.analysis().crate_for(file_id)?;
let file_line_index = world.analysis().file_line_index(file_id)?;
let file_line_index = snap.analysis().file_line_index(file_id)?;
let end_position = to_proto::position(&file_line_index, TextSize::of(file.as_str()));
let mut rustfmt = match &world.config.rustfmt {
let mut rustfmt = match &snap.config.rustfmt {
RustfmtConfig::Rustfmt { extra_args } => {
let mut cmd = process::Command::new("rustfmt");
cmd.args(extra_args);
if let Some(&crate_id) = crate_ids.first() {
// Assume all crates are in the same edition
let edition = world.analysis().crate_edition(crate_id)?;
let edition = snap.analysis().crate_edition(crate_id)?;
cmd.arg("--edition");
cmd.arg(edition.to_string());
}
@ -697,15 +702,14 @@ pub fn handle_formatting(
}
fn handle_fixes(
world: &WorldSnapshot,
snap: &GlobalStateSnapshot,
params: &lsp_types::CodeActionParams,
res: &mut Vec<lsp_ext::CodeAction>,
) -> Result<()> {
let file_id = from_proto::file_id(&world, &params.text_document.uri)?;
let line_index = world.analysis().file_line_index(file_id)?;
let file_id = from_proto::file_id(&snap, &params.text_document.uri)?;
let line_index = snap.analysis().file_line_index(file_id)?;
let range = from_proto::text_range(&line_index, params.range);
let diagnostics = world.analysis().diagnostics(file_id)?;
let diagnostics = snap.analysis().diagnostics(file_id)?;
let fixes_from_diagnostics = diagnostics
.into_iter()
@ -714,18 +718,19 @@ fn handle_fixes(
.map(|(_range, fix)| fix);
for fix in fixes_from_diagnostics {
let title = fix.label;
let edit = to_proto::snippet_workspace_edit(&world, fix.source_change)?;
let edit = to_proto::snippet_workspace_edit(&snap, fix.source_change)?;
let action = lsp_ext::CodeAction {
title,
id: None,
group: None,
kind: None,
kind: Some(lsp_types::code_action_kind::QUICKFIX.into()),
edit: Some(edit),
command: None,
};
res.push(action);
}
for fix in world.check_fixes.get(&file_id).into_iter().flatten() {
for fix in snap.check_fixes.get(&file_id).into_iter().flatten() {
let fix_range = from_proto::text_range(&line_index, fix.range);
if fix_range.intersect(range).is_none() {
continue;
@ -736,37 +741,34 @@ fn handle_fixes(
}
pub fn handle_code_action(
world: WorldSnapshot,
snap: GlobalStateSnapshot,
params: lsp_types::CodeActionParams,
) -> Result<Option<Vec<lsp_ext::CodeAction>>> {
let _p = profile("handle_code_action");
// We intentionally don't support command-based actions, as those either
// requires custom client-code anyway, or requires server-initiated edits.
// Server initiated edits break causality, so we avoid those as well.
if !world.config.client_caps.code_action_literals {
if !snap.config.client_caps.code_action_literals {
return Ok(None);
}
let file_id = from_proto::file_id(&world, &params.text_document.uri)?;
let line_index = world.analysis().file_line_index(file_id)?;
let file_id = from_proto::file_id(&snap, &params.text_document.uri)?;
let line_index = snap.analysis().file_line_index(file_id)?;
let range = from_proto::text_range(&line_index, params.range);
let frange = FileRange { file_id, range };
let mut res: Vec<lsp_ext::CodeAction> = Vec::new();
handle_fixes(&world, &params, &mut res)?;
handle_fixes(&snap, &params, &mut res)?;
if world.config.client_caps.resolve_code_action {
for (index, assist) in world
.analysis()
.unresolved_assists(&world.config.assist, frange)?
.into_iter()
.enumerate()
if snap.config.client_caps.resolve_code_action {
for (index, assist) in
snap.analysis().unresolved_assists(&snap.config.assist, frange)?.into_iter().enumerate()
{
res.push(to_proto::unresolved_code_action(&world, assist, index)?);
res.push(to_proto::unresolved_code_action(&snap, assist, index)?);
}
} else {
for assist in world.analysis().resolved_assists(&world.config.assist, frange)?.into_iter() {
res.push(to_proto::resolved_code_action(&world, assist)?);
for assist in snap.analysis().resolved_assists(&snap.config.assist, frange)?.into_iter() {
res.push(to_proto::resolved_code_action(&snap, assist)?);
}
}
@ -774,43 +776,43 @@ pub fn handle_code_action(
}
pub fn handle_resolve_code_action(
world: WorldSnapshot,
snap: GlobalStateSnapshot,
params: lsp_ext::ResolveCodeActionParams,
) -> Result<Option<lsp_ext::SnippetWorkspaceEdit>> {
let _p = profile("handle_resolve_code_action");
let file_id = from_proto::file_id(&world, &params.code_action_params.text_document.uri)?;
let line_index = world.analysis().file_line_index(file_id)?;
let file_id = from_proto::file_id(&snap, &params.code_action_params.text_document.uri)?;
let line_index = snap.analysis().file_line_index(file_id)?;
let range = from_proto::text_range(&line_index, params.code_action_params.range);
let frange = FileRange { file_id, range };
let assists = world.analysis().resolved_assists(&world.config.assist, frange)?;
let assists = snap.analysis().resolved_assists(&snap.config.assist, frange)?;
let id_components = params.id.split(":").collect::<Vec<&str>>();
let index = id_components.last().unwrap().parse::<usize>().unwrap();
let id_string = id_components.first().unwrap();
let assist = &assists[index];
assert!(assist.assist.id.0 == *id_string);
Ok(to_proto::resolved_code_action(&world, assist.clone())?.edit)
Ok(to_proto::resolved_code_action(&snap, assist.clone())?.edit)
}
pub fn handle_code_lens(
world: WorldSnapshot,
snap: GlobalStateSnapshot,
params: lsp_types::CodeLensParams,
) -> Result<Option<Vec<CodeLens>>> {
let _p = profile("handle_code_lens");
let mut lenses: Vec<CodeLens> = Default::default();
if world.config.lens.none() {
if snap.config.lens.none() {
// early return before any db query!
return Ok(Some(lenses));
}
let file_id = from_proto::file_id(&world, &params.text_document.uri)?;
let line_index = world.analysis().file_line_index(file_id)?;
let cargo_spec = CargoTargetSpec::for_file(&world, file_id)?;
let file_id = from_proto::file_id(&snap, &params.text_document.uri)?;
let line_index = snap.analysis().file_line_index(file_id)?;
let cargo_spec = CargoTargetSpec::for_file(&snap, file_id)?;
if world.config.lens.runnable() {
if snap.config.lens.runnable() {
// Gather runnables
for runnable in world.analysis().runnables(file_id)? {
for runnable in snap.analysis().runnables(file_id)? {
let (run_title, debugee) = match &runnable.kind {
RunnableKind::Test { .. } | RunnableKind::TestMod { .. } => {
("\u{fe0e} Run Test", true)
@ -836,8 +838,8 @@ pub fn handle_code_lens(
};
let range = to_proto::range(&line_index, runnable.nav.range());
let r = to_proto::runnable(&world, file_id, runnable)?;
if world.config.lens.run {
let r = to_proto::runnable(&snap, file_id, runnable)?;
if snap.config.lens.run {
let lens = CodeLens {
range,
command: Some(Command {
@ -850,7 +852,7 @@ pub fn handle_code_lens(
lenses.push(lens);
}
if debugee && world.config.lens.debug {
if debugee && snap.config.lens.debug {
let debug_lens = CodeLens {
range,
command: Some(Command {
@ -865,11 +867,10 @@ pub fn handle_code_lens(
}
}
if world.config.lens.impementations {
if snap.config.lens.impementations {
// Handle impls
lenses.extend(
world
.analysis()
snap.analysis()
.file_structure(file_id)?
.into_iter()
.filter(|it| match it.kind {
@ -904,14 +905,17 @@ enum CodeLensResolveData {
Impls(lsp_types::request::GotoImplementationParams),
}
pub fn handle_code_lens_resolve(world: WorldSnapshot, code_lens: CodeLens) -> Result<CodeLens> {
pub fn handle_code_lens_resolve(
snap: GlobalStateSnapshot,
code_lens: CodeLens,
) -> Result<CodeLens> {
let _p = profile("handle_code_lens_resolve");
let data = code_lens.data.unwrap();
let resolve = from_json::<Option<CodeLensResolveData>>("CodeLensResolveData", data)?;
match resolve {
Some(CodeLensResolveData::Impls(lens_params)) => {
let locations: Vec<Location> =
match handle_goto_implementation(world, lens_params.clone())? {
match handle_goto_implementation(snap, lens_params.clone())? {
Some(lsp_types::GotoDefinitionResponse::Scalar(loc)) => vec![loc],
Some(lsp_types::GotoDefinitionResponse::Array(locs)) => locs,
Some(lsp_types::GotoDefinitionResponse::Link(links)) => links
@ -950,14 +954,14 @@ pub fn handle_code_lens_resolve(world: WorldSnapshot, code_lens: CodeLens) -> Re
}
pub fn handle_document_highlight(
world: WorldSnapshot,
snap: GlobalStateSnapshot,
params: lsp_types::DocumentHighlightParams,
) -> Result<Option<Vec<DocumentHighlight>>> {
let _p = profile("handle_document_highlight");
let position = from_proto::file_position(&world, params.text_document_position_params)?;
let line_index = world.analysis().file_line_index(position.file_id)?;
let position = from_proto::file_position(&snap, params.text_document_position_params)?;
let line_index = snap.analysis().file_line_index(position.file_id)?;
let refs = match world
let refs = match snap
.analysis()
.find_all_refs(position, Some(SearchScope::single_file(position.file_id)))?
{
@ -977,19 +981,19 @@ pub fn handle_document_highlight(
}
pub fn handle_ssr(
world: WorldSnapshot,
snap: GlobalStateSnapshot,
params: lsp_ext::SsrParams,
) -> Result<lsp_types::WorkspaceEdit> {
let _p = profile("handle_ssr");
let source_change =
world.analysis().structural_search_replace(&params.query, params.parse_only)??;
to_proto::workspace_edit(&world, source_change)
snap.analysis().structural_search_replace(&params.query, params.parse_only)??;
to_proto::workspace_edit(&snap, source_change)
}
pub fn publish_diagnostics(world: &WorldSnapshot, file_id: FileId) -> Result<DiagnosticTask> {
pub fn publish_diagnostics(snap: &GlobalStateSnapshot, file_id: FileId) -> Result<DiagnosticTask> {
let _p = profile("publish_diagnostics");
let line_index = world.analysis().file_line_index(file_id)?;
let diagnostics: Vec<Diagnostic> = world
let line_index = snap.analysis().file_line_index(file_id)?;
let diagnostics: Vec<Diagnostic> = snap
.analysis()
.diagnostics(file_id)?
.into_iter()
@ -1007,28 +1011,28 @@ pub fn publish_diagnostics(world: &WorldSnapshot, file_id: FileId) -> Result<Dia
}
pub fn handle_inlay_hints(
world: WorldSnapshot,
snap: GlobalStateSnapshot,
params: InlayHintsParams,
) -> Result<Vec<InlayHint>> {
let _p = profile("handle_inlay_hints");
let file_id = from_proto::file_id(&world, &params.text_document.uri)?;
let analysis = world.analysis();
let file_id = from_proto::file_id(&snap, &params.text_document.uri)?;
let analysis = snap.analysis();
let line_index = analysis.file_line_index(file_id)?;
Ok(analysis
.inlay_hints(file_id, &world.config.inlay_hints)?
.inlay_hints(file_id, &snap.config.inlay_hints)?
.into_iter()
.map(|it| to_proto::inlay_int(&line_index, it))
.collect())
}
pub fn handle_call_hierarchy_prepare(
world: WorldSnapshot,
snap: GlobalStateSnapshot,
params: CallHierarchyPrepareParams,
) -> Result<Option<Vec<CallHierarchyItem>>> {
let _p = profile("handle_call_hierarchy_prepare");
let position = from_proto::file_position(&world, params.text_document_position_params)?;
let position = from_proto::file_position(&snap, params.text_document_position_params)?;
let nav_info = match world.analysis().call_hierarchy(position)? {
let nav_info = match snap.analysis().call_hierarchy(position)? {
None => return Ok(None),
Some(it) => it,
};
@ -1037,24 +1041,24 @@ pub fn handle_call_hierarchy_prepare(
let res = navs
.into_iter()
.filter(|it| it.kind() == SyntaxKind::FN_DEF)
.map(|it| to_proto::call_hierarchy_item(&world, it))
.map(|it| to_proto::call_hierarchy_item(&snap, it))
.collect::<Result<Vec<_>>>()?;
Ok(Some(res))
}
pub fn handle_call_hierarchy_incoming(
world: WorldSnapshot,
snap: GlobalStateSnapshot,
params: CallHierarchyIncomingCallsParams,
) -> Result<Option<Vec<CallHierarchyIncomingCall>>> {
let _p = profile("handle_call_hierarchy_incoming");
let item = params.item;
let doc = TextDocumentIdentifier::new(item.uri);
let frange = from_proto::file_range(&world, doc, item.range)?;
let frange = from_proto::file_range(&snap, doc, item.range)?;
let fpos = FilePosition { file_id: frange.file_id, offset: frange.range.start() };
let call_items = match world.analysis().incoming_calls(fpos)? {
let call_items = match snap.analysis().incoming_calls(fpos)? {
None => return Ok(None),
Some(it) => it,
};
@ -1063,8 +1067,8 @@ pub fn handle_call_hierarchy_incoming(
for call_item in call_items.into_iter() {
let file_id = call_item.target.file_id();
let line_index = world.analysis().file_line_index(file_id)?;
let item = to_proto::call_hierarchy_item(&world, call_item.target)?;
let line_index = snap.analysis().file_line_index(file_id)?;
let item = to_proto::call_hierarchy_item(&snap, call_item.target)?;
res.push(CallHierarchyIncomingCall {
from: item,
from_ranges: call_item
@ -1079,17 +1083,17 @@ pub fn handle_call_hierarchy_incoming(
}
pub fn handle_call_hierarchy_outgoing(
world: WorldSnapshot,
snap: GlobalStateSnapshot,
params: CallHierarchyOutgoingCallsParams,
) -> Result<Option<Vec<CallHierarchyOutgoingCall>>> {
let _p = profile("handle_call_hierarchy_outgoing");
let item = params.item;
let doc = TextDocumentIdentifier::new(item.uri);
let frange = from_proto::file_range(&world, doc, item.range)?;
let frange = from_proto::file_range(&snap, doc, item.range)?;
let fpos = FilePosition { file_id: frange.file_id, offset: frange.range.start() };
let call_items = match world.analysis().outgoing_calls(fpos)? {
let call_items = match snap.analysis().outgoing_calls(fpos)? {
None => return Ok(None),
Some(it) => it,
};
@ -1098,8 +1102,8 @@ pub fn handle_call_hierarchy_outgoing(
for call_item in call_items.into_iter() {
let file_id = call_item.target.file_id();
let line_index = world.analysis().file_line_index(file_id)?;
let item = to_proto::call_hierarchy_item(&world, call_item.target)?;
let line_index = snap.analysis().file_line_index(file_id)?;
let item = to_proto::call_hierarchy_item(&snap, call_item.target)?;
res.push(CallHierarchyOutgoingCall {
to: item,
from_ranges: call_item
@ -1114,31 +1118,31 @@ pub fn handle_call_hierarchy_outgoing(
}
pub fn handle_semantic_tokens(
world: WorldSnapshot,
snap: GlobalStateSnapshot,
params: SemanticTokensParams,
) -> Result<Option<SemanticTokensResult>> {
let _p = profile("handle_semantic_tokens");
let file_id = from_proto::file_id(&world, &params.text_document.uri)?;
let text = world.analysis().file_text(file_id)?;
let line_index = world.analysis().file_line_index(file_id)?;
let file_id = from_proto::file_id(&snap, &params.text_document.uri)?;
let text = snap.analysis().file_text(file_id)?;
let line_index = snap.analysis().file_line_index(file_id)?;
let highlights = world.analysis().highlight(file_id)?;
let highlights = snap.analysis().highlight(file_id)?;
let semantic_tokens = to_proto::semantic_tokens(&text, &line_index, highlights);
Ok(Some(semantic_tokens.into()))
}
pub fn handle_semantic_tokens_range(
world: WorldSnapshot,
snap: GlobalStateSnapshot,
params: SemanticTokensRangeParams,
) -> Result<Option<SemanticTokensRangeResult>> {
let _p = profile("handle_semantic_tokens_range");
let frange = from_proto::file_range(&world, params.text_document, params.range)?;
let text = world.analysis().file_text(frange.file_id)?;
let line_index = world.analysis().file_line_index(frange.file_id)?;
let frange = from_proto::file_range(&snap, params.text_document, params.range)?;
let text = snap.analysis().file_text(frange.file_id)?;
let line_index = snap.analysis().file_line_index(frange.file_id)?;
let highlights = world.analysis().highlight_range(frange)?;
let highlights = snap.analysis().highlight_range(frange)?;
let semantic_tokens = to_proto::semantic_tokens(&text, &line_index, highlights);
Ok(Some(semantic_tokens.into()))
}

View file

@ -10,7 +10,8 @@ use ra_syntax::{SyntaxKind, TextRange, TextSize};
use ra_vfs::LineEndings;
use crate::{
cargo_target_spec::CargoTargetSpec, lsp_ext, semantic_tokens, world::WorldSnapshot, Result,
cargo_target_spec::CargoTargetSpec, global_state::GlobalStateSnapshot, lsp_ext,
semantic_tokens, Result,
};
pub(crate) fn position(line_index: &LineIndex, offset: TextSize) -> lsp_types::Position {
@ -384,41 +385,44 @@ pub(crate) fn folding_range(
}
}
pub(crate) fn url(world: &WorldSnapshot, file_id: FileId) -> Result<lsp_types::Url> {
world.file_id_to_uri(file_id)
pub(crate) fn url(snap: &GlobalStateSnapshot, file_id: FileId) -> Result<lsp_types::Url> {
snap.file_id_to_uri(file_id)
}
pub(crate) fn versioned_text_document_identifier(
world: &WorldSnapshot,
snap: &GlobalStateSnapshot,
file_id: FileId,
version: Option<i64>,
) -> Result<lsp_types::VersionedTextDocumentIdentifier> {
let res = lsp_types::VersionedTextDocumentIdentifier { uri: url(world, file_id)?, version };
let res = lsp_types::VersionedTextDocumentIdentifier { uri: url(snap, file_id)?, version };
Ok(res)
}
pub(crate) fn location(world: &WorldSnapshot, frange: FileRange) -> Result<lsp_types::Location> {
let url = url(world, frange.file_id)?;
let line_index = world.analysis().file_line_index(frange.file_id)?;
pub(crate) fn location(
snap: &GlobalStateSnapshot,
frange: FileRange,
) -> Result<lsp_types::Location> {
let url = url(snap, frange.file_id)?;
let line_index = snap.analysis().file_line_index(frange.file_id)?;
let range = range(&line_index, frange.range);
let loc = lsp_types::Location::new(url, range);
Ok(loc)
}
pub(crate) fn location_link(
world: &WorldSnapshot,
snap: &GlobalStateSnapshot,
src: Option<FileRange>,
target: NavigationTarget,
) -> Result<lsp_types::LocationLink> {
let origin_selection_range = match src {
Some(src) => {
let line_index = world.analysis().file_line_index(src.file_id)?;
let line_index = snap.analysis().file_line_index(src.file_id)?;
let range = range(&line_index, src.range);
Some(range)
}
None => None,
};
let (target_uri, target_range, target_selection_range) = location_info(world, target)?;
let (target_uri, target_range, target_selection_range) = location_info(snap, target)?;
let res = lsp_types::LocationLink {
origin_selection_range,
target_uri,
@ -429,12 +433,12 @@ pub(crate) fn location_link(
}
fn location_info(
world: &WorldSnapshot,
snap: &GlobalStateSnapshot,
target: NavigationTarget,
) -> Result<(lsp_types::Url, lsp_types::Range, lsp_types::Range)> {
let line_index = world.analysis().file_line_index(target.file_id())?;
let line_index = snap.analysis().file_line_index(target.file_id())?;
let target_uri = url(world, target.file_id())?;
let target_uri = url(snap, target.file_id())?;
let target_range = range(&line_index, target.full_range());
let target_selection_range =
target.focus_range().map(|it| range(&line_index, it)).unwrap_or(target_range);
@ -442,14 +446,14 @@ fn location_info(
}
pub(crate) fn goto_definition_response(
world: &WorldSnapshot,
snap: &GlobalStateSnapshot,
src: Option<FileRange>,
targets: Vec<NavigationTarget>,
) -> Result<lsp_types::GotoDefinitionResponse> {
if world.config.client_caps.location_link {
if snap.config.client_caps.location_link {
let links = targets
.into_iter()
.map(|nav| location_link(world, src, nav))
.map(|nav| location_link(snap, src, nav))
.collect::<Result<Vec<_>>>()?;
Ok(links.into())
} else {
@ -457,7 +461,7 @@ pub(crate) fn goto_definition_response(
.into_iter()
.map(|nav| {
location(
world,
snap,
FileRange {
file_id: nav.file_id(),
range: nav.focus_range().unwrap_or(nav.range()),
@ -470,13 +474,13 @@ pub(crate) fn goto_definition_response(
}
pub(crate) fn snippet_text_document_edit(
world: &WorldSnapshot,
snap: &GlobalStateSnapshot,
is_snippet: bool,
source_file_edit: SourceFileEdit,
) -> Result<lsp_ext::SnippetTextDocumentEdit> {
let text_document = versioned_text_document_identifier(world, source_file_edit.file_id, None)?;
let line_index = world.analysis().file_line_index(source_file_edit.file_id)?;
let line_endings = world.file_line_endings(source_file_edit.file_id);
let text_document = versioned_text_document_identifier(snap, source_file_edit.file_id, None)?;
let line_index = snap.analysis().file_line_index(source_file_edit.file_id)?;
let line_endings = snap.file_line_endings(source_file_edit.file_id);
let edits = source_file_edit
.edit
.into_iter()
@ -486,17 +490,17 @@ pub(crate) fn snippet_text_document_edit(
}
pub(crate) fn resource_op(
world: &WorldSnapshot,
snap: &GlobalStateSnapshot,
file_system_edit: FileSystemEdit,
) -> Result<lsp_types::ResourceOp> {
let res = match file_system_edit {
FileSystemEdit::CreateFile { source_root, path } => {
let uri = world.path_to_uri(source_root, &path)?;
let uri = snap.path_to_uri(source_root, &path)?;
lsp_types::ResourceOp::Create(lsp_types::CreateFile { uri, options: None })
}
FileSystemEdit::MoveFile { src, dst_source_root, dst_path } => {
let old_uri = world.file_id_to_uri(src)?;
let new_uri = world.path_to_uri(dst_source_root, &dst_path)?;
let old_uri = snap.file_id_to_uri(src)?;
let new_uri = snap.path_to_uri(dst_source_root, &dst_path)?;
lsp_types::ResourceOp::Rename(lsp_types::RenameFile { old_uri, new_uri, options: None })
}
};
@ -504,16 +508,16 @@ pub(crate) fn resource_op(
}
pub(crate) fn snippet_workspace_edit(
world: &WorldSnapshot,
snap: &GlobalStateSnapshot,
source_change: SourceChange,
) -> Result<lsp_ext::SnippetWorkspaceEdit> {
let mut document_changes: Vec<lsp_ext::SnippetDocumentChangeOperation> = Vec::new();
for op in source_change.file_system_edits {
let op = resource_op(&world, op)?;
let op = resource_op(&snap, op)?;
document_changes.push(lsp_ext::SnippetDocumentChangeOperation::Op(op));
}
for edit in source_change.source_file_edits {
let edit = snippet_text_document_edit(&world, source_change.is_snippet, edit)?;
let edit = snippet_text_document_edit(&snap, source_change.is_snippet, edit)?;
document_changes.push(lsp_ext::SnippetDocumentChangeOperation::Edit(edit));
}
let workspace_edit =
@ -522,11 +526,11 @@ pub(crate) fn snippet_workspace_edit(
}
pub(crate) fn workspace_edit(
world: &WorldSnapshot,
snap: &GlobalStateSnapshot,
source_change: SourceChange,
) -> Result<lsp_types::WorkspaceEdit> {
assert!(!source_change.is_snippet);
snippet_workspace_edit(world, source_change).map(|it| it.into())
snippet_workspace_edit(snap, source_change).map(|it| it.into())
}
impl From<lsp_ext::SnippetWorkspaceEdit> for lsp_types::WorkspaceEdit {
@ -565,13 +569,13 @@ impl From<lsp_ext::SnippetWorkspaceEdit> for lsp_types::WorkspaceEdit {
}
pub fn call_hierarchy_item(
world: &WorldSnapshot,
snap: &GlobalStateSnapshot,
target: NavigationTarget,
) -> Result<lsp_types::CallHierarchyItem> {
let name = target.name().to_string();
let detail = target.description().map(|it| it.to_string());
let kind = symbol_kind(target.kind());
let (uri, range, selection_range) = location_info(world, target)?;
let (uri, range, selection_range) = location_info(snap, target)?;
Ok(lsp_types::CallHierarchyItem { name, kind, tags: None, detail, uri, range, selection_range })
}
@ -620,14 +624,14 @@ fn main() <fold>{
}
pub(crate) fn unresolved_code_action(
world: &WorldSnapshot,
snap: &GlobalStateSnapshot,
assist: Assist,
index: usize,
) -> Result<lsp_ext::CodeAction> {
let res = lsp_ext::CodeAction {
title: assist.label,
id: Some(format!("{}:{}", assist.id.0.to_owned(), index.to_string())),
group: assist.group.filter(|_| world.config.client_caps.code_action_group).map(|gr| gr.0),
group: assist.group.filter(|_| snap.config.client_caps.code_action_group).map(|gr| gr.0),
kind: Some(String::new()),
edit: None,
command: None,
@ -636,25 +640,25 @@ pub(crate) fn unresolved_code_action(
}
pub(crate) fn resolved_code_action(
world: &WorldSnapshot,
snap: &GlobalStateSnapshot,
assist: ResolvedAssist,
) -> Result<lsp_ext::CodeAction> {
let change = assist.source_change;
unresolved_code_action(world, assist.assist, 0).and_then(|it| {
unresolved_code_action(snap, assist.assist, 0).and_then(|it| {
Ok(lsp_ext::CodeAction {
id: None,
edit: Some(snippet_workspace_edit(world, change)?),
edit: Some(snippet_workspace_edit(snap, change)?),
..it
})
})
}
pub(crate) fn runnable(
world: &WorldSnapshot,
snap: &GlobalStateSnapshot,
file_id: FileId,
runnable: Runnable,
) -> Result<lsp_ext::Runnable> {
let spec = CargoTargetSpec::for_file(world, file_id)?;
let spec = CargoTargetSpec::for_file(snap, file_id)?;
let target = spec.as_ref().map(|s| s.target.clone());
let (cargo_args, executable_args) =
CargoTargetSpec::runnable_args(spec, &runnable.kind, &runnable.cfg_exprs)?;
@ -667,14 +671,14 @@ pub(crate) fn runnable(
target.map_or_else(|| "run binary".to_string(), |t| format!("run {}", t))
}
};
let location = location_link(world, None, runnable.nav)?;
let location = location_link(snap, None, runnable.nav)?;
Ok(lsp_ext::Runnable {
label,
location: Some(location),
kind: lsp_ext::RunnableKind::Cargo,
args: lsp_ext::CargoRunnable {
workspace_root: world.workspace_root_for(file_id).map(|root| root.to_owned()),
workspace_root: snap.workspace_root_for(file_id).map(|root| root.to_owned()),
cargo_args,
executable_args,
},

View file

@ -58,55 +58,6 @@ use std::collections::Spam;
eprintln!("completion took {:?}", completion_start.elapsed());
}
#[test]
fn test_runnables_no_project() {
if skip_slow_tests() {
return;
}
let server = project(
r"
//- lib.rs
#[test]
fn foo() {
}
",
);
server.wait_until_workspace_is_loaded();
server.request::<Runnables>(
RunnablesParams { text_document: server.doc_id("lib.rs"), position: None },
json!([
{
"args": {
"cargoArgs": ["test"],
"executableArgs": ["foo", "--nocapture"],
},
"kind": "cargo",
"label": "test foo",
"location": {
"targetRange": {
"end": { "character": 1, "line": 2 },
"start": { "character": 0, "line": 0 }
},
"targetSelectionRange": {
"end": { "character": 6, "line": 1 },
"start": { "character": 3, "line": 1 }
},
"targetUri": "file:///[..]/lib.rs"
}
},
{
"args": {
"cargoArgs": ["check", "--workspace"],
"executableArgs": [],
},
"kind": "cargo",
"label": "cargo check --workspace"
}
]),
);
}
#[test]
fn test_runnables_project() {
if skip_slow_tests() {
@ -347,6 +298,7 @@ fn main() {}
}
]
},
"kind": "quickfix",
"title": "Create module"
}]),
);
@ -379,8 +331,7 @@ fn test_missing_module_code_action_in_json_project() {
"root_module": path.join("src/lib.rs"),
"deps": [],
"edition": "2015",
"atom_cfgs": [],
"key_value_cfgs": {}
"cfg": [ "cfg_atom_1", "feature=cfg_1"],
} ]
});
@ -418,6 +369,7 @@ fn main() {{}}
}
]
},
"kind": "quickfix",
"title": "Create module"
}]),
);

View file

@ -19,8 +19,9 @@ use serde_json::{to_string_pretty, Value};
use tempfile::TempDir;
use test_utils::{find_mismatch, parse_fixture};
use ra_project_model::ProjectManifest;
use rust_analyzer::{
config::{ClientCapsConfig, Config},
config::{ClientCapsConfig, Config, LinkedProject},
main_loop,
};
@ -42,7 +43,7 @@ impl<'a> Project<'a> {
self
}
pub fn root(mut self, path: &str) -> Project<'a> {
pub(crate) fn root(mut self, path: &str) -> Project<'a> {
self.roots.push(path.into());
self
}
@ -74,7 +75,16 @@ impl<'a> Project<'a> {
paths.push((path, entry.text));
}
let roots = self.roots.into_iter().map(|root| tmp_dir.path().join(root)).collect();
let mut roots =
self.roots.into_iter().map(|root| tmp_dir.path().join(root)).collect::<Vec<_>>();
if roots.is_empty() {
roots.push(tmp_dir.path().to_path_buf());
}
let linked_projects = roots
.into_iter()
.map(|it| ProjectManifest::discover_single(&it).unwrap())
.map(LinkedProject::from)
.collect::<Vec<_>>();
let mut config = Config {
client_caps: ClientCapsConfig {
@ -84,6 +94,7 @@ impl<'a> Project<'a> {
..Default::default()
},
with_sysroot: self.with_sysroot,
linked_projects,
..Config::default()
};
@ -91,7 +102,7 @@ impl<'a> Project<'a> {
f(&mut config)
}
Server::new(tmp_dir, config, roots, paths)
Server::new(tmp_dir, config, paths)
}
}
@ -109,20 +120,12 @@ pub struct Server {
}
impl Server {
fn new(
dir: TempDir,
config: Config,
roots: Vec<PathBuf>,
files: Vec<(PathBuf, String)>,
) -> Server {
let path = dir.path().to_path_buf();
let roots = if roots.is_empty() { vec![path] } else { roots };
fn new(dir: TempDir, config: Config, files: Vec<(PathBuf, String)>) -> Server {
let (connection, client) = Connection::memory();
let _thread = jod_thread::Builder::new()
.name("test server".to_string())
.spawn(move || main_loop(roots, config, connection).unwrap())
.spawn(move || main_loop(config, connection).unwrap())
.expect("failed to spawn a thread");
let res =

View file

@ -124,3 +124,8 @@ pub fn replace(buf: &mut String, from: char, to: &str) {
// FIXME: do this in place.
*buf = buf.replace(from, to)
}
pub fn split1(haystack: &str, delim: char) -> Option<(&str, &str)> {
let idx = haystack.find(delim)?;
Some((&haystack[..idx], &haystack[idx + delim.len_utf8()..]))
}

View file

@ -14,4 +14,5 @@ serde_json = "1.0.48"
relative-path = "1.0.0"
rustc-hash = "1.1.0"
ra_cfg = { path = "../ra_cfg" }
ra_cfg = { path = "../ra_cfg" }
stdx = { path = "../stdx" }

View file

@ -15,6 +15,7 @@ use std::{
};
pub use ra_cfg::CfgOptions;
use stdx::split1;
pub use relative_path::{RelativePath, RelativePathBuf};
pub use rustc_hash::FxHashMap;
@ -332,11 +333,6 @@ fn parse_meta(meta: &str) -> FixtureMeta {
FixtureMeta::File(FileMeta { path, crate_name: krate, deps, edition, cfg, env })
}
fn split1(haystack: &str, delim: char) -> Option<(&str, &str)> {
let idx = haystack.find(delim)?;
Some((&haystack[..idx], &haystack[idx + delim.len_utf8()..]))
}
/// Adjusts the indentation of the first line to the minimum indentation of the rest of the lines.
/// This allows fixtures to start off in a different indentation, e.g. to align the first line with
/// the other lines visually: