diff --git a/Cargo.lock b/Cargo.lock index 7432a82080d0..469b3c5daef2 100644 --- a/Cargo.lock +++ b/Cargo.lock @@ -1893,6 +1893,7 @@ dependencies = [ "serde", "serde_derive", "serde_json", + "span", "stdx", "syntax", "syntax-bridge", diff --git a/crates/base-db/src/change.rs b/crates/base-db/src/change.rs index da2fb27571c2..f946037df942 100644 --- a/crates/base-db/src/change.rs +++ b/crates/base-db/src/change.rs @@ -3,6 +3,7 @@ use std::fmt; +use rustc_hash::FxHashSet; use salsa::Durability; use triomphe::Arc; use vfs::FileId; @@ -49,8 +50,16 @@ impl FileChange { pub fn apply(self, db: &mut dyn RootQueryDb) -> Option { let _p = tracing::info_span!("FileChange::apply").entered(); if let Some(roots) = self.roots { + let mut local_roots = FxHashSet::default(); + let mut library_roots = FxHashSet::default(); for (idx, root) in roots.into_iter().enumerate() { let root_id = SourceRootId(idx as u32); + if root.is_library { + library_roots.insert(root_id); + } else { + local_roots.insert(root_id); + } + let durability = source_root_durability(&root); for file_id in root.iter() { db.set_file_source_root_with_durability(file_id, root_id, durability); @@ -58,6 +67,8 @@ impl FileChange { db.set_source_root_with_durability(root_id, Arc::new(root), durability); } + db.set_local_roots_with_durability(Arc::new(local_roots), Durability::MEDIUM); + db.set_library_roots_with_durability(Arc::new(library_roots), Durability::MEDIUM); } for (file_id, text) in self.files_changed { diff --git a/crates/base-db/src/lib.rs b/crates/base-db/src/lib.rs index ad17f1730bef..881ab847be38 100644 --- a/crates/base-db/src/lib.rs +++ b/crates/base-db/src/lib.rs @@ -263,6 +263,16 @@ pub trait RootQueryDb: SourceDatabase + salsa::Database { #[salsa::invoke(input::transitive_rev_deps)] #[salsa::transparent] fn transitive_rev_deps(&self, of: Crate) -> FxHashSet; + + /// The set of "local" (that is, from the current workspace) roots. + /// Files in local roots are assumed to change frequently. + #[salsa::input] + fn local_roots(&self) -> Arc>; + + /// The set of roots for crates.io libraries. + /// Files in libraries are assumed to never change. + #[salsa::input] + fn library_roots(&self) -> Arc>; } pub fn transitive_deps(db: &dyn SourceDatabase, crate_id: Crate) -> FxHashSet { diff --git a/crates/hir-def/src/nameres.rs b/crates/hir-def/src/nameres.rs index 0837308d5b60..0374213f65f3 100644 --- a/crates/hir-def/src/nameres.rs +++ b/crates/hir-def/src/nameres.rs @@ -519,6 +519,23 @@ impl DefMap { .map(|(id, _data)| id) } + pub fn inline_modules_for_macro_file( + &self, + file_id: MacroCallId, + ) -> impl Iterator + '_ { + self.modules + .iter() + .filter(move |(_id, data)| { + (match data.origin { + ModuleOrigin::Inline { definition_tree_id, .. } => { + definition_tree_id.file_id().macro_file() + } + _ => None, + }) == Some(file_id) + }) + .map(|(id, _data)| id) + } + pub fn modules(&self) -> impl Iterator + '_ { self.modules.iter() } diff --git a/crates/hir-expand/src/lib.rs b/crates/hir-expand/src/lib.rs index ac61b2200970..157695c8ea32 100644 --- a/crates/hir-expand/src/lib.rs +++ b/crates/hir-expand/src/lib.rs @@ -1068,6 +1068,7 @@ intern::impl_internable!(ModPath, attrs::AttrInput); #[salsa_macros::interned(no_lifetime, debug, revisions = usize::MAX)] #[doc(alias = "MacroFileId")] +#[derive(PartialOrd, Ord)] pub struct MacroCallId { pub loc: MacroCallLoc, } @@ -1086,7 +1087,7 @@ impl From for span::MacroCallId { } } -#[derive(Debug, Clone, Copy, PartialEq, Eq, Hash, salsa_macros::Supertype)] +#[derive(Debug, Clone, Copy, PartialEq, Eq, PartialOrd, Ord, Hash, salsa_macros::Supertype)] pub enum HirFileId { FileId(EditionedFileId), MacroFile(MacroCallId), diff --git a/crates/hir-expand/src/prettify_macro_expansion_.rs b/crates/hir-expand/src/prettify_macro_expansion_.rs index 6431d46d39e9..c1ffcaef0806 100644 --- a/crates/hir-expand/src/prettify_macro_expansion_.rs +++ b/crates/hir-expand/src/prettify_macro_expansion_.rs @@ -20,6 +20,7 @@ pub fn prettify_macro_expansion( let span_offset = syn.text_range().start(); let target_crate = target_crate_id.data(db); let mut syntax_ctx_id_to_dollar_crate_replacement = FxHashMap::default(); + syntax_bridge::prettify_macro_expansion::prettify_macro_expansion( syn, &mut |dollar_crate| { diff --git a/crates/hir/src/semantics.rs b/crates/hir/src/semantics.rs index 247bb6939831..c4cedde170b0 100644 --- a/crates/hir/src/semantics.rs +++ b/crates/hir/src/semantics.rs @@ -284,18 +284,25 @@ impl Semantics<'_, DB> { self.imp.resolve_variant(record_lit).map(VariantDef::from) } - pub fn file_to_module_def(&self, file: impl Into) -> Option { - self.imp.file_to_module_defs(file.into()).next() + pub fn file_to_module_def(&self, file: EditionedFileId) -> Option { + self.imp.ed_file_to_module_defs(file).next() } - pub fn file_to_module_defs(&self, file: impl Into) -> impl Iterator { - self.imp.file_to_module_defs(file.into()) + pub fn file_to_module_def2(&self, file: FileId) -> Option { + self.imp.file_to_module_defs(file).next() } pub fn hir_file_to_module_def(&self, file: impl Into) -> Option { self.imp.hir_file_to_module_defs(file.into()).next() } + pub fn file_to_module_defs( + &self, + file: impl Into, + ) -> impl Iterator { + self.imp.ed_file_to_module_defs(file.into()) + } + pub fn hir_file_to_module_defs( &self, file: impl Into, @@ -380,6 +387,19 @@ impl<'db> SemanticsImpl<'db> { } } + /// If not crate is found for the file, try to return the last crate in topological order. + pub fn first_crate_hir(&self, file: HirFileId) -> Option { + match file { + HirFileId::FileId(editioned_file_id) => { + self.first_crate(editioned_file_id.file_id(self.db)) + } + HirFileId::MacroFile(macro_call_id) => { + let macro_call = self.db.lookup_intern_macro_call(macro_call_id); + Some(macro_call.krate.into()) + } + } + } + pub fn attach_first_edition(&self, file: FileId) -> Option { Some(EditionedFileId::new( self.db, @@ -412,6 +432,7 @@ impl<'db> SemanticsImpl<'db> { HirFileId::FileId(file_id) => { let module = self.file_to_module_defs(file_id.file_id(self.db)).next()?; let def_map = crate_def_map(self.db, module.krate().id); + match def_map[module.id.local_id].origin { ModuleOrigin::CrateRoot { .. } => None, ModuleOrigin::File { declaration, declaration_tree_id, .. } => { @@ -770,7 +791,7 @@ impl<'db> SemanticsImpl<'db> { // FIXME: Type the return type /// Returns the range (pre-expansion) in the string literal corresponding to the resolution, /// absolute file range (post-expansion) - /// of the part in the format string, the corresponding string token and the resolution if it + /// of the part in the format string (post-expansion), the corresponding string token and the resolution if it /// exists. pub fn check_for_format_args_template_with_file( &self, @@ -904,7 +925,6 @@ impl<'db> SemanticsImpl<'db> { None => return res, }; let file = self.find_file(node.syntax()); - if first == last { // node is just the token, so descend the token self.descend_into_macros_all( @@ -1877,9 +1897,15 @@ impl<'db> SemanticsImpl<'db> { self.with_ctx(|ctx| ctx.file_to_def(file).to_owned()).into_iter().map(Module::from) } + fn ed_file_to_module_defs(&self, file: EditionedFileId) -> impl Iterator { + self.with_ctx(|ctx| ctx.file_to_def(file.file_id(self.db)).to_owned()) + .into_iter() + .map(Module::from) + } + fn hir_file_to_module_defs(&self, file: HirFileId) -> impl Iterator { // FIXME: Do we need to care about inline modules for macro expansions? - self.file_to_module_defs(file.original_file_respecting_includes(self.db).file_id(self.db)) + self.ed_file_to_module_defs(file.original_file_respecting_includes(self.db)) } pub fn scope(&self, node: &SyntaxNode) -> Option> { diff --git a/crates/ide-assists/src/handlers/convert_bool_to_enum.rs b/crates/ide-assists/src/handlers/convert_bool_to_enum.rs index f73b8c4fd0f1..e69d01626759 100644 --- a/crates/ide-assists/src/handlers/convert_bool_to_enum.rs +++ b/crates/ide-assists/src/handlers/convert_bool_to_enum.rs @@ -208,7 +208,7 @@ fn replace_usages( target_module: &hir::Module, delayed_mutations: &mut Vec<(ImportScope, ast::Path)>, ) { - for (file_id, references) in usages { + for (file_id, references) in usages.map_out_of_macros(&ctx.sema) { edit.edit_file(file_id.file_id(ctx.db())); let refs_with_imports = augment_references_with_imports(ctx, references, target_module); diff --git a/crates/ide-assists/src/handlers/convert_named_struct_to_tuple_struct.rs b/crates/ide-assists/src/handlers/convert_named_struct_to_tuple_struct.rs index 32c4ae2e869e..bf6d12e39895 100644 --- a/crates/ide-assists/src/handlers/convert_named_struct_to_tuple_struct.rs +++ b/crates/ide-assists/src/handlers/convert_named_struct_to_tuple_struct.rs @@ -156,7 +156,7 @@ fn edit_struct_references( }; let usages = strukt_def.usages(&ctx.sema).include_self_refs().all(); - for (file_id, refs) in usages { + for (file_id, refs) in usages.map_out_of_macros(&ctx.sema) { edit.edit_file(file_id.file_id(ctx.db())); for r in refs { process_struct_name_reference(ctx, r, edit); @@ -234,7 +234,7 @@ fn edit_field_references( }; let def = Definition::Field(field); let usages = def.usages(&ctx.sema).all(); - for (file_id, refs) in usages { + for (file_id, refs) in usages.map_out_of_macros(&ctx.sema) { edit.edit_file(file_id.file_id(ctx.db())); for r in refs { if let Some(name_ref) = r.name.as_name_ref() { diff --git a/crates/ide-assists/src/handlers/convert_tuple_return_type_to_struct.rs b/crates/ide-assists/src/handlers/convert_tuple_return_type_to_struct.rs index cca4cb9d8f77..64f075c736cc 100644 --- a/crates/ide-assists/src/handlers/convert_tuple_return_type_to_struct.rs +++ b/crates/ide-assists/src/handlers/convert_tuple_return_type_to_struct.rs @@ -6,7 +6,7 @@ use ide_db::{ defs::Definition, helpers::mod_path_to_ast, imports::insert_use::{ImportScope, insert_use}, - search::{FileReference, UsageSearchResult}, + search::{FileReference, RealFileUsageSearchResult}, source_change::SourceChangeBuilder, syntax_helpers::node_ext::{for_each_tail_expr, walk_expr}, }; @@ -70,7 +70,8 @@ pub(crate) fn convert_tuple_return_type_to_struct( let ret_type = edit.make_mut(ret_type); let fn_ = edit.make_mut(fn_); - let usages = Definition::Function(fn_def).usages(&ctx.sema).all(); + let usages = + Definition::Function(fn_def).usages(&ctx.sema).all().map_out_of_macros(&ctx.sema); let struct_name = format!("{}Result", stdx::to_camel_case(&fn_name.to_string())); let parent = fn_.syntax().ancestors().find_map(>::cast); add_tuple_struct_def( @@ -101,7 +102,7 @@ pub(crate) fn convert_tuple_return_type_to_struct( fn replace_usages( edit: &mut SourceChangeBuilder, ctx: &AssistContext<'_>, - usages: &UsageSearchResult, + usages: &RealFileUsageSearchResult, struct_name: &str, target_module: &hir::Module, ) { @@ -231,7 +232,7 @@ fn augment_references_with_imports( fn add_tuple_struct_def( edit: &mut SourceChangeBuilder, ctx: &AssistContext<'_>, - usages: &UsageSearchResult, + usages: &RealFileUsageSearchResult, parent: &SyntaxNode, tuple_ty: &ast::TupleType, struct_name: &str, diff --git a/crates/ide-assists/src/handlers/convert_tuple_struct_to_named_struct.rs b/crates/ide-assists/src/handlers/convert_tuple_struct_to_named_struct.rs index 80756197fb70..16a488bd19c4 100644 --- a/crates/ide-assists/src/handlers/convert_tuple_struct_to_named_struct.rs +++ b/crates/ide-assists/src/handlers/convert_tuple_struct_to_named_struct.rs @@ -143,7 +143,8 @@ fn edit_struct_references( Either::Left(s) => Definition::Adt(hir::Adt::Struct(s)), Either::Right(v) => Definition::Variant(v), }; - let usages = strukt_def.usages(&ctx.sema).include_self_refs().all(); + let usages = + strukt_def.usages(&ctx.sema).include_self_refs().all().map_out_of_macros(&ctx.sema); let edit_node = |edit: &mut SourceChangeBuilder, node: SyntaxNode| -> Option<()> { match_ast! { @@ -228,7 +229,7 @@ fn edit_field_references( None => continue, }; let def = Definition::Field(field); - let usages = def.usages(&ctx.sema).all(); + let usages = def.usages(&ctx.sema).all().map_out_of_macros(&ctx.sema); for (file_id, refs) in usages { edit.edit_file(file_id.file_id(ctx.db())); for r in refs { diff --git a/crates/ide-assists/src/handlers/destructure_tuple_binding.rs b/crates/ide-assists/src/handlers/destructure_tuple_binding.rs index f09389f8302f..69306ef045b4 100644 --- a/crates/ide-assists/src/handlers/destructure_tuple_binding.rs +++ b/crates/ide-assists/src/handlers/destructure_tuple_binding.rs @@ -128,6 +128,7 @@ fn collect_data(ident_pat: IdentPat, ctx: &AssistContext<'_>) -> Option Option { let db = ctx.sema.db; - let module = ctx.sema.file_to_module_def(ctx.vfs_file_id())?; + let module = ctx.sema.file_to_module_def(ctx.file_id())?; let edition = module.krate().edition(ctx.db()); let (name, range, ty) = match f { diff --git a/crates/ide-assists/src/handlers/inline_call.rs b/crates/ide-assists/src/handlers/inline_call.rs index b7b8bc604a51..5b73f64a6325 100644 --- a/crates/ide-assists/src/handlers/inline_call.rs +++ b/crates/ide-assists/src/handlers/inline_call.rs @@ -103,14 +103,14 @@ pub(crate) fn inline_into_callers(acc: &mut Assists, ctx: &AssistContext<'_>) -> "Inline into all callers", name.syntax().text_range(), |builder| { - let mut usages = usages.all(); + let mut usages = usages.all().map_out_of_macros(&ctx.sema); let current_file_usage = usages.references.remove(&def_file); let mut remove_def = true; let mut inline_refs_for_file = |file_id: EditionedFileId, refs: Vec| { + let call_krate = ctx.sema.file_to_module_def(file_id).map(|it| it.krate()); let file_id = file_id.file_id(ctx.db()); builder.edit_file(file_id); - let call_krate = ctx.sema.file_to_module_def(file_id).map(|it| it.krate()); let count = refs.len(); // The collects are required as we are otherwise iterating while mutating 🙅‍♀️🙅‍♂️ let (name_refs, name_refs_use) = split_refs_and_uses(builder, refs, Some); @@ -140,7 +140,7 @@ pub(crate) fn inline_into_callers(acc: &mut Assists, ctx: &AssistContext<'_>) -> remove_def = false; } }; - for (file_id, refs) in usages.into_iter() { + for (file_id, refs) in usages { inline_refs_for_file(file_id, refs); } match current_file_usage { @@ -196,7 +196,7 @@ pub(crate) fn inline_call(acc: &mut Assists, ctx: &AssistContext<'_>) -> Option< let name_ref: ast::NameRef = ctx.find_node_at_offset()?; let call_info = CallInfo::from_name_ref( name_ref.clone(), - ctx.sema.file_to_module_def(ctx.vfs_file_id())?.krate().into(), + ctx.sema.file_to_module_def(ctx.file_id())?.krate().into(), )?; let (function, label) = match &call_info.node { ast::CallableExpr::Call(call) => { @@ -337,6 +337,7 @@ fn inline( Definition::Local(local) .usages(sema) .all() + .map_out_of_macros(sema) .references .remove(&function_def_file_id) .unwrap_or_default() diff --git a/crates/ide-assists/src/handlers/inline_local_variable.rs b/crates/ide-assists/src/handlers/inline_local_variable.rs index 5d4bdc6ec76c..3f4fd6ec3389 100644 --- a/crates/ide-assists/src/handlers/inline_local_variable.rs +++ b/crates/ide-assists/src/handlers/inline_local_variable.rs @@ -2,7 +2,7 @@ use hir::{PathResolution, Semantics}; use ide_db::{ EditionedFileId, RootDatabase, defs::Definition, - search::{FileReference, FileReferenceNode, UsageSearchResult}, + search::{FileReference, FileReferenceNode, RealFileUsageSearchResult}, }; use syntax::{ SyntaxElement, TextRange, @@ -142,7 +142,8 @@ fn inline_let( } let local = sema.to_def(&bind_pat)?; - let UsageSearchResult { mut references } = Definition::Local(local).usages(sema).all(); + let RealFileUsageSearchResult { mut references } = + Definition::Local(local).usages(sema).all().map_out_of_macros(sema); match references.remove(&file_id) { Some(references) => Some(InlineData { let_stmt, @@ -189,7 +190,8 @@ fn inline_usage( let let_stmt = ast::LetStmt::cast(bind_pat.syntax().parent()?)?; - let UsageSearchResult { mut references } = Definition::Local(local).usages(sema).all(); + let RealFileUsageSearchResult { mut references } = + Definition::Local(local).usages(sema).all().map_out_of_macros(sema); let mut references = references.remove(&file_id)?; let delete_let = references.len() == 1; references.retain(|fref| fref.name.as_name_ref() == Some(&name)); diff --git a/crates/ide-assists/src/handlers/inline_macro.rs b/crates/ide-assists/src/handlers/inline_macro.rs index b09bef36ae15..dbf849ad4b4f 100644 --- a/crates/ide-assists/src/handlers/inline_macro.rs +++ b/crates/ide-assists/src/handlers/inline_macro.rs @@ -38,7 +38,7 @@ use crate::{AssistContext, AssistId, Assists}; pub(crate) fn inline_macro(acc: &mut Assists, ctx: &AssistContext<'_>) -> Option<()> { let unexpanded = ctx.find_node_at_offset::()?; let macro_call = ctx.sema.to_def(&unexpanded)?; - let target_crate_id = ctx.sema.file_to_module_def(ctx.vfs_file_id())?.krate().into(); + let target_crate_id = ctx.sema.file_to_module_def(ctx.file_id())?.krate().into(); let text_range = unexpanded.syntax().text_range(); acc.add( diff --git a/crates/ide-assists/src/handlers/inline_type_alias.rs b/crates/ide-assists/src/handlers/inline_type_alias.rs index 4511072b041b..fb590e935d5b 100644 --- a/crates/ide-assists/src/handlers/inline_type_alias.rs +++ b/crates/ide-assists/src/handlers/inline_type_alias.rs @@ -93,7 +93,7 @@ pub(crate) fn inline_type_alias_uses(acc: &mut Assists, ctx: &AssistContext<'_>) } }; - for (file_id, refs) in usages.into_iter() { + for (file_id, refs) in usages.map_out_of_macros(&ctx.sema) { inline_refs_for_file(file_id.file_id(ctx.db()), refs); } if !definition_deleted { diff --git a/crates/ide-assists/src/handlers/move_const_to_impl.rs b/crates/ide-assists/src/handlers/move_const_to_impl.rs index 0c1dc9eb9349..bad5f02680fe 100644 --- a/crates/ide-assists/src/handlers/move_const_to_impl.rs +++ b/crates/ide-assists/src/handlers/move_const_to_impl.rs @@ -93,7 +93,8 @@ pub(crate) fn move_const_to_impl(acc: &mut Assists, ctx: &AssistContext<'_>) -> file_id: ctx.file_id(), range: parent_fn.syntax().text_range(), })) - .all(); + .all() + .map_out_of_macros(&ctx.sema); let range_to_delete = match const_.syntax().next_sibling_or_token() { Some(s) if matches!(s.kind(), SyntaxKind::WHITESPACE) => { diff --git a/crates/ide-assists/src/handlers/move_from_mod_rs.rs b/crates/ide-assists/src/handlers/move_from_mod_rs.rs index a36d3136a16d..8e60023190b5 100644 --- a/crates/ide-assists/src/handlers/move_from_mod_rs.rs +++ b/crates/ide-assists/src/handlers/move_from_mod_rs.rs @@ -22,7 +22,7 @@ use crate::{ // ``` pub(crate) fn move_from_mod_rs(acc: &mut Assists, ctx: &AssistContext<'_>) -> Option<()> { let source_file = ctx.find_node_at_offset::()?; - let module = ctx.sema.file_to_module_def(ctx.vfs_file_id())?; + let module = ctx.sema.file_to_module_def(ctx.file_id())?; // Enable this assist if the user select all "meaningful" content in the source file let trimmed_selected_range = trimmed_text_range(&source_file, ctx.selection_trimmed()); let trimmed_file_range = trimmed_text_range(&source_file, source_file.syntax().text_range()); diff --git a/crates/ide-assists/src/handlers/move_to_mod_rs.rs b/crates/ide-assists/src/handlers/move_to_mod_rs.rs index 5e95b264fc8e..0824b4e8282d 100644 --- a/crates/ide-assists/src/handlers/move_to_mod_rs.rs +++ b/crates/ide-assists/src/handlers/move_to_mod_rs.rs @@ -22,7 +22,7 @@ use crate::{ // ``` pub(crate) fn move_to_mod_rs(acc: &mut Assists, ctx: &AssistContext<'_>) -> Option<()> { let source_file = ctx.find_node_at_offset::()?; - let module = ctx.sema.file_to_module_def(ctx.vfs_file_id())?; + let module = ctx.sema.file_to_module_def(ctx.file_id())?; // Enable this assist if the user select all "meaningful" content in the source file let trimmed_selected_range = trimmed_text_range(&source_file, ctx.selection_trimmed()); let trimmed_file_range = trimmed_text_range(&source_file, source_file.syntax().text_range()); diff --git a/crates/ide-assists/src/handlers/promote_local_to_const.rs b/crates/ide-assists/src/handlers/promote_local_to_const.rs index 603be4d66733..caa562b2f722 100644 --- a/crates/ide-assists/src/handlers/promote_local_to_const.rs +++ b/crates/ide-assists/src/handlers/promote_local_to_const.rs @@ -71,7 +71,8 @@ pub(crate) fn promote_local_to_const(acc: &mut Assists, ctx: &AssistContext<'_>) let make = SyntaxFactory::with_mappings(); let mut editor = edit.make_editor(let_stmt.syntax()); let name = to_upper_snake_case(&name.to_string()); - let usages = Definition::Local(local).usages(&ctx.sema).all(); + let usages = + Definition::Local(local).usages(&ctx.sema).all().map_out_of_macros(&ctx.sema); if let Some(usages) = usages.references.get(&ctx.file_id()) { let name_ref = make.name_ref(&name); diff --git a/crates/ide-assists/src/handlers/remove_unused_param.rs b/crates/ide-assists/src/handlers/remove_unused_param.rs index 8b824c7c7f49..cec917492d85 100644 --- a/crates/ide-assists/src/handlers/remove_unused_param.rs +++ b/crates/ide-assists/src/handlers/remove_unused_param.rs @@ -85,7 +85,8 @@ pub(crate) fn remove_unused_param(acc: &mut Assists, ctx: &AssistContext<'_>) -> for element in elements { editor.delete(element); } - for (file_id, references) in fn_def.usages(&ctx.sema).all() { + for (file_id, references) in fn_def.usages(&ctx.sema).all().map_out_of_macros(&ctx.sema) + { process_usages(ctx, builder, file_id, references, param_position, is_self_present); } builder.add_file_edits(ctx.vfs_file_id(), editor); diff --git a/crates/ide-assists/src/handlers/unnecessary_async.rs b/crates/ide-assists/src/handlers/unnecessary_async.rs index ac10a829bbf1..29b77ef70df2 100644 --- a/crates/ide-assists/src/handlers/unnecessary_async.rs +++ b/crates/ide-assists/src/handlers/unnecessary_async.rs @@ -94,9 +94,9 @@ fn find_all_references( ctx: &AssistContext<'_>, def: &Definition, ) -> impl Iterator { - def.usages(&ctx.sema).all().into_iter().flat_map(|(file_id, references)| { - references.into_iter().map(move |reference| (file_id, reference)) - }) + def.usages(&ctx.sema).all().map_out_of_macros(&ctx.sema).into_iter().flat_map( + |(file_id, references)| references.into_iter().map(move |reference| (file_id, reference)), + ) } /// Finds the await expression for the given `NameRef`. diff --git a/crates/ide-db/src/apply_change.rs b/crates/ide-db/src/apply_change.rs index 008b6fdbe2c6..9bbd12aaa9f8 100644 --- a/crates/ide-db/src/apply_change.rs +++ b/crates/ide-db/src/apply_change.rs @@ -1,12 +1,9 @@ //! Applies changes to the IDE state transactionally. -use base_db::SourceRootId; use profile::Bytes; -use rustc_hash::FxHashSet; use salsa::{Database as _, Durability}; -use triomphe::Arc; -use crate::{ChangeWithProcMacros, RootDatabase, symbol_index::SymbolsDatabase}; +use crate::{ChangeWithProcMacros, RootDatabase}; impl RootDatabase { pub fn request_cancellation(&mut self) { @@ -18,20 +15,6 @@ impl RootDatabase { let _p = tracing::info_span!("RootDatabase::apply_change").entered(); self.request_cancellation(); tracing::trace!("apply_change {:?}", change); - if let Some(roots) = &change.source_change.roots { - let mut local_roots = FxHashSet::default(); - let mut library_roots = FxHashSet::default(); - for (idx, root) in roots.iter().enumerate() { - let root_id = SourceRootId(idx as u32); - if root.is_library { - library_roots.insert(root_id); - } else { - local_roots.insert(root_id); - } - } - self.set_local_roots_with_durability(Arc::new(local_roots), Durability::MEDIUM); - self.set_library_roots_with_durability(Arc::new(library_roots), Durability::MEDIUM); - } change.apply(self); } diff --git a/crates/ide-db/src/helpers.rs b/crates/ide-db/src/helpers.rs index 340429037e67..8d27578f3106 100644 --- a/crates/ide-db/src/helpers.rs +++ b/crates/ide-db/src/helpers.rs @@ -3,8 +3,8 @@ use std::collections::VecDeque; use base_db::SourceDatabase; -use hir::{Crate, ItemInNs, ModuleDef, Name, Semantics}; -use span::{Edition, FileId}; +use hir::{Crate, HirFileId, ItemInNs, ModuleDef, Name, Semantics}; +use span::Edition; use syntax::{ AstToken, SyntaxKind, SyntaxToken, ToSmolStr, TokenAtOffset, ast::{self, make}, @@ -60,11 +60,11 @@ pub fn mod_path_to_ast(path: &hir::ModPath, edition: Edition) -> ast::Path { /// Iterates all `ModuleDef`s and `Impl` blocks of the given file. pub fn visit_file_defs( sema: &Semantics<'_, RootDatabase>, - file_id: FileId, + file_id: HirFileId, cb: &mut dyn FnMut(Definition), ) { let db = sema.db; - let module = match sema.file_to_module_def(file_id) { + let module = match sema.hir_file_to_module_def(file_id) { Some(it) => it, None => return, }; diff --git a/crates/ide-db/src/lib.rs b/crates/ide-db/src/lib.rs index c94be7e164e2..595b726c84b1 100644 --- a/crates/ide-db/src/lib.rs +++ b/crates/ide-db/src/lib.rs @@ -12,6 +12,7 @@ pub mod famous_defs; pub mod helpers; pub mod items_locator; pub mod label; +pub mod navigation_target; pub mod path_transform; pub mod prime_caches; pub mod rename; @@ -60,7 +61,7 @@ use hir::{ }; use triomphe::Arc; -use crate::{line_index::LineIndex, symbol_index::SymbolsDatabase}; +use crate::line_index::LineIndex; pub use rustc_hash::{FxHashMap, FxHashSet, FxHasher}; pub use ::line_index; diff --git a/crates/ide/src/navigation_target.rs b/crates/ide-db/src/navigation_target.rs similarity index 50% rename from crates/ide/src/navigation_target.rs rename to crates/ide-db/src/navigation_target.rs index 7dc18141bdbc..5e69d673eea1 100644 --- a/crates/ide/src/navigation_target.rs +++ b/crates/ide-db/src/navigation_target.rs @@ -1,33 +1,37 @@ //! See [`NavigationTarget`]. -use std::fmt; +use std::{fmt, iter}; use arrayvec::ArrayVec; use either::Either; use hir::{ AssocItem, FieldSource, HasContainer, HasCrate, HasSource, HirDisplay, HirFileId, InFile, - LocalSource, ModuleSource, db::ExpandDatabase, symbols::FileSymbol, -}; -use ide_db::{ - FileId, FileRange, RootDatabase, SymbolKind, - defs::Definition, - documentation::{Documentation, HasDocs}, + InFileWrapper, LocalSource, ModuleSource, db::ExpandDatabase, symbols::FileSymbol, }; use span::Edition; -use stdx::never; +use stdx::{TupleExt, never}; use syntax::{ - AstNode, SmolStr, SyntaxNode, TextRange, ToSmolStr, + AstNode, SmolStr, TextRange, ToSmolStr, ast::{self, HasName}, format_smolstr, }; +use crate::{ + FileId, FileRange, RootDatabase, SymbolKind, + defs::Definition, + documentation::{Documentation, HasDocs}, +}; + +pub type RealNavigationTarget = NavigationTarget; +pub type HirNavigationTarget = NavigationTarget; + /// `NavigationTarget` represents an element in the editor's UI which you can /// click on to navigate to a particular piece of code. /// /// Typically, a `NavigationTarget` corresponds to some element in the source /// code, like a function or a struct, but this is not strictly required. #[derive(Clone, PartialEq, Eq, Hash)] -pub struct NavigationTarget { +pub struct NavigationTarget { pub file_id: FileId, /// Range which encompasses the whole element. /// @@ -58,7 +62,7 @@ pub struct NavigationTarget { pub alias: Option, } -impl fmt::Debug for NavigationTarget { +impl fmt::Debug for NavigationTarget { fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { let mut f = f.debug_struct("NavigationTarget"); macro_rules! opt { @@ -76,60 +80,94 @@ impl fmt::Debug for NavigationTarget { } } -pub(crate) trait ToNav { - fn to_nav(&self, db: &RootDatabase) -> UpmappingResult; +pub trait ToNav { + fn to_nav_hir(&self, db: &RootDatabase) -> HirNavigationTarget; } pub trait TryToNav { - fn try_to_nav(&self, db: &RootDatabase) -> Option>; + fn try_to_nav_hir(&self, db: &RootDatabase) -> Option; + fn try_to_nav(&self, db: &RootDatabase) -> Option> { + self.try_to_nav_hir(db).map(|it| HirNavigationTarget::upmap(it, db)) + } } impl TryToNav for Either { - fn try_to_nav(&self, db: &RootDatabase) -> Option> { + fn try_to_nav_hir(&self, db: &RootDatabase) -> Option { match self { - Either::Left(it) => it.try_to_nav(db), - Either::Right(it) => it.try_to_nav(db), + Either::Left(it) => it.try_to_nav_hir(db), + Either::Right(it) => it.try_to_nav_hir(db), } } } -impl NavigationTarget { - pub fn focus_or_full_range(&self) -> TextRange { - self.focus_range.unwrap_or(self.full_range) +impl RealNavigationTarget { + pub fn focus_or_full_file_range(&self) -> FileRange { + FileRange { file_id: self.file_id, range: self.focus_or_full_range() } + } +} + +impl HirNavigationTarget { + pub fn focus_or_full_file_range(&self, db: &RootDatabase) -> FileRange { + InFile { file_id: self.file_id, value: self.focus_or_full_range() } + .original_node_file_range(db) + .0 + .into_file_id(db) } - pub(crate) fn from_module_to_decl( + /// Upmaps this nav target to the `target` [`HirFileId`] if possible. + /// + /// If successful, the result entries are guaranteed to have the `target` file id. + pub fn upmap_to( + self, db: &RootDatabase, - module: hir::Module, - ) -> UpmappingResult { - let edition = module.krate().edition(db); - let name = - module.name(db).map(|it| it.display_no_db(edition).to_smolstr()).unwrap_or_default(); - match module.declaration_source(db) { - Some(InFile { value, file_id }) => { - orig_range_with_focus(db, file_id, value.syntax(), value.name()).map( - |(FileRange { file_id, range: full_range }, focus_range)| { - let mut res = NavigationTarget::from_syntax( - file_id, - name.clone(), - focus_range, - full_range, - SymbolKind::Module, - ); - res.docs = module.docs(db); - res.description = Some( - module.display(db, module.krate().to_display_target(db)).to_string(), - ); - res - }, - ) - } - _ => module.to_nav(db), + target: HirFileId, + ) -> Option> { + if self.file_id == target { + return Some(vec![self]); } + let ranges = + orig_ranges_with_focus_in(db, self.file_id, self.full_range, self.focus_range, target)?; + Some( + ranges + .into_iter() + .map(|(range, focus)| { + NavigationTarget { + file_id: target, + full_range: range, + focus_range: focus, + name: self.name.clone(), + kind: self.kind, + container_name: self.container_name.clone(), + description: self.description.clone(), + // FIXME: possibly expensive clone! + docs: self.docs.clone(), + alias: self.alias.clone(), + } + }) + .collect(), + ) } - #[cfg(test)] - pub(crate) fn debug_render(&self) -> String { + pub fn upmap(self, db: &RootDatabase) -> UpmappingResult { + orig_range_with_focus(db, self.file_id, self.full_range, self.focus_range).map( + |(range, focus)| NavigationTarget { + file_id: range.file_id, + full_range: range.range, + focus_range: focus, + name: self.name.clone(), + kind: self.kind, + container_name: self.container_name.clone(), + description: self.description.clone(), + // FIXME: possibly expensive clone! + docs: self.docs.clone(), + alias: self.alias.clone(), + }, + ) + } +} + +impl NavigationTarget { + pub fn debug_render(&self) -> String { let mut buf = format!( "{} {:?} {:?} {:?}", self.name, @@ -145,29 +183,60 @@ impl NavigationTarget { } buf } +} + +impl HirNavigationTarget { + pub fn from_module_to_decl(db: &RootDatabase, module: hir::Module) -> HirNavigationTarget { + let edition = module.krate().edition(db); + let name = + module.name(db).map(|it| it.display(db, edition).to_smolstr()).unwrap_or_default(); + match module.declaration_source(db) { + Some(InFile { value, file_id }) => { + let display_target = module.krate().to_display_target(db); + let mut res = NavigationTarget::from_syntax( + file_id, + name.clone(), + value.name().map(|it| it.syntax().text_range()), + value.syntax().text_range(), + SymbolKind::Module, + ); + res.docs = module.docs(db); + res.description = Some(module.display(db, display_target).to_string()); + res + } + _ => module.to_nav_hir(db), + } + } +} + +impl NavigationTarget { + pub fn focus_or_full_range(&self) -> TextRange { + self.focus_range.unwrap_or(self.full_range) + } /// Allows `NavigationTarget` to be created from a `NameOwner` - pub(crate) fn from_named( - db: &RootDatabase, - InFile { file_id, value }: InFile<&dyn ast::HasName>, + pub fn from_named( + InFileWrapper { file_id, value }: InFileWrapper, kind: SymbolKind, - ) -> UpmappingResult { + ) -> Self { let name: SmolStr = value.name().map(|it| it.text().into()).unwrap_or_else(|| "_".into()); - orig_range_with_focus(db, file_id, value.syntax(), value.name()).map( - |(FileRange { file_id, range: full_range }, focus_range)| { - NavigationTarget::from_syntax(file_id, name.clone(), focus_range, full_range, kind) - }, + NavigationTarget::from_syntax( + file_id, + name.clone(), + value.name().map(|it| it.syntax().text_range()), + value.syntax().text_range(), + kind, ) } - pub(crate) fn from_syntax( + pub fn from_syntax( file_id: FileId, name: SmolStr, focus_range: Option, full_range: TextRange, kind: SymbolKind, - ) -> NavigationTarget { + ) -> Self { NavigationTarget { file_id, name, @@ -183,88 +252,61 @@ impl NavigationTarget { } impl TryToNav for FileSymbol { - fn try_to_nav(&self, db: &RootDatabase) -> Option> { + fn try_to_nav_hir(&self, db: &RootDatabase) -> Option { let edition = self.def.module(db).map(|it| it.krate().edition(db)).unwrap_or(Edition::CURRENT); let display_target = self.def.krate(db).to_display_target(db); - Some( - orig_range_with_focus_r( - db, - self.loc.hir_file_id, - self.loc.ptr.text_range(), - Some(self.loc.name_ptr.text_range()), - ) - .map(|(FileRange { file_id, range: full_range }, focus_range)| { - NavigationTarget { - file_id, - name: self.is_alias.then(|| self.def.name(db)).flatten().map_or_else( - || self.name.as_str().into(), - |it| it.display_no_db(edition).to_smolstr(), - ), - alias: self.is_alias.then(|| self.name.as_str().into()), - kind: Some(self.def.into()), - full_range, - focus_range, - container_name: self.container_name.clone(), - description: match self.def { - hir::ModuleDef::Module(it) => { - Some(it.display(db, display_target).to_string()) - } - hir::ModuleDef::Function(it) => { - Some(it.display(db, display_target).to_string()) - } - hir::ModuleDef::Adt(it) => Some(it.display(db, display_target).to_string()), - hir::ModuleDef::Variant(it) => { - Some(it.display(db, display_target).to_string()) - } - hir::ModuleDef::Const(it) => { - Some(it.display(db, display_target).to_string()) - } - hir::ModuleDef::Static(it) => { - Some(it.display(db, display_target).to_string()) - } - hir::ModuleDef::Trait(it) => { - Some(it.display(db, display_target).to_string()) - } - hir::ModuleDef::TraitAlias(it) => { - Some(it.display(db, display_target).to_string()) - } - hir::ModuleDef::TypeAlias(it) => { - Some(it.display(db, display_target).to_string()) - } - hir::ModuleDef::Macro(it) => { - Some(it.display(db, display_target).to_string()) - } - hir::ModuleDef::BuiltinType(_) => None, - }, - docs: None, - } - }), - ) + + Some(NavigationTarget { + file_id: self.loc.hir_file_id, + name: self.is_alias.then(|| self.def.name(db)).flatten().map_or_else( + || self.name.as_str().into(), + |it| it.display_no_db(edition).to_smolstr(), + ), + alias: self.is_alias.then(|| self.name.as_str().into()), + kind: Some(self.def.into()), + full_range: self.loc.ptr.text_range(), + focus_range: Some(self.loc.name_ptr.text_range()), + container_name: self.container_name.clone(), + description: match self.def { + hir::ModuleDef::Module(it) => Some(it.display(db, display_target).to_string()), + hir::ModuleDef::Function(it) => Some(it.display(db, display_target).to_string()), + hir::ModuleDef::Adt(it) => Some(it.display(db, display_target).to_string()), + hir::ModuleDef::Variant(it) => Some(it.display(db, display_target).to_string()), + hir::ModuleDef::Const(it) => Some(it.display(db, display_target).to_string()), + hir::ModuleDef::Static(it) => Some(it.display(db, display_target).to_string()), + hir::ModuleDef::Trait(it) => Some(it.display(db, display_target).to_string()), + hir::ModuleDef::TraitAlias(it) => Some(it.display(db, display_target).to_string()), + hir::ModuleDef::TypeAlias(it) => Some(it.display(db, display_target).to_string()), + hir::ModuleDef::Macro(it) => Some(it.display(db, display_target).to_string()), + hir::ModuleDef::BuiltinType(_) => None, + }, + docs: None, + }) } } impl TryToNav for Definition { - fn try_to_nav(&self, db: &RootDatabase) -> Option> { + fn try_to_nav_hir(&self, db: &RootDatabase) -> Option { match self { - Definition::Local(it) => Some(it.to_nav(db)), - Definition::Label(it) => it.try_to_nav(db), - Definition::Module(it) => Some(it.to_nav(db)), - Definition::Crate(it) => Some(it.to_nav(db)), - Definition::Macro(it) => it.try_to_nav(db), - Definition::Field(it) => it.try_to_nav(db), - Definition::SelfType(it) => it.try_to_nav(db), - Definition::GenericParam(it) => it.try_to_nav(db), - Definition::Function(it) => it.try_to_nav(db), - Definition::Adt(it) => it.try_to_nav(db), - Definition::Variant(it) => it.try_to_nav(db), - Definition::Const(it) => it.try_to_nav(db), - Definition::Static(it) => it.try_to_nav(db), - Definition::Trait(it) => it.try_to_nav(db), - Definition::TraitAlias(it) => it.try_to_nav(db), - Definition::TypeAlias(it) => it.try_to_nav(db), - Definition::ExternCrateDecl(it) => it.try_to_nav(db), - Definition::InlineAsmOperand(it) => it.try_to_nav(db), + Definition::Local(it) => Some(it.to_nav_hir(db)), + Definition::Label(it) => it.try_to_nav_hir(db), + Definition::Module(it) => Some(it.to_nav_hir(db)), + Definition::Crate(it) => Some(it.to_nav_hir(db)), + Definition::Macro(it) => it.try_to_nav_hir(db), + Definition::Field(it) => it.try_to_nav_hir(db), + Definition::SelfType(it) => it.try_to_nav_hir(db), + Definition::GenericParam(it) => it.try_to_nav_hir(db), + Definition::Function(it) => it.try_to_nav_hir(db), + Definition::Adt(it) => it.try_to_nav_hir(db), + Definition::Variant(it) => it.try_to_nav_hir(db), + Definition::Const(it) => it.try_to_nav_hir(db), + Definition::Static(it) => it.try_to_nav_hir(db), + Definition::Trait(it) => it.try_to_nav_hir(db), + Definition::TraitAlias(it) => it.try_to_nav_hir(db), + Definition::TypeAlias(it) => it.try_to_nav_hir(db), + Definition::ExternCrateDecl(it) => it.try_to_nav_hir(db), + Definition::InlineAsmOperand(it) => it.try_to_nav_hir(db), Definition::BuiltinLifetime(_) | Definition::BuiltinType(_) | Definition::TupleField(_) @@ -272,24 +314,24 @@ impl TryToNav for Definition { | Definition::InlineAsmRegOrRegClass(_) | Definition::BuiltinAttr(_) => None, // FIXME: The focus range should be set to the helper declaration - Definition::DeriveHelper(it) => it.derive().try_to_nav(db), + Definition::DeriveHelper(it) => it.derive().try_to_nav_hir(db), } } } impl TryToNav for hir::ModuleDef { - fn try_to_nav(&self, db: &RootDatabase) -> Option> { + fn try_to_nav_hir(&self, db: &RootDatabase) -> Option { match self { - hir::ModuleDef::Module(it) => Some(it.to_nav(db)), - hir::ModuleDef::Function(it) => it.try_to_nav(db), - hir::ModuleDef::Adt(it) => it.try_to_nav(db), - hir::ModuleDef::Variant(it) => it.try_to_nav(db), - hir::ModuleDef::Const(it) => it.try_to_nav(db), - hir::ModuleDef::Static(it) => it.try_to_nav(db), - hir::ModuleDef::Trait(it) => it.try_to_nav(db), - hir::ModuleDef::TraitAlias(it) => it.try_to_nav(db), - hir::ModuleDef::TypeAlias(it) => it.try_to_nav(db), - hir::ModuleDef::Macro(it) => it.try_to_nav(db), + hir::ModuleDef::Module(it) => Some(it.to_nav_hir(db)), + hir::ModuleDef::Function(it) => it.try_to_nav_hir(db), + hir::ModuleDef::Adt(it) => it.try_to_nav_hir(db), + hir::ModuleDef::Variant(it) => it.try_to_nav_hir(db), + hir::ModuleDef::Const(it) => it.try_to_nav_hir(db), + hir::ModuleDef::Static(it) => it.try_to_nav_hir(db), + hir::ModuleDef::Trait(it) => it.try_to_nav_hir(db), + hir::ModuleDef::TraitAlias(it) => it.try_to_nav_hir(db), + hir::ModuleDef::TypeAlias(it) => it.try_to_nav_hir(db), + hir::ModuleDef::Macro(it) => it.try_to_nav_hir(db), hir::ModuleDef::BuiltinType(_) => None, } } @@ -378,27 +420,20 @@ where D: HasSource + ToNavFromAst + Copy + HasDocs + HirDisplay + HasCrate, D::Ast: ast::HasName, { - fn try_to_nav(&self, db: &RootDatabase) -> Option> { + fn try_to_nav_hir(&self, db: &RootDatabase) -> Option { let src = self.source(db)?; - Some( - NavigationTarget::from_named( - db, - src.as_ref().map(|it| it as &dyn ast::HasName), - D::KIND, - ) - .map(|mut res| { - res.docs = self.docs(db); - res.description = - Some(self.display(db, self.krate(db).to_display_target(db)).to_string()); - res.container_name = self.container_name(db); - res - }), - ) + let mut navigation_target = + NavigationTarget::from_named(src.as_ref().map(|it| it as &dyn ast::HasName), D::KIND); + navigation_target.docs = self.docs(db); + navigation_target.description = + Some(self.display(db, self.krate(db).to_display_target(db)).to_string()); + navigation_target.container_name = self.container_name(db); + Some(navigation_target) } } impl ToNav for hir::Module { - fn to_nav(&self, db: &RootDatabase) -> UpmappingResult { + fn to_nav_hir(&self, db: &RootDatabase) -> HirNavigationTarget { let InFile { file_id, value } = self.definition_source(db); let edition = self.krate(db).edition(db); @@ -410,28 +445,24 @@ impl ToNav for hir::Module { ModuleSource::BlockExpr(node) => (node.syntax(), None), }; - orig_range_with_focus(db, file_id, syntax, focus).map( - |(FileRange { file_id, range: full_range }, focus_range)| { - NavigationTarget::from_syntax( - file_id, - name.clone(), - focus_range, - full_range, - SymbolKind::Module, - ) - }, + NavigationTarget::from_syntax( + file_id, + name.clone(), + focus.map(|it| it.syntax().text_range()), + syntax.text_range(), + SymbolKind::Module, ) } } impl ToNav for hir::Crate { - fn to_nav(&self, db: &RootDatabase) -> UpmappingResult { - self.root_module().to_nav(db) + fn to_nav_hir(&self, db: &RootDatabase) -> HirNavigationTarget { + self.root_module().to_nav_hir(db) } } impl TryToNav for hir::Impl { - fn try_to_nav(&self, db: &RootDatabase) -> Option> { + fn try_to_nav_hir(&self, db: &RootDatabase) -> Option { let InFile { file_id, value } = self.source(db)?; let derive_path = self.as_builtin_derive_path(db); @@ -439,203 +470,171 @@ impl TryToNav for hir::Impl { Some(attr) => (attr.file_id.into(), None, attr.value.syntax()), None => (file_id, value.self_ty(), value.syntax()), }; - - Some(orig_range_with_focus(db, file_id, syntax, focus).map( - |(FileRange { file_id, range: full_range }, focus_range)| { - NavigationTarget::from_syntax( - file_id, - "impl".into(), - focus_range, - full_range, - SymbolKind::Impl, - ) - }, + Some(NavigationTarget::from_syntax( + file_id, + "impl".into(), + focus.map(|it| it.syntax().text_range()), + syntax.text_range(), + SymbolKind::Impl, )) } } impl TryToNav for hir::ExternCrateDecl { - fn try_to_nav(&self, db: &RootDatabase) -> Option> { + fn try_to_nav_hir(&self, db: &RootDatabase) -> Option { let src = self.source(db)?; let InFile { file_id, value } = src; let focus = value .rename() .map_or_else(|| value.name_ref().map(Either::Left), |it| it.name().map(Either::Right)); + let krate = self.module(db).krate(); let edition = krate.edition(db); + let mut nav = NavigationTarget::from_syntax( + file_id, + self.alias_or_name(db) + .unwrap_or_else(|| self.name(db)) + .display(db, edition) + .to_smolstr(), + focus.map(|it| it.syntax().text_range()), + value.syntax().text_range(), + SymbolKind::Module, + ); - Some(orig_range_with_focus(db, file_id, value.syntax(), focus).map( - |(FileRange { file_id, range: full_range }, focus_range)| { - let mut res = NavigationTarget::from_syntax( - file_id, - self.alias_or_name(db) - .unwrap_or_else(|| self.name(db)) - .display_no_db(edition) - .to_smolstr(), - focus_range, - full_range, - SymbolKind::Module, - ); - - res.docs = self.docs(db); - res.description = Some(self.display(db, krate.to_display_target(db)).to_string()); - res.container_name = container_name(db, *self, edition); - res - }, - )) + nav.docs = self.docs(db); + nav.description = Some(self.display(db, krate.to_display_target(db)).to_string()); + nav.container_name = container_name(db, *self, edition); + Some(nav) } } impl TryToNav for hir::Field { - fn try_to_nav(&self, db: &RootDatabase) -> Option> { + fn try_to_nav_hir(&self, db: &RootDatabase) -> Option { let src = self.source(db)?; let krate = self.parent_def(db).module(db).krate(); - let field_source = match &src.value { + Some(match &src.value { FieldSource::Named(it) => { - NavigationTarget::from_named(db, src.with_value(it), SymbolKind::Field).map( - |mut res| { - res.docs = self.docs(db); - res.description = - Some(self.display(db, krate.to_display_target(db)).to_string()); - res - }, - ) + let mut nav = NavigationTarget::from_named(src.with_value(it), SymbolKind::Field); + nav.docs = self.docs(db); + nav.description = Some(self.display(db, krate.to_display_target(db)).to_string()); + nav } - FieldSource::Pos(it) => orig_range(db, src.file_id, it.syntax()).map( - |(FileRange { file_id, range: full_range }, focus_range)| { - NavigationTarget::from_syntax( - file_id, - format_smolstr!("{}", self.index()), - focus_range, - full_range, - SymbolKind::Field, - ) - }, + FieldSource::Pos(it) => NavigationTarget::from_syntax( + src.file_id, + format_smolstr!("{}", self.index()), + None, + it.syntax().text_range(), + SymbolKind::Field, ), - }; - Some(field_source) + }) } } impl TryToNav for hir::Macro { - fn try_to_nav(&self, db: &RootDatabase) -> Option> { + fn try_to_nav_hir(&self, db: &RootDatabase) -> Option { let src = self.source(db)?; let name_owner: &dyn ast::HasName = match &src.value { Either::Left(it) => it, Either::Right(it) => it, }; - Some( - NavigationTarget::from_named( - db, - src.as_ref().with_value(name_owner), - self.kind(db).into(), - ) - .map(|mut res| { - res.docs = self.docs(db); - res - }), - ) + let mut nav = + NavigationTarget::from_named(src.as_ref().with_value(name_owner), self.kind(db).into()); + nav.docs = self.docs(db); + Some(nav) } } impl TryToNav for hir::Adt { - fn try_to_nav(&self, db: &RootDatabase) -> Option> { + fn try_to_nav_hir(&self, db: &RootDatabase) -> Option { match self { - hir::Adt::Struct(it) => it.try_to_nav(db), - hir::Adt::Union(it) => it.try_to_nav(db), - hir::Adt::Enum(it) => it.try_to_nav(db), + hir::Adt::Struct(it) => it.try_to_nav_hir(db), + hir::Adt::Union(it) => it.try_to_nav_hir(db), + hir::Adt::Enum(it) => it.try_to_nav_hir(db), } } } impl TryToNav for hir::AssocItem { - fn try_to_nav(&self, db: &RootDatabase) -> Option> { + fn try_to_nav_hir(&self, db: &RootDatabase) -> Option { match self { - AssocItem::Function(it) => it.try_to_nav(db), - AssocItem::Const(it) => it.try_to_nav(db), - AssocItem::TypeAlias(it) => it.try_to_nav(db), + AssocItem::Function(it) => it.try_to_nav_hir(db), + AssocItem::Const(it) => it.try_to_nav_hir(db), + AssocItem::TypeAlias(it) => it.try_to_nav_hir(db), } } } impl TryToNav for hir::GenericParam { - fn try_to_nav(&self, db: &RootDatabase) -> Option> { + fn try_to_nav_hir(&self, db: &RootDatabase) -> Option { match self { - hir::GenericParam::TypeParam(it) => it.try_to_nav(db), - hir::GenericParam::ConstParam(it) => it.try_to_nav(db), - hir::GenericParam::LifetimeParam(it) => it.try_to_nav(db), + hir::GenericParam::TypeParam(it) => it.try_to_nav_hir(db), + hir::GenericParam::ConstParam(it) => it.try_to_nav_hir(db), + hir::GenericParam::LifetimeParam(it) => it.try_to_nav_hir(db), } } } impl ToNav for LocalSource { - fn to_nav(&self, db: &RootDatabase) -> UpmappingResult { + fn to_nav_hir(&self, db: &RootDatabase) -> HirNavigationTarget { let InFile { file_id, value } = &self.source; let file_id = *file_id; let local = self.local; - let (node, name) = match &value { + let (node, name_n) = match &value { Either::Left(bind_pat) => (bind_pat.syntax(), bind_pat.name()), Either::Right(it) => (it.syntax(), it.name()), }; let edition = self.local.parent(db).module(db).krate().edition(db); - - orig_range_with_focus(db, file_id, node, name).map( - |(FileRange { file_id, range: full_range }, focus_range)| { - let name = local.name(db).display_no_db(edition).to_smolstr(); - let kind = if local.is_self(db) { - SymbolKind::SelfParam - } else if local.is_param(db) { - SymbolKind::ValueParam - } else { - SymbolKind::Local - }; - NavigationTarget { - file_id, - name, - alias: None, - kind: Some(kind), - full_range, - focus_range, - container_name: None, - description: None, - docs: None, - } - }, - ) + let name = local.name(db).display_no_db(edition).to_smolstr(); + let kind = if local.is_self(db) { + SymbolKind::SelfParam + } else if local.is_param(db) { + SymbolKind::ValueParam + } else { + SymbolKind::Local + }; + NavigationTarget { + file_id, + name, + alias: None, + kind: Some(kind), + full_range: node.text_range(), + focus_range: name_n.map(|it| it.syntax().text_range()), + container_name: None, + description: None, + docs: None, + } } } impl ToNav for hir::Local { - fn to_nav(&self, db: &RootDatabase) -> UpmappingResult { - self.primary_source(db).to_nav(db) + fn to_nav_hir(&self, db: &RootDatabase) -> HirNavigationTarget { + self.primary_source(db).to_nav_hir(db) } } impl TryToNav for hir::Label { - fn try_to_nav(&self, db: &RootDatabase) -> Option> { + fn try_to_nav_hir(&self, db: &RootDatabase) -> Option { let InFile { file_id, value } = self.source(db)?; // Labels can't be keywords, so no escaping needed. let name = self.name(db).display_no_db(Edition::Edition2015).to_smolstr(); - Some(orig_range_with_focus(db, file_id, value.syntax(), value.lifetime()).map( - |(FileRange { file_id, range: full_range }, focus_range)| NavigationTarget { - file_id, - name: name.clone(), - alias: None, - kind: Some(SymbolKind::Label), - full_range, - focus_range, - container_name: None, - description: None, - docs: None, - }, - )) + Some(NavigationTarget { + file_id, + name: name.clone(), + alias: None, + kind: Some(SymbolKind::Label), + full_range: value.syntax().text_range(), + focus_range: value.lifetime().map(|it| it.syntax().text_range()), + container_name: None, + description: None, + docs: None, + }) } } impl TryToNav for hir::TypeParam { - fn try_to_nav(&self, db: &RootDatabase) -> Option> { + fn try_to_nav_hir(&self, db: &RootDatabase) -> Option { let InFile { file_id, value } = self.merge().source(db)?; let edition = self.module(db).krate().edition(db); let name = self.name(db).display_no_db(edition).to_smolstr(); @@ -655,52 +654,48 @@ impl TryToNav for hir::TypeParam { }; let focus = value.as_ref().either(|it| it.name(), |it| it.name()); - Some(orig_range_with_focus(db, file_id, syntax, focus).map( - |(FileRange { file_id, range: full_range }, focus_range)| NavigationTarget { - file_id, - name: name.clone(), - alias: None, - kind: Some(SymbolKind::TypeParam), - full_range, - focus_range, - container_name: None, - description: None, - docs: None, - }, - )) + Some(NavigationTarget { + file_id, + name: name.clone(), + alias: None, + kind: Some(SymbolKind::TypeParam), + full_range: syntax.text_range(), + focus_range: focus.map(|it| it.syntax().text_range()), + container_name: None, + description: None, + docs: None, + }) } } impl TryToNav for hir::TypeOrConstParam { - fn try_to_nav(&self, db: &RootDatabase) -> Option> { - self.split(db).try_to_nav(db) + fn try_to_nav_hir(&self, db: &RootDatabase) -> Option { + self.split(db).try_to_nav_hir(db) } } impl TryToNav for hir::LifetimeParam { - fn try_to_nav(&self, db: &RootDatabase) -> Option> { + fn try_to_nav_hir(&self, db: &RootDatabase) -> Option { let InFile { file_id, value } = self.source(db)?; // Lifetimes cannot be keywords, so not escaping needed. let name = self.name(db).display_no_db(Edition::Edition2015).to_smolstr(); - Some(orig_range(db, file_id, value.syntax()).map( - |(FileRange { file_id, range: full_range }, focus_range)| NavigationTarget { - file_id, - name: name.clone(), - alias: None, - kind: Some(SymbolKind::LifetimeParam), - full_range, - focus_range, - container_name: None, - description: None, - docs: None, - }, - )) + Some(NavigationTarget { + file_id, + name: name.clone(), + alias: None, + kind: Some(SymbolKind::LifetimeParam), + full_range: value.syntax().text_range(), + focus_range: value.lifetime().map(|it| it.syntax().text_range()), + container_name: None, + description: None, + docs: None, + }) } } impl TryToNav for hir::ConstParam { - fn try_to_nav(&self, db: &RootDatabase) -> Option> { + fn try_to_nav_hir(&self, db: &RootDatabase) -> Option { let InFile { file_id, value } = self.merge().source(db)?; let edition = self.module(db).krate().edition(db); let name = self.name(db).display_no_db(edition).to_smolstr(); @@ -713,44 +708,38 @@ impl TryToNav for hir::ConstParam { } }; - Some(orig_range_with_focus(db, file_id, value.syntax(), value.name()).map( - |(FileRange { file_id, range: full_range }, focus_range)| NavigationTarget { - file_id, - name: name.clone(), - alias: None, - kind: Some(SymbolKind::ConstParam), - full_range, - focus_range, - container_name: None, - description: None, - docs: None, - }, - )) + Some(NavigationTarget { + file_id, + name: name.clone(), + alias: None, + kind: Some(SymbolKind::ConstParam), + full_range: value.syntax().text_range(), + focus_range: value.name().map(|it| it.syntax().text_range()), + container_name: None, + description: None, + docs: None, + }) } } impl TryToNav for hir::InlineAsmOperand { - fn try_to_nav(&self, db: &RootDatabase) -> Option> { + fn try_to_nav_hir(&self, db: &RootDatabase) -> Option { let InFile { file_id, value } = &self.source(db)?; let file_id = *file_id; - Some(orig_range_with_focus(db, file_id, value.syntax(), value.name()).map( - |(FileRange { file_id, range: full_range }, focus_range)| { - let edition = self.parent(db).module(db).krate().edition(db); - NavigationTarget { - file_id, - name: self - .name(db) - .map_or_else(|| "_".into(), |it| it.display(db, edition).to_smolstr()), - alias: None, - kind: Some(SymbolKind::Local), - full_range, - focus_range, - container_name: None, - description: None, - docs: None, - } - }, - )) + let edition = self.parent(db).module(db).krate().edition(db); + Some(NavigationTarget { + file_id, + name: self + .name(db) + .map_or_else(|| "_".into(), |it| it.display(db, edition).to_smolstr()), + alias: None, + kind: Some(SymbolKind::Local), + full_range: value.syntax().text_range(), + focus_range: value.name().map(|it| it.syntax().text_range()), + container_name: None, + description: None, + docs: None, + }) } } @@ -792,35 +781,37 @@ impl UpmappingResult { } } -/// Returns the original range of the syntax node, and the range of the name mapped out of macro expansions -/// May return two results if the mapped node originates from a macro definition in which case the -/// second result is the creating macro call. fn orig_range_with_focus( - db: &RootDatabase, - hir_file: HirFileId, - value: &SyntaxNode, - name: Option, -) -> UpmappingResult<(FileRange, Option)> { - orig_range_with_focus_r( - db, - hir_file, - value.text_range(), - name.map(|it| it.syntax().text_range()), - ) -} - -pub(crate) fn orig_range_with_focus_r( db: &RootDatabase, hir_file: HirFileId, value: TextRange, focus_range: Option, ) -> UpmappingResult<(FileRange, Option)> { - let Some(name) = focus_range else { return orig_range_r(db, hir_file, value) }; + let macro_file = match hir_file { + HirFileId::FileId(editioned_file_id) => { + return UpmappingResult { + call_site: ( + FileRange { file_id: editioned_file_id.file_id(db), range: value }, + focus_range, + ), + def_site: None, + }; + } + HirFileId::MacroFile(macro_file) => macro_file, + }; + let call_site_fallback = || UpmappingResult { + call_site: ( + InFile::new(hir_file, value).original_node_file_range(db).0.into_file_id(db), + None, + ), + def_site: None, + }; + + let Some(name) = focus_range else { return call_site_fallback() }; - let call_kind = || db.lookup_intern_macro_call(hir_file.macro_file().unwrap()).kind; + let call_kind = || db.lookup_intern_macro_call(macro_file).kind; - let def_range = - || db.lookup_intern_macro_call(hir_file.macro_file().unwrap()).def.definition_range(db); + let def_range = || db.lookup_intern_macro_call(macro_file).def.definition_range(db); // FIXME: Also make use of the syntax context to determine which site we are at? let value_range = InFile::new(hir_file, value).original_node_file_range_opt(db); @@ -892,7 +883,7 @@ pub(crate) fn orig_range_with_focus_r( } } // lost name? can't happen for single tokens - None => return orig_range_r(db, hir_file, value), + None => return call_site_fallback(), }; UpmappingResult { @@ -924,55 +915,92 @@ pub(crate) fn orig_range_with_focus_r( } } -fn orig_range( - db: &RootDatabase, - hir_file: HirFileId, - value: &SyntaxNode, -) -> UpmappingResult<(FileRange, Option)> { - UpmappingResult { - call_site: ( - InFile::new(hir_file, value).original_file_range_rooted(db).into_file_id(db), - None, - ), - def_site: None, - } -} - -fn orig_range_r( +// FIXME: Return type should mark each item as being call site, defe site, etc +pub(crate) fn orig_ranges_with_focus_in( db: &RootDatabase, hir_file: HirFileId, value: TextRange, -) -> UpmappingResult<(FileRange, Option)> { - UpmappingResult { - call_site: ( - InFile::new(hir_file, value).original_node_file_range(db).0.into_file_id(db), - None, - ), - def_site: None, + focus_range: Option, + target: HirFileId, +) -> Option)>> { + let (mut current, target) = match (hir_file, target) { + (HirFileId::FileId(file_id), HirFileId::FileId(target_file_id)) + if file_id == target_file_id => + { + return Some(vec![(value, focus_range)]); + } + (HirFileId::FileId(_), HirFileId::FileId(_) | HirFileId::MacroFile(_)) => return None, + (HirFileId::MacroFile(_), HirFileId::FileId(target_file_id)) => { + let r = orig_range_with_focus(db, hir_file, value, focus_range); + if r.call_site.0.file_id != target_file_id.file_id(db) { + return None; + } + let mut ranges = vec![]; + ranges.push((r.call_site.0.range, r.call_site.1)); + + if let Some((def_range, def_focus)) = + r.def_site.filter(|it| it.0.file_id == target_file_id.file_id(db)) + { + ranges.push((def_range.range, def_focus)); + } + return Some(ranges); + } + (HirFileId::MacroFile(current), HirFileId::MacroFile(target)) => (current, target), + }; + let expansion_span_map = db.expansion_span_map(current); + let span = expansion_span_map.span_at(value.start()); + // FIXME: Use this + let _focus_span = + focus_range.map(|focus_range| expansion_span_map.span_at(focus_range.start())); + loop { + let parent = current.parent(db).macro_file()?; + if parent == target { + let arg_map = db.expansion_span_map(parent); + let arg_node = current.call_node(db); + let arg_range = arg_node.text_range(); + let res = arg_map + .ranges_with_span_exact(span) + .filter(|(range, _)| range.intersect(arg_range).is_some()) + .map(TupleExt::head) + .zip(iter::repeat(None)) + .collect::>(); + break res.is_empty().then_some(res); + } + current = parent; } } #[cfg(test)] mod tests { use expect_test::expect; + use test_fixture::WithFixture; - use crate::{Query, fixture}; + use crate::{ + RootDatabase, + navigation_target::TryToNav, + symbol_index::{self, Query}, + }; #[test] fn test_nav_for_symbol() { - let (analysis, _) = fixture::file( + let (db, _) = &RootDatabase::with_single_file( r#" enum FooInner { } fn foo() { enum FooInner { } } "#, ); - let navs = analysis.symbol_search(Query::new("FooInner".to_owned()), !0).unwrap(); + let navs = symbol_index::world_symbols(db, Query::new("FooInner".to_owned())) + .into_iter() + .filter_map(|s| s.try_to_nav_hir(db)) + .collect::>(); expect![[r#" [ NavigationTarget { file_id: FileId( - 0, + EditionedFileId( + Id(1800), + ), ), full_range: 0..17, focus_range: 5..13, @@ -982,7 +1010,9 @@ fn foo() { enum FooInner { } } }, NavigationTarget { file_id: FileId( - 0, + EditionedFileId( + Id(1800), + ), ), full_range: 29..46, focus_range: 34..42, @@ -998,20 +1028,23 @@ fn foo() { enum FooInner { } } #[test] fn test_world_symbols_are_case_sensitive() { - let (analysis, _) = fixture::file( + let (db, _) = &RootDatabase::with_single_file( r#" fn foo() {} struct Foo; "#, ); - let navs = analysis.symbol_search(Query::new("foo".to_owned()), !0).unwrap(); + let navs = symbol_index::world_symbols(db, Query::new("foo".to_owned())) + .into_iter() + .filter_map(|s| s.try_to_nav_hir(db)) + .collect::>(); assert_eq!(navs.len(), 2) } #[test] fn test_ensure_hidden_symbols_are_not_returned() { - let (analysis, _) = fixture::file( + let (db, _) = &RootDatabase::with_single_file( r#" fn foo() {} struct Foo; @@ -1020,14 +1053,24 @@ static __FOO_CALLSITE: () = (); ); // It doesn't show the hidden symbol - let navs = analysis.symbol_search(Query::new("foo".to_owned()), !0).unwrap(); + + let navs = symbol_index::world_symbols(db, Query::new("foo".to_owned())) + .into_iter() + .filter_map(|s| s.try_to_nav_hir(db)) + .collect::>(); assert_eq!(navs.len(), 2); - let navs = analysis.symbol_search(Query::new("_foo".to_owned()), !0).unwrap(); + + let navs = symbol_index::world_symbols(db, Query::new("_foo".to_owned())) + .into_iter() + .filter_map(|s| s.try_to_nav_hir(db)) + .collect::>(); assert_eq!(navs.len(), 0); // Unless we explicitly search for a `__` prefix - let query = Query::new("__foo".to_owned()); - let navs = analysis.symbol_search(query, !0).unwrap(); + let navs = symbol_index::world_symbols(db, Query::new("__foo".to_owned())) + .into_iter() + .filter_map(|s| s.try_to_nav_hir(db)) + .collect::>(); assert_eq!(navs.len(), 1); } } diff --git a/crates/ide-db/src/rename.rs b/crates/ide-db/src/rename.rs index 4e737e27f050..1efbd6f8ba55 100644 --- a/crates/ide-db/src/rename.rs +++ b/crates/ide-db/src/rename.rs @@ -320,7 +320,7 @@ fn rename_mod( } let def = Definition::Module(module); - let usages = def.usages(sema).all(); + let usages = def.usages(sema).all().map_out_of_macros(sema); let ref_edits = usages.iter().map(|(file_id, references)| { let edition = file_id.edition(sema.db); ( @@ -380,7 +380,7 @@ fn rename_reference( } let def = convert_to_def_in_trait(sema.db, def); - let usages = def.usages(sema).all(); + let usages = def.usages(sema).all().map_out_of_macros(sema); if !usages.is_empty() && ident_kind == IdentifierKind::Underscore { cov_mark::hit!(rename_underscore_multiple); diff --git a/crates/ide-db/src/search.rs b/crates/ide-db/src/search.rs index 4efb83ba3235..a5bf07e6e7fa 100644 --- a/crates/ide-db/src/search.rs +++ b/crates/ide-db/src/search.rs @@ -9,15 +9,15 @@ use std::{cell::LazyCell, cmp::Reverse}; use base_db::{RootQueryDb, SourceDatabase}; use either::Either; +use hir::db::ExpandDatabase; use hir::{ - Adt, AsAssocItem, DefWithBody, EditionedFileId, FileRange, FileRangeWrapper, HasAttrs, - HasContainer, HasSource, InFile, InFileWrapper, InRealFile, InlineAsmOperand, ItemContainer, + Adt, AsAssocItem, DefWithBody, EditionedFileId, FileRange, HasAttrs, HasContainer, HasSource, + HirFileId, HirFileRange, InFile, InFileWrapper, InRealFile, InlineAsmOperand, ItemContainer, ModuleSource, PathResolution, Semantics, Visibility, sym, }; use memchr::memmem::Finder; use parser::SyntaxKind; use rustc_hash::{FxHashMap, FxHashSet}; -use salsa::Database; use syntax::{ AstNode, AstToken, SmolStr, SyntaxElement, SyntaxNode, TextRange, TextSize, ToSmolStr, ast::{self, HasName, Rename}, @@ -25,6 +25,7 @@ use syntax::{ }; use triomphe::Arc; +use crate::navigation_target::orig_ranges_with_focus_in; use crate::{ RootDatabase, defs::{Definition, NameClass, NameRefClass}, @@ -33,7 +34,7 @@ use crate::{ #[derive(Debug, Default, Clone)] pub struct UsageSearchResult { - pub references: FxHashMap>, + pub references: FxHashMap>, } impl UsageSearchResult { @@ -45,6 +46,129 @@ impl UsageSearchResult { self.references.len() } + pub fn iter(&self) -> impl Iterator + '_ { + self.references.iter().map(|(&file_id, refs)| (file_id, &**refs)) + } + + pub fn file_ranges(&self) -> impl Iterator + '_ { + self.references.iter().flat_map(|(&file_id, refs)| { + refs.iter().map(move |&FileReference { range, .. }| HirFileRange { file_id, range }) + }) + } + + pub fn map_out_of_macros( + self, + sema: &Semantics<'_, RootDatabase>, + ) -> RealFileUsageSearchResult { + let mut references = >>::default(); + self.references + .into_iter() + .flat_map(|(file_id, refs)| { + refs.into_iter().map(move |reference| { + if let FileReferenceNode::FormatStringEntry(ast, name_range) = reference.name { + // FIXME: Clean this up + let r = ast.syntax().text_range(); + let FileRange { file_id, range } = + InFile::new(file_id, r).original_node_file_range_rooted(sema.db); + let relative = name_range - r.start(); + let mut new_range = relative + range.start(); + if new_range.end() > range.end() { + new_range = TextRange::new(new_range.start(), range.end()); + } + return ( + file_id, + FileReference { + range: new_range, + name: FileReferenceNode::FormatStringEntry(ast, new_range), + category: reference.category, + }, + ); + } + let FileRange { file_id, range } = InFile::new(file_id, reference.range) + .original_node_file_range_rooted(sema.db); + ( + file_id, + FileReference { range, name: reference.name, category: reference.category }, + ) + }) + }) + .for_each(|(file_id, ref_)| references.entry(file_id).or_default().push(ref_)); + RealFileUsageSearchResult { references } + } + + pub fn map_out_of_macros_to( + mut self, + sema: &Semantics<'_, RootDatabase>, + target: HirFileId, + ) -> Vec { + let mut references = self.references.remove(&target).unwrap_or_default(); + for (file_id, refs) in self.references { + references.extend( + refs.into_iter() + .filter_map(move |reference| { + orig_ranges_with_focus_in( + sema.db, + file_id, + reference.node_range(), + None, + target, + ) + .map(|it| { + it.into_iter().map(move |(range, _focus)| match &reference.name { + FileReferenceNode::FormatStringEntry(ast, name_range) => { + let r = ast.syntax().text_range(); + let relative = name_range - r.start(); + let mut new_range = relative + range.start(); + if new_range.end() > range.end() { + new_range = TextRange::new(new_range.start(), range.end()); + } + FileReference { + range: new_range, + name: FileReferenceNode::FormatStringEntry( + ast.clone(), + new_range, + ), + category: reference.category, + } + } + name => FileReference { + range, + name: name.clone(), + category: reference.category, + }, + }) + }) + }) + .flatten(), + ); + } + references + } +} + +impl IntoIterator for UsageSearchResult { + type Item = (HirFileId, Vec); + type IntoIter = > as IntoIterator>::IntoIter; + + fn into_iter(self) -> Self::IntoIter { + self.references.into_iter() + } +} + +#[derive(Debug, Default, Clone)] +pub struct RealFileUsageSearchResult { + pub references: FxHashMap>, +} + +impl RealFileUsageSearchResult { + pub fn is_empty(&self) -> bool { + self.references.is_empty() + } + + pub fn len(&self) -> usize { + self.references.len() + } + pub fn iter(&self) -> impl Iterator + '_ { self.references.iter().map(|(&file_id, refs)| (file_id, &**refs)) } @@ -56,7 +180,7 @@ impl UsageSearchResult { } } -impl IntoIterator for UsageSearchResult { +impl IntoIterator for RealFileUsageSearchResult { type Item = (EditionedFileId, Vec); type IntoIter = > as IntoIterator>::IntoIter; @@ -67,13 +191,19 @@ impl IntoIterator for UsageSearchResult { #[derive(Debug, Clone)] pub struct FileReference { - /// The range of the reference in the original file + /// The range of the reference in the (macro-)file pub range: TextRange, /// The node of the reference in the (macro-)file pub name: FileReferenceNode, pub category: ReferenceCategory, } +impl FileReference { + pub fn node_range(&self) -> TextRange { + self.name.syntax().text_range() + } +} + #[derive(Debug, Clone)] pub enum FileReferenceNode { Name(ast::Name), @@ -149,11 +279,11 @@ bitflags::bitflags! { /// e.g. for things like local variables. #[derive(Clone, Debug)] pub struct SearchScope { - entries: FxHashMap>, + entries: FxHashMap>, } impl SearchScope { - fn new(entries: FxHashMap>) -> SearchScope { + fn new(entries: FxHashMap>) -> SearchScope { SearchScope { entries } } @@ -169,7 +299,7 @@ impl SearchScope { entries.extend( source_root .iter() - .map(|id| (EditionedFileId::new(db, id, crate_data.edition), None)), + .map(|id| (EditionedFileId::new(db, id, crate_data.edition).into(), None)), ); } SearchScope { entries } @@ -186,7 +316,7 @@ impl SearchScope { entries.extend( source_root .iter() - .map(|id| (EditionedFileId::new(db, id, rev_dep.edition(db)), None)), + .map(|id| (EditionedFileId::new(db, id, rev_dep.edition(db)).into(), None)), ); } SearchScope { entries } @@ -201,7 +331,7 @@ impl SearchScope { SearchScope { entries: source_root .iter() - .map(|id| (EditionedFileId::new(db, id, of.edition(db)), None)) + .map(|id| (EditionedFileId::new(db, id, of.edition(db)).into(), None)) .collect(), } } @@ -219,12 +349,12 @@ impl SearchScope { (file_id.original_file(db), Some(value)) } }; - entries.entry(file_id).or_insert(range); + entries.entry(file_id.into()).or_insert(range); let mut to_visit: Vec<_> = module.children(db).collect(); while let Some(module) = to_visit.pop() { if let Some(file_id) = module.as_source_file_id(db) { - entries.insert(file_id, None); + entries.insert(file_id.into(), None); } to_visit.extend(module.children(db)); } @@ -238,17 +368,27 @@ impl SearchScope { /// Build a empty search scope spanning the given file. pub fn single_file(file: EditionedFileId) -> SearchScope { + SearchScope::new(std::iter::once((file.into(), None)).collect()) + } + + /// Build a empty search scope spanning the given file. + pub fn single_hir_file(file: HirFileId) -> SearchScope { SearchScope::new(std::iter::once((file, None)).collect()) } /// Build a empty search scope spanning the text range of the given file. pub fn file_range(range: FileRange) -> SearchScope { + SearchScope::new(std::iter::once((range.file_id.into(), Some(range.range))).collect()) + } + + /// Build a empty search scope spanning the text range of the given file. + pub fn hir_file_range(range: HirFileRange) -> SearchScope { SearchScope::new(std::iter::once((range.file_id, Some(range.range))).collect()) } /// Build a empty search scope spanning the given files. pub fn files(files: &[EditionedFileId]) -> SearchScope { - SearchScope::new(files.iter().map(|f| (*f, None)).collect()) + SearchScope::new(files.iter().map(|&f| (f.into(), None)).collect()) } pub fn intersection(&self, other: &SearchScope) -> SearchScope { @@ -278,8 +418,8 @@ impl SearchScope { } impl IntoIterator for SearchScope { - type Item = (EditionedFileId, Option); - type IntoIter = std::collections::hash_map::IntoIter>; + type Item = (HirFileId, Option); + type IntoIter = std::collections::hash_map::IntoIter>; fn into_iter(self) -> Self::IntoIter { self.entries.into_iter() @@ -483,13 +623,24 @@ impl<'a> FindUsages<'a> { fn scope_files<'b>( db: &'b RootDatabase, scope: &'b SearchScope, - ) -> impl Iterator, EditionedFileId, TextRange)> + 'b { - scope.entries.iter().map(|(&file_id, &search_range)| { - let text = db.file_text(file_id.file_id(db)).text(db); - let search_range = - search_range.unwrap_or_else(|| TextRange::up_to(TextSize::of(&*text))); + ) -> impl Iterator, HirFileId, TextRange)> + 'b { + scope.entries.iter().map(|(&file_id, &search_range)| match file_id { + HirFileId::FileId(editioned_file_id) => { + let text = db.file_text(editioned_file_id.file_id(db)).text(db); + let search_range = + search_range.unwrap_or_else(|| TextRange::up_to(TextSize::of(&*text))); - (text, file_id, search_range) + (text, file_id, search_range) + } + HirFileId::MacroFile(macro_file_id) => { + let text = Arc::from( + db.parse_macro_expansion(macro_file_id).value.0.syntax_node().to_string(), + ); + + let search_range = + search_range.unwrap_or_else(|| TextRange::up_to(TextSize::of(&*text))); + (text, file_id, search_range) + } }) } @@ -524,10 +675,10 @@ impl<'a> FindUsages<'a> { fn find_nodes<'b>( sema: &'b Semantics<'_, RootDatabase>, name: &str, - file_id: EditionedFileId, node: &syntax::SyntaxNode, + file_id: HirFileId, offset: TextSize, - ) -> impl Iterator + 'b { + ) -> impl Iterator> + 'b { node.token_at_offset(offset) .find(|it| { // `name` is stripped of raw ident prefix. See the comment on name retrieval below. @@ -535,16 +686,27 @@ impl<'a> FindUsages<'a> { }) .into_iter() .flat_map(move |token| { - if sema.is_inside_macro_call(InFile::new(file_id.into(), &token)) { - sema.descend_into_macros_exact(token) + if sema.is_inside_macro_call(InFile::new(file_id, &token)) { + sema.descend_into_macros_exact_with_file(token) } else { - <_>::from([token]) + <_>::from([InFile::new(file_id, token)]) } .into_iter() - .filter_map(|it| it.parent()) + .filter_map(|it| it.map(|it| it.parent()).transpose()) }) } + fn find_name_refs<'b>( + sema: &'b Semantics<'_, RootDatabase>, + name: &str, + node: &syntax::SyntaxNode, + file_id: HirFileId, + offset: TextSize, + ) -> impl Iterator> + 'b { + Self::find_nodes(sema, name, node, file_id, offset) + .filter_map(|it| it.map(ast::NameRef::cast).transpose()) + } + /// Performs a special fast search for associated functions. This is mainly intended /// to speed up `new()` which can take a long time. /// @@ -562,7 +724,7 @@ impl<'a> FindUsages<'a> { // FIXME: Extend this to other cases, such as associated types/consts/enum variants (note those can be `use`d). fn short_associated_function_fast_search( &self, - sink: &mut dyn FnMut(EditionedFileId, FileReference) -> bool, + sink: &mut dyn FnMut(HirFileId, FileReference) -> bool, search_scope: &SearchScope, name: &str, ) -> bool { @@ -616,7 +778,7 @@ impl<'a> FindUsages<'a> { fn collect_possible_aliases( sema: &Semantics<'_, RootDatabase>, container: Adt, - ) -> Option<(FxHashSet, Vec>)> { + ) -> Option<(FxHashSet, Vec)> { fn insert_type_alias( db: &RootDatabase, to_process: &mut Vec<(SmolStr, SearchScope)>, @@ -652,18 +814,19 @@ impl<'a> FindUsages<'a> { for (file_text, file_id, search_range) in FindUsages::scope_files(db, ¤t_to_process_search_scope) { - let tree = LazyCell::new(move || sema.parse(file_id).syntax().clone()); + let tree = LazyCell::new(move || sema.parse_or_expand(file_id)); for offset in FindUsages::match_indices(&file_text, &finder, search_range) { let usages = FindUsages::find_nodes( sema, ¤t_to_process, - file_id, &tree, + file_id, offset, ) .filter(|it| matches!(it.kind(), SyntaxKind::NAME | SyntaxKind::NAME_REF)); for usage in usages { + let InFileWrapper { file_id, value: usage } = usage; if let Some(alias) = usage.parent().and_then(|it| { let path = ast::PathSegment::cast(it)?.parent_path(); let use_tree = ast::UseTree::cast(path.syntax().parent()?)?; @@ -734,7 +897,10 @@ impl<'a> FindUsages<'a> { }; (|| { let impl_ = impl_?; - is_possibly_self.push(sema.original_range(impl_.syntax())); + is_possibly_self.push(HirFileRange { + file_id, + range: impl_.syntax().text_range(), + }); let assoc_items = impl_.assoc_item_list()?; let type_aliases = assoc_items .syntax() @@ -806,21 +972,22 @@ impl<'a> FindUsages<'a> { this: &FindUsages<'_>, finder: &Finder<'_>, name: &str, - files: impl Iterator, EditionedFileId, TextRange)>, + files: impl Iterator, HirFileId, TextRange)>, mut container_predicate: impl FnMut( &SyntaxNode, - InFileWrapper, + InFileWrapper, ) -> bool, - sink: &mut dyn FnMut(EditionedFileId, FileReference) -> bool, + sink: &mut dyn FnMut(HirFileId, FileReference) -> bool, ) { for (file_text, file_id, search_range) in files { - let tree = LazyCell::new(move || this.sema.parse(file_id).syntax().clone()); + let tree = LazyCell::new(move || this.sema.parse_or_expand(file_id)); for offset in FindUsages::match_indices(&file_text, finder, search_range) { - let usages = FindUsages::find_nodes(this.sema, name, file_id, &tree, offset) - .filter_map(ast::NameRef::cast); + let usages = + FindUsages::find_name_refs(this.sema, name, &tree, file_id, offset); for usage in usages { let found_usage = usage + .value .syntax() .parent() .and_then(ast::PathSegment::cast) @@ -832,7 +999,7 @@ impl<'a> FindUsages<'a> { }) .unwrap_or(false); if found_usage { - this.found_name_ref(&usage, sink); + this.found_name_ref(&usage.value, &mut |it| sink(usage.file_id, it)); } } } @@ -859,10 +1026,23 @@ impl<'a> FindUsages<'a> { name, is_possibly_self.into_iter().map(|position| { ( - self.sema - .db - .file_text(position.file_id.file_id(self.sema.db)) - .text(self.sema.db), + match position.file_id { + HirFileId::FileId(editioned_file_id) => self + .sema + .db + .file_text(editioned_file_id.file_id(self.sema.db)) + .text(self.sema.db), + + HirFileId::MacroFile(macro_file_id) => Arc::from( + self.sema + .db + .parse_macro_expansion(macro_file_id) + .value + .0 + .syntax_node() + .to_string(), + ), + }, position.file_id, position.range, ) @@ -896,7 +1076,7 @@ impl<'a> FindUsages<'a> { true } - pub fn search(&self, sink: &mut dyn FnMut(EditionedFileId, FileReference) -> bool) { + pub fn search(&self, sink: &mut dyn FnMut(HirFileId, FileReference) -> bool) { let _p = tracing::info_span!("FindUsages:search").entered(); let sema = self.sema; @@ -956,20 +1136,23 @@ impl<'a> FindUsages<'a> { let include_self_kw_refs = self.include_self_kw_refs.as_ref().map(|ty| (ty, Finder::new("Self"))); for (text, file_id, search_range) in Self::scope_files(sema.db, &search_scope) { - let tree = LazyCell::new(move || sema.parse(file_id).syntax().clone()); + let tree = LazyCell::new(move || sema.parse_or_expand(file_id)); // Search for occurrences of the items name for offset in Self::match_indices(&text, finder, search_range) { let ret = tree.token_at_offset(offset).any(|token| { - if let Some((range, _frange, string_token, Some(nameres))) = - sema.check_for_format_args_template(token.clone(), offset) + let Some(str_token) = ast::String::cast(token.clone()) else { return false }; + if let Some((_range, frange, string_token, Some(nameres))) = sema + .check_for_format_args_template_with_file( + InFile::new(file_id, str_token), + offset, + ) { return self.found_format_args_ref( - file_id, - range, + frange.range, string_token, nameres, - sink, + &mut |it| sink(frange.file_id, it), ); } false @@ -978,13 +1161,19 @@ impl<'a> FindUsages<'a> { return; } - for name in Self::find_nodes(sema, name, file_id, &tree, offset) - .filter_map(ast::NameLike::cast) + for node in Self::find_nodes(sema, name, &tree, file_id, offset) + .filter_map(|it| it.map(ast::NameLike::cast).transpose()) { - if match name { - ast::NameLike::NameRef(name_ref) => self.found_name_ref(&name_ref, sink), - ast::NameLike::Name(name) => self.found_name(&name, sink), - ast::NameLike::Lifetime(lifetime) => self.found_lifetime(&lifetime, sink), + if match node.value { + ast::NameLike::NameRef(name_ref) => { + self.found_name_ref(&name_ref, &mut |it| sink(node.file_id, it)) + } + ast::NameLike::Name(name) => { + self.found_name(&name, &mut |it| sink(node.file_id, it)) + } + ast::NameLike::Lifetime(lifetime) => { + self.found_lifetime(&lifetime, &mut |it| sink(node.file_id, it)) + } } { return; } @@ -993,10 +1182,10 @@ impl<'a> FindUsages<'a> { // Search for occurrences of the `Self` referring to our type if let Some((self_ty, finder)) = &include_self_kw_refs { for offset in Self::match_indices(&text, finder, search_range) { - for name_ref in Self::find_nodes(sema, "Self", file_id, &tree, offset) - .filter_map(ast::NameRef::cast) - { - if self.found_self_ty_name_ref(self_ty, &name_ref, sink) { + for name_ref in Self::find_name_refs(sema, "Self", &tree, file_id, offset) { + if self.found_self_ty_name_ref(self_ty, &name_ref.value, &mut |it| { + sink(name_ref.file_id, it) + }) { return; } } @@ -1013,25 +1202,24 @@ impl<'a> FindUsages<'a> { let finder = &Finder::new("super"); for (text, file_id, search_range) in Self::scope_files(sema.db, &scope) { - self.sema.db.unwind_if_revision_cancelled(); - - let tree = LazyCell::new(move || sema.parse(file_id).syntax().clone()); + let tree = LazyCell::new(move || sema.parse_or_expand(file_id)); for offset in Self::match_indices(&text, finder, search_range) { - for name_ref in Self::find_nodes(sema, "super", file_id, &tree, offset) - .filter_map(ast::NameRef::cast) - { - if self.found_name_ref(&name_ref, sink) { + for name_ref in Self::find_name_refs(sema, "super", &tree, file_id, offset) { + if self + .found_name_ref(&name_ref.value, &mut |it| sink(name_ref.file_id, it)) + { return; } } } if let Some(finder) = &is_crate_root { for offset in Self::match_indices(&text, finder, search_range) { - for name_ref in Self::find_nodes(sema, "crate", file_id, &tree, offset) - .filter_map(ast::NameRef::cast) + for name_ref in Self::find_name_refs(sema, "crate", &tree, file_id, offset) { - if self.found_name_ref(&name_ref, sink) { + if self.found_name_ref(&name_ref.value, &mut |it| { + sink(name_ref.file_id, it) + }) { return; } } @@ -1044,14 +1232,13 @@ impl<'a> FindUsages<'a> { match self.def { Definition::Module(module) if self.search_self_mod => { let src = module.definition_source(sema.db); - let file_id = src.file_id.original_file(sema.db); - let (file_id, search_range) = match src.value { - ModuleSource::Module(m) => (file_id, Some(m.syntax().text_range())), - ModuleSource::BlockExpr(b) => (file_id, Some(b.syntax().text_range())), - ModuleSource::SourceFile(_) => (file_id, None), + let search_range = match src.value { + ModuleSource::Module(m) => Some(m.syntax().text_range()), + ModuleSource::BlockExpr(b) => Some(b.syntax().text_range()), + ModuleSource::SourceFile(_) => None, }; - let search_range = if let Some(&range) = search_scope.entries.get(&file_id) { + let search_range = if let Some(&range) = search_scope.entries.get(&src.file_id) { match (range, search_range) { (None, range) | (range, None) => range, (Some(range), Some(search_range)) => match range.intersect(search_range) { @@ -1063,19 +1250,36 @@ impl<'a> FindUsages<'a> { return; }; - let file_text = sema.db.file_text(file_id.file_id(self.sema.db)); - let text = file_text.text(sema.db); + let (t1_, t2_); + let text = match src.file_id { + HirFileId::FileId(editioned_file_id) => { + let file_text = sema.db.file_text(editioned_file_id.file_id(self.sema.db)); + t1_ = file_text.text(sema.db); + &*t1_ + } + + HirFileId::MacroFile(macro_file_id) => { + t2_ = sema + .db + .parse_macro_expansion(macro_file_id) + .value + .0 + .syntax_node() + .to_string(); + &*t2_ + } + }; let search_range = - search_range.unwrap_or_else(|| TextRange::up_to(TextSize::of(&*text))); + search_range.unwrap_or_else(|| TextRange::up_to(TextSize::of(text))); - let tree = LazyCell::new(|| sema.parse(file_id).syntax().clone()); + let tree = LazyCell::new(|| sema.parse_or_expand(src.file_id)); let finder = &Finder::new("self"); - for offset in Self::match_indices(&text, finder, search_range) { - for name_ref in Self::find_nodes(sema, "self", file_id, &tree, offset) - .filter_map(ast::NameRef::cast) - { - if self.found_self_module_name_ref(&name_ref, sink) { + for offset in Self::match_indices(text, finder, search_range) { + for name_ref in Self::find_name_refs(sema, "self", &tree, src.file_id, offset) { + if self.found_self_module_name_ref(&name_ref.value, &mut |it| { + sink(name_ref.file_id, it) + }) { return; } } @@ -1089,7 +1293,7 @@ impl<'a> FindUsages<'a> { &self, self_ty: &hir::Type<'_>, name_ref: &ast::NameRef, - sink: &mut dyn FnMut(EditionedFileId, FileReference) -> bool, + sink: &mut dyn FnMut(FileReference) -> bool, ) -> bool { // See https://github.com/rust-lang/rust-analyzer/pull/15864/files/e0276dc5ddc38c65240edb408522bb869f15afb4#r1389848845 let ty_eq = |ty: hir::Type<'_>| match (ty.as_adt(), self_ty.as_adt()) { @@ -1102,13 +1306,12 @@ impl<'a> FindUsages<'a> { Some(NameRefClass::Definition(Definition::SelfType(impl_), _)) if ty_eq(impl_.self_ty(self.sema.db)) => { - let FileRange { file_id, range } = self.sema.original_range(name_ref.syntax()); let reference = FileReference { - range, + range: name_ref.syntax().text_range(), name: FileReferenceNode::NameRef(name_ref.clone()), category: ReferenceCategory::empty(), }; - sink(file_id, reference) + sink(reference) } _ => false, } @@ -1117,22 +1320,21 @@ impl<'a> FindUsages<'a> { fn found_self_module_name_ref( &self, name_ref: &ast::NameRef, - sink: &mut dyn FnMut(EditionedFileId, FileReference) -> bool, + sink: &mut dyn FnMut(FileReference) -> bool, ) -> bool { match NameRefClass::classify(self.sema, name_ref) { Some(NameRefClass::Definition(def @ Definition::Module(_), _)) if def == self.def => { - let FileRange { file_id, range } = self.sema.original_range(name_ref.syntax()); let category = if is_name_ref_in_import(name_ref) { ReferenceCategory::IMPORT } else { ReferenceCategory::empty() }; let reference = FileReference { - range, + range: name_ref.syntax().text_range(), name: FileReferenceNode::NameRef(name_ref.clone()), category, }; - sink(file_id, reference) + sink(reference) } _ => false, } @@ -1140,11 +1342,10 @@ impl<'a> FindUsages<'a> { fn found_format_args_ref( &self, - file_id: EditionedFileId, range: TextRange, token: ast::String, res: Either, - sink: &mut dyn FnMut(EditionedFileId, FileReference) -> bool, + sink: &mut dyn FnMut(FileReference) -> bool, ) -> bool { let def = res.either(Definition::from, Definition::from); if def == self.def { @@ -1153,7 +1354,7 @@ impl<'a> FindUsages<'a> { name: FileReferenceNode::FormatStringEntry(token, range), category: ReferenceCategory::READ, }; - sink(file_id, reference) + sink(reference) } else { false } @@ -1162,17 +1363,16 @@ impl<'a> FindUsages<'a> { fn found_lifetime( &self, lifetime: &ast::Lifetime, - sink: &mut dyn FnMut(EditionedFileId, FileReference) -> bool, + sink: &mut dyn FnMut(FileReference) -> bool, ) -> bool { match NameRefClass::classify_lifetime(self.sema, lifetime) { Some(NameRefClass::Definition(def, _)) if def == self.def => { - let FileRange { file_id, range } = self.sema.original_range(lifetime.syntax()); let reference = FileReference { - range, + range: lifetime.syntax().text_range(), name: FileReferenceNode::Lifetime(lifetime.clone()), category: ReferenceCategory::empty(), }; - sink(file_id, reference) + sink(reference) } _ => false, } @@ -1181,7 +1381,7 @@ impl<'a> FindUsages<'a> { fn found_name_ref( &self, name_ref: &ast::NameRef, - sink: &mut dyn FnMut(EditionedFileId, FileReference) -> bool, + sink: &mut dyn FnMut(FileReference) -> bool, ) -> bool { match NameRefClass::classify(self.sema, name_ref) { Some(NameRefClass::Definition(def, _)) @@ -1190,13 +1390,12 @@ impl<'a> FindUsages<'a> { || matches!(self.assoc_item_container, Some(hir::AssocItemContainer::Trait(_))) && convert_to_def_in_trait(self.sema.db, def) == self.def => { - let FileRange { file_id, range } = self.sema.original_range(name_ref.syntax()); let reference = FileReference { - range, + range: name_ref.syntax().text_range(), name: FileReferenceNode::NameRef(name_ref.clone()), category: ReferenceCategory::new(self.sema, &def, name_ref), }; - sink(file_id, reference) + sink(reference) } // FIXME: special case type aliases, we can't filter between impl and trait defs here as we lack the substitutions // so we always resolve all assoc type aliases to both their trait def and impl defs @@ -1206,23 +1405,21 @@ impl<'a> FindUsages<'a> { && convert_to_def_in_trait(self.sema.db, def) == convert_to_def_in_trait(self.sema.db, self.def) => { - let FileRange { file_id, range } = self.sema.original_range(name_ref.syntax()); let reference = FileReference { - range, + range: name_ref.syntax().text_range(), name: FileReferenceNode::NameRef(name_ref.clone()), category: ReferenceCategory::new(self.sema, &def, name_ref), }; - sink(file_id, reference) + sink(reference) } Some(NameRefClass::Definition(def, _)) if self.include_self_kw_refs.is_some() => { if self.include_self_kw_refs == def_to_ty(self.sema, &def) { - let FileRange { file_id, range } = self.sema.original_range(name_ref.syntax()); let reference = FileReference { - range, + range: name_ref.syntax().text_range(), name: FileReferenceNode::NameRef(name_ref.clone()), category: ReferenceCategory::new(self.sema, &def, name_ref), }; - sink(file_id, reference) + sink(reference) } else { false } @@ -1232,8 +1429,6 @@ impl<'a> FindUsages<'a> { field_ref: field, adt_subst: _, }) => { - let FileRange { file_id, range } = self.sema.original_range(name_ref.syntax()); - let field = Definition::Field(field); let local = Definition::Local(local); let access = match self.def { @@ -1246,44 +1441,38 @@ impl<'a> FindUsages<'a> { _ => return false, }; let reference = FileReference { - range, + range: name_ref.syntax().text_range(), name: FileReferenceNode::NameRef(name_ref.clone()), category: access, }; - sink(file_id, reference) + sink(reference) } _ => false, } } - fn found_name( - &self, - name: &ast::Name, - sink: &mut dyn FnMut(EditionedFileId, FileReference) -> bool, - ) -> bool { + fn found_name(&self, name: &ast::Name, sink: &mut dyn FnMut(FileReference) -> bool) -> bool { match NameClass::classify(self.sema, name) { Some(NameClass::PatFieldShorthand { local_def: _, field_ref, adt_subst: _ }) if matches!( self.def, Definition::Field(_) if Definition::Field(field_ref) == self.def ) => { - let FileRange { file_id, range } = self.sema.original_range(name.syntax()); let reference = FileReference { - range, + range: name.syntax().text_range(), name: FileReferenceNode::Name(name.clone()), // FIXME: mutable patterns should have `Write` access category: ReferenceCategory::READ, }; - sink(file_id, reference) + sink(reference) } Some(NameClass::ConstReference(def)) if self.def == def => { - let FileRange { file_id, range } = self.sema.original_range(name.syntax()); let reference = FileReference { - range, + range: name.syntax().text_range(), name: FileReferenceNode::Name(name.clone()), category: ReferenceCategory::empty(), }; - sink(file_id, reference) + sink(reference) } Some(NameClass::Definition(def)) if def != self.def => { match (&self.assoc_item_container, self.def) { @@ -1302,13 +1491,12 @@ impl<'a> FindUsages<'a> { if convert_to_def_in_trait(self.sema.db, def) == self.def => {} _ => return false, } - let FileRange { file_id, range } = self.sema.original_range(name.syntax()); let reference = FileReference { - range, + range: name.syntax().text_range(), name: FileReferenceNode::Name(name.clone()), category: ReferenceCategory::empty(), }; - sink(file_id, reference) + sink(reference) } _ => false, } diff --git a/crates/ide-db/src/symbol_index.rs b/crates/ide-db/src/symbol_index.rs index c15cade84a50..7ab4e7902d22 100644 --- a/crates/ide-db/src/symbol_index.rs +++ b/crates/ide-db/src/symbol_index.rs @@ -36,7 +36,6 @@ use hir::{ symbols::{FileSymbol, SymbolCollector}, }; use rayon::prelude::*; -use rustc_hash::FxHashSet; use triomphe::Arc; use crate::RootDatabase; @@ -118,16 +117,6 @@ pub trait SymbolsDatabase: HirDatabase + SourceDatabase { #[salsa::transparent] /// The symbol indices of modules that make up a given crate. fn crate_symbols(&self, krate: Crate) -> Box<[Arc]>; - - /// The set of "local" (that is, from the current workspace) roots. - /// Files in local roots are assumed to change frequently. - #[salsa::input] - fn local_roots(&self) -> Arc>; - - /// The set of roots for crates.io libraries. - /// Files in libraries are assumed to never change. - #[salsa::input] - fn library_roots(&self) -> Arc>; } fn library_symbols(db: &dyn SymbolsDatabase, source_root_id: SourceRootId) -> Arc { @@ -394,6 +383,7 @@ impl Query { mod tests { use expect_test::expect_file; + use rustc_hash::FxHashSet; use salsa::Durability; use test_fixture::{WORKSPACE, WithFixture}; diff --git a/crates/ide-db/src/test_data/test_doc_alias.txt b/crates/ide-db/src/test_data/test_doc_alias.txt index 30d1df4f8e55..1f28335b13b7 100644 --- a/crates/ide-db/src/test_data/test_doc_alias.txt +++ b/crates/ide-db/src/test_data/test_doc_alias.txt @@ -3,7 +3,7 @@ Module { id: ModuleId { krate: Crate( - Id(3000), + Id(2800), ), block: None, local_id: Idx::(0), @@ -16,7 +16,7 @@ Struct( Struct { id: StructId( - 3401, + 2c01, ), }, ), @@ -24,7 +24,7 @@ loc: DeclarationLocation { hir_file_id: FileId( EditionedFileId( - Id(2000), + Id(1800), ), ), ptr: SyntaxNodePtr { @@ -50,7 +50,7 @@ Struct( Struct { id: StructId( - 3400, + 2c00, ), }, ), @@ -58,7 +58,7 @@ loc: DeclarationLocation { hir_file_id: FileId( EditionedFileId( - Id(2000), + Id(1800), ), ), ptr: SyntaxNodePtr { @@ -84,7 +84,7 @@ Struct( Struct { id: StructId( - 3400, + 2c00, ), }, ), @@ -92,7 +92,7 @@ loc: DeclarationLocation { hir_file_id: FileId( EditionedFileId( - Id(2000), + Id(1800), ), ), ptr: SyntaxNodePtr { @@ -118,7 +118,7 @@ Struct( Struct { id: StructId( - 3400, + 2c00, ), }, ), @@ -126,7 +126,7 @@ loc: DeclarationLocation { hir_file_id: FileId( EditionedFileId( - Id(2000), + Id(1800), ), ), ptr: SyntaxNodePtr { @@ -152,7 +152,7 @@ Struct( Struct { id: StructId( - 3400, + 2c00, ), }, ), @@ -160,7 +160,7 @@ loc: DeclarationLocation { hir_file_id: FileId( EditionedFileId( - Id(2000), + Id(1800), ), ), ptr: SyntaxNodePtr { @@ -186,7 +186,7 @@ Struct( Struct { id: StructId( - 3401, + 2c01, ), }, ), @@ -194,7 +194,7 @@ loc: DeclarationLocation { hir_file_id: FileId( EditionedFileId( - Id(2000), + Id(1800), ), ), ptr: SyntaxNodePtr { @@ -220,7 +220,7 @@ Struct( Struct { id: StructId( - 3400, + 2c00, ), }, ), @@ -228,7 +228,7 @@ loc: DeclarationLocation { hir_file_id: FileId( EditionedFileId( - Id(2000), + Id(1800), ), ), ptr: SyntaxNodePtr { diff --git a/crates/ide-db/src/test_data/test_symbol_index_collection.txt b/crates/ide-db/src/test_data/test_symbol_index_collection.txt index de046e70c673..0b7c799bd2b6 100644 --- a/crates/ide-db/src/test_data/test_symbol_index_collection.txt +++ b/crates/ide-db/src/test_data/test_symbol_index_collection.txt @@ -3,7 +3,7 @@ Module { id: ModuleId { krate: Crate( - Id(3000), + Id(2800), ), block: None, local_id: Idx::(0), @@ -15,14 +15,14 @@ def: TypeAlias( TypeAlias { id: TypeAliasId( - 6800, + 6000, ), }, ), loc: DeclarationLocation { hir_file_id: FileId( EditionedFileId( - Id(2000), + Id(1800), ), ), ptr: SyntaxNodePtr { @@ -47,14 +47,14 @@ def: Const( Const { id: ConstId( - 6000, + 5800, ), }, ), loc: DeclarationLocation { hir_file_id: FileId( EditionedFileId( - Id(2000), + Id(1800), ), ), ptr: SyntaxNodePtr { @@ -79,14 +79,14 @@ def: Const( Const { id: ConstId( - 6002, + 5802, ), }, ), loc: DeclarationLocation { hir_file_id: FileId( EditionedFileId( - Id(2000), + Id(1800), ), ), ptr: SyntaxNodePtr { @@ -112,7 +112,7 @@ Enum( Enum { id: EnumId( - 4c00, + 4400, ), }, ), @@ -120,7 +120,7 @@ loc: DeclarationLocation { hir_file_id: FileId( EditionedFileId( - Id(2000), + Id(1800), ), ), ptr: SyntaxNodePtr { @@ -146,7 +146,7 @@ Macro { id: Macro2Id( Macro2Id( - 4800, + 4000, ), ), }, @@ -154,7 +154,7 @@ loc: DeclarationLocation { hir_file_id: FileId( EditionedFileId( - Id(2000), + Id(1800), ), ), ptr: SyntaxNodePtr { @@ -180,7 +180,7 @@ Macro { id: Macro2Id( Macro2Id( - 4800, + 4000, ), ), }, @@ -188,7 +188,7 @@ loc: DeclarationLocation { hir_file_id: FileId( EditionedFileId( - Id(2000), + Id(1800), ), ), ptr: SyntaxNodePtr { @@ -213,14 +213,14 @@ def: Static( Static { id: StaticId( - 6400, + 5c00, ), }, ), loc: DeclarationLocation { hir_file_id: FileId( EditionedFileId( - Id(2000), + Id(1800), ), ), ptr: SyntaxNodePtr { @@ -246,7 +246,7 @@ Struct( Struct { id: StructId( - 4401, + 3c01, ), }, ), @@ -254,7 +254,7 @@ loc: DeclarationLocation { hir_file_id: FileId( EditionedFileId( - Id(2000), + Id(1800), ), ), ptr: SyntaxNodePtr { @@ -280,7 +280,7 @@ Struct( Struct { id: StructId( - 4400, + 3c00, ), }, ), @@ -288,7 +288,7 @@ loc: DeclarationLocation { hir_file_id: MacroFile( MacroCallId( - Id(3800), + Id(3000), ), ), ptr: SyntaxNodePtr { @@ -314,7 +314,7 @@ Struct( Struct { id: StructId( - 4405, + 3c05, ), }, ), @@ -322,7 +322,7 @@ loc: DeclarationLocation { hir_file_id: FileId( EditionedFileId( - Id(2000), + Id(1800), ), ), ptr: SyntaxNodePtr { @@ -350,7 +350,7 @@ Struct( Struct { id: StructId( - 4406, + 3c06, ), }, ), @@ -358,7 +358,7 @@ loc: DeclarationLocation { hir_file_id: FileId( EditionedFileId( - Id(2000), + Id(1800), ), ), ptr: SyntaxNodePtr { @@ -386,7 +386,7 @@ Struct( Struct { id: StructId( - 4407, + 3c07, ), }, ), @@ -394,7 +394,7 @@ loc: DeclarationLocation { hir_file_id: FileId( EditionedFileId( - Id(2000), + Id(1800), ), ), ptr: SyntaxNodePtr { @@ -420,7 +420,7 @@ Struct( Struct { id: StructId( - 4402, + 3c02, ), }, ), @@ -428,7 +428,7 @@ loc: DeclarationLocation { hir_file_id: FileId( EditionedFileId( - Id(2000), + Id(1800), ), ), ptr: SyntaxNodePtr { @@ -453,14 +453,14 @@ def: Trait( Trait { id: TraitId( - 5800, + 5000, ), }, ), loc: DeclarationLocation { hir_file_id: FileId( EditionedFileId( - Id(2000), + Id(1800), ), ), ptr: SyntaxNodePtr { @@ -486,7 +486,7 @@ Macro { id: Macro2Id( Macro2Id( - 4800, + 4000, ), ), }, @@ -494,7 +494,7 @@ loc: DeclarationLocation { hir_file_id: FileId( EditionedFileId( - Id(2000), + Id(1800), ), ), ptr: SyntaxNodePtr { @@ -520,7 +520,7 @@ Union( Union { id: UnionId( - 5000, + 4800, ), }, ), @@ -528,7 +528,7 @@ loc: DeclarationLocation { hir_file_id: FileId( EditionedFileId( - Id(2000), + Id(1800), ), ), ptr: SyntaxNodePtr { @@ -554,7 +554,7 @@ Module { id: ModuleId { krate: Crate( - Id(3000), + Id(2800), ), block: None, local_id: Idx::(1), @@ -564,7 +564,7 @@ loc: DeclarationLocation { hir_file_id: FileId( EditionedFileId( - Id(2000), + Id(1800), ), ), ptr: SyntaxNodePtr { @@ -590,7 +590,7 @@ Module { id: ModuleId { krate: Crate( - Id(3000), + Id(2800), ), block: None, local_id: Idx::(2), @@ -600,7 +600,7 @@ loc: DeclarationLocation { hir_file_id: FileId( EditionedFileId( - Id(2000), + Id(1800), ), ), ptr: SyntaxNodePtr { @@ -626,7 +626,7 @@ Macro { id: MacroRulesId( MacroRulesId( - 3401, + 2c01, ), ), }, @@ -634,7 +634,7 @@ loc: DeclarationLocation { hir_file_id: FileId( EditionedFileId( - Id(2000), + Id(1800), ), ), ptr: SyntaxNodePtr { @@ -659,14 +659,14 @@ def: Function( Function { id: FunctionId( - 5c02, + 5402, ), }, ), loc: DeclarationLocation { hir_file_id: FileId( EditionedFileId( - Id(2000), + Id(1800), ), ), ptr: SyntaxNodePtr { @@ -693,14 +693,14 @@ def: Function( Function { id: FunctionId( - 5c01, + 5401, ), }, ), loc: DeclarationLocation { hir_file_id: FileId( EditionedFileId( - Id(2000), + Id(1800), ), ), ptr: SyntaxNodePtr { @@ -728,7 +728,7 @@ Macro { id: MacroRulesId( MacroRulesId( - 3400, + 2c00, ), ), }, @@ -736,7 +736,7 @@ loc: DeclarationLocation { hir_file_id: FileId( EditionedFileId( - Id(2000), + Id(1800), ), ), ptr: SyntaxNodePtr { @@ -761,14 +761,14 @@ def: Function( Function { id: FunctionId( - 5c00, + 5400, ), }, ), loc: DeclarationLocation { hir_file_id: FileId( EditionedFileId( - Id(2000), + Id(1800), ), ), ptr: SyntaxNodePtr { @@ -794,7 +794,7 @@ Macro { id: MacroRulesId( MacroRulesId( - 3401, + 2c01, ), ), }, @@ -802,7 +802,7 @@ loc: DeclarationLocation { hir_file_id: FileId( EditionedFileId( - Id(2000), + Id(1800), ), ), ptr: SyntaxNodePtr { @@ -827,14 +827,14 @@ def: Function( Function { id: FunctionId( - 5c03, + 5403, ), }, ), loc: DeclarationLocation { hir_file_id: FileId( EditionedFileId( - Id(2000), + Id(1800), ), ), ptr: SyntaxNodePtr { @@ -862,7 +862,7 @@ Module { id: ModuleId { krate: Crate( - Id(3000), + Id(2800), ), block: None, local_id: Idx::(1), @@ -875,7 +875,7 @@ Struct( Struct { id: StructId( - 4403, + 3c03, ), }, ), @@ -883,7 +883,7 @@ loc: DeclarationLocation { hir_file_id: FileId( EditionedFileId( - Id(2000), + Id(1800), ), ), ptr: SyntaxNodePtr { @@ -909,7 +909,7 @@ Module { id: ModuleId { krate: Crate( - Id(3000), + Id(2800), ), block: None, local_id: Idx::(2), @@ -921,14 +921,14 @@ def: Trait( Trait { id: TraitId( - 5800, + 5000, ), }, ), loc: DeclarationLocation { hir_file_id: FileId( EditionedFileId( - Id(2001), + Id(1801), ), ), ptr: SyntaxNodePtr { @@ -954,7 +954,7 @@ Macro { id: Macro2Id( Macro2Id( - 4800, + 4000, ), ), }, @@ -962,7 +962,7 @@ loc: DeclarationLocation { hir_file_id: FileId( EditionedFileId( - Id(2001), + Id(1801), ), ), ptr: SyntaxNodePtr { @@ -988,7 +988,7 @@ Struct( Struct { id: StructId( - 4404, + 3c04, ), }, ), @@ -996,7 +996,7 @@ loc: DeclarationLocation { hir_file_id: FileId( EditionedFileId( - Id(2001), + Id(1801), ), ), ptr: SyntaxNodePtr { @@ -1022,7 +1022,7 @@ Macro { id: Macro2Id( Macro2Id( - 4800, + 4000, ), ), }, @@ -1030,7 +1030,7 @@ loc: DeclarationLocation { hir_file_id: FileId( EditionedFileId( - Id(2001), + Id(1801), ), ), ptr: SyntaxNodePtr { @@ -1056,7 +1056,7 @@ Struct( Struct { id: StructId( - 4404, + 3c04, ), }, ), @@ -1064,7 +1064,7 @@ loc: DeclarationLocation { hir_file_id: FileId( EditionedFileId( - Id(2001), + Id(1801), ), ), ptr: SyntaxNodePtr { diff --git a/crates/ide-db/src/test_data/test_symbols_exclude_imports.txt b/crates/ide-db/src/test_data/test_symbols_exclude_imports.txt index 22872b577f71..9d6448d24377 100644 --- a/crates/ide-db/src/test_data/test_symbols_exclude_imports.txt +++ b/crates/ide-db/src/test_data/test_symbols_exclude_imports.txt @@ -5,7 +5,7 @@ Struct( Struct { id: StructId( - 3800, + 3000, ), }, ), @@ -13,7 +13,7 @@ loc: DeclarationLocation { hir_file_id: FileId( EditionedFileId( - Id(2001), + Id(1801), ), ), ptr: SyntaxNodePtr { diff --git a/crates/ide-db/src/test_data/test_symbols_with_imports.txt b/crates/ide-db/src/test_data/test_symbols_with_imports.txt index 9f98bf87e2e8..3aee3c15f894 100644 --- a/crates/ide-db/src/test_data/test_symbols_with_imports.txt +++ b/crates/ide-db/src/test_data/test_symbols_with_imports.txt @@ -5,7 +5,7 @@ Struct( Struct { id: StructId( - 3800, + 3000, ), }, ), @@ -13,7 +13,7 @@ loc: DeclarationLocation { hir_file_id: FileId( EditionedFileId( - Id(2001), + Id(1801), ), ), ptr: SyntaxNodePtr { @@ -39,7 +39,7 @@ Struct( Struct { id: StructId( - 3800, + 3000, ), }, ), @@ -47,7 +47,7 @@ loc: DeclarationLocation { hir_file_id: FileId( EditionedFileId( - Id(2000), + Id(1800), ), ), ptr: SyntaxNodePtr { diff --git a/crates/ide-diagnostics/src/handlers/missing_fields.rs b/crates/ide-diagnostics/src/handlers/missing_fields.rs index 7da799e0d490..a549cfee1458 100644 --- a/crates/ide-diagnostics/src/handlers/missing_fields.rs +++ b/crates/ide-diagnostics/src/handlers/missing_fields.rs @@ -220,7 +220,7 @@ fn get_default_constructor( let krate = ctx .sema - .file_to_module_def(d.file.original_file(ctx.sema.db).file_id(ctx.sema.db))? + .file_to_module_def2(d.file.original_file(ctx.sema.db).file_id(ctx.sema.db))? .krate(); let module = krate.root_module(); diff --git a/crates/ide-diagnostics/src/handlers/private_field.rs b/crates/ide-diagnostics/src/handlers/private_field.rs index 69cd0d27cb06..48237e39d49d 100644 --- a/crates/ide-diagnostics/src/handlers/private_field.rs +++ b/crates/ide-diagnostics/src/handlers/private_field.rs @@ -35,7 +35,7 @@ pub(crate) fn field_is_private_fixes( fix_range: TextRange, ) -> Option> { let def_crate = private_field.krate(sema.db); - let usage_crate = sema.file_to_module_def(usage_file_id.file_id(sema.db))?.krate(); + let usage_crate = sema.file_to_module_def(usage_file_id)?.krate(); let mut visibility_text = if usage_crate == def_crate { "pub(crate) " } else { "pub " }; let source = private_field.source(sema.db)?; diff --git a/crates/ide-diagnostics/src/lib.rs b/crates/ide-diagnostics/src/lib.rs index 72bd66d1c8bb..acd3531702b4 100644 --- a/crates/ide-diagnostics/src/lib.rs +++ b/crates/ide-diagnostics/src/lib.rs @@ -349,8 +349,8 @@ pub fn semantic_diagnostics( let editioned_file_id = sema .attach_first_edition(file_id) .unwrap_or_else(|| EditionedFileId::current_edition(db, file_id)); + let edition = editioned_file_id.edition(db); - let (file_id, edition) = editioned_file_id.unpack(db); let mut res = Vec::new(); let parse = sema.parse(editioned_file_id); @@ -371,7 +371,7 @@ pub fn semantic_diagnostics( ); } - let module = sema.file_to_module_def(file_id); + let module = sema.file_to_module_def(editioned_file_id); let is_nightly = matches!( module.and_then(|m| db.toolchain_channel(m.krate().into())), diff --git a/crates/ide-ssr/src/lib.rs b/crates/ide-ssr/src/lib.rs index e4b20f3f1aad..610697b2d9fc 100644 --- a/crates/ide-ssr/src/lib.rs +++ b/crates/ide-ssr/src/lib.rs @@ -80,7 +80,7 @@ pub use crate::{errors::SsrError, from_comment::ssr_from_comment, matching::Matc use crate::{errors::bail, matching::MatchFailureReason}; use hir::{FileRange, Semantics}; -use ide_db::symbol_index::SymbolsDatabase; +use ide_db::base_db::RootQueryDb; use ide_db::text_edit::TextEdit; use ide_db::{EditionedFileId, FileId, FxHashMap, RootDatabase, base_db::SourceDatabase}; use resolving::ResolvedRule; diff --git a/crates/ide-ssr/src/search.rs b/crates/ide-ssr/src/search.rs index 99a98fb2a713..0eb48bcfb651 100644 --- a/crates/ide-ssr/src/search.rs +++ b/crates/ide-ssr/src/search.rs @@ -7,8 +7,9 @@ use crate::{ use hir::FileRange; use ide_db::{ EditionedFileId, FileId, FxHashSet, + base_db::RootQueryDb, defs::Definition, - search::{SearchScope, UsageSearchResult}, + search::{RealFileUsageSearchResult, SearchScope}, }; use syntax::{AstNode, SyntaxKind, SyntaxNode, ast}; @@ -18,7 +19,7 @@ use syntax::{AstNode, SyntaxKind, SyntaxNode, ast}; /// them more than once. #[derive(Default)] pub(crate) struct UsageCache { - usages: Vec<(Definition, UsageSearchResult)>, + usages: Vec<(Definition, RealFileUsageSearchResult)>, } impl<'db> MatchFinder<'db> { @@ -114,14 +115,14 @@ impl<'db> MatchFinder<'db> { &self, usage_cache: &'a mut UsageCache, definition: Definition, - ) -> &'a UsageSearchResult { + ) -> &'a RealFileUsageSearchResult { // Logically if a lookup succeeds we should just return it. Unfortunately returning it would // extend the lifetime of the borrow, then we wouldn't be able to do the insertion on a // cache miss. This is a limitation of NLL and is fixed with Polonius. For now we do two // lookups in the case of a cache hit. if usage_cache.find(&definition).is_none() { let usages = definition.usages(&self.sema).in_scope(&self.search_scope()).all(); - usage_cache.usages.push((definition, usages)); + usage_cache.usages.push((definition, usages.map_out_of_macros(&self.sema))); return &usage_cache.usages.last().unwrap().1; } usage_cache.find(&definition).unwrap() @@ -156,7 +157,6 @@ impl<'db> MatchFinder<'db> { if self.restrict_ranges.is_empty() { // Unrestricted search. use ide_db::base_db::SourceDatabase; - use ide_db::symbol_index::SymbolsDatabase; for &root in self.sema.db.local_roots().iter() { let sr = self.sema.db.source_root(root).source_root(self.sema.db); for file_id in sr.iter() { @@ -259,7 +259,7 @@ fn is_search_permitted(node: &SyntaxNode) -> bool { } impl UsageCache { - fn find(&mut self, definition: &Definition) -> Option<&UsageSearchResult> { + fn find(&mut self, definition: &Definition) -> Option<&RealFileUsageSearchResult> { // We expect a very small number of cache entries (generally 1), so a linear scan should be // fast enough and avoids the need to implement Hash for Definition. for (d, refs) in &self.usages { diff --git a/crates/ide-ssr/src/tests.rs b/crates/ide-ssr/src/tests.rs index 46b633b8a325..274d9f414cd3 100644 --- a/crates/ide-ssr/src/tests.rs +++ b/crates/ide-ssr/src/tests.rs @@ -2,7 +2,7 @@ use expect_test::{Expect, expect}; use hir::{FilePosition, FileRange}; use ide_db::{ EditionedFileId, FxHashSet, - base_db::{SourceDatabase, salsa::Durability}, + base_db::{RootQueryDb, SourceDatabase, salsa::Durability}, }; use test_utils::RangeOrOffset; use triomphe::Arc; @@ -66,7 +66,6 @@ fn parser_undefined_placeholder_in_replacement() { /// `code` may optionally contain a cursor marker `$0`. If it doesn't, then the position will be /// the start of the file. If there's a second cursor marker, then we'll return a single range. pub(crate) fn single_file(code: &str) -> (ide_db::RootDatabase, FilePosition, Vec) { - use ide_db::symbol_index::SymbolsDatabase; use test_fixture::{WORKSPACE, WithFixture}; let (mut db, file_id, range_or_offset) = if code.contains(test_utils::CURSOR_MARKER) { ide_db::RootDatabase::with_range_or_offset(code) diff --git a/crates/ide/src/annotations.rs b/crates/ide/src/annotations.rs index 05196ac98c03..34a0c7f96a4c 100644 --- a/crates/ide/src/annotations.rs +++ b/crates/ide/src/annotations.rs @@ -1,16 +1,13 @@ -use hir::{HasSource, InFile, InRealFile, Semantics}; -use ide_db::{ - FileId, FilePosition, FileRange, FxIndexSet, RootDatabase, defs::Definition, - helpers::visit_file_defs, -}; +use hir::{HasSource, HirFileId, HirFilePosition, HirFileRange, InFile, Semantics}; +use ide_db::{FxIndexSet, RootDatabase, defs::Definition, helpers::visit_file_defs}; use itertools::Itertools; use syntax::{AstNode, TextRange, ast::HasName}; use crate::{ - NavigationTarget, RunnableKind, + RunnableKind, annotations::fn_references::find_all_methods, goto_implementation::goto_implementation, - navigation_target, + navigation_target::HirNavigationTarget, references::find_all_refs, runnables::{Runnable, runnables}, }; @@ -32,8 +29,8 @@ pub struct Annotation { #[derive(Debug, Hash, PartialEq, Eq)] pub enum AnnotationKind { Runnable(Runnable), - HasImpls { pos: FilePosition, data: Option> }, - HasReferences { pos: FilePosition, data: Option> }, + HasImpls { pos: HirFilePosition, data: Option> }, + HasReferences { pos: HirFilePosition, data: Option> }, } pub struct AnnotationConfig { @@ -54,7 +51,7 @@ pub enum AnnotationLocation { pub(crate) fn annotations( db: &RootDatabase, config: &AnnotationConfig, - file_id: FileId, + file_id: HirFileId, ) -> Vec { let mut annotations = FxIndexSet::default(); @@ -76,17 +73,17 @@ pub(crate) fn annotations( AnnotationLocation::AboveName => cmd_target, AnnotationLocation::AboveWholeItem => range, }; - let target_pos = FilePosition { file_id, offset: cmd_target.start() }; + let target_pos = HirFilePosition { file_id, offset: cmd_target.start() }; (annotation_range, target_pos) }; visit_file_defs(&Semantics::new(db), file_id, &mut |def| { let range = match def { Definition::Const(konst) if config.annotate_references => { - konst.source(db).and_then(|node| name_range(db, node, file_id)) + konst.source(db).and_then(|node| name_range(node, file_id)) } Definition::Trait(trait_) if config.annotate_references || config.annotate_impls => { - trait_.source(db).and_then(|node| name_range(db, node, file_id)) + trait_.source(db).and_then(|node| name_range(node, file_id)) } Definition::Adt(adt) => match adt { hir::Adt::Enum(enum_) => { @@ -95,7 +92,7 @@ pub(crate) fn annotations( .variants(db) .into_iter() .filter_map(|variant| { - variant.source(db).and_then(|node| name_range(db, node, file_id)) + variant.source(db).and_then(|node| name_range(node, file_id)) }) .for_each(|range| { let (annotation_range, target_position) = mk_ranges(range); @@ -109,14 +106,14 @@ pub(crate) fn annotations( }) } if config.annotate_references || config.annotate_impls { - enum_.source(db).and_then(|node| name_range(db, node, file_id)) + enum_.source(db).and_then(|node| name_range(node, file_id)) } else { None } } _ => { if config.annotate_references || config.annotate_impls { - adt.source(db).and_then(|node| name_range(db, node, file_id)) + adt.source(db).and_then(|node| name_range(node, file_id)) } else { None } @@ -145,32 +142,13 @@ pub(crate) fn annotations( } fn name_range( - db: &RootDatabase, node: InFile, - source_file_id: FileId, + source_file_id: HirFileId, ) -> Option<(TextRange, Option)> { - if let Some(name) = node.value.name().map(|name| name.syntax().text_range()) { - // if we have a name, try mapping that out of the macro expansion as we can put the - // annotation on that name token - // See `test_no_annotations_macro_struct_def` vs `test_annotations_macro_struct_def_call_site` - let res = navigation_target::orig_range_with_focus_r( - db, - node.file_id, - node.value.syntax().text_range(), - Some(name), - ); - if res.call_site.0.file_id == source_file_id { - if let Some(name_range) = res.call_site.1 { - return Some((res.call_site.0.range, Some(name_range))); - } - } - }; - // otherwise try upmapping the entire node out of attributes - let InRealFile { file_id, value } = node.original_ast_node_rooted(db)?; - if file_id.file_id(db) == source_file_id { + if node.file_id == source_file_id { Some(( - value.syntax().text_range(), - value.name().map(|name| name.syntax().text_range()), + node.value.syntax().text_range(), + node.value.name().map(|name| name.syntax().text_range()), )) } else { None @@ -207,7 +185,7 @@ pub(crate) fn resolve_annotation(db: &RootDatabase, mut annotation: Annotation) .into_iter() .flat_map(|res| res.references) .flat_map(|(file_id, access)| { - access.into_iter().map(move |(range, _)| FileRange { file_id, range }) + access.into_iter().map(move |(range, _)| HirFileRange { file_id, range }) }) .collect() }); @@ -251,7 +229,7 @@ mod tests { let (analysis, file_id) = fixture::file(ra_fixture); let annotations: Vec = analysis - .annotations(config, file_id) + .annotations(config, file_id.into()) .unwrap() .into_iter() .map(|annotation| analysis.resolve_annotation(annotation).unwrap()) @@ -283,7 +261,9 @@ fn main() { kind: HasReferences { pos: FilePositionWrapper { file_id: FileId( - 0, + EditionedFileId( + Id(1800), + ), ), offset: 6, }, @@ -291,7 +271,9 @@ fn main() { [ FileRangeWrapper { file_id: FileId( - 0, + EditionedFileId( + Id(1800), + ), ), range: 78..82, }, @@ -304,7 +286,9 @@ fn main() { kind: HasReferences { pos: FilePositionWrapper { file_id: FileId( - 0, + EditionedFileId( + Id(1800), + ), ), offset: 30, }, @@ -318,7 +302,9 @@ fn main() { kind: HasReferences { pos: FilePositionWrapper { file_id: FileId( - 0, + EditionedFileId( + Id(1800), + ), ), offset: 53, }, @@ -334,7 +320,9 @@ fn main() { use_name_in_title: false, nav: NavigationTarget { file_id: FileId( - 0, + EditionedFileId( + Id(1800), + ), ), full_range: 50..85, focus_range: 53..57, @@ -373,7 +361,9 @@ fn main() { kind: HasImpls { pos: FilePositionWrapper { file_id: FileId( - 0, + EditionedFileId( + Id(1800), + ), ), offset: 7, }, @@ -387,7 +377,9 @@ fn main() { kind: HasReferences { pos: FilePositionWrapper { file_id: FileId( - 0, + EditionedFileId( + Id(1800), + ), ), offset: 7, }, @@ -395,7 +387,9 @@ fn main() { [ FileRangeWrapper { file_id: FileId( - 0, + EditionedFileId( + Id(1800), + ), ), range: 41..45, }, @@ -408,7 +402,9 @@ fn main() { kind: HasReferences { pos: FilePositionWrapper { file_id: FileId( - 0, + EditionedFileId( + Id(1800), + ), ), offset: 17, }, @@ -424,7 +420,9 @@ fn main() { use_name_in_title: false, nav: NavigationTarget { file_id: FileId( - 0, + EditionedFileId( + Id(1800), + ), ), full_range: 14..48, focus_range: 17..21, @@ -467,7 +465,9 @@ fn main() { kind: HasImpls { pos: FilePositionWrapper { file_id: FileId( - 0, + EditionedFileId( + Id(1800), + ), ), offset: 7, }, @@ -475,7 +475,9 @@ fn main() { [ NavigationTarget { file_id: FileId( - 0, + EditionedFileId( + Id(1800), + ), ), full_range: 36..64, focus_range: 57..61, @@ -491,7 +493,9 @@ fn main() { kind: HasReferences { pos: FilePositionWrapper { file_id: FileId( - 0, + EditionedFileId( + Id(1800), + ), ), offset: 7, }, @@ -499,13 +503,17 @@ fn main() { [ FileRangeWrapper { file_id: FileId( - 0, + EditionedFileId( + Id(1800), + ), ), range: 57..61, }, FileRangeWrapper { file_id: FileId( - 0, + EditionedFileId( + Id(1800), + ), ), range: 93..97, }, @@ -518,7 +526,9 @@ fn main() { kind: HasImpls { pos: FilePositionWrapper { file_id: FileId( - 0, + EditionedFileId( + Id(1800), + ), ), offset: 20, }, @@ -526,7 +536,9 @@ fn main() { [ NavigationTarget { file_id: FileId( - 0, + EditionedFileId( + Id(1800), + ), ), full_range: 36..64, focus_range: 57..61, @@ -542,7 +554,9 @@ fn main() { kind: HasReferences { pos: FilePositionWrapper { file_id: FileId( - 0, + EditionedFileId( + Id(1800), + ), ), offset: 20, }, @@ -550,7 +564,9 @@ fn main() { [ FileRangeWrapper { file_id: FileId( - 0, + EditionedFileId( + Id(1800), + ), ), range: 41..52, }, @@ -563,7 +579,9 @@ fn main() { kind: HasReferences { pos: FilePositionWrapper { file_id: FileId( - 0, + EditionedFileId( + Id(1800), + ), ), offset: 69, }, @@ -579,7 +597,9 @@ fn main() { use_name_in_title: false, nav: NavigationTarget { file_id: FileId( - 0, + EditionedFileId( + Id(1800), + ), ), full_range: 66..100, focus_range: 69..73, @@ -614,7 +634,9 @@ fn main() {} kind: HasReferences { pos: FilePositionWrapper { file_id: FileId( - 0, + EditionedFileId( + Id(1800), + ), ), offset: 3, }, @@ -630,7 +652,9 @@ fn main() {} use_name_in_title: false, nav: NavigationTarget { file_id: FileId( - 0, + EditionedFileId( + Id(1800), + ), ), full_range: 0..12, focus_range: 3..7, @@ -673,7 +697,9 @@ fn main() { kind: HasImpls { pos: FilePositionWrapper { file_id: FileId( - 0, + EditionedFileId( + Id(1800), + ), ), offset: 7, }, @@ -681,7 +707,9 @@ fn main() { [ NavigationTarget { file_id: FileId( - 0, + EditionedFileId( + Id(1800), + ), ), full_range: 14..56, focus_range: 19..23, @@ -697,7 +725,9 @@ fn main() { kind: HasReferences { pos: FilePositionWrapper { file_id: FileId( - 0, + EditionedFileId( + Id(1800), + ), ), offset: 7, }, @@ -705,13 +735,17 @@ fn main() { [ FileRangeWrapper { file_id: FileId( - 0, + EditionedFileId( + Id(1800), + ), ), range: 19..23, }, FileRangeWrapper { file_id: FileId( - 0, + EditionedFileId( + Id(1800), + ), ), range: 74..78, }, @@ -724,7 +758,9 @@ fn main() { kind: HasReferences { pos: FilePositionWrapper { file_id: FileId( - 0, + EditionedFileId( + Id(1800), + ), ), offset: 33, }, @@ -732,7 +768,9 @@ fn main() { [ FileRangeWrapper { file_id: FileId( - 0, + EditionedFileId( + Id(1800), + ), ), range: 79..90, }, @@ -745,7 +783,9 @@ fn main() { kind: HasReferences { pos: FilePositionWrapper { file_id: FileId( - 0, + EditionedFileId( + Id(1800), + ), ), offset: 61, }, @@ -761,7 +801,9 @@ fn main() { use_name_in_title: false, nav: NavigationTarget { file_id: FileId( - 0, + EditionedFileId( + Id(1800), + ), ), full_range: 58..95, focus_range: 61..65, @@ -801,7 +843,9 @@ mod tests { kind: HasReferences { pos: FilePositionWrapper { file_id: FileId( - 0, + EditionedFileId( + Id(1800), + ), ), offset: 3, }, @@ -817,7 +861,9 @@ mod tests { use_name_in_title: false, nav: NavigationTarget { file_id: FileId( - 0, + EditionedFileId( + Id(1800), + ), ), full_range: 0..12, focus_range: 3..7, @@ -841,7 +887,9 @@ mod tests { use_name_in_title: false, nav: NavigationTarget { file_id: FileId( - 0, + EditionedFileId( + Id(1800), + ), ), full_range: 14..64, focus_range: 18..23, @@ -868,7 +916,9 @@ mod tests { use_name_in_title: false, nav: NavigationTarget { file_id: FileId( - 0, + EditionedFileId( + Id(1800), + ), ), full_range: 30..62, focus_range: 45..57, @@ -947,36 +997,7 @@ m! { }; "#, expect![[r#" - [ - Annotation { - range: 83..87, - kind: HasImpls { - pos: FilePositionWrapper { - file_id: FileId( - 0, - ), - offset: 83, - }, - data: Some( - [], - ), - }, - }, - Annotation { - range: 83..87, - kind: HasReferences { - pos: FilePositionWrapper { - file_id: FileId( - 0, - ), - offset: 83, - }, - data: Some( - [], - ), - }, - }, - ] + [] "#]], ); } @@ -996,7 +1017,9 @@ struct Foo; kind: HasImpls { pos: FilePositionWrapper { file_id: FileId( - 0, + EditionedFileId( + Id(1800), + ), ), offset: 67, }, @@ -1010,7 +1033,9 @@ struct Foo; kind: HasReferences { pos: FilePositionWrapper { file_id: FileId( - 0, + EditionedFileId( + Id(1800), + ), ), offset: 67, }, diff --git a/crates/ide/src/annotations/fn_references.rs b/crates/ide/src/annotations/fn_references.rs index 427a2eff8201..2e810bd36f9b 100644 --- a/crates/ide/src/annotations/fn_references.rs +++ b/crates/ide/src/annotations/fn_references.rs @@ -1,20 +1,19 @@ //! This module implements a methods and free functions search in the specified file. //! We have to skip tests, so cannot reuse file_structure module. -use hir::Semantics; +use hir::{HirFileId, Semantics}; use ide_assists::utils::test_related_attribute_syn; use ide_db::RootDatabase; use syntax::{AstNode, SyntaxNode, TextRange, ast, ast::HasName}; -use crate::FileId; - pub(super) fn find_all_methods( db: &RootDatabase, - file_id: FileId, + file_id: HirFileId, ) -> Vec<(TextRange, Option)> { let sema = Semantics::new(db); - let source_file = sema.parse_guess_edition(file_id); - source_file.syntax().descendants().filter_map(method_range).collect() + let file_id = sema.adjust_edition(file_id); + let syntax = sema.parse_or_expand(sema.adjust_edition(file_id)); + syntax.descendants().filter_map(method_range).collect() } fn method_range(item: SyntaxNode) -> Option<(TextRange, Option)> { @@ -50,7 +49,7 @@ mod tests { "#, ); - let refs = super::find_all_methods(&analysis.db, pos.file_id); + let refs = super::find_all_methods(&analysis.db, pos.file_id.into()); check_result(&refs, &[3..=13, 27..=33, 47..=57]); } @@ -65,7 +64,7 @@ mod tests { "#, ); - let refs = super::find_all_methods(&analysis.db, pos.file_id); + let refs = super::find_all_methods(&analysis.db, pos.file_id.into()); check_result(&refs, &[19..=22, 35..=38]); } @@ -86,7 +85,7 @@ mod tests { "#, ); - let refs = super::find_all_methods(&analysis.db, pos.file_id); + let refs = super::find_all_methods(&analysis.db, pos.file_id.into()); check_result(&refs, &[28..=34]); } diff --git a/crates/ide/src/call_hierarchy.rs b/crates/ide/src/call_hierarchy.rs index 7a0405939d10..aa3043a62f13 100644 --- a/crates/ide/src/call_hierarchy.rs +++ b/crates/ide/src/call_hierarchy.rs @@ -1,22 +1,22 @@ //! Entry point for call-hierarchy -use std::iter; - -use hir::Semantics; +use hir::{HirFileRange, Semantics}; use ide_db::{ - FileRange, FxIndexMap, RootDatabase, + FxIndexMap, RootDatabase, defs::{Definition, NameClass, NameRefClass}, helpers::pick_best_token, search::FileReference, }; use syntax::{AstNode, SyntaxKind::IDENT, ast}; -use crate::{FilePosition, NavigationTarget, RangeInfo, TryToNav, goto_definition}; +use crate::{ + HirFilePosition, RangeInfo, TryToNav, goto_definition, navigation_target::HirNavigationTarget, +}; #[derive(Debug, Clone)] pub struct CallItem { - pub target: NavigationTarget, - pub ranges: Vec, + pub target: HirNavigationTarget, + pub ranges: Vec, } #[derive(Debug, Clone, Copy, PartialEq, Eq)] @@ -27,20 +27,19 @@ pub struct CallHierarchyConfig { pub(crate) fn call_hierarchy( db: &RootDatabase, - position: FilePosition, -) -> Option>> { + position: HirFilePosition, +) -> Option>> { goto_definition::goto_definition(db, position) } pub(crate) fn incoming_calls( db: &RootDatabase, CallHierarchyConfig { exclude_tests }: CallHierarchyConfig, - FilePosition { file_id, offset }: FilePosition, + HirFilePosition { file_id, offset }: HirFilePosition, ) -> Option> { let sema = &Semantics::new(db); - let file = sema.parse_guess_edition(file_id); - let file = file.syntax(); + let file = &sema.parse_or_expand(sema.adjust_edition(file_id)); let mut calls = CallLocations::default(); let references = sema @@ -58,7 +57,7 @@ pub(crate) fn incoming_calls( }) .flat_map(|func| func.usages(sema).all()); - for (_, references) in references { + for (file_id, references) in references { let references = references.iter().filter_map(|FileReference { name, .. }| name.as_name_ref()); for name in references { @@ -67,7 +66,7 @@ pub(crate) fn incoming_calls( let def = ast::Fn::cast(node).and_then(|fn_| sema.to_def(&fn_))?; // We should return def before check if it is a test, so that we // will not continue to search for outer fn in nested fns - def.try_to_nav(sema.db).map(|nav| (def, nav)) + def.try_to_nav_hir(sema.db).map(|nav| (def, nav)) }); if let Some((def, nav)) = def_nav { @@ -75,11 +74,8 @@ pub(crate) fn incoming_calls( continue; } - let range = sema.original_range(name.syntax()); - calls.add(nav.call_site, range.into_file_id(db)); - if let Some(other) = nav.def_site { - calls.add(other, range.into_file_id(db)); - } + let range = HirFileRange { file_id, range: name.syntax().text_range() }; + calls.add(nav, range); } } } @@ -90,11 +86,10 @@ pub(crate) fn incoming_calls( pub(crate) fn outgoing_calls( db: &RootDatabase, CallHierarchyConfig { exclude_tests }: CallHierarchyConfig, - FilePosition { file_id, offset }: FilePosition, + HirFilePosition { file_id, offset }: HirFilePosition, ) -> Option> { let sema = Semantics::new(db); - let file = sema.parse_guess_edition(file_id); - let file = file.syntax(); + let file = &sema.parse_or_expand(sema.adjust_edition(file_id)); let token = pick_best_token(file.token_at_offset(offset), |kind| match kind { IDENT => 1, _ => 0, @@ -122,39 +117,39 @@ pub(crate) fn outgoing_calls( if exclude_tests && it.is_test(db) { return None; } - it.try_to_nav(db) + it.try_to_nav_hir(db) } - hir::CallableKind::TupleEnumVariant(it) => it.try_to_nav(db), - hir::CallableKind::TupleStruct(it) => it.try_to_nav(db), + hir::CallableKind::TupleEnumVariant(it) => it.try_to_nav_hir(db), + hir::CallableKind::TupleStruct(it) => it.try_to_nav_hir(db), _ => None, } - .zip(Some(sema.original_range(expr.syntax()))) + .zip(Some(HirFileRange { file_id, range: expr.syntax().text_range() })) } ast::CallableExpr::MethodCall(expr) => { let function = sema.resolve_method_call(&expr)?; if exclude_tests && function.is_test(db) { return None; } - function - .try_to_nav(db) - .zip(Some(sema.original_range(expr.name_ref()?.syntax()))) + function.try_to_nav_hir(db).zip(Some(HirFileRange { + file_id, + range: expr.name_ref()?.syntax().text_range(), + })) } }?; - Some(nav_target.into_iter().zip(iter::repeat(range))) + Some((nav_target, range)) }) - .flatten() - .for_each(|(nav, range)| calls.add(nav, range.into_file_id(db))); + .for_each(|(nav, range)| calls.add(nav, range)); Some(calls.into_items()) } #[derive(Default)] struct CallLocations { - funcs: FxIndexMap>, + funcs: FxIndexMap>, } impl CallLocations { - fn add(&mut self, target: NavigationTarget, range: FileRange) { + fn add(&mut self, target: HirNavigationTarget, range: HirFileRange) { self.funcs.entry(target).or_default().push(range); } @@ -166,7 +161,7 @@ impl CallLocations { #[cfg(test)] mod tests { use expect_test::{Expect, expect}; - use ide_db::FilePosition; + use hir::HirFilePosition; use itertools::Itertools; use crate::fixture; @@ -191,7 +186,7 @@ mod tests { let (analysis, pos) = fixture::position(ra_fixture); - let mut navs = analysis.call_hierarchy(pos).unwrap().unwrap().info; + let mut navs = analysis.call_hierarchy(pos.into()).unwrap().unwrap().info; assert_eq!(navs.len(), 1); let nav = navs.pop().unwrap(); expected_nav.assert_eq(&nav.debug_render()); @@ -199,7 +194,7 @@ mod tests { let config = crate::CallHierarchyConfig { exclude_tests }; let item_pos = - FilePosition { file_id: nav.file_id, offset: nav.focus_or_full_range().start() }; + HirFilePosition { file_id: nav.file_id, offset: nav.focus_or_full_range().start() }; let incoming_calls = analysis.incoming_calls(config, item_pos).unwrap().unwrap(); expected_incoming.assert_eq(&incoming_calls.into_iter().map(debug_render).join("\n")); @@ -218,9 +213,11 @@ fn caller() { call$0ee(); } "#, - expect![["callee Function FileId(0) 0..14 3..9"]], - expect!["caller Function FileId(0) 15..44 18..24 : FileId(0):33..39"], - expect![[]], + expect!["callee Function FileId(EditionedFileId(Id(1800))) 0..14 3..9"], + expect![ + "caller Function FileId(EditionedFileId(Id(1800))) 15..44 18..24 : FileId(EditionedFileId(Id(1800))):33..39" + ], + expect![], ); } @@ -235,9 +232,11 @@ fn caller() { callee(); } "#, - expect![["callee Function FileId(0) 0..14 3..9"]], - expect!["caller Function FileId(0) 15..44 18..24 : FileId(0):33..39"], - expect![[]], + expect!["callee Function FileId(EditionedFileId(Id(1800))) 0..14 3..9"], + expect![ + "caller Function FileId(EditionedFileId(Id(1800))) 15..44 18..24 : FileId(EditionedFileId(Id(1800))):33..39" + ], + expect![], ); } @@ -253,9 +252,11 @@ fn caller() { callee(); } "#, - expect![["callee Function FileId(0) 0..14 3..9"]], - expect!["caller Function FileId(0) 15..58 18..24 : FileId(0):33..39, FileId(0):47..53"], - expect![[]], + expect!["callee Function FileId(EditionedFileId(Id(1800))) 0..14 3..9"], + expect![ + "caller Function FileId(EditionedFileId(Id(1800))) 15..58 18..24 : FileId(EditionedFileId(Id(1800))):33..39, FileId(EditionedFileId(Id(1800))):47..53" + ], + expect![], ); } @@ -274,11 +275,11 @@ fn caller2() { callee(); } "#, - expect![["callee Function FileId(0) 0..14 3..9"]], + expect!["callee Function FileId(EditionedFileId(Id(1800))) 0..14 3..9"], expect![[r#" - caller1 Function FileId(0) 15..45 18..25 : FileId(0):34..40 - caller2 Function FileId(0) 47..77 50..57 : FileId(0):66..72"#]], - expect![[]], + caller1 Function FileId(EditionedFileId(Id(1800))) 15..45 18..25 : FileId(EditionedFileId(Id(1800))):34..40 + caller2 Function FileId(EditionedFileId(Id(1800))) 47..77 50..57 : FileId(EditionedFileId(Id(1800))):66..72"#]], + expect![], ); } @@ -303,11 +304,11 @@ mod tests { } } "#, - expect![["callee Function FileId(0) 0..14 3..9"]], + expect!["callee Function FileId(EditionedFileId(Id(1800))) 0..14 3..9"], expect![[r#" - caller1 Function FileId(0) 15..45 18..25 : FileId(0):34..40 - test_caller Function FileId(0) 95..149 110..121 tests : FileId(0):134..140"#]], - expect![[]], + caller1 Function FileId(EditionedFileId(Id(1800))) 15..45 18..25 : FileId(EditionedFileId(Id(1800))):34..40 + test_caller Function FileId(EditionedFileId(Id(1800))) 95..149 110..121 tests : FileId(EditionedFileId(Id(1800))):134..140"#]], + expect![], ); } @@ -327,9 +328,11 @@ fn caller() { //- /foo/mod.rs pub fn callee() {} "#, - expect!["callee Function FileId(1) 0..18 7..13 foo"], - expect!["caller Function FileId(0) 27..56 30..36 : FileId(0):45..51"], - expect![[]], + expect!["callee Function FileId(EditionedFileId(Id(1801))) 0..18 7..13 foo"], + expect![ + "caller Function FileId(EditionedFileId(Id(1800))) 27..56 30..36 : FileId(EditionedFileId(Id(1800))):45..51" + ], + expect![], ); } @@ -345,9 +348,11 @@ fn call$0er() { callee(); } "#, - expect![["caller Function FileId(0) 15..58 18..24"]], - expect![[]], - expect!["callee Function FileId(0) 0..14 3..9 : FileId(0):33..39, FileId(0):47..53"], + expect!["caller Function FileId(EditionedFileId(Id(1800))) 15..58 18..24"], + expect![], + expect![ + "callee Function FileId(EditionedFileId(Id(1800))) 0..14 3..9 : FileId(EditionedFileId(Id(1800))):33..39, FileId(EditionedFileId(Id(1800))):47..53" + ], ); } @@ -367,9 +372,9 @@ fn call$0er() { //- /foo/mod.rs pub fn callee() {} "#, - expect![["caller Function FileId(0) 27..56 30..36"]], - expect![[]], - expect!["callee Function FileId(1) 0..18 7..13 foo : FileId(0):45..51"], + expect!["caller Function FileId(EditionedFileId(Id(1800))) 27..56 30..36"], + expect![], + expect!["callee Function FileId(EditionedFileId(Id(1801))) 0..18 7..13 foo : FileId(EditionedFileId(Id(1800))):45..51"], ); } @@ -391,9 +396,13 @@ fn caller3() { } "#, - expect![["caller2 Function FileId(0) 33..64 36..43"]], - expect!["caller1 Function FileId(0) 0..31 3..10 : FileId(0):19..26"], - expect!["caller3 Function FileId(0) 66..83 69..76 : FileId(0):52..59"], + expect!["caller2 Function FileId(EditionedFileId(Id(1800))) 33..64 36..43"], + expect![ + "caller1 Function FileId(EditionedFileId(Id(1800))) 0..31 3..10 : FileId(EditionedFileId(Id(1800))):19..26" + ], + expect![ + "caller3 Function FileId(EditionedFileId(Id(1800))) 66..83 69..76 : FileId(EditionedFileId(Id(1800))):52..59" + ], ); } @@ -412,9 +421,13 @@ fn main() { a$0() } "#, - expect![["a Function FileId(0) 0..18 3..4"]], - expect!["main Function FileId(0) 31..52 34..38 : FileId(0):47..48"], - expect!["b Function FileId(0) 20..29 23..24 : FileId(0):13..14"], + expect!["a Function FileId(EditionedFileId(Id(1800))) 0..18 3..4"], + expect![ + "main Function FileId(EditionedFileId(Id(1800))) 31..52 34..38 : FileId(EditionedFileId(Id(1800))):47..48" + ], + expect![ + "b Function FileId(EditionedFileId(Id(1800))) 20..29 23..24 : FileId(EditionedFileId(Id(1800))):13..14" + ], ); check_hierarchy( @@ -430,9 +443,11 @@ fn main() { a() } "#, - expect![["b Function FileId(0) 20..29 23..24"]], - expect!["a Function FileId(0) 0..18 3..4 : FileId(0):13..14"], - expect![[]], + expect!["b Function FileId(EditionedFileId(Id(1800))) 20..29 23..24"], + expect![ + "a Function FileId(EditionedFileId(Id(1800))) 0..18 3..4 : FileId(EditionedFileId(Id(1800))):13..14" + ], + expect![], ); } @@ -456,9 +471,11 @@ fn caller() { call!(call$0ee); } "#, - expect![[r#"callee Function FileId(0) 144..159 152..158"#]], - expect!["caller Function FileId(0) 160..194 163..169 : FileId(0):184..190"], - expect![[]], + expect!["callee Function MacroFile(MacroCallId(Id(3400))) 0..10 2..8"], + expect![ + "caller Function FileId(EditionedFileId(Id(1800))) 160..194 163..169 : MacroFile(MacroCallId(Id(3401))):0..6" + ], + expect![], ); check_hierarchy( false, @@ -478,9 +495,11 @@ fn caller() { call!(callee); } "#, - expect![[r#"callee Function FileId(0) 144..159 152..158"#]], - expect!["caller Function FileId(0) 160..194 163..169 : FileId(0):184..190"], - expect![[]], + expect!["callee Function MacroFile(MacroCallId(Id(3400))) 0..10 2..8"], + expect![ + "caller Function FileId(EditionedFileId(Id(1800))) 160..194 163..169 : MacroFile(MacroCallId(Id(3401))):0..6" + ], + expect![], ); } @@ -504,10 +523,10 @@ fn caller$0() { call!(callee); } "#, - expect![[r#"caller Function FileId(0) 160..194 163..169"#]], - expect![[]], + expect!["caller Function FileId(EditionedFileId(Id(1800))) 160..194 163..169"], + expect![], // FIXME - expect![[]], + expect![], ); } @@ -535,9 +554,11 @@ macro_rules! call { } } "#, - expect!["callee Function FileId(0) 22..37 30..36"], - expect!["caller Function FileId(0) 38..72 41..47 : FileId(0):62..68"], - expect![[]], + expect!["callee Function MacroFile(MacroCallId(Id(3400))) 0..10 2..8"], + expect![ + "caller Function FileId(EditionedFileId(Id(1800))) 38..72 41..47 : MacroFile(MacroCallId(Id(3401))):0..6" + ], + expect![], ); check_hierarchy( false, @@ -561,9 +582,11 @@ macro_rules! call { } } "#, - expect!["callee Function FileId(0) 22..37 30..36"], - expect!["caller Function FileId(0) 38..72 41..47 : FileId(0):62..68"], - expect![[]], + expect!["callee Function MacroFile(MacroCallId(Id(3400))) 0..10 2..8"], + expect![ + "caller Function FileId(EditionedFileId(Id(1800))) 38..72 41..47 : MacroFile(MacroCallId(Id(3401))):0..6" + ], + expect![], ); check_hierarchy( false, @@ -590,12 +613,11 @@ macro_rules! call { } } "#, - expect!["callee Function FileId(0) 22..37 30..36"], + expect!["callee Function MacroFile(MacroCallId(Id(3400))) 0..10 2..8"], expect![[r#" - caller Function FileId(0) 38..43 : FileId(0):44..50 - caller Function FileId(1) 130..136 130..136 : FileId(0):44..50 - callee Function FileId(0) 38..52 44..50 : FileId(0):44..50"#]], - expect![[]], + caller Function MacroFile(MacroCallId(Id(3401))) 0..20 2..8 : MacroFile(MacroCallId(Id(3401))):11..17 + callee Function MacroFile(MacroCallId(Id(3401))) 20..40 22..28 : MacroFile(MacroCallId(Id(3401))):31..37"#]], + expect![], ); } @@ -624,10 +646,10 @@ macro_rules! call { } } "#, - expect!["caller Function FileId(0) 38..72 41..47"], - expect![[]], + expect!["caller Function FileId(EditionedFileId(Id(1800))) 38..72 41..47"], + expect![], // FIXME - expect![[]], + expect![], ); check_hierarchy( false, @@ -652,10 +674,10 @@ macro_rules! call { } } "#, - expect!["caller Function FileId(0) 38..72 41..47"], - expect![[]], + expect!["caller Function FileId(EditionedFileId(Id(1800))) 38..72 41..47"], + expect![], // FIXME - expect![[]], + expect![], ); } @@ -678,9 +700,11 @@ fn caller() { S1::callee(); } "#, - expect!["callee Function FileId(0) 15..27 18..24 T1"], - expect!["caller Function FileId(0) 82..115 85..91 : FileId(0):104..110"], - expect![[]], + expect!["callee Function FileId(EditionedFileId(Id(1800))) 15..27 18..24 T1"], + expect![ + "caller Function FileId(EditionedFileId(Id(1800))) 82..115 85..91 : FileId(EditionedFileId(Id(1800))):104..110" + ], + expect![], ); } @@ -706,14 +730,18 @@ fn f3() { f1(); f2(); } "#, - expect!["f1 Function FileId(0) 25..52 28..30"], - expect![[r#" - main Function FileId(0) 0..23 3..7 : FileId(0):16..18 - f2 Function FileId(0) 54..81 57..59 : FileId(0):68..70 - f3 Function FileId(0) 83..118 94..96 : FileId(0):105..107"#]], - expect![[r#" - f2 Function FileId(0) 54..81 57..59 : FileId(0):39..41 - f3 Function FileId(0) 83..118 94..96 : FileId(0):45..47"#]], + expect!["f1 Function FileId(EditionedFileId(Id(1800))) 25..52 28..30"], + expect![ + r#" + main Function FileId(EditionedFileId(Id(1800))) 0..23 3..7 : FileId(EditionedFileId(Id(1800))):16..18 + f2 Function FileId(EditionedFileId(Id(1800))) 54..81 57..59 : FileId(EditionedFileId(Id(1800))):68..70 + f3 Function FileId(EditionedFileId(Id(1800))) 83..118 94..96 : FileId(EditionedFileId(Id(1800))):105..107"# + ], + expect![ + r#" + f2 Function FileId(EditionedFileId(Id(1800))) 54..81 57..59 : FileId(EditionedFileId(Id(1800))):39..41 + f3 Function FileId(EditionedFileId(Id(1800))) 83..118 94..96 : FileId(EditionedFileId(Id(1800))):45..47"# + ], ); check_hierarchy( @@ -736,11 +764,13 @@ fn f3() { f1(); f2(); } "#, - expect!["f1 Function FileId(0) 25..52 28..30"], + expect!["f1 Function FileId(EditionedFileId(Id(1800))) 25..52 28..30"], expect![[r#" - main Function FileId(0) 0..23 3..7 : FileId(0):16..18 - f2 Function FileId(0) 54..81 57..59 : FileId(0):68..70"#]], - expect!["f2 Function FileId(0) 54..81 57..59 : FileId(0):39..41"], + main Function FileId(EditionedFileId(Id(1800))) 0..23 3..7 : FileId(EditionedFileId(Id(1800))):16..18 + f2 Function FileId(EditionedFileId(Id(1800))) 54..81 57..59 : FileId(EditionedFileId(Id(1800))):68..70"#]], + expect![ + "f2 Function FileId(EditionedFileId(Id(1800))) 54..81 57..59 : FileId(EditionedFileId(Id(1800))):39..41" + ], ); } } diff --git a/crates/ide/src/child_modules.rs b/crates/ide/src/child_modules.rs index b781596187b9..0a8597c50d7b 100644 --- a/crates/ide/src/child_modules.rs +++ b/crates/ide/src/child_modules.rs @@ -1,11 +1,8 @@ -use hir::Semantics; -use ide_db::{FilePosition, RootDatabase}; -use syntax::{ - algo::find_node_at_offset, - ast::{self, AstNode}, -}; +use hir::{HirFilePosition, Semantics}; +use ide_db::RootDatabase; +use syntax::{algo::find_node_at_offset, ast}; -use crate::NavigationTarget; +use crate::{HirNavigationTarget, NavigationTarget}; // Feature: Child Modules // @@ -16,11 +13,15 @@ use crate::NavigationTarget; // | VS Code | **rust-analyzer: Locate child modules** | /// This returns `Vec` because a module may be included from several places. -pub(crate) fn child_modules(db: &RootDatabase, position: FilePosition) -> Vec { +pub(crate) fn child_modules( + db: &RootDatabase, + position: HirFilePosition, +) -> Vec { let sema = Semantics::new(db); - let source_file = sema.parse_guess_edition(position.file_id); + let file_id = sema.adjust_edition(position.file_id); + let source_file = sema.parse_or_expand(file_id); // First go to the parent module which contains the cursor - let module = find_node_at_offset::(source_file.syntax(), position.offset); + let module = find_node_at_offset::(&source_file, position.offset); match module { Some(module) => { @@ -28,14 +29,14 @@ pub(crate) fn child_modules(db: &RootDatabase, position: FilePosition) -> Vec { // Return all the child modules inside the source file - sema.file_to_module_defs(position.file_id) + sema.hir_file_to_module_defs(file_id) .flat_map(|module| module.children(db)) - .map(|module| NavigationTarget::from_module_to_decl(db, module).call_site()) + .map(|module| NavigationTarget::from_module_to_decl(db, module)) .collect() } } @@ -43,18 +44,21 @@ pub(crate) fn child_modules(db: &RootDatabase, position: FilePosition) -> Vec>(); - assert_eq!(expected.into_iter().map(|(fr, _)| fr).collect::>(), navs); + assert_eq!( + expected.into_iter().map(|(r, _)| r.into()).collect::>(), + navs + ); } #[test] diff --git a/crates/ide/src/doc_links/tests.rs b/crates/ide/src/doc_links/tests.rs index 6af156fa668f..6fd55a1ce926 100644 --- a/crates/ide/src/doc_links/tests.rs +++ b/crates/ide/src/doc_links/tests.rs @@ -24,7 +24,8 @@ fn check_external_docs( sysroot: Option<&str>, ) { let (analysis, position) = fixture::position(ra_fixture); - let links = analysis.external_docs(position, target_dir, sysroot).unwrap(); + let links = + analysis.external_docs(position.into_file_id(&analysis.db), target_dir, sysroot).unwrap(); let web_url = links.web_url; let local_url = links.local_url; @@ -45,7 +46,7 @@ fn check_external_docs( fn check_rewrite(#[rust_analyzer::rust_fixture] ra_fixture: &str, expect: Expect) { let (analysis, position) = fixture::position(ra_fixture); let sema = &Semantics::new(&analysis.db); - let (cursor_def, docs, range) = def_under_cursor(sema, &position); + let (cursor_def, docs, range) = def_under_cursor(sema, position.into_file_id(&analysis.db)); let res = rewrite_links(sema.db, docs.as_str(), cursor_def, Some(range)); expect.assert_eq(&res) } @@ -53,10 +54,14 @@ fn check_rewrite(#[rust_analyzer::rust_fixture] ra_fixture: &str, expect: Expect fn check_doc_links(#[rust_analyzer::rust_fixture] ra_fixture: &str) { let key_fn = |&(FileRange { file_id, range }, _): &_| (file_id, range.start()); - let (analysis, position, mut expected) = fixture::annotations(ra_fixture); + let (analysis, position, expected) = fixture::annotations(ra_fixture); + let mut expected = expected + .into_iter() + .map(|(range, s)| (range.into_file_id(&analysis.db), s)) + .collect::>(); expected.sort_by_key(key_fn); let sema = &Semantics::new(&analysis.db); - let (cursor_def, docs, range) = def_under_cursor(sema, &position); + let (cursor_def, docs, range) = def_under_cursor(sema, position.into_file_id(&analysis.db)); let defs = extract_definitions_from_docs(&docs); let actual: Vec<_> = defs .into_iter() @@ -79,7 +84,7 @@ fn check_doc_links(#[rust_analyzer::rust_fixture] ra_fixture: &str) { fn def_under_cursor( sema: &Semantics<'_, RootDatabase>, - position: &FilePosition, + position: FilePosition, ) -> (Definition, Documentation, DocsRangeMap) { let (docs, def) = sema .parse_guess_edition(position.file_id) diff --git a/crates/ide/src/expand_macro.rs b/crates/ide/src/expand_macro.rs index f31886b96976..1deb66103852 100644 --- a/crates/ide/src/expand_macro.rs +++ b/crates/ide/src/expand_macro.rs @@ -1,15 +1,13 @@ use hir::db::ExpandDatabase; -use hir::{ExpandResult, InFile, InRealFile, Semantics}; +use hir::{ExpandResult, HirFileId, HirFilePosition, InFile, InRealFile, Semantics}; use ide_db::{ - FileId, RootDatabase, base_db::Crate, helpers::pick_best_token, + RootDatabase, base_db::Crate, helpers::pick_best_token, syntax_helpers::prettify_macro_expansion, }; use span::{SpanMap, SyntaxContext, TextRange, TextSize}; use stdx::format_to; use syntax::{AstNode, NodeOrToken, SyntaxKind, SyntaxNode, T, ast, ted}; -use crate::FilePosition; - pub struct ExpandedMacro { pub name: String, pub expansion: String, @@ -24,13 +22,19 @@ pub struct ExpandedMacro { // | VS Code | **rust-analyzer: Expand macro recursively at caret** | // // ![Expand Macro Recursively](https://user-images.githubusercontent.com/48062697/113020648-b3973180-917a-11eb-84a9-ecb921293dc5.gif) -pub(crate) fn expand_macro(db: &RootDatabase, position: FilePosition) -> Option { +pub(crate) fn expand_macro( + db: &RootDatabase, + hir::FilePositionWrapper { mut file_id, offset }: HirFilePosition, +) -> Option { let sema = Semantics::new(db); - let file_id = sema.attach_first_edition(position.file_id)?; - let file = sema.parse(file_id); - let krate = sema.file_to_module_def(file_id.file_id(db))?.krate().into(); + file_id = sema.adjust_edition(file_id); + let file = &sema.parse_or_expand(file_id); + let krate = match file_id { + HirFileId::FileId(file_id) => sema.file_to_module_def(file_id)?.krate().into(), + HirFileId::MacroFile(macro_file_id) => db.lookup_intern_macro_call(macro_file_id).def.krate, + }; - let tok = pick_best_token(file.syntax().token_at_offset(position.offset), |kind| match kind { + let tok = pick_best_token(file.token_at_offset(offset), |kind| match kind { SyntaxKind::IDENT => 1, _ => 0, })?; @@ -66,14 +70,8 @@ pub(crate) fn expand_macro(db: &RootDatabase, position: FilePosition) -> Option< let ExpandResult { err, value: expansion } = expansions.get(idx)?.clone(); let expansion_file_id = sema.hir_file_for(&expansion).macro_file()?; let expansion_span_map = db.expansion_span_map(expansion_file_id); - let mut expansion = format( - db, - SyntaxKind::MACRO_ITEMS, - position.file_id, - expansion, - &expansion_span_map, - krate, - ); + let mut expansion = + format(db, SyntaxKind::MACRO_ITEMS, expansion, &expansion_span_map, krate); if let Some(err) = err { expansion.insert_str( 0, @@ -87,10 +85,13 @@ pub(crate) fn expand_macro(db: &RootDatabase, position: FilePosition) -> Option< return derive; } - let mut anc = sema - .descend_token_into_include_expansion(InRealFile::new(file_id, tok)) - .value - .parent_ancestors(); + let mut anc = match file_id { + HirFileId::FileId(file_id) => { + sema.descend_token_into_include_expansion(InRealFile::new(file_id, tok)).value + } + HirFileId::MacroFile(_) => tok, + } + .parent_ancestors(); let mut span_map = SpanMap::empty(); let mut error = String::new(); let (name, expanded, kind) = loop { @@ -127,7 +128,7 @@ pub(crate) fn expand_macro(db: &RootDatabase, position: FilePosition) -> Option< // FIXME: // macro expansion may lose all white space information // But we hope someday we can use ra_fmt for that - let mut expansion = format(db, kind, position.file_id, expanded, &span_map, krate); + let mut expansion = format(db, kind, expanded, &span_map, krate); if !error.is_empty() { expansion.insert_str(0, &format!("Expansion had errors:{error}\n\n")); @@ -203,22 +204,21 @@ fn expand( fn format( db: &RootDatabase, kind: SyntaxKind, - file_id: FileId, expanded: SyntaxNode, span_map: &SpanMap, krate: Crate, ) -> String { let expansion = prettify_macro_expansion(db, expanded, span_map, krate).to_string(); - _format(db, kind, file_id, &expansion).unwrap_or(expansion) + _format(db, kind, &expansion, krate).unwrap_or(expansion) } #[cfg(any(test, target_arch = "wasm32", target_os = "emscripten"))] fn _format( _db: &RootDatabase, _kind: SyntaxKind, - _file_id: FileId, expansion: &str, + _krate: Crate, ) -> Option { // remove trailing spaces for test use itertools::Itertools; @@ -229,11 +229,9 @@ fn _format( fn _format( db: &RootDatabase, kind: SyntaxKind, - file_id: FileId, expansion: &str, + crate_id: Crate, ) -> Option { - use ide_db::base_db::RootQueryDb; - // hack until we get hygiene working (same character amount to preserve formatting as much as possible) const DOLLAR_CRATE_REPLACE: &str = "__r_a_"; const BUILTIN_REPLACE: &str = "builtin__POUND"; @@ -247,7 +245,6 @@ fn _format( }; let expansion = format!("{prefix}{expansion}{suffix}"); - let &crate_id = db.relevant_crates(file_id).iter().next()?; let edition = crate_id.data(db).edition; #[allow(clippy::disallowed_methods)] @@ -295,7 +292,7 @@ mod tests { #[track_caller] fn check(#[rust_analyzer::rust_fixture] ra_fixture: &str, expect: Expect) { let (analysis, pos) = fixture::position(ra_fixture); - let expansion = analysis.expand_macro(pos).unwrap().unwrap(); + let expansion = analysis.expand_macro(pos.into()).unwrap().unwrap(); let actual = format!("{}\n{}", expansion.name, expansion.expansion); expect.assert_eq(&actual); } @@ -319,7 +316,7 @@ $0concat!("test", 10, 'b', true);"#, //- minicore: asm $0asm!("0x300, x0");"#, ); - let expansion = analysis.expand_macro(pos).unwrap(); + let expansion = analysis.expand_macro(pos.into()).unwrap(); assert!(expansion.is_none()); } diff --git a/crates/ide/src/extend_selection.rs b/crates/ide/src/extend_selection.rs index a374f9752fcf..88e4713d62f6 100644 --- a/crates/ide/src/extend_selection.rs +++ b/crates/ide/src/extend_selection.rs @@ -326,9 +326,9 @@ mod tests { fn do_check(before: &str, afters: &[&str]) { let (analysis, position) = fixture::position(before); - let before = analysis.file_text(position.file_id).unwrap(); + let before = analysis.file_text(position.file_id.file_id(&analysis.db)).unwrap(); let range = TextRange::empty(position.offset); - let mut frange = FileRange { file_id: position.file_id, range }; + let mut frange = FileRange { file_id: position.file_id.file_id(&analysis.db), range }; for &after in afters { frange.range = analysis.extend_selection(frange).unwrap(); diff --git a/crates/ide/src/file_structure.rs b/crates/ide/src/file_structure.rs index 347da4e85b4a..bc48eeebe280 100644 --- a/crates/ide/src/file_structure.rs +++ b/crates/ide/src/file_structure.rs @@ -1,6 +1,6 @@ use ide_db::SymbolKind; use syntax::{ - AstNode, AstToken, NodeOrToken, SourceFile, SyntaxNode, SyntaxToken, TextRange, WalkEvent, + AstNode, AstToken, NodeOrToken, SyntaxNode, SyntaxToken, TextRange, WalkEvent, ast::{self, HasAttrs, HasGenericParams, HasName}, match_ast, }; @@ -36,11 +36,11 @@ pub enum StructureNodeKind { // | VS Code | Ctrl+Shift+O | // // ![File Structure](https://user-images.githubusercontent.com/48062697/113020654-b42fc800-917a-11eb-8388-e7dc4d92b02e.gif) -pub(crate) fn file_structure(file: &SourceFile) -> Vec { +pub(crate) fn file_structure(file: &SyntaxNode) -> Vec { let mut res = Vec::new(); let mut stack = Vec::new(); - for event in file.syntax().preorder_with_tokens() { + for event in file.preorder_with_tokens() { match event { WalkEvent::Enter(NodeOrToken::Node(node)) => { if let Some(mut symbol) = structure_node(&node) { @@ -251,12 +251,13 @@ fn structure_token(token: SyntaxToken) -> Option { #[cfg(test)] mod tests { use expect_test::{Expect, expect}; + use syntax::SourceFile; use super::*; fn check(#[rust_analyzer::rust_fixture] ra_fixture: &str, expect: Expect) { let file = SourceFile::parse(ra_fixture, span::Edition::CURRENT).ok().unwrap(); - let structure = file_structure(&file); + let structure = file_structure(file.syntax()); expect.assert_debug_eq(&structure) } diff --git a/crates/ide/src/fixture.rs b/crates/ide/src/fixture.rs index fbf89042fae1..022ff243436f 100644 --- a/crates/ide/src/fixture.rs +++ b/crates/ide/src/fixture.rs @@ -1,16 +1,17 @@ //! Utilities for creating `Analysis` instances for tests. +use hir::{EditionedFileId, FilePosition, FileRange}; use test_fixture::ChangeFixture; use test_utils::{RangeOrOffset, extract_annotations}; -use crate::{Analysis, AnalysisHost, FileId, FilePosition, FileRange}; +use crate::{Analysis, AnalysisHost}; /// Creates analysis for a single file. -pub(crate) fn file(#[rust_analyzer::rust_fixture] ra_fixture: &str) -> (Analysis, FileId) { +pub(crate) fn file(#[rust_analyzer::rust_fixture] ra_fixture: &str) -> (Analysis, EditionedFileId) { let mut host = AnalysisHost::default(); let change_fixture = ChangeFixture::parse(&host.db, ra_fixture); host.db.enable_proc_attr_macros(); host.db.apply_change(change_fixture.change); - (host.analysis(), change_fixture.files[0].file_id(&host.db)) + (host.analysis(), change_fixture.files[0]) } /// Creates analysis from a multi-file fixture, returns positions marked with $0. @@ -23,7 +24,7 @@ pub(crate) fn position( host.db.apply_change(change_fixture.change); let (file_id, range_or_offset) = change_fixture.file_position.expect("expected a marker ($0)"); let offset = range_or_offset.expect_offset(); - (host.analysis(), FilePosition { file_id: file_id.file_id(&host.db), offset }) + (host.analysis(), FilePosition { file_id, offset }) } /// Creates analysis for a single file, returns range marked with a pair of $0. @@ -34,19 +35,19 @@ pub(crate) fn range(#[rust_analyzer::rust_fixture] ra_fixture: &str) -> (Analysi host.db.apply_change(change_fixture.change); let (file_id, range_or_offset) = change_fixture.file_position.expect("expected a marker ($0)"); let range = range_or_offset.expect_range(); - (host.analysis(), FileRange { file_id: file_id.file_id(&host.db), range }) + (host.analysis(), FileRange { file_id, range }) } /// Creates analysis for a single file, returns range marked with a pair of $0 or a position marked with $0. pub(crate) fn range_or_position( #[rust_analyzer::rust_fixture] ra_fixture: &str, -) -> (Analysis, FileId, RangeOrOffset) { +) -> (Analysis, EditionedFileId, RangeOrOffset) { let mut host = AnalysisHost::default(); let change_fixture = ChangeFixture::parse(&host.db, ra_fixture); host.db.enable_proc_attr_macros(); host.db.apply_change(change_fixture.change); let (file_id, range_or_offset) = change_fixture.file_position.expect("expected a marker ($0)"); - (host.analysis(), file_id.file_id(&host.db), range_or_offset) + (host.analysis(), file_id, range_or_offset) } /// Creates analysis from a multi-file fixture, returns positions marked with $0. @@ -60,19 +61,16 @@ pub(crate) fn annotations( let (file_id, range_or_offset) = change_fixture.file_position.expect("expected a marker ($0)"); let offset = range_or_offset.expect_offset(); - let db = &host.db; let annotations = change_fixture .files .iter() .flat_map(|&file_id| { let file_text = host.analysis().file_text(file_id.file_id(&host.db)).unwrap(); let annotations = extract_annotations(&file_text); - annotations - .into_iter() - .map(move |(range, data)| (FileRange { file_id: file_id.file_id(db), range }, data)) + annotations.into_iter().map(move |(range, data)| (FileRange { file_id, range }, data)) }) .collect(); - (host.analysis(), FilePosition { file_id: file_id.file_id(&host.db), offset }, annotations) + (host.analysis(), FilePosition { file_id, offset }, annotations) } /// Creates analysis from a multi-file fixture with annotations without $0 @@ -91,9 +89,7 @@ pub(crate) fn annotations_without_marker( .flat_map(|&file_id| { let file_text = host.analysis().file_text(file_id.file_id(db)).unwrap(); let annotations = extract_annotations(&file_text); - annotations - .into_iter() - .map(move |(range, data)| (FileRange { file_id: file_id.file_id(db), range }, data)) + annotations.into_iter().map(move |(range, data)| (FileRange { file_id, range }, data)) }) .collect(); (host.analysis(), annotations) diff --git a/crates/ide/src/folding_ranges.rs b/crates/ide/src/folding_ranges.rs index 9bd8504733a4..6d6a1b1bfcd5 100755 --- a/crates/ide/src/folding_ranges.rs +++ b/crates/ide/src/folding_ranges.rs @@ -1,6 +1,6 @@ use ide_db::{FxHashSet, syntax_helpers::node_ext::vis_eq}; use syntax::{ - Direction, NodeOrToken, SourceFile, + Direction, NodeOrToken, SyntaxKind::{self, *}, SyntaxNode, TextRange, TextSize, ast::{self, AstNode, AstToken}, @@ -43,7 +43,7 @@ pub struct Fold { // // Defines folding regions for curly braced blocks, runs of consecutive use, mod, const or static // items, and `region` / `endregion` comment markers. -pub(crate) fn folding_ranges(file: &SourceFile) -> Vec { +pub(crate) fn folding_ranges(file: &SyntaxNode) -> Vec { let mut res = vec![]; let mut visited_comments = FxHashSet::default(); let mut visited_nodes = FxHashSet::default(); @@ -51,7 +51,7 @@ pub(crate) fn folding_ranges(file: &SourceFile) -> Vec { // regions can be nested, here is a LIFO buffer let mut region_starts: Vec = vec![]; - for element in file.syntax().descendants_with_tokens() { + for element in file.descendants_with_tokens() { // Fold items that span multiple lines if let Some(kind) = fold_kind(element.kind()) { let is_multiline = match &element { @@ -287,6 +287,7 @@ fn fold_range_for_multiline_match_arm(match_arm: ast::MatchArm) -> Option Option>> { + position @ HirFilePosition { file_id, offset }: HirFilePosition, +) -> Option>> { let sema = Semantics::new(db); - let file = sema.parse_guess_edition(file_id).syntax().clone(); + let file_id = sema.adjust_edition(file_id); + let file = sema.parse_or_expand(sema.adjust_edition(file_id)); let original_token = file .token_at_offset(offset) .find(|it| matches!(it.kind(), IDENT | T![self] | T![super] | T![crate] | T![Self]))?; let range = original_token.text_range(); - let info: Vec = sema + let info: Vec = sema .descend_into_macros_no_opaque(original_token, false) .iter() .filter_map(|token| { @@ -38,35 +40,34 @@ pub(crate) fn goto_declaration( ast::NameRef(name_ref) => match NameRefClass::classify(&sema, &name_ref)? { NameRefClass::Definition(it, _) => Some(it), NameRefClass::FieldShorthand { field_ref, .. } => - return field_ref.try_to_nav(db), + return field_ref.try_to_nav_hir(db), NameRefClass::ExternCrateShorthand { decl, .. } => - return decl.try_to_nav(db), + return decl.try_to_nav_hir(db), }, ast::Name(name) => match NameClass::classify(&sema, &name)? { NameClass::Definition(it) | NameClass::ConstReference(it) => Some(it), NameClass::PatFieldShorthand { field_ref, .. } => - return field_ref.try_to_nav(db), + return field_ref.try_to_nav_hir(db), }, _ => None } }; let assoc = match def? { Definition::Module(module) => { - return Some(NavigationTarget::from_module_to_decl(db, module)); + return Some(HirNavigationTarget::from_module_to_decl(db, module)); } Definition::Const(c) => c.as_assoc_item(db), Definition::TypeAlias(ta) => ta.as_assoc_item(db), Definition::Function(f) => f.as_assoc_item(db), - Definition::ExternCrateDecl(it) => return it.try_to_nav(db), + Definition::ExternCrateDecl(it) => return it.try_to_nav_hir(db), _ => None, }?; let trait_ = assoc.implemented_trait(db)?; let name = Some(assoc.name(db)?); let item = trait_.items(db).into_iter().find(|it| it.name(db) == name)?; - item.try_to_nav(db) + item.try_to_nav_hir(db) }) - .flatten() .collect(); if info.is_empty() { goto_definition(db, position) } else { Some(RangeInfo::new(range, info)) } @@ -74,7 +75,7 @@ pub(crate) fn goto_declaration( #[cfg(test)] mod tests { - use ide_db::FileRange; + use hir::HirFileRange; use itertools::Itertools; use crate::fixture; @@ -82,7 +83,7 @@ mod tests { fn check(#[rust_analyzer::rust_fixture] ra_fixture: &str) { let (analysis, position, expected) = fixture::annotations(ra_fixture); let navs = analysis - .goto_declaration(position) + .goto_declaration(position.into()) .unwrap() .expect("no declaration or definition found") .info; @@ -90,17 +91,14 @@ mod tests { panic!("unresolved reference") } - let cmp = |&FileRange { file_id, range }: &_| (file_id, range.start()); + let cmp = |&HirFileRange { file_id, range }: &_| (file_id, range.start()); let navs = navs .into_iter() - .map(|nav| FileRange { file_id: nav.file_id, range: nav.focus_or_full_range() }) - .sorted_by_key(cmp) - .collect::>(); - let expected = expected - .into_iter() - .map(|(FileRange { file_id, range }, _)| FileRange { file_id, range }) + .map(|nav| HirFileRange { file_id: nav.file_id, range: nav.focus_or_full_range() }) .sorted_by_key(cmp) .collect::>(); + let expected = + expected.into_iter().map(|(r, _)| r.into()).sorted_by_key(cmp).collect::>(); assert_eq!(expected, navs); } diff --git a/crates/ide/src/goto_definition.rs b/crates/ide/src/goto_definition.rs index 29fc68bb50f1..0729f601b330 100644 --- a/crates/ide/src/goto_definition.rs +++ b/crates/ide/src/goto_definition.rs @@ -1,12 +1,13 @@ use std::{iter, mem::discriminant}; use crate::{ - FilePosition, NavigationTarget, RangeInfo, TryToNav, UpmappingResult, + RangeInfo, TryToNav, doc_links::token_as_doc_comment, - navigation_target::{self, ToNav}, + navigation_target::{HirNavigationTarget, ToNav}, }; use hir::{ - AsAssocItem, AssocItem, CallableKind, FileRange, HasCrate, InFile, ModuleDef, Semantics, sym, + AsAssocItem, AssocItem, CallableKind, EditionedFileId, HasCrate, HirFileId, HirFilePosition, + InFile, ModuleDef, Semantics, sym, }; use ide_db::{ RootDatabase, SymbolKind, @@ -16,7 +17,6 @@ use ide_db::{ helpers::pick_best_token, }; use itertools::Itertools; -use span::{Edition, FileId}; use syntax::{ AstNode, AstToken, SyntaxKind::*, @@ -38,12 +38,12 @@ use syntax::{ // ![Go to Definition](https://user-images.githubusercontent.com/48062697/113065563-025fbe00-91b1-11eb-83e4-a5a703610b23.gif) pub(crate) fn goto_definition( db: &RootDatabase, - FilePosition { file_id, offset }: FilePosition, -) -> Option>> { + HirFilePosition { file_id, offset }: HirFilePosition, +) -> Option>> { let sema = &Semantics::new(db); - let file = sema.parse_guess_edition(file_id).syntax().clone(); - let edition = - sema.attach_first_edition(file_id).map(|it| it.edition(db)).unwrap_or(Edition::CURRENT); + let file_id = sema.adjust_edition(file_id); + let file = sema.parse_or_expand(sema.adjust_edition(file_id)); + let edition = file_id.edition(sema.db); let original_token = pick_best_token(file.token_at_offset(offset), |kind| match kind { IDENT | INT_NUMBER @@ -62,8 +62,8 @@ pub(crate) fn goto_definition( })?; if let Some(doc_comment) = token_as_doc_comment(&original_token) { return doc_comment.get_definition_with_descend_at(sema, offset, |def, _, link_range| { - let nav = def.try_to_nav(db)?; - Some(RangeInfo::new(link_range, nav.collect())) + let nav = def.try_to_nav_hir(db)?; + Some(RangeInfo::new(link_range, vec![nav])) }); } @@ -116,9 +116,8 @@ pub(crate) fn goto_definition( if let Definition::ExternCrateDecl(crate_def) = def { return crate_def .resolved_crate(db) - .map(|it| it.root_module().to_nav(sema.db)) + .map(|it| it.root_module().to_nav_hir(sema.db)) .into_iter() - .flatten() .collect(); } try_filter_trait_item_definition(sema, &def) @@ -129,7 +128,7 @@ pub(crate) fn goto_definition( }) .flatten() .unique() - .collect::>(); + .collect::>(); Some(RangeInfo::new(original_token.text_range(), navs)) } @@ -138,7 +137,7 @@ pub(crate) fn goto_definition( fn find_definition_for_known_blanket_dual_impls( sema: &Semantics<'_, RootDatabase>, original_token: &SyntaxToken, -) -> Option> { +) -> Option> { let method_call = ast::MethodCallExpr::cast(original_token.parent()?.parent()?)?; let callable = sema.resolve_method_call_as_callable(&method_call)?; let CallableKind::Function(f) = callable.kind() else { return None }; @@ -208,8 +207,8 @@ fn find_definition_for_known_blanket_dual_impls( fn try_lookup_include_path( sema: &Semantics<'_, RootDatabase>, token: InFile, - file_id: FileId, -) -> Option { + hir_file_id: HirFileId, +) -> Option { let file = token.file_id.macro_file()?; // Check that we are in the eager argument expansion of an include macro @@ -221,10 +220,13 @@ fn try_lookup_include_path( } let path = token.value.value().ok()?; - let file_id = sema.db.resolve_path(AnchoredPath { anchor: file_id, path: &path })?; + let file_id = sema.db.resolve_path(AnchoredPath { + anchor: hir_file_id.original_file(sema.db).file_id(sema.db), + path: &path, + })?; let size = sema.db.file_text(file_id).text(sema.db).len().try_into().ok()?; - Some(NavigationTarget { - file_id, + Some(HirNavigationTarget { + file_id: EditionedFileId::new(sema.db, file_id, hir_file_id.edition(sema.db)).into(), full_range: TextRange::new(0.into(), size), name: path.into(), alias: None, @@ -239,7 +241,7 @@ fn try_lookup_include_path( fn try_lookup_macro_def_in_macro_use( sema: &Semantics<'_, RootDatabase>, token: SyntaxToken, -) -> Option { +) -> Option { let extern_crate = token.parent()?.ancestors().find_map(ast::ExternCrate::cast)?; let extern_crate = sema.to_def(&extern_crate)?; let krate = extern_crate.resolved_crate(sema.db)?; @@ -247,8 +249,8 @@ fn try_lookup_macro_def_in_macro_use( for mod_def in krate.root_module().declarations(sema.db) { if let ModuleDef::Macro(mac) = mod_def { if mac.name(sema.db).as_str() == token.text() { - if let Some(nav) = mac.try_to_nav(sema.db) { - return Some(nav.call_site); + if let Some(nav) = mac.try_to_nav_hir(sema.db) { + return Some(nav); } } } @@ -267,7 +269,7 @@ fn try_lookup_macro_def_in_macro_use( fn try_filter_trait_item_definition( sema: &Semantics<'_, RootDatabase>, def: &Definition, -) -> Option> { +) -> Option> { let db = sema.db; let assoc = def.as_assoc_item(db)?; match assoc { @@ -280,8 +282,8 @@ fn try_filter_trait_item_definition( .items(db) .iter() .filter(|itm| discriminant(*itm) == discriminant_value) - .find_map(|itm| (itm.name(db)? == name).then(|| itm.try_to_nav(db)).flatten()) - .map(|it| it.collect()) + .find_map(|itm| (itm.name(db)? == name).then(|| itm.try_to_nav_hir(db)).flatten()) + .map(|it| vec![it]) } } } @@ -289,7 +291,7 @@ fn try_filter_trait_item_definition( fn handle_control_flow_keywords( sema: &Semantics<'_, RootDatabase>, token: &SyntaxToken, -) -> Option> { +) -> Option> { match token.kind() { // For `fn` / `loop` / `while` / `for` / `async` / `match`, return the keyword it self, // so that VSCode will find the references when using `ctrl + click` @@ -337,7 +339,7 @@ pub(crate) fn find_fn_or_blocks( fn nav_for_exit_points( sema: &Semantics<'_, RootDatabase>, token: &SyntaxToken, -) -> Option> { +) -> Option> { let db = sema.db; let token_kind = token.kind(); @@ -349,31 +351,16 @@ fn nav_for_exit_points( match_ast! { match node { ast::Fn(fn_) => { - let mut nav = sema.to_def(&fn_)?.try_to_nav(db)?; + let mut nav = sema.to_def(&fn_)?.try_to_nav_hir(db)?; // For async token, we navigate to itself, which triggers // VSCode to find the references - let focus_token = if matches!(token_kind, T![async]) { + let range = if matches!(token_kind, T![async]) { fn_.async_token()? } else { fn_.fn_token()? - }; - - let focus_frange = InFile::new(file_id, focus_token.text_range()) - .original_node_file_range_opt(db) - .map(|(frange, _)| frange); - - if let Some(FileRange { file_id, range }) = focus_frange { - let contains_frange = |nav: &NavigationTarget| { - nav.file_id == file_id.file_id(db) && nav.full_range.contains_range(range) - }; - - if let Some(def_site) = nav.def_site.as_mut() { - if contains_frange(def_site) { - def_site.focus_range = Some(range); - } - } else if contains_frange(&nav.call_site) { - nav.call_site.focus_range = Some(range); - } + }.text_range(); + if nav.file_id == file_id && nav.full_range.contains_range(range) { + nav.focus_range = Some(range); } Some(nav) @@ -381,19 +368,19 @@ fn nav_for_exit_points( ast::ClosureExpr(c) => { let pipe_tok = c.param_list().and_then(|it| it.pipe_token())?.text_range(); let closure_in_file = InFile::new(file_id, c.into()); - Some(expr_to_nav(db, closure_in_file, Some(pipe_tok))) + Some(expr_to_nav(closure_in_file, Some(pipe_tok))) }, ast::BlockExpr(blk) => { match blk.modifier() { Some(ast::BlockModifier::Async(_)) => { let async_tok = blk.async_token()?.text_range(); let blk_in_file = InFile::new(file_id, blk.into()); - Some(expr_to_nav(db, blk_in_file, Some(async_tok))) + Some(expr_to_nav(blk_in_file, Some(async_tok))) }, Some(ast::BlockModifier::Try(_)) if token_kind != T![return] => { let try_tok = blk.try_token()?.text_range(); let blk_in_file = InFile::new(file_id, blk.into()); - Some(expr_to_nav(db, blk_in_file, Some(try_tok))) + Some(expr_to_nav(blk_in_file, Some(try_tok))) }, _ => None, } @@ -402,8 +389,7 @@ fn nav_for_exit_points( } } }) - .flatten() - .collect_vec(); + .collect(); Some(navs) } @@ -444,9 +430,7 @@ pub(crate) fn find_branch_root( fn nav_for_branch_exit_points( sema: &Semantics<'_, RootDatabase>, token: &SyntaxToken, -) -> Option> { - let db = sema.db; - +) -> Option> { let navs = match token.kind() { T![match] => find_branch_root(sema, token) .into_iter() @@ -455,9 +439,8 @@ fn nav_for_branch_exit_points( let match_expr = ast::MatchExpr::cast(node)?; let focus_range = match_expr.match_token()?.text_range(); let match_expr_in_file = InFile::new(file_id, match_expr.into()); - Some(expr_to_nav(db, match_expr_in_file, Some(focus_range))) + Some(expr_to_nav(match_expr_in_file, Some(focus_range))) }) - .flatten() .collect_vec(), T![=>] => find_branch_root(sema, token) @@ -470,9 +453,8 @@ fn nav_for_branch_exit_points( let file_id = sema.hir_file_for(match_expr.syntax()); let focus_range = match_arm.fat_arrow_token()?.text_range(); let match_expr_in_file = InFile::new(file_id, match_expr.into()); - Some(expr_to_nav(db, match_expr_in_file, Some(focus_range))) + Some(expr_to_nav(match_expr_in_file, Some(focus_range))) }) - .flatten() .collect_vec(), T![if] => find_branch_root(sema, token) @@ -482,9 +464,8 @@ fn nav_for_branch_exit_points( let if_expr = ast::IfExpr::cast(node)?; let focus_range = if_expr.if_token()?.text_range(); let if_expr_in_file = InFile::new(file_id, if_expr.into()); - Some(expr_to_nav(db, if_expr_in_file, Some(focus_range))) + Some(expr_to_nav(if_expr_in_file, Some(focus_range))) }) - .flatten() .collect_vec(), _ => return Some(Vec::new()), @@ -541,9 +522,7 @@ pub(crate) fn find_loops( fn nav_for_break_points( sema: &Semantics<'_, RootDatabase>, token: &SyntaxToken, -) -> Option> { - let db = sema.db; - +) -> Option> { let navs = find_loops(sema, token)? .into_iter() .filter_map(|expr| { @@ -557,31 +536,26 @@ fn nav_for_break_points( ast::Expr::BlockExpr(blk) => blk.label().unwrap().syntax().text_range(), _ => return None, }; - let nav = expr_to_nav(db, expr_in_file, Some(focus_range)); + let nav = expr_to_nav(expr_in_file, Some(focus_range)); Some(nav) }) - .flatten() - .collect_vec(); + .collect(); Some(navs) } -fn def_to_nav(db: &RootDatabase, def: Definition) -> Vec { - def.try_to_nav(db).map(|it| it.collect()).unwrap_or_default() +fn def_to_nav(db: &RootDatabase, def: Definition) -> Vec { + def.try_to_nav_hir(db).map(|it| vec![it]).unwrap_or_default() } fn expr_to_nav( - db: &RootDatabase, InFile { file_id, value }: InFile, focus_range: Option, -) -> UpmappingResult { +) -> HirNavigationTarget { let kind = SymbolKind::Label; let value_range = value.syntax().text_range(); - let navs = navigation_target::orig_range_with_focus_r(db, file_id, value_range, focus_range); - navs.map(|(hir::FileRangeWrapper { file_id, range }, focus_range)| { - NavigationTarget::from_syntax(file_id, "".into(), focus_range, range, kind) - }) + HirNavigationTarget::from_syntax(file_id, "".into(), focus_range, value_range, kind) } #[cfg(test)] @@ -594,17 +568,19 @@ mod tests { #[track_caller] fn check(#[rust_analyzer::rust_fixture] ra_fixture: &str) { let (analysis, position, expected) = fixture::annotations(ra_fixture); - let navs = analysis.goto_definition(position).unwrap().expect("no definition found").info; + let navs = + analysis.goto_definition(position.into()).unwrap().expect("no definition found").info; let cmp = |&FileRange { file_id, range }: &_| (file_id, range.start()); let navs = navs .into_iter() - .map(|nav| FileRange { file_id: nav.file_id, range: nav.focus_or_full_range() }) + .flat_map(|nav| nav.upmap(&analysis.db)) + .map(|nav| nav.focus_or_full_file_range()) .sorted_by_key(cmp) .collect::>(); let expected = expected .into_iter() - .map(|(FileRange { file_id, range }, _)| FileRange { file_id, range }) + .map(|(r, _)| r.into_file_id(&analysis.db)) .sorted_by_key(cmp) .collect::>(); @@ -613,14 +589,16 @@ mod tests { fn check_unresolved(#[rust_analyzer::rust_fixture] ra_fixture: &str) { let (analysis, position) = fixture::position(ra_fixture); - let navs = analysis.goto_definition(position).unwrap().expect("no definition found").info; + let navs = + analysis.goto_definition(position.into()).unwrap().expect("no definition found").info; assert!(navs.is_empty(), "didn't expect this to resolve anywhere: {navs:?}") } fn check_name(expected_name: &str, #[rust_analyzer::rust_fixture] ra_fixture: &str) { let (analysis, position, _) = fixture::annotations(ra_fixture); - let navs = analysis.goto_definition(position).unwrap().expect("no definition found").info; + let navs = + analysis.goto_definition(position.into()).unwrap().expect("no definition found").info; assert!(navs.len() < 2, "expected single navigation target but encountered {}", navs.len()); let Some(target) = navs.into_iter().next() else { panic!("expected single navigation target but encountered none"); diff --git a/crates/ide/src/goto_implementation.rs b/crates/ide/src/goto_implementation.rs index 02d96a647328..50f2bd51316e 100644 --- a/crates/ide/src/goto_implementation.rs +++ b/crates/ide/src/goto_implementation.rs @@ -1,4 +1,4 @@ -use hir::{AsAssocItem, Impl, Semantics}; +use hir::{AsAssocItem, HirFilePosition, Impl, Semantics}; use ide_db::{ RootDatabase, defs::{Definition, NameClass, NameRefClass}, @@ -6,7 +6,7 @@ use ide_db::{ }; use syntax::{AstNode, SyntaxKind::*, T, ast}; -use crate::{FilePosition, NavigationTarget, RangeInfo, TryToNav}; +use crate::{RangeInfo, TryToNav, navigation_target::HirNavigationTarget}; // Feature: Go to Implementation // @@ -19,11 +19,11 @@ use crate::{FilePosition, NavigationTarget, RangeInfo, TryToNav}; // ![Go to Implementation](https://user-images.githubusercontent.com/48062697/113065566-02f85480-91b1-11eb-9288-aaad8abd8841.gif) pub(crate) fn goto_implementation( db: &RootDatabase, - FilePosition { file_id, offset }: FilePosition, -) -> Option>> { + HirFilePosition { file_id, offset }: HirFilePosition, +) -> Option>> { let sema = Semantics::new(db); - let source_file = sema.parse_guess_edition(file_id); - let syntax = source_file.syntax().clone(); + let file_id = sema.adjust_edition(file_id); + let syntax = sema.parse_or_expand(sema.adjust_edition(file_id)); let original_token = pick_best_token(syntax.token_at_offset(offset), |kind| match kind { IDENT | T![self] | INT_NUMBER => 1, @@ -83,22 +83,20 @@ pub(crate) fn goto_implementation( Some(RangeInfo { range, info: navs }) } -fn impls_for_ty(sema: &Semantics<'_, RootDatabase>, ty: hir::Type<'_>) -> Vec { +fn impls_for_ty(sema: &Semantics<'_, RootDatabase>, ty: hir::Type<'_>) -> Vec { Impl::all_for_type(sema.db, ty) .into_iter() - .filter_map(|imp| imp.try_to_nav(sema.db)) - .flatten() + .filter_map(|imp| imp.try_to_nav_hir(sema.db)) .collect() } fn impls_for_trait( sema: &Semantics<'_, RootDatabase>, trait_: hir::Trait, -) -> Vec { +) -> Vec { Impl::all_for_trait(sema.db, trait_) .into_iter() - .filter_map(|imp| imp.try_to_nav(sema.db)) - .flatten() + .filter_map(|imp| imp.try_to_nav_hir(sema.db)) .collect() } @@ -106,7 +104,7 @@ fn impls_for_trait_item( sema: &Semantics<'_, RootDatabase>, trait_: hir::Trait, fun_name: hir::Name, -) -> Vec { +) -> Vec { Impl::all_for_trait(sema.db, trait_) .into_iter() .filter_map(|imp| { @@ -114,9 +112,8 @@ fn impls_for_trait_item( let itm_name = itm.name(sema.db)?; (itm_name == fun_name).then_some(*itm) })?; - item.try_to_nav(sema.db) + item.try_to_nav_hir(sema.db) }) - .flatten() .collect() } @@ -130,17 +127,21 @@ mod tests { fn check(#[rust_analyzer::rust_fixture] ra_fixture: &str) { let (analysis, position, expected) = fixture::annotations(ra_fixture); - let navs = analysis.goto_implementation(position).unwrap().unwrap().info; + let navs = analysis.goto_implementation(position.into()).unwrap().unwrap().info; - let cmp = |frange: &FileRange| (frange.file_id, frange.range.start()); + let cmp = |&FileRange { file_id, range }: &_| (file_id, range.start()); let actual = navs .into_iter() - .map(|nav| FileRange { file_id: nav.file_id, range: nav.focus_or_full_range() }) + .flat_map(|nav| nav.upmap(&analysis.db)) + .map(|nav| nav.focus_or_full_file_range()) + .sorted_by_key(cmp) + .collect::>(); + let expected = expected + .into_iter() + .map(|(r, _)| r.into_file_id(&analysis.db)) .sorted_by_key(cmp) .collect::>(); - let expected = - expected.into_iter().map(|(range, _)| range).sorted_by_key(cmp).collect::>(); assert_eq!(expected, actual); } diff --git a/crates/ide/src/goto_type_definition.rs b/crates/ide/src/goto_type_definition.rs index b80e81d39c6d..e22aa4af9d7d 100644 --- a/crates/ide/src/goto_type_definition.rs +++ b/crates/ide/src/goto_type_definition.rs @@ -1,8 +1,8 @@ -use hir::GenericParam; +use hir::{GenericParam, HirFilePosition}; use ide_db::{RootDatabase, defs::Definition, helpers::pick_best_token}; use syntax::{AstNode, SyntaxKind::*, SyntaxToken, T, ast, match_ast}; -use crate::{FilePosition, NavigationTarget, RangeInfo, TryToNav}; +use crate::{RangeInfo, TryToNav, navigation_target::HirNavigationTarget}; // Feature: Go to Type Definition // @@ -15,26 +15,25 @@ use crate::{FilePosition, NavigationTarget, RangeInfo, TryToNav}; // ![Go to Type Definition](https://user-images.githubusercontent.com/48062697/113020657-b560f500-917a-11eb-9007-0f809733a338.gif) pub(crate) fn goto_type_definition( db: &RootDatabase, - FilePosition { file_id, offset }: FilePosition, -) -> Option>> { + HirFilePosition { file_id, offset }: HirFilePosition, +) -> Option>> { let sema = hir::Semantics::new(db); - let file: ast::SourceFile = sema.parse_guess_edition(file_id); - let token: SyntaxToken = - pick_best_token(file.syntax().token_at_offset(offset), |kind| match kind { - IDENT | INT_NUMBER | T![self] => 3, - kind if kind.is_trivia() => 0, - T![;] => 1, - _ => 2, - })?; + let file_id = sema.adjust_edition(file_id); + let syntax = sema.parse_or_expand(sema.adjust_edition(file_id)); + + let token: SyntaxToken = pick_best_token(syntax.token_at_offset(offset), |kind| match kind { + IDENT | INT_NUMBER | T![self] => 3, + kind if kind.is_trivia() => 0, + T![;] => 1, + _ => 2, + })?; let mut res = Vec::new(); let mut push = |def: Definition| { - if let Some(navs) = def.try_to_nav(db) { - for nav in navs { - if !res.contains(&nav) { - res.push(nav); - } + if let Some(nav) = def.try_to_nav_hir(db) { + if !res.contains(&nav) { + res.push(nav); } } }; @@ -118,18 +117,19 @@ mod tests { fn check(#[rust_analyzer::rust_fixture] ra_fixture: &str) { let (analysis, position, expected) = fixture::annotations(ra_fixture); - let navs = analysis.goto_type_definition(position).unwrap().unwrap().info; + let navs = analysis.goto_type_definition(position.into()).unwrap().unwrap().info; assert!(!navs.is_empty(), "navigation is empty"); let cmp = |&FileRange { file_id, range }: &_| (file_id, range.start()); let navs = navs .into_iter() - .map(|nav| FileRange { file_id: nav.file_id, range: nav.focus_or_full_range() }) + .flat_map(|nav| nav.upmap(&analysis.db)) + .map(|nav| nav.focus_or_full_file_range()) .sorted_by_key(cmp) .collect::>(); let expected = expected .into_iter() - .map(|(file_range, _)| file_range) + .map(|(r, _)| r.into_file_id(&analysis.db)) .sorted_by_key(cmp) .collect::>(); assert_eq!(expected, navs); diff --git a/crates/ide/src/highlight_related.rs b/crates/ide/src/highlight_related.rs index 356bd69aa44e..5b50dea4b196 100644 --- a/crates/ide/src/highlight_related.rs +++ b/crates/ide/src/highlight_related.rs @@ -1,6 +1,6 @@ use std::iter; -use hir::{EditionedFileId, FilePosition, FileRange, HirFileId, InFile, Semantics, db}; +use hir::{FileRange, HirFileId, HirFilePosition, HirFileRange, InFile, Semantics, db}; use ide_db::{ FxHashMap, FxHashSet, RootDatabase, defs::{Definition, IdentClass}, @@ -19,7 +19,10 @@ use syntax::{ match_ast, }; -use crate::{NavigationTarget, TryToNav, goto_definition, navigation_target::ToNav}; +use crate::{ + TryToNav, goto_definition, + navigation_target::{HirNavigationTarget, ToNav}, +}; #[derive(PartialEq, Eq, Hash)] pub struct HighlightedRange { @@ -40,7 +43,7 @@ pub struct HighlightRelatedConfig { pub branch_exit_points: bool, } -type HighlightMap = FxHashMap>; +type HighlightMap = FxHashMap>; // Feature: Highlight Related // @@ -57,13 +60,11 @@ type HighlightMap = FxHashMap>; pub(crate) fn highlight_related( sema: &Semantics<'_, RootDatabase>, config: HighlightRelatedConfig, - ide_db::FilePosition { offset, file_id }: ide_db::FilePosition, + HirFilePosition { offset, file_id }: HirFilePosition, ) -> Option> { let _p = tracing::info_span!("highlight_related").entered(); - let file_id = sema - .attach_first_edition(file_id) - .unwrap_or_else(|| EditionedFileId::current_edition(sema.db, file_id)); - let syntax = sema.parse(file_id).syntax().clone(); + let file_id = sema.adjust_edition(file_id); + let syntax = sema.parse_or_expand(sema.adjust_edition(file_id)); let token = pick_best_token(syntax.token_at_offset(offset), |kind| match kind { T![?] => 4, // prefer `?` when the cursor is sandwiched like in `await$0?` @@ -99,7 +100,7 @@ pub(crate) fn highlight_related( T![|] if config.closure_captures => highlight_closure_captures(sema, token, file_id), T![move] if config.closure_captures => highlight_closure_captures(sema, token, file_id), _ if config.references => { - highlight_references(sema, token, FilePosition { file_id, offset }) + highlight_references(sema, token, HirFilePosition { file_id, offset }) } _ => None, } @@ -108,7 +109,7 @@ pub(crate) fn highlight_related( fn highlight_closure_captures( sema: &Semantics<'_, RootDatabase>, token: SyntaxToken, - file_id: EditionedFileId, + file_id: HirFileId, ) -> Option> { let closure = token.parent_ancestors().take(2).find_map(ast::ClosureExpr::cast)?; let search_range = closure.body()?.syntax().text_range(); @@ -121,13 +122,14 @@ fn highlight_closure_captures( .flat_map(|local| { let usages = Definition::Local(local) .usages(sema) - .in_scope(&SearchScope::file_range(FileRange { file_id, range: search_range })) + .in_scope(&SearchScope::hir_file_range(HirFileRange { + file_id, + range: search_range, + })) .include_self_refs() .all() - .references - .remove(&file_id) + .map_out_of_macros_to(sema, file_id) .into_iter() - .flatten() .map(|FileReference { category, range, .. }| HighlightedRange { range, category, @@ -140,8 +142,9 @@ fn highlight_closure_captures( local .sources(sema.db) .into_iter() - .flat_map(|x| x.to_nav(sema.db)) - .filter(|decl| decl.file_id == file_id.file_id(sema.db)) + .map(|x| x.to_nav_hir(sema.db)) + .filter_map(|it| it.upmap_to(sema.db, file_id)) + .flatten() .filter_map(|decl| decl.focus_range) .map(move |range| HighlightedRange { range, category }) .chain(usages) @@ -153,7 +156,7 @@ fn highlight_closure_captures( fn highlight_references( sema: &Semantics<'_, RootDatabase>, token: SyntaxToken, - FilePosition { file_id, offset }: FilePosition, + HirFilePosition { file_id, offset }: HirFilePosition, ) -> Option> { let defs = if let Some((range, _, _, resolution)) = sema.check_for_format_args_template(token.clone(), offset) @@ -170,18 +173,6 @@ fn highlight_references( } else { find_defs(sema, token.clone()) }; - let usages = defs - .iter() - .filter_map(|&d| { - d.usages(sema) - .in_scope(&SearchScope::single_file(file_id)) - .include_self_refs() - .all() - .references - .remove(&file_id) - }) - .flatten() - .map(|FileReference { category, range, .. }| HighlightedRange { range, category }); let mut res = FxHashSet::default(); for &def in &defs { // highlight trait usages @@ -215,19 +206,17 @@ fn highlight_references( res.extend( if use_tree { t.items(sema.db) } else { t.items_with_supertraits(sema.db) } .into_iter() - .filter_map(|item| { + .flat_map(|item| { Definition::from(item) .usages(sema) - .set_scope(Some(&SearchScope::file_range(FileRange { + .set_scope(Some(&SearchScope::hir_file_range(HirFileRange { file_id, range: trait_item_use_scope.text_range(), }))) .include_self_refs() .all() - .references - .remove(&file_id) + .map_out_of_macros_to(sema, file_id) }) - .flatten() .map(|FileReference { category, range, .. }| HighlightedRange { range, category, @@ -264,28 +253,27 @@ fn highlight_references( local .sources(sema.db) .into_iter() - .flat_map(|x| x.to_nav(sema.db)) - .filter(|decl| decl.file_id == file_id.file_id(sema.db)) + .map(|x| x.to_nav_hir(sema.db)) + .filter_map(|it| it.upmap_to(sema.db, file_id)) + .flatten() .filter_map(|decl| decl.focus_range) .map(|range| HighlightedRange { range, category }) - .for_each(|x| { - res.insert(x); - }); + .for_each(|x| _ = res.insert(x)); } def => { - let navs = match def { + let nav = match def { Definition::Module(module) => { - NavigationTarget::from_module_to_decl(sema.db, module) + HirNavigationTarget::from_module_to_decl(sema.db, module) } - def => match def.try_to_nav(sema.db) { + def => match def.try_to_nav_hir(sema.db) { Some(it) => it, None => continue, }, }; + let Some(navs) = nav.upmap_to(sema.db, file_id) else { + continue; + }; for nav in navs { - if nav.file_id != file_id.file_id(sema.db) { - continue; - } let hl_range = nav.focus_range.map(|range| { let category = if matches!(def, Definition::Local(l) if l.is_mut(sema.db)) { ReferenceCategory::WRITE @@ -302,6 +290,16 @@ fn highlight_references( } } + let usages = defs + .iter() + .flat_map(|&d| { + d.usages(sema) + .in_scope(&SearchScope::single_hir_file(file_id)) + .include_self_refs() + .all() + .map_out_of_macros_to(sema, file_id) + }) + .map(|FileReference { category, range, .. }| HighlightedRange { range, category }); res.extend(usages); if res.is_empty() { None } else { Some(res.into_iter().collect()) } } @@ -309,13 +307,13 @@ fn highlight_references( pub(crate) fn highlight_branch_exit_points( sema: &Semantics<'_, RootDatabase>, token: SyntaxToken, -) -> FxHashMap> { +) -> FxHashMap> { let mut highlights: HighlightMap = FxHashMap::default(); let push_to_highlights = |file_id, range, highlights: &mut HighlightMap| { if let Some(FileRange { file_id, range }) = original_frange(sema.db, file_id, range) { let hrange = HighlightedRange { category: ReferenceCategory::empty(), range }; - highlights.entry(file_id).or_default().insert(hrange); + highlights.entry(file_id.into()).or_default().insert(hrange); } }; @@ -393,12 +391,12 @@ fn hl_exit_points( def_token: Option, body: ast::Expr, ) -> Option { - let mut highlights: FxHashMap> = FxHashMap::default(); + let mut highlights: FxHashMap> = FxHashMap::default(); let mut push_to_highlights = |file_id, range| { if let Some(FileRange { file_id, range }) = original_frange(sema.db, file_id, range) { let hrange = HighlightedRange { category: ReferenceCategory::empty(), range }; - highlights.entry(file_id).or_default().insert(hrange); + highlights.entry(file_id.into()).or_default().insert(hrange); } }; @@ -463,7 +461,7 @@ fn hl_exit_points( pub(crate) fn highlight_exit_points( sema: &Semantics<'_, RootDatabase>, token: SyntaxToken, -) -> FxHashMap> { +) -> FxHashMap> { let mut res = FxHashMap::default(); for def in goto_definition::find_fn_or_blocks(sema, &token) { let new_map = match_ast! { @@ -492,7 +490,7 @@ pub(crate) fn highlight_exit_points( pub(crate) fn highlight_break_points( sema: &Semantics<'_, RootDatabase>, token: SyntaxToken, -) -> FxHashMap> { +) -> FxHashMap> { pub(crate) fn hl( sema: &Semantics<'_, RootDatabase>, cursor_token_kind: SyntaxKind, @@ -500,12 +498,12 @@ pub(crate) fn highlight_break_points( label: Option, expr: ast::Expr, ) -> Option { - let mut highlights: FxHashMap> = FxHashMap::default(); + let mut highlights: FxHashMap> = FxHashMap::default(); let mut push_to_highlights = |file_id, range| { if let Some(FileRange { file_id, range }) = original_frange(sema.db, file_id, range) { let hrange = HighlightedRange { category: ReferenceCategory::empty(), range }; - highlights.entry(file_id).or_default().insert(hrange); + highlights.entry(file_id.into()).or_default().insert(hrange); } }; @@ -587,18 +585,18 @@ pub(crate) fn highlight_break_points( pub(crate) fn highlight_yield_points( sema: &Semantics<'_, RootDatabase>, token: SyntaxToken, -) -> FxHashMap> { +) -> FxHashMap> { fn hl( sema: &Semantics<'_, RootDatabase>, async_token: Option, body: Option, ) -> Option { - let mut highlights: FxHashMap> = FxHashMap::default(); + let mut highlights: FxHashMap> = FxHashMap::default(); let mut push_to_highlights = |file_id, range| { if let Some(FileRange { file_id, range }) = original_frange(sema.db, file_id, range) { let hrange = HighlightedRange { category: ReferenceCategory::empty(), range }; - highlights.entry(file_id).or_default().insert(hrange); + highlights.entry(file_id.into()).or_default().insert(hrange); } }; @@ -786,18 +784,18 @@ impl<'a> WalkExpandedExprCtx<'a> { pub(crate) fn highlight_unsafe_points( sema: &Semantics<'_, RootDatabase>, token: SyntaxToken, -) -> FxHashMap> { +) -> FxHashMap> { fn hl( sema: &Semantics<'_, RootDatabase>, unsafe_token: &SyntaxToken, block_expr: Option, - ) -> Option>> { - let mut highlights: FxHashMap> = FxHashMap::default(); + ) -> Option>> { + let mut highlights: FxHashMap> = FxHashMap::default(); let mut push_to_highlights = |file_id, range| { if let Some(FileRange { file_id, range }) = original_frange(sema.db, file_id, range) { let hrange = HighlightedRange { category: ReferenceCategory::empty(), range }; - highlights.entry(file_id).or_default().push(hrange); + highlights.entry(file_id.into()).or_default().push(hrange); } }; @@ -850,7 +848,7 @@ mod tests { ) { let (analysis, pos, annotations) = fixture::annotations(ra_fixture); - let hls = analysis.highlight_related(config, pos).unwrap().unwrap_or_default(); + let hls = analysis.highlight_related(config, pos.into()).unwrap().unwrap_or_default(); let mut expected = annotations.into_iter().map(|(r, access)| (r.range, access)).collect::>(); diff --git a/crates/ide/src/hover.rs b/crates/ide/src/hover.rs index e4d6279759ed..2980a5852386 100644 --- a/crates/ide/src/hover.rs +++ b/crates/ide/src/hover.rs @@ -7,11 +7,11 @@ use std::{iter, ops::Not}; use either::Either; use hir::{ - DisplayTarget, GenericDef, GenericSubstitution, HasCrate, HasSource, LangItem, Semantics, - db::DefDatabase, + DisplayTarget, GenericDef, GenericSubstitution, HasCrate, HasSource, HirFileId, HirFileRange, + LangItem, Semantics, db::DefDatabase, }; use ide_db::{ - FileRange, FxIndexSet, Ranker, RootDatabase, + FxIndexSet, Ranker, RootDatabase, defs::{Definition, IdentClass, NameRefClass, OperatorClass}, famous_defs::FamousDefs, helpers::pick_best_token, @@ -25,11 +25,11 @@ use syntax::{ }; use crate::{ - FileId, FilePosition, NavigationTarget, RangeInfo, Runnable, TryToNav, + HirFilePosition, RangeInfo, Runnable, TryToNav, doc_links::token_as_doc_comment, markdown_remove::remove_markdown, markup::Markup, - navigation_target::UpmappingResult, + navigation_target::HirNavigationTarget, runnables::{runnable_fn, runnable_mod}, }; #[derive(Clone, Debug, PartialEq, Eq)] @@ -75,11 +75,12 @@ pub enum HoverDocFormat { PlainText, } +#[allow(clippy::large_enum_variant)] #[derive(Debug, Clone, Hash, PartialEq, Eq)] pub enum HoverAction { Runnable(Runnable), - Implementation(FilePosition), - Reference(FilePosition), + Implementation(HirFilePosition), + Reference(HirFilePosition), GoToType(Vec), } @@ -99,7 +100,7 @@ impl HoverAction { it.name(db).map(|name| name.display(db, edition).to_string()), edition, ), - nav: it.try_to_nav(db)?.call_site(), + nav: it.try_to_nav_hir(db)?, }) }) .collect::>(); @@ -110,7 +111,7 @@ impl HoverAction { #[derive(Debug, Clone, Eq, PartialEq, Hash)] pub struct HoverGotoTypeData { pub mod_path: String, - pub nav: NavigationTarget, + pub nav: HirNavigationTarget, } /// Contains the results when hovering over an item @@ -128,25 +129,24 @@ pub struct HoverResult { // ![Hover](https://user-images.githubusercontent.com/48062697/113020658-b5f98b80-917a-11eb-9f88-3dbc27320c95.gif) pub(crate) fn hover( db: &RootDatabase, - frange @ FileRange { file_id, range }: FileRange, + frange @ HirFileRange { file_id, range }: HirFileRange, config: &HoverConfig, ) -> Option> { let sema = &hir::Semantics::new(db); - let file = sema.parse_guess_edition(file_id).syntax().clone(); - let edition = - sema.attach_first_edition(file_id).map(|it| it.edition(db)).unwrap_or(Edition::CURRENT); - let display_target = sema.first_crate(file_id)?.to_display_target(db); + let file_id = sema.adjust_edition(file_id); + let file = sema.parse_or_expand(sema.adjust_edition(file_id)); + + let display_target = sema.first_crate_hir(file_id)?.to_display_target(db); let mut res = if range.is_empty() { hover_offset( sema, - FilePosition { file_id, offset: range.start() }, + HirFilePosition { file_id, offset: range.start() }, file, config, - edition, display_target, ) } else { - hover_ranged(sema, frange, file, config, edition, display_target) + hover_ranged(sema, frange, file, config, display_target) }?; if let HoverDocFormat::PlainText = config.format { @@ -158,12 +158,12 @@ pub(crate) fn hover( #[allow(clippy::field_reassign_with_default)] fn hover_offset( sema: &Semantics<'_, RootDatabase>, - FilePosition { file_id, offset }: FilePosition, + HirFilePosition { file_id, offset }: HirFilePosition, file: SyntaxNode, config: &HoverConfig, - edition: Edition, display_target: DisplayTarget, ) -> Option> { + let edition = file_id.edition(sema.db); let original_token = pick_best_token(file.token_at_offset(offset), |kind| match kind { IDENT | INT_NUMBER @@ -175,7 +175,7 @@ fn hover_offset( | T![_] => 4, // index and prefix ops and closure pipe T!['['] | T![']'] | T![?] | T![*] | T![-] | T![!] | T![|] => 3, - kind if kind.is_keyword(edition) => 2, + kind if kind.is_keyword(file_id.edition(sema.db)) => 2, T!['('] | T![')'] => 2, kind if kind.is_trivia() => 0, _ => 1, @@ -193,7 +193,6 @@ fn hover_offset( None, false, config, - edition, display_target, ); Some(RangeInfo::new(range, res)) @@ -212,7 +211,6 @@ fn hover_offset( None, false, config, - edition, display_target, ); return Some(RangeInfo::new(range, res)); @@ -304,7 +302,6 @@ fn hover_offset( macro_arm, hovered_definition, config, - edition, display_target, ) }) @@ -387,10 +384,9 @@ fn hover_offset( fn hover_ranged( sema: &Semantics<'_, RootDatabase>, - FileRange { range, .. }: FileRange, + HirFileRange { range, file_id }: HirFileRange, file: SyntaxNode, config: &HoverConfig, - edition: Edition, display_target: DisplayTarget, ) -> Option> { // FIXME: make this work in attributes @@ -399,6 +395,7 @@ fn hover_ranged( .ancestors() .take_while(|it| ast::MacroCall::can_cast(it.kind()) || !ast::Item::can_cast(it.kind())) .find_map(Either::::cast)?; + let edition = file_id.edition(sema.db); let res = match &expr_or_pat { Either::Left(ast::Expr::TryExpr(try_expr)) => { render::try_expr(sema, config, try_expr, edition, display_target) @@ -424,14 +421,13 @@ fn hover_ranged( // FIXME: Why is this pub(crate)? pub(crate) fn hover_for_definition( sema: &Semantics<'_, RootDatabase>, - file_id: FileId, + file_id: HirFileId, def: Definition, subst: Option>, scope_node: &SyntaxNode, macro_arm: Option, render_extras: bool, config: &HoverConfig, - edition: Edition, display_target: DisplayTarget, ) -> HoverResult { let famous_defs = match &def { @@ -466,7 +462,7 @@ pub(crate) fn hover_for_definition( render_extras, subst_types.as_ref(), config, - edition, + file_id.edition(db), display_target, ); HoverResult { @@ -475,7 +471,13 @@ pub(crate) fn hover_for_definition( show_fn_references_action(sema.db, def), show_implementations_action(sema.db, def), runnable_action(sema, def, file_id), - goto_type_action_for_def(sema.db, def, ¬able_traits, subst_types, edition), + goto_type_action_for_def( + sema.db, + def, + ¬able_traits, + subst_types, + file_id.edition(db), + ), ] .into_iter() .flatten() @@ -511,34 +513,30 @@ fn notable_traits<'db>( } fn show_implementations_action(db: &RootDatabase, def: Definition) -> Option { - fn to_action(nav_target: NavigationTarget) -> HoverAction { - HoverAction::Implementation(FilePosition { + fn to_action(nav_target: HirNavigationTarget) -> HoverAction { + HoverAction::Implementation(HirFilePosition { file_id: nav_target.file_id, offset: nav_target.focus_or_full_range().start(), }) } let adt = match def { - Definition::Trait(it) => { - return it.try_to_nav(db).map(UpmappingResult::call_site).map(to_action); - } + Definition::Trait(it) => return it.try_to_nav_hir(db).map(to_action), Definition::Adt(it) => Some(it), Definition::SelfType(it) => it.self_ty(db).as_adt(), _ => None, }?; - adt.try_to_nav(db).map(UpmappingResult::call_site).map(to_action) + adt.try_to_nav_hir(db).map(to_action) } fn show_fn_references_action(db: &RootDatabase, def: Definition) -> Option { match def { - Definition::Function(it) => { - it.try_to_nav(db).map(UpmappingResult::call_site).map(|nav_target| { - HoverAction::Reference(FilePosition { - file_id: nav_target.file_id, - offset: nav_target.focus_or_full_range().start(), - }) + Definition::Function(it) => it.try_to_nav_hir(db).map(|nav_target| { + HoverAction::Reference(HirFilePosition { + file_id: nav_target.file_id, + offset: nav_target.focus_or_full_range().start(), }) - } + }), _ => None, } } @@ -546,13 +544,13 @@ fn show_fn_references_action(db: &RootDatabase, def: Definition) -> Option, def: Definition, - file_id: FileId, + file_id: HirFileId, ) -> Option { match def { Definition::Module(it) => runnable_mod(sema, it).map(HoverAction::Runnable), Definition::Function(func) => { let src = func.source(sema.db)?; - if src.file_id.file_id().is_none_or(|f| f.file_id(sema.db) != file_id) { + if src.file_id.file_id().is_none_or(|f| f != file_id) { cov_mark::hit!(hover_macro_generated_struct_fn_doc_comment); cov_mark::hit!(hover_macro_generated_struct_fn_doc_attr); return None; diff --git a/crates/ide/src/hover/tests.rs b/crates/ide/src/hover/tests.rs index f63499aa0fd4..7a581fa5b953 100644 --- a/crates/ide/src/hover/tests.rs +++ b/crates/ide/src/hover/tests.rs @@ -1,5 +1,6 @@ use expect_test::{Expect, expect}; -use ide_db::{FileRange, base_db::SourceDatabase}; +use hir::HirFileRange; +use ide_db::base_db::SourceDatabase; use syntax::TextRange; use crate::{ @@ -30,7 +31,10 @@ fn check_hover_no_result(#[rust_analyzer::rust_fixture] ra_fixture: &str) { let hover = analysis .hover( &HoverConfig { links_in_hover: true, ..HOVER_BASE_CONFIG }, - FileRange { file_id: position.file_id, range: TextRange::empty(position.offset) }, + HirFileRange { + file_id: position.file_id.into(), + range: TextRange::empty(position.offset), + }, ) .unwrap(); assert!(hover.is_none(), "hover not expected but found: {:?}", hover.unwrap()); @@ -42,13 +46,16 @@ fn check(#[rust_analyzer::rust_fixture] ra_fixture: &str, expect: Expect) { let hover = analysis .hover( &HoverConfig { links_in_hover: true, ..HOVER_BASE_CONFIG }, - FileRange { file_id: position.file_id, range: TextRange::empty(position.offset) }, + HirFileRange { + file_id: position.file_id.into(), + range: TextRange::empty(position.offset), + }, ) .unwrap() .unwrap(); - let content = analysis.db.file_text(position.file_id); - let hovered_element = &content.text(&analysis.db)[hover.range]; + let content = analysis.db.file_text(position.file_id.file_id(&analysis.db)).text(&analysis.db); + let hovered_element = &content[hover.range]; let actual = format!("*{hovered_element}*\n{}\n", hover.info.markup); expect.assert_eq(&actual) @@ -68,12 +75,15 @@ fn check_hover_fields_limit( max_fields_count: fields_count.into(), ..HOVER_BASE_CONFIG }, - FileRange { file_id: position.file_id, range: TextRange::empty(position.offset) }, + HirFileRange { + file_id: position.file_id.into(), + range: TextRange::empty(position.offset), + }, ) .unwrap() .unwrap(); - let content = analysis.db.file_text(position.file_id).text(&analysis.db); + let content = analysis.db.file_text(position.file_id.file_id(&analysis.db)).text(&analysis.db); let hovered_element = &content[hover.range]; let actual = format!("*{hovered_element}*\n{}\n", hover.info.markup); @@ -94,12 +104,15 @@ fn check_hover_enum_variants_limit( max_enum_variants_count: variants_count.into(), ..HOVER_BASE_CONFIG }, - FileRange { file_id: position.file_id, range: TextRange::empty(position.offset) }, + HirFileRange { + file_id: position.file_id.into(), + range: TextRange::empty(position.offset), + }, ) .unwrap() .unwrap(); - let content = analysis.db.file_text(position.file_id).text(&analysis.db); + let content = analysis.db.file_text(position.file_id.file_id(&analysis.db)).text(&analysis.db); let hovered_element = &content[hover.range]; let actual = format!("*{hovered_element}*\n{}\n", hover.info.markup); @@ -120,12 +133,15 @@ fn check_assoc_count( max_trait_assoc_items_count: Some(count), ..HOVER_BASE_CONFIG }, - FileRange { file_id: position.file_id, range: TextRange::empty(position.offset) }, + HirFileRange { + file_id: position.file_id.into(), + range: TextRange::empty(position.offset), + }, ) .unwrap() .unwrap(); - let content = analysis.db.file_text(position.file_id).text(&analysis.db); + let content = analysis.db.file_text(position.file_id.file_id(&analysis.db)).text(&analysis.db); let hovered_element = &content[hover.range]; let actual = format!("*{hovered_element}*\n{}\n", hover.info.markup); @@ -137,12 +153,15 @@ fn check_hover_no_links(#[rust_analyzer::rust_fixture] ra_fixture: &str, expect: let hover = analysis .hover( &HOVER_BASE_CONFIG, - FileRange { file_id: position.file_id, range: TextRange::empty(position.offset) }, + HirFileRange { + file_id: position.file_id.into(), + range: TextRange::empty(position.offset), + }, ) .unwrap() .unwrap(); - let content = analysis.db.file_text(position.file_id).text(&analysis.db); + let content = analysis.db.file_text(position.file_id.file_id(&analysis.db)).text(&analysis.db); let hovered_element = &content[hover.range]; let actual = format!("*{hovered_element}*\n{}\n", hover.info.markup); @@ -154,12 +173,15 @@ fn check_hover_no_memory_layout(#[rust_analyzer::rust_fixture] ra_fixture: &str, let hover = analysis .hover( &HoverConfig { memory_layout: None, ..HOVER_BASE_CONFIG }, - FileRange { file_id: position.file_id, range: TextRange::empty(position.offset) }, + HirFileRange { + file_id: position.file_id.into(), + range: TextRange::empty(position.offset), + }, ) .unwrap() .unwrap(); - let content = analysis.db.file_text(position.file_id).text(&analysis.db); + let content = analysis.db.file_text(position.file_id.file_id(&analysis.db)).text(&analysis.db); let hovered_element = &content[hover.range]; let actual = format!("*{hovered_element}*\n{}\n", hover.info.markup); @@ -175,12 +197,15 @@ fn check_hover_no_markdown(#[rust_analyzer::rust_fixture] ra_fixture: &str, expe format: HoverDocFormat::PlainText, ..HOVER_BASE_CONFIG }, - FileRange { file_id: position.file_id, range: TextRange::empty(position.offset) }, + HirFileRange { + file_id: position.file_id.into(), + range: TextRange::empty(position.offset), + }, ) .unwrap() .unwrap(); - let content = analysis.db.file_text(position.file_id).text(&analysis.db); + let content = analysis.db.file_text(position.file_id.file_id(&analysis.db)).text(&analysis.db); let hovered_element = &content[hover.range]; let actual = format!("*{hovered_element}*\n{}\n", hover.info.markup); @@ -192,7 +217,7 @@ fn check_actions(#[rust_analyzer::rust_fixture] ra_fixture: &str, expect: Expect let mut hover = analysis .hover( &HoverConfig { links_in_hover: true, ..HOVER_BASE_CONFIG }, - FileRange { file_id, range: position.range_or_empty() }, + HirFileRange { file_id: file_id.into(), range: position.range_or_empty() }, ) .unwrap() .unwrap(); @@ -214,14 +239,14 @@ fn check_actions(#[rust_analyzer::rust_fixture] ra_fixture: &str, expect: Expect fn check_hover_range(#[rust_analyzer::rust_fixture] ra_fixture: &str, expect: Expect) { let (analysis, range) = fixture::range(ra_fixture); - let hover = analysis.hover(&HOVER_BASE_CONFIG, range).unwrap().unwrap(); + let hover = analysis.hover(&HOVER_BASE_CONFIG, range.into()).unwrap().unwrap(); expect.assert_eq(hover.info.markup.as_str()) } fn check_hover_range_actions(#[rust_analyzer::rust_fixture] ra_fixture: &str, expect: Expect) { let (analysis, range) = fixture::range(ra_fixture); let mut hover = analysis - .hover(&HoverConfig { links_in_hover: true, ..HOVER_BASE_CONFIG }, range) + .hover(&HoverConfig { links_in_hover: true, ..HOVER_BASE_CONFIG }, range.into()) .unwrap() .unwrap(); // stub out ranges into minicore as they can change every now and then @@ -242,7 +267,7 @@ fn check_hover_range_actions(#[rust_analyzer::rust_fixture] ra_fixture: &str, ex fn check_hover_range_no_results(#[rust_analyzer::rust_fixture] ra_fixture: &str) { let (analysis, range) = fixture::range(ra_fixture); - let hover = analysis.hover(&HOVER_BASE_CONFIG, range).unwrap(); + let hover = analysis.hover(&HOVER_BASE_CONFIG, range.into()).unwrap(); assert!(hover.is_none()); } @@ -493,7 +518,9 @@ fn main() { mod_path: "ra_test_fixture::S2", nav: NavigationTarget { file_id: FileId( - 0, + EditionedFileId( + Id(1800), + ), ), full_range: 10..20, focus_range: 17..19, @@ -506,7 +533,9 @@ fn main() { mod_path: "ra_test_fixture::S", nav: NavigationTarget { file_id: FileId( - 0, + EditionedFileId( + Id(1800), + ), ), full_range: 0..9, focus_range: 7..8, @@ -519,7 +548,9 @@ fn main() { mod_path: "core::ops::function::FnOnce", nav: NavigationTarget { file_id: FileId( - 1, + EditionedFileId( + Id(1801), + ), ), full_range: 4294967295..4294967295, focus_range: 4294967295..4294967295, @@ -2370,7 +2401,9 @@ fn foo(Foo { b$0ar }: &Foo) {} mod_path: "ra_test_fixture::Bar", nav: NavigationTarget { file_id: FileId( - 0, + EditionedFileId( + Id(1800), + ), ), full_range: 0..11, focus_range: 7..10, @@ -2406,7 +2439,9 @@ fn test() { Reference( FilePositionWrapper { file_id: FileId( - 0, + EditionedFileId( + Id(1800), + ), ), offset: 3, }, @@ -2417,7 +2452,9 @@ fn test() { mod_path: "ra_test_fixture::S", nav: NavigationTarget { file_id: FileId( - 0, + EditionedFileId( + Id(1800), + ), ), full_range: 20..29, focus_range: 27..28, @@ -2452,7 +2489,9 @@ fn test() { Reference( FilePositionWrapper { file_id: FileId( - 0, + EditionedFileId( + Id(1800), + ), ), offset: 15, }, @@ -2463,7 +2502,9 @@ fn test() { mod_path: "ra_test_fixture::Bar", nav: NavigationTarget { file_id: FileId( - 0, + EditionedFileId( + Id(1800), + ), ), full_range: 0..11, focus_range: 7..10, @@ -2497,7 +2538,9 @@ fn test() { Reference( FilePositionWrapper { file_id: FileId( - 0, + EditionedFileId( + Id(1800), + ), ), offset: 16, }, @@ -2508,7 +2551,9 @@ fn test() { mod_path: "ra_test_fixture::Bar", nav: NavigationTarget { file_id: FileId( - 0, + EditionedFileId( + Id(1800), + ), ), full_range: 0..12, focus_range: 6..9, @@ -2712,7 +2757,9 @@ fn test_hover_trait_show_qualifiers() { Implementation( FilePositionWrapper { file_id: FileId( - 0, + EditionedFileId( + Id(1800), + ), ), offset: 13, }, @@ -3403,7 +3450,9 @@ fn test_hover_trait_has_impl_action() { Implementation( FilePositionWrapper { file_id: FileId( - 0, + EditionedFileId( + Id(1800), + ), ), offset: 6, }, @@ -3422,7 +3471,9 @@ fn test_hover_struct_has_impl_action() { Implementation( FilePositionWrapper { file_id: FileId( - 0, + EditionedFileId( + Id(1800), + ), ), offset: 7, }, @@ -3441,7 +3492,9 @@ fn test_hover_union_has_impl_action() { Implementation( FilePositionWrapper { file_id: FileId( - 0, + EditionedFileId( + Id(1800), + ), ), offset: 6, }, @@ -3460,7 +3513,9 @@ fn test_hover_enum_has_impl_action() { Implementation( FilePositionWrapper { file_id: FileId( - 0, + EditionedFileId( + Id(1800), + ), ), offset: 5, }, @@ -3479,7 +3534,9 @@ fn test_hover_self_has_impl_action() { Implementation( FilePositionWrapper { file_id: FileId( - 0, + EditionedFileId( + Id(1800), + ), ), offset: 7, }, @@ -3501,7 +3558,9 @@ fn foo_$0test() {} Reference( FilePositionWrapper { file_id: FileId( - 0, + EditionedFileId( + Id(1800), + ), ), offset: 11, }, @@ -3511,7 +3570,9 @@ fn foo_$0test() {} use_name_in_title: false, nav: NavigationTarget { file_id: FileId( - 0, + EditionedFileId( + Id(1800), + ), ), full_range: 0..24, focus_range: 11..19, @@ -3555,7 +3616,9 @@ mod tests$0 { use_name_in_title: false, nav: NavigationTarget { file_id: FileId( - 0, + EditionedFileId( + Id(1800), + ), ), full_range: 0..46, focus_range: 4..9, @@ -3595,7 +3658,9 @@ fn main() { let s$0t = S{ f1:0 }; } mod_path: "ra_test_fixture::S", nav: NavigationTarget { file_id: FileId( - 0, + EditionedFileId( + Id(1800), + ), ), full_range: 0..19, focus_range: 7..8, @@ -3628,7 +3693,9 @@ fn main() { let s$0t = S{ f1:Arg(0) }; } mod_path: "ra_test_fixture::Arg", nav: NavigationTarget { file_id: FileId( - 0, + EditionedFileId( + Id(1800), + ), ), full_range: 0..16, focus_range: 7..10, @@ -3641,7 +3708,9 @@ fn main() { let s$0t = S{ f1:Arg(0) }; } mod_path: "ra_test_fixture::S", nav: NavigationTarget { file_id: FileId( - 0, + EditionedFileId( + Id(1800), + ), ), full_range: 17..37, focus_range: 24..25, @@ -3687,7 +3756,9 @@ fn main() { let s$0t = S{ f1: S{ f1: Arg(0) } }; } mod_path: "ra_test_fixture::Arg", nav: NavigationTarget { file_id: FileId( - 0, + EditionedFileId( + Id(1800), + ), ), full_range: 0..16, focus_range: 7..10, @@ -3700,7 +3771,9 @@ fn main() { let s$0t = S{ f1: S{ f1: Arg(0) } }; } mod_path: "ra_test_fixture::S", nav: NavigationTarget { file_id: FileId( - 0, + EditionedFileId( + Id(1800), + ), ), full_range: 17..37, focus_range: 24..25, @@ -3736,7 +3809,9 @@ fn main() { let s$0t = (A(1), B(2), M::C(3) ); } mod_path: "ra_test_fixture::A", nav: NavigationTarget { file_id: FileId( - 0, + EditionedFileId( + Id(1800), + ), ), full_range: 0..14, focus_range: 7..8, @@ -3749,7 +3824,9 @@ fn main() { let s$0t = (A(1), B(2), M::C(3) ); } mod_path: "ra_test_fixture::B", nav: NavigationTarget { file_id: FileId( - 0, + EditionedFileId( + Id(1800), + ), ), full_range: 15..29, focus_range: 22..23, @@ -3762,7 +3839,9 @@ fn main() { let s$0t = (A(1), B(2), M::C(3) ); } mod_path: "ra_test_fixture::M::C", nav: NavigationTarget { file_id: FileId( - 0, + EditionedFileId( + Id(1800), + ), ), full_range: 42..60, focus_range: 53..54, @@ -3796,7 +3875,9 @@ fn main() { let s$0t = foo(); } mod_path: "ra_test_fixture::Foo", nav: NavigationTarget { file_id: FileId( - 0, + EditionedFileId( + Id(1800), + ), ), full_range: 0..12, focus_range: 6..9, @@ -3830,7 +3911,9 @@ fn main() { let s$0t = foo(); } mod_path: "ra_test_fixture::Foo", nav: NavigationTarget { file_id: FileId( - 0, + EditionedFileId( + Id(1800), + ), ), full_range: 0..15, focus_range: 6..9, @@ -3843,7 +3926,9 @@ fn main() { let s$0t = foo(); } mod_path: "ra_test_fixture::S", nav: NavigationTarget { file_id: FileId( - 0, + EditionedFileId( + Id(1800), + ), ), full_range: 16..25, focus_range: 23..24, @@ -3877,7 +3962,9 @@ fn main() { let s$0t = foo(); } mod_path: "ra_test_fixture::Bar", nav: NavigationTarget { file_id: FileId( - 0, + EditionedFileId( + Id(1800), + ), ), full_range: 13..25, focus_range: 19..22, @@ -3890,7 +3977,9 @@ fn main() { let s$0t = foo(); } mod_path: "ra_test_fixture::Foo", nav: NavigationTarget { file_id: FileId( - 0, + EditionedFileId( + Id(1800), + ), ), full_range: 0..12, focus_range: 6..9, @@ -3927,7 +4016,9 @@ fn main() { let s$0t = foo(); } mod_path: "ra_test_fixture::Bar", nav: NavigationTarget { file_id: FileId( - 0, + EditionedFileId( + Id(1800), + ), ), full_range: 16..31, focus_range: 22..25, @@ -3940,7 +4031,9 @@ fn main() { let s$0t = foo(); } mod_path: "ra_test_fixture::Foo", nav: NavigationTarget { file_id: FileId( - 0, + EditionedFileId( + Id(1800), + ), ), full_range: 0..15, focus_range: 6..9, @@ -3953,7 +4046,9 @@ fn main() { let s$0t = foo(); } mod_path: "ra_test_fixture::S1", nav: NavigationTarget { file_id: FileId( - 0, + EditionedFileId( + Id(1800), + ), ), full_range: 32..44, focus_range: 39..41, @@ -3966,7 +4061,9 @@ fn main() { let s$0t = foo(); } mod_path: "ra_test_fixture::S2", nav: NavigationTarget { file_id: FileId( - 0, + EditionedFileId( + Id(1800), + ), ), full_range: 45..57, focus_range: 52..54, @@ -3997,7 +4094,9 @@ fn foo(ar$0g: &impl Foo) {} mod_path: "ra_test_fixture::Foo", nav: NavigationTarget { file_id: FileId( - 0, + EditionedFileId( + Id(1800), + ), ), full_range: 0..12, focus_range: 6..9, @@ -4031,7 +4130,9 @@ fn foo(ar$0g: &impl Foo + Bar) {} mod_path: "ra_test_fixture::Bar", nav: NavigationTarget { file_id: FileId( - 0, + EditionedFileId( + Id(1800), + ), ), full_range: 13..28, focus_range: 19..22, @@ -4044,7 +4145,9 @@ fn foo(ar$0g: &impl Foo + Bar) {} mod_path: "ra_test_fixture::Foo", nav: NavigationTarget { file_id: FileId( - 0, + EditionedFileId( + Id(1800), + ), ), full_range: 0..12, focus_range: 6..9, @@ -4057,7 +4160,9 @@ fn foo(ar$0g: &impl Foo + Bar) {} mod_path: "ra_test_fixture::S", nav: NavigationTarget { file_id: FileId( - 0, + EditionedFileId( + Id(1800), + ), ), full_range: 29..39, focus_range: 36..37, @@ -4098,7 +4203,9 @@ pub mod future { mod_path: "core::future::Future", nav: NavigationTarget { file_id: FileId( - 1, + EditionedFileId( + Id(1801), + ), ), full_range: 4294967295..4294967295, focus_range: 4294967295..4294967295, @@ -4112,7 +4219,9 @@ pub mod future { mod_path: "main::S", nav: NavigationTarget { file_id: FileId( - 0, + EditionedFileId( + Id(1800), + ), ), full_range: 0..110, focus_range: 108..109, @@ -4144,7 +4253,9 @@ fn foo(ar$0g: &impl Foo) {} mod_path: "ra_test_fixture::Foo", nav: NavigationTarget { file_id: FileId( - 0, + EditionedFileId( + Id(1800), + ), ), full_range: 0..15, focus_range: 6..9, @@ -4157,7 +4268,9 @@ fn foo(ar$0g: &impl Foo) {} mod_path: "ra_test_fixture::S", nav: NavigationTarget { file_id: FileId( - 0, + EditionedFileId( + Id(1800), + ), ), full_range: 16..27, focus_range: 23..24, @@ -4194,7 +4307,9 @@ fn main() { let s$0t = foo(); } mod_path: "ra_test_fixture::B", nav: NavigationTarget { file_id: FileId( - 0, + EditionedFileId( + Id(1800), + ), ), full_range: 48..61, focus_range: 55..56, @@ -4207,7 +4322,9 @@ fn main() { let s$0t = foo(); } mod_path: "ra_test_fixture::Foo", nav: NavigationTarget { file_id: FileId( - 0, + EditionedFileId( + Id(1800), + ), ), full_range: 0..15, focus_range: 6..9, @@ -4220,7 +4337,9 @@ fn main() { let s$0t = foo(); } mod_path: "ra_test_fixture::S", nav: NavigationTarget { file_id: FileId( - 0, + EditionedFileId( + Id(1800), + ), ), full_range: 16..25, focus_range: 23..24, @@ -4251,7 +4370,9 @@ fn foo(ar$0g: &dyn Foo) {} mod_path: "ra_test_fixture::Foo", nav: NavigationTarget { file_id: FileId( - 0, + EditionedFileId( + Id(1800), + ), ), full_range: 0..12, focus_range: 6..9, @@ -4283,7 +4404,9 @@ fn foo(ar$0g: &dyn Foo) {} mod_path: "ra_test_fixture::Foo", nav: NavigationTarget { file_id: FileId( - 0, + EditionedFileId( + Id(1800), + ), ), full_range: 0..15, focus_range: 6..9, @@ -4296,7 +4419,9 @@ fn foo(ar$0g: &dyn Foo) {} mod_path: "ra_test_fixture::S", nav: NavigationTarget { file_id: FileId( - 0, + EditionedFileId( + Id(1800), + ), ), full_range: 16..27, focus_range: 23..24, @@ -4331,7 +4456,9 @@ fn foo(a$0rg: &impl ImplTrait>>>) {} mod_path: "ra_test_fixture::B", nav: NavigationTarget { file_id: FileId( - 0, + EditionedFileId( + Id(1800), + ), ), full_range: 43..57, focus_range: 50..51, @@ -4344,7 +4471,9 @@ fn foo(a$0rg: &impl ImplTrait>>>) {} mod_path: "ra_test_fixture::DynTrait", nav: NavigationTarget { file_id: FileId( - 0, + EditionedFileId( + Id(1800), + ), ), full_range: 22..42, focus_range: 28..36, @@ -4357,7 +4486,9 @@ fn foo(a$0rg: &impl ImplTrait>>>) {} mod_path: "ra_test_fixture::ImplTrait", nav: NavigationTarget { file_id: FileId( - 0, + EditionedFileId( + Id(1800), + ), ), full_range: 0..21, focus_range: 6..15, @@ -4370,7 +4501,9 @@ fn foo(a$0rg: &impl ImplTrait>>>) {} mod_path: "ra_test_fixture::S", nav: NavigationTarget { file_id: FileId( - 0, + EditionedFileId( + Id(1800), + ), ), full_range: 58..69, focus_range: 65..66, @@ -4412,7 +4545,9 @@ fn main() { let s$0t = test().get(); } mod_path: "ra_test_fixture::Foo", nav: NavigationTarget { file_id: FileId( - 0, + EditionedFileId( + Id(1800), + ), ), full_range: 0..62, focus_range: 6..9, @@ -4445,7 +4580,9 @@ impl Foo {} mod_path: "ra_test_fixture::Bar", nav: NavigationTarget { file_id: FileId( - 0, + EditionedFileId( + Id(1800), + ), ), full_range: 0..11, focus_range: 7..10, @@ -4477,7 +4614,9 @@ fn foo(t: T$0){} mod_path: "ra_test_fixture::Foo", nav: NavigationTarget { file_id: FileId( - 0, + EditionedFileId( + Id(1800), + ), ), full_range: 0..12, focus_range: 6..9, @@ -4510,7 +4649,9 @@ impl Foo { mod_path: "ra_test_fixture::Foo", nav: NavigationTarget { file_id: FileId( - 0, + EditionedFileId( + Id(1800), + ), ), full_range: 0..11, focus_range: 7..10, @@ -6814,7 +6955,10 @@ fn hover_feature() { analysis .hover( &HoverConfig { links_in_hover: true, ..HOVER_BASE_CONFIG }, - FileRange { file_id: position.file_id, range: TextRange::empty(position.offset) }, + HirFileRange { + file_id: position.file_id.into(), + range: TextRange::empty(position.offset), + }, ) .unwrap() .unwrap(); @@ -7244,7 +7388,9 @@ fn foo() { mod_path: "ra_test_fixture::Foo", nav: NavigationTarget { file_id: FileId( - 0, + EditionedFileId( + Id(1800), + ), ), full_range: 0..11, focus_range: 7..10, @@ -9213,7 +9359,9 @@ impl Iterator for S { Implementation( FilePositionWrapper { file_id: FileId( - 0, + EditionedFileId( + Id(1800), + ), ), offset: 7, }, @@ -9224,7 +9372,9 @@ impl Iterator for S { mod_path: "core::future::Future", nav: NavigationTarget { file_id: FileId( - 1, + EditionedFileId( + Id(1801), + ), ), full_range: 4294967295..4294967295, focus_range: 4294967295..4294967295, @@ -9238,7 +9388,9 @@ impl Iterator for S { mod_path: "core::iter::traits::iterator::Iterator", nav: NavigationTarget { file_id: FileId( - 1, + EditionedFileId( + Id(1801), + ), ), full_range: 4294967295..4294967295, focus_range: 4294967295..4294967295, @@ -9252,7 +9404,9 @@ impl Iterator for S { mod_path: "ra_test_fixture::Notable", nav: NavigationTarget { file_id: FileId( - 0, + EditionedFileId( + Id(1800), + ), ), full_range: 21..59, focus_range: 49..56, @@ -9265,7 +9419,9 @@ impl Iterator for S { mod_path: "ra_test_fixture::S2", nav: NavigationTarget { file_id: FileId( - 0, + EditionedFileId( + Id(1800), + ), ), full_range: 10..20, focus_range: 17..19, @@ -10528,7 +10684,9 @@ macro_rules! str { Reference( FilePositionWrapper { file_id: FileId( - 0, + EditionedFileId( + Id(1800), + ), ), offset: 92, }, @@ -10538,7 +10696,9 @@ macro_rules! str { use_name_in_title: false, nav: NavigationTarget { file_id: FileId( - 0, + EditionedFileId( + Id(1800), + ), ), full_range: 81..301, focus_range: 92..96, diff --git a/crates/ide/src/inlay_hints.rs b/crates/ide/src/inlay_hints.rs index 19e5509681aa..783cc4ac91af 100644 --- a/crates/ide/src/inlay_hints.rs +++ b/crates/ide/src/inlay_hints.rs @@ -5,11 +5,14 @@ use std::{ use either::Either; use hir::{ - ClosureStyle, DisplayTarget, EditionedFileId, HasVisibility, HirDisplay, HirDisplayError, - HirWrite, InRealFile, ModuleDef, ModuleDefId, Semantics, sym, + ClosureStyle, DisplayTarget, HasVisibility, HirDisplay, HirDisplayError, HirFileId, + HirFileRange, HirWrite, InFile, ModuleDef, ModuleDefId, Semantics, sym, }; -use ide_db::{FileRange, RootDatabase, famous_defs::FamousDefs, text_edit::TextEditBuilder}; -use ide_db::{FxHashSet, text_edit::TextEdit}; +use ide_db::{ + FxHashSet, + text_edit::{TextEdit, TextEditBuilder}, +}; +use ide_db::{RootDatabase, famous_defs::FamousDefs}; use itertools::Itertools; use smallvec::{SmallVec, smallvec}; use stdx::never; @@ -19,7 +22,7 @@ use syntax::{ format_smolstr, match_ast, }; -use crate::{FileId, navigation_target::TryToNav}; +use crate::navigation_target::TryToNav; mod adjustment; mod bind_pat; @@ -78,17 +81,15 @@ mod range_exclusive; // ![Inlay hints](https://user-images.githubusercontent.com/48062697/113020660-b5f98b80-917a-11eb-8d70-3be3fd558cdd.png) pub(crate) fn inlay_hints( db: &RootDatabase, - file_id: FileId, + file_id: HirFileId, range_limit: Option, config: &InlayHintsConfig, ) -> Vec { let _p = tracing::info_span!("inlay_hints").entered(); let sema = Semantics::new(db); - let file_id = sema - .attach_first_edition(file_id) - .unwrap_or_else(|| EditionedFileId::current_edition(db, file_id)); - let file = sema.parse(file_id); - let file = file.syntax(); + + let file_id = sema.adjust_edition(file_id); + let file = &sema.parse_or_expand(file_id); let mut acc = Vec::new(); @@ -127,7 +128,7 @@ struct InlayHintCtx { pub(crate) fn inlay_hints_resolve( db: &RootDatabase, - file_id: FileId, + file_id: HirFileId, resolve_range: TextRange, hash: u64, config: &InlayHintsConfig, @@ -135,11 +136,8 @@ pub(crate) fn inlay_hints_resolve( ) -> Option { let _p = tracing::info_span!("inlay_hints_resolve").entered(); let sema = Semantics::new(db); - let file_id = sema - .attach_first_edition(file_id) - .unwrap_or_else(|| EditionedFileId::current_edition(db, file_id)); - let file = sema.parse(file_id); - let file = file.syntax(); + let file_id = sema.adjust_edition(file_id); + let file = &sema.parse_or_expand(file_id); let scope = sema.scope(file)?; let famous_defs = FamousDefs(&sema, scope.krate()); @@ -205,9 +203,9 @@ fn handle_event(ctx: &mut InlayHintCtx, node: WalkEvent) -> Option, ctx: &mut InlayHintCtx, - famous_defs @ FamousDefs(sema, _krate): &FamousDefs<'_, '_>, + famous_defs @ FamousDefs(sema, _): &FamousDefs<'_, '_>, config: &InlayHintsConfig, - file_id: EditionedFileId, + file_id: HirFileId, display_target: DisplayTarget, node: SyntaxNode, ) { @@ -216,7 +214,7 @@ fn hints( sema, config, display_target, - InRealFile { file_id, value: node.clone() }, + InFile { file_id, value: node.clone() }, ); if let Some(any_has_generic_args) = ast::AnyHasGenericArgs::cast(node.clone()) { generic_param::hints(hints, famous_defs, config, any_has_generic_args); @@ -228,12 +226,12 @@ fn hints( chaining::hints(hints, famous_defs, config, display_target, &expr); adjustment::hints(hints, famous_defs, config, display_target, &expr); match expr { - ast::Expr::CallExpr(it) => param_name::hints(hints, famous_defs, config, ast::Expr::from(it)), + ast::Expr::CallExpr(it) => param_name::hints(hints, famous_defs, config, file_id, ast::Expr::from(it)), ast::Expr::MethodCallExpr(it) => { - param_name::hints(hints, famous_defs, config, ast::Expr::from(it)) + param_name::hints(hints, famous_defs, config, file_id, ast::Expr::from(it)) } ast::Expr::ClosureExpr(it) => { - closure_captures::hints(hints, famous_defs, config, it.clone()); + closure_captures::hints(hints, famous_defs, config, file_id, it.clone()); closure_ret::hints(hints, famous_defs, config, display_target, it) }, ast::Expr::RangeExpr(it) => range_exclusive::hints(hints, famous_defs, config, it), @@ -354,8 +352,8 @@ impl InlayHintsConfig { /// location link to actually resolve but where computing `finish` would be costly. fn lazy_location_opt( &self, - finish: impl FnOnce() -> Option, - ) -> Option> { + finish: impl FnOnce() -> Option, + ) -> Option> { if self.fields_to_resolve.resolve_label_location { Some(LazyProperty::Lazy) } else { @@ -543,7 +541,7 @@ impl InlayHintLabel { pub fn simple( s: impl Into, tooltip: Option>, - linked_location: Option>, + linked_location: Option>, ) -> InlayHintLabel { InlayHintLabel { parts: smallvec![InlayHintLabelPart { text: s.into(), linked_location, tooltip }], @@ -627,7 +625,7 @@ pub struct InlayHintLabelPart { /// refers to (not necessarily the location itself). /// When setting this, no tooltip must be set on the containing hint, or VS Code will display /// them both. - pub linked_location: Option>, + pub linked_location: Option>, /// The tooltip to show when hovering over the inlay hint, this may invoke other actions like /// hover requests to show. pub tooltip: Option>, @@ -671,7 +669,7 @@ struct InlayHintLabelBuilder<'a> { result: InlayHintLabel, last_part: String, resolve: bool, - location: Option>, + location: Option>, } impl fmt::Write for InlayHintLabelBuilder<'_> { @@ -689,9 +687,8 @@ impl HirWrite for InlayHintLabelBuilder<'_> { LazyProperty::Lazy } else { LazyProperty::Computed({ - let Some(location) = ModuleDef::from(def).try_to_nav(self.db) else { return }; - let location = location.call_site(); - FileRange { file_id: location.file_id, range: location.focus_or_full_range() } + let Some(location) = ModuleDef::from(def).try_to_nav_hir(self.db) else { return }; + HirFileRange { file_id: location.file_id, range: location.focus_or_full_range() } }) }); } @@ -915,8 +912,9 @@ mod tests { #[rust_analyzer::rust_fixture] ra_fixture: &str, ) { let (analysis, file_id) = fixture::file(ra_fixture); - let mut expected = extract_annotations(&analysis.file_text(file_id).unwrap()); - let inlay_hints = analysis.inlay_hints(&config, file_id, None).unwrap(); + let mut expected = + extract_annotations(&analysis.file_text(file_id.file_id(&analysis.db)).unwrap()); + let inlay_hints = analysis.inlay_hints(&config, file_id.into(), None).unwrap(); let actual = inlay_hints .into_iter() // FIXME: We trim the start because some inlay produces leading whitespace which is not properly supported by our annotation extraction @@ -935,7 +933,7 @@ mod tests { expect: Expect, ) { let (analysis, file_id) = fixture::file(ra_fixture); - let inlay_hints = analysis.inlay_hints(&config, file_id, None).unwrap(); + let inlay_hints = analysis.inlay_hints(&config, file_id.into(), None).unwrap(); let filtered = inlay_hints.into_iter().map(|hint| (hint.range, hint.label)).collect::>(); expect.assert_debug_eq(&filtered) @@ -950,7 +948,7 @@ mod tests { expect: Expect, ) { let (analysis, file_id) = fixture::file(ra_fixture); - let inlay_hints = analysis.inlay_hints(&config, file_id, None).unwrap(); + let inlay_hints = analysis.inlay_hints(&config, file_id.into(), None).unwrap(); let edits = inlay_hints .into_iter() @@ -961,7 +959,7 @@ mod tests { }) .expect("no edit returned"); - let mut actual = analysis.file_text(file_id).unwrap().to_string(); + let mut actual = analysis.file_text(file_id.file_id(&analysis.db)).unwrap().to_string(); edits.apply(&mut actual); expect.assert_eq(&actual); } @@ -972,7 +970,7 @@ mod tests { #[rust_analyzer::rust_fixture] ra_fixture: &str, ) { let (analysis, file_id) = fixture::file(ra_fixture); - let inlay_hints = analysis.inlay_hints(&config, file_id, None).unwrap(); + let inlay_hints = analysis.inlay_hints(&config, file_id.into(), None).unwrap(); let edits: Vec<_> = inlay_hints.into_iter().filter_map(|hint| hint.text_edit?.computed()).collect(); diff --git a/crates/ide/src/inlay_hints/bind_pat.rs b/crates/ide/src/inlay_hints/bind_pat.rs index 729349365e6c..5e02682e2d56 100644 --- a/crates/ide/src/inlay_hints/bind_pat.rs +++ b/crates/ide/src/inlay_hints/bind_pat.rs @@ -310,7 +310,7 @@ fn main(a: SliceIter<'_, Container>) { analysis .inlay_hints( &InlayHintsConfig { chaining_hints: true, ..DISABLED_CONFIG }, - file_id, + file_id.into(), None, ) .unwrap(); @@ -420,11 +420,12 @@ fn main() { } "#; let (analysis, file_id) = fixture::file(fixture); - let expected = extract_annotations(&analysis.file_text(file_id).unwrap()); + let expected = + extract_annotations(&analysis.file_text(file_id.file_id(&analysis.db)).unwrap()); let inlay_hints = analysis .inlay_hints( &InlayHintsConfig { type_hints: true, ..DISABLED_CONFIG }, - file_id, + file_id.into(), Some(TextRange::new(TextSize::from(491), TextSize::from(640))), ) .unwrap(); diff --git a/crates/ide/src/inlay_hints/bounds.rs b/crates/ide/src/inlay_hints/bounds.rs index f0003dae3f36..a8b44191c7c9 100644 --- a/crates/ide/src/inlay_hints/bounds.rs +++ b/crates/ide/src/inlay_hints/bounds.rs @@ -1,7 +1,8 @@ //! Implementation of trait bound hints. //! //! Currently this renders the implied `Sized` bound. -use ide_db::{FileRange, famous_defs::FamousDefs}; +use hir::HirFileRange; +use ide_db::famous_defs::FamousDefs; use syntax::ast::{self, AstNode, HasTypeBounds}; @@ -44,12 +45,10 @@ pub(super) fn hints( text: "Sized".to_owned(), linked_location: sized_trait.and_then(|it| { config.lazy_location_opt(|| { - it.try_to_nav(sema.db).map(|it| { - let n = it.call_site(); - FileRange { - file_id: n.file_id, - range: n.focus_or_full_range(), - } + // FIXME: Replace with a range fetch only + it.try_to_nav_hir(sema.db).map(|it| HirFileRange { + file_id: it.file_id, + range: it.focus_or_full_range(), }) }) }), @@ -141,7 +140,9 @@ fn foo() {} Computed( FileRangeWrapper { file_id: FileId( - 1, + EditionedFileId( + Id(1801), + ), ), range: 446..451, }, diff --git a/crates/ide/src/inlay_hints/chaining.rs b/crates/ide/src/inlay_hints/chaining.rs index ff157fa171b5..319fb434cc5e 100644 --- a/crates/ide/src/inlay_hints/chaining.rs +++ b/crates/ide/src/inlay_hints/chaining.rs @@ -99,7 +99,7 @@ mod tests { expect: Expect, ) { let (analysis, file_id) = fixture::file(ra_fixture); - let mut inlay_hints = analysis.inlay_hints(&config, file_id, None).unwrap(); + let mut inlay_hints = analysis.inlay_hints(&config, file_id.into(), None).unwrap(); inlay_hints.iter_mut().flat_map(|hint| &mut hint.label.parts).for_each(|hint| { if let Some(LazyProperty::Computed(loc)) = &mut hint.linked_location { loc.range = TextRange::empty(TextSize::from(0)); @@ -139,7 +139,9 @@ fn main() { Computed( FileRangeWrapper { file_id: FileId( - 0, + EditionedFileId( + Id(1800), + ), ), range: 63..64, }, @@ -158,7 +160,9 @@ fn main() { Computed( FileRangeWrapper { file_id: FileId( - 0, + EditionedFileId( + Id(1800), + ), ), range: 7..8, }, @@ -222,7 +226,9 @@ fn main() { Computed( FileRangeWrapper { file_id: FileId( - 0, + EditionedFileId( + Id(1800), + ), ), range: 51..52, }, @@ -241,7 +247,9 @@ fn main() { Computed( FileRangeWrapper { file_id: FileId( - 0, + EditionedFileId( + Id(1800), + ), ), range: 29..30, }, @@ -289,7 +297,9 @@ fn main() { Computed( FileRangeWrapper { file_id: FileId( - 0, + EditionedFileId( + Id(1800), + ), ), range: 51..52, }, @@ -308,7 +318,9 @@ fn main() { Computed( FileRangeWrapper { file_id: FileId( - 0, + EditionedFileId( + Id(1800), + ), ), range: 29..30, }, @@ -357,7 +369,9 @@ fn main() { Computed( FileRangeWrapper { file_id: FileId( - 0, + EditionedFileId( + Id(1800), + ), ), range: 23..24, }, @@ -372,7 +386,9 @@ fn main() { Computed( FileRangeWrapper { file_id: FileId( - 0, + EditionedFileId( + Id(1800), + ), ), range: 55..56, }, @@ -392,7 +408,9 @@ fn main() { Computed( FileRangeWrapper { file_id: FileId( - 0, + EditionedFileId( + Id(1800), + ), ), range: 7..8, }, @@ -407,7 +425,9 @@ fn main() { Computed( FileRangeWrapper { file_id: FileId( - 0, + EditionedFileId( + Id(1800), + ), ), range: 55..56, }, @@ -460,7 +480,9 @@ fn main() { Computed( FileRangeWrapper { file_id: FileId( - 1, + EditionedFileId( + Id(1801), + ), ), range: 0..0, }, @@ -475,7 +497,9 @@ fn main() { Computed( FileRangeWrapper { file_id: FileId( - 1, + EditionedFileId( + Id(1801), + ), ), range: 0..0, }, @@ -496,7 +520,9 @@ fn main() { Computed( FileRangeWrapper { file_id: FileId( - 1, + EditionedFileId( + Id(1801), + ), ), range: 0..0, }, @@ -511,7 +537,9 @@ fn main() { Computed( FileRangeWrapper { file_id: FileId( - 1, + EditionedFileId( + Id(1801), + ), ), range: 0..0, }, @@ -532,7 +560,9 @@ fn main() { Computed( FileRangeWrapper { file_id: FileId( - 1, + EditionedFileId( + Id(1801), + ), ), range: 0..0, }, @@ -547,7 +577,9 @@ fn main() { Computed( FileRangeWrapper { file_id: FileId( - 1, + EditionedFileId( + Id(1801), + ), ), range: 0..0, }, @@ -568,7 +600,9 @@ fn main() { Computed( FileRangeWrapper { file_id: FileId( - 0, + EditionedFileId( + Id(1800), + ), ), range: 0..0, }, @@ -616,7 +650,9 @@ fn main() { Computed( FileRangeWrapper { file_id: FileId( - 0, + EditionedFileId( + Id(1800), + ), ), range: 7..13, }, @@ -635,7 +671,9 @@ fn main() { Computed( FileRangeWrapper { file_id: FileId( - 0, + EditionedFileId( + Id(1800), + ), ), range: 7..13, }, @@ -654,7 +692,9 @@ fn main() { Computed( FileRangeWrapper { file_id: FileId( - 0, + EditionedFileId( + Id(1800), + ), ), range: 7..13, }, @@ -664,25 +704,6 @@ fn main() { }, ], ), - ( - 222..228, - [ - InlayHintLabelPart { - text: "self", - linked_location: Some( - Computed( - FileRangeWrapper { - file_id: FileId( - 0, - ), - range: 42..46, - }, - ), - ), - tooltip: "", - }, - ], - ), ] "#]], ); diff --git a/crates/ide/src/inlay_hints/closing_brace.rs b/crates/ide/src/inlay_hints/closing_brace.rs index 05253b679489..c3395a8d73aa 100644 --- a/crates/ide/src/inlay_hints/closing_brace.rs +++ b/crates/ide/src/inlay_hints/closing_brace.rs @@ -3,8 +3,8 @@ //! fn g() { //! } /* fn g */ //! ``` -use hir::{DisplayTarget, HirDisplay, InRealFile, Semantics}; -use ide_db::{FileRange, RootDatabase}; +use hir::{DisplayTarget, HirDisplay, HirFileRange, InFile, Semantics}; +use ide_db::RootDatabase; use syntax::{ SyntaxKind, SyntaxNode, T, ast::{self, AstNode, HasLoopBody, HasName}, @@ -21,7 +21,7 @@ pub(super) fn hints( sema: &Semantics<'_, RootDatabase>, config: &InlayHintsConfig, display_target: DisplayTarget, - InRealFile { file_id, value: node }: InRealFile, + InFile { file_id, value: node }: InFile, ) -> Option<()> { let min_lines = config.closing_brace_hints_min_lines?; @@ -138,8 +138,7 @@ pub(super) fn hints( return None; } - let linked_location = - name_range.map(|range| FileRange { file_id: file_id.file_id(sema.db), range }); + let linked_location = name_range.map(|range| HirFileRange { file_id, range }); acc.push(InlayHint { range: closing_token.text_range(), kind: InlayKind::ClosingBrace, diff --git a/crates/ide/src/inlay_hints/closure_captures.rs b/crates/ide/src/inlay_hints/closure_captures.rs index 3186a566d2bc..f6db2bc7a3ab 100644 --- a/crates/ide/src/inlay_hints/closure_captures.rs +++ b/crates/ide/src/inlay_hints/closure_captures.rs @@ -1,9 +1,10 @@ //! Implementation of "closure captures" inlay hints. //! //! Tests live in [`bind_pat`][super::bind_pat] module. +use hir::{HirFileId, HirFileRange}; use ide_db::famous_defs::FamousDefs; use ide_db::text_edit::{TextRange, TextSize}; -use stdx::{TupleExt, never}; +use stdx::never; use syntax::ast::{self, AstNode}; use crate::{ @@ -14,6 +15,7 @@ pub(super) fn hints( acc: &mut Vec, FamousDefs(sema, _): &FamousDefs<'_, '_>, config: &InlayHintsConfig, + _file_id: HirFileId, closure: ast::ClosureExpr, ) -> Option<()> { if !config.closure_capture_hints { @@ -72,13 +74,9 @@ pub(super) fn hints( // force cache the source file, otherwise sema lookup will potentially panic _ = sema.parse_or_expand(source.file()); - source.name().and_then(|name| { - name.syntax().original_file_range_opt(sema.db).map(TupleExt::head).map( - |frange| ide_db::FileRange { - file_id: frange.file_id.file_id(sema.db), - range: frange.range, - }, - ) + Some(HirFileRange { + file_id: source.file(), + range: source.name()?.syntax().text_range(), }) }), tooltip: None, diff --git a/crates/ide/src/inlay_hints/generic_param.rs b/crates/ide/src/inlay_hints/generic_param.rs index 6e1b3bdbdf03..cc44f5d3c935 100644 --- a/crates/ide/src/inlay_hints/generic_param.rs +++ b/crates/ide/src/inlay_hints/generic_param.rs @@ -1,5 +1,6 @@ //! Implementation of inlay hints for generic parameters. use either::Either; +use hir::HirFileRange; use ide_db::{active_parameter::generic_def_for_node, famous_defs::FamousDefs}; use syntax::{ AstNode, @@ -81,31 +82,27 @@ pub(crate) fn hints( return None; } - let range = sema.original_range_opt(arg.syntax())?.range; + let range = arg.syntax().text_range(); let colon = if config.render_colons { ":" } else { "" }; let label = InlayHintLabel::simple( format!("{}{colon}", param_name.display(sema.db, krate.edition(sema.db))), None, - config.lazy_location_opt(|| { - let source_syntax = match param { - hir::GenericParam::TypeParam(it) => { - sema.source(it.merge()).map(|it| it.value.syntax().clone()) - } - hir::GenericParam::ConstParam(it) => { - let syntax = sema.source(it.merge())?.value.syntax().clone(); - let const_param = ast::ConstParam::cast(syntax)?; - const_param.name().map(|it| it.syntax().clone()) - } - hir::GenericParam::LifetimeParam(it) => { - sema.source(it).map(|it| it.value.syntax().clone()) - } - }; - let linked_location = source_syntax.and_then(|it| sema.original_range_opt(&it)); - linked_location.map(|frange| ide_db::FileRange { - file_id: frange.file_id.file_id(sema.db), - range: frange.range, - }) + config.lazy_location_opt(|| match param { + hir::GenericParam::TypeParam(it) => { + sema.source(it.merge()).map(|it| it.node_file_range()) + } + hir::GenericParam::ConstParam(it) => { + let syntax = sema.source(it.merge())?; + let const_param = ast::ConstParam::cast(syntax.value.syntax().clone())?; + const_param.name().map(|it| HirFileRange { + file_id: syntax.file_id, + range: it.syntax().text_range(), + }) + } + hir::GenericParam::LifetimeParam(it) => { + sema.source(it).map(|it| it.node_file_range()) + } }), ); diff --git a/crates/ide/src/inlay_hints/implicit_drop.rs b/crates/ide/src/inlay_hints/implicit_drop.rs index bf4688e9d82b..8e0fb3c5f951 100644 --- a/crates/ide/src/inlay_hints/implicit_drop.rs +++ b/crates/ide/src/inlay_hints/implicit_drop.rs @@ -6,11 +6,11 @@ //! } //! ``` use hir::{ - ChalkTyInterner, DefWithBody, + ChalkTyInterner, DefWithBody, HirFileRange, db::{DefDatabase as _, HirDatabase as _}, mir::{MirSpan, TerminatorKind}, }; -use ide_db::{FileRange, famous_defs::FamousDefs}; +use ide_db::famous_defs::FamousDefs; use syntax::{ ToSmolStr, @@ -105,12 +105,7 @@ pub(super) fn hints( .patterns_for_binding(binding_idx) .first() .and_then(|d| source_map.pat_syntax(*d).ok()) - .and_then(|d| { - Some(FileRange { - file_id: d.file_id.file_id()?.file_id(sema.db), - range: d.value.text_range(), - }) - }) + .map(|d| HirFileRange { file_id: d.file_id, range: d.value.text_range() }) }), ); label.prepend_str("drop("); diff --git a/crates/ide/src/inlay_hints/lifetime.rs b/crates/ide/src/inlay_hints/lifetime.rs index 0069452e7b90..86ae89964117 100644 --- a/crates/ide/src/inlay_hints/lifetime.rs +++ b/crates/ide/src/inlay_hints/lifetime.rs @@ -6,10 +6,10 @@ use std::iter; use ide_db::{FxHashMap, famous_defs::FamousDefs, syntax_helpers::node_ext::walk_ty}; use itertools::Itertools; -use syntax::{SmolStr, format_smolstr}; use syntax::{ - SyntaxKind, SyntaxToken, + SmolStr, SyntaxKind, SyntaxToken, ast::{self, AstNode, HasGenericParams, HasName}, + format_smolstr, }; use crate::{ diff --git a/crates/ide/src/inlay_hints/param_name.rs b/crates/ide/src/inlay_hints/param_name.rs index 5174228466c0..1e7e231d7a4d 100644 --- a/crates/ide/src/inlay_hints/param_name.rs +++ b/crates/ide/src/inlay_hints/param_name.rs @@ -7,7 +7,7 @@ use std::iter::zip; use either::Either; -use hir::Semantics; +use hir::{HirFileId, InFile, Semantics}; use ide_db::{RootDatabase, famous_defs::FamousDefs}; use stdx::to_lower_snake_case; @@ -19,13 +19,14 @@ pub(super) fn hints( acc: &mut Vec, FamousDefs(sema, krate): &FamousDefs<'_, '_>, config: &InlayHintsConfig, + file_id: HirFileId, expr: ast::Expr, ) -> Option<()> { if !config.parameter_hints { return None; } - let (callable, arg_list) = get_callable(sema, &expr)?; + let InFile { file_id: arg_file, value: (callable, arg_list) } = get_callable(sema, &expr)?; let unary_function = callable.n_params() == 1; let function_name = match callable.kind() { hir::CallableKind::Function(function) => Some(function.name(sema.db)), @@ -37,8 +38,11 @@ pub(super) fn hints( .into_iter() .zip(arg_list.args()) .filter_map(|(p, arg)| { - // Only annotate hints for expressions that exist in the original file - let range = sema.original_range_opt(arg.syntax())?; + // FIXME: macro mapping for the arg range + if arg_file != file_id { + return None; + } + let range = arg.syntax().text_range(); let param_name = p.name(sema.db)?; Some((p, param_name, arg, range)) }) @@ -51,7 +55,7 @@ pub(super) fn hints( arg, ) }) - .map(|(param, param_name, _, hir::FileRange { range, .. })| { + .map(|(param, param_name, _, range)| { let colon = if config.render_colons { ":" } else { "" }; let label = InlayHintLabel::simple( format!("{}{colon}", param_name.display(sema.db, krate.edition(sema.db))), @@ -65,10 +69,7 @@ pub(super) fn hints( _ => None, }, }?; - sema.original_range_opt(name_syntax.syntax()).map(|frange| ide_db::FileRange { - file_id: frange.file_id.file_id(sema.db), - range: frange.range, - }) + Some(source.with_value(name_syntax).node_file_range()) }), ); InlayHint { @@ -90,17 +91,25 @@ pub(super) fn hints( fn get_callable<'db>( sema: &Semantics<'db, RootDatabase>, expr: &ast::Expr, -) -> Option<(hir::Callable<'db>, ast::ArgList)> { +) -> Option, ast::ArgList)>> { match expr { ast::Expr::CallExpr(expr) => { let descended = sema.descend_node_into_attributes(expr.clone()).pop(); let expr = descended.as_ref().unwrap_or(expr); - sema.type_of_expr(&expr.expr()?)?.original.as_callable(sema.db).zip(expr.arg_list()) + let file_id = sema.hir_file_for(expr.syntax()); + sema.type_of_expr(&expr.expr()?)? + .original + .as_callable(sema.db) + .zip(expr.arg_list()) + .map(|it| InFile::new(file_id, it)) } ast::Expr::MethodCallExpr(expr) => { let descended = sema.descend_node_into_attributes(expr.clone()).pop(); let expr = descended.as_ref().unwrap_or(expr); - sema.resolve_method_call_as_callable(expr).zip(expr.arg_list()) + let file_id = sema.hir_file_for(expr.syntax()); + sema.resolve_method_call_as_callable(expr) + .zip(expr.arg_list()) + .map(|it| InFile::new(file_id, it)) } _ => None, } diff --git a/crates/ide/src/lib.rs b/crates/ide/src/lib.rs index b3b8deb61fc0..14b6d37d128b 100644 --- a/crates/ide/src/lib.rs +++ b/crates/ide/src/lib.rs @@ -16,7 +16,6 @@ mod fixture; mod markup; -mod navigation_target; mod annotations; mod call_hierarchy; @@ -62,21 +61,23 @@ use std::panic::{AssertUnwindSafe, UnwindSafe}; use cfg::CfgOptions; use fetch_crates::CrateInfo; -use hir::{ChangeWithProcMacros, EditionedFileId, crate_def_map, sym}; +use hir::{ + ChangeWithProcMacros, EditionedFileId, HirFileId, HirFilePosition, HirFileRange, crate_def_map, + db::ExpandDatabase, sym, +}; use ide_db::{ FxHashMap, FxIndexSet, LineIndexDatabase, base_db::{ CrateOrigin, CrateWorkspaceData, Env, FileSet, RootQueryDb, SourceDatabase, VfsPath, salsa::Cancelled, }, + navigation_target::ToNav, prime_caches, symbol_index, }; use syntax::SourceFile; use triomphe::Arc; use view_memory_layout::{RecursiveMemoryLayout, view_memory_layout}; -use crate::navigation_target::ToNav; - pub use crate::{ annotations::{Annotation, AnnotationConfig, AnnotationKind, AnnotationLocation}, call_hierarchy::{CallHierarchyConfig, CallItem}, @@ -101,7 +102,6 @@ pub use crate::{ PackageInformation, SymbolInformationKind, }, move_item::Direction, - navigation_target::{NavigationTarget, TryToNav, UpmappingResult}, references::ReferenceSearchResult, rename::RenameError, runnables::{Runnable, RunnableKind, TestId, UpdateTest}, @@ -130,6 +130,10 @@ pub use ide_db::{ documentation::Documentation, label::Label, line_index::{LineCol, LineIndex}, + navigation_target::{ + self, HirNavigationTarget, NavigationTarget, RealNavigationTarget, TryToNav, + UpmappingResult, + }, prime_caches::ParallelPrimeCachesProgress, search::{ReferenceCategory, SearchScope}, source_change::{FileSystemEdit, SnippetEdit, SourceChange}, @@ -231,7 +235,7 @@ impl Analysis { // Creates an analysis instance for a single file, without any external // dependencies, stdlib support or ability to apply changes. See // `AnalysisHost` for creating a fully-featured analysis. - pub fn from_single_file(text: String) -> (Analysis, FileId) { + pub fn from_single_file(text: String) -> (Analysis, EditionedFileId) { let mut host = AnalysisHost::default(); let file_id = FileId::from_raw(0); let mut file_set = FileSet::default(); @@ -271,7 +275,7 @@ impl Analysis { change.set_crate_graph(crate_graph); host.apply_change(change); - (host.analysis(), file_id) + (host.analysis(), EditionedFileId::new(&host.db, file_id, Edition::CURRENT)) } /// Debug info about the current state of the analysis. @@ -387,7 +391,7 @@ impl Analysis { self.with_db(fetch_crates::fetch_crates) } - pub fn expand_macro(&self, position: FilePosition) -> Cancellable> { + pub fn expand_macro(&self, position: HirFilePosition) -> Cancellable> { self.with_db(|db| expand_macro::expand_macro(db, position)) } @@ -430,20 +434,16 @@ impl Analysis { /// Returns a tree representation of symbols in the file. Useful to draw a /// file outline. - pub fn file_structure(&self, file_id: FileId) -> Cancellable> { + pub fn file_structure(&self, file_id: HirFileId) -> Cancellable> { // FIXME: Edition - self.with_db(|db| { - let editioned_file_id_wrapper = EditionedFileId::current_edition(&self.db, file_id); - - file_structure::file_structure(&db.parse(editioned_file_id_wrapper).tree()) - }) + self.with_db(|db| file_structure::file_structure(&db.parse_or_expand(file_id))) } /// Returns a list of the places in the file where type hints can be displayed. pub fn inlay_hints( &self, config: &InlayHintsConfig, - file_id: FileId, + file_id: HirFileId, range: Option, ) -> Cancellable> { self.with_db(|db| inlay_hints::inlay_hints(db, file_id, range, config)) @@ -451,7 +451,7 @@ impl Analysis { pub fn inlay_hints_resolve( &self, config: &InlayHintsConfig, - file_id: FileId, + file_id: HirFileId, resolve_range: TextRange, hash: u64, hasher: impl Fn(&InlayHint) -> u64 + Send + UnwindSafe, @@ -462,22 +462,21 @@ impl Analysis { } /// Returns the set of folding ranges. - pub fn folding_ranges(&self, file_id: FileId) -> Cancellable> { - self.with_db(|db| { - let editioned_file_id_wrapper = EditionedFileId::current_edition(&self.db, file_id); - - folding_ranges::folding_ranges(&db.parse(editioned_file_id_wrapper).tree()) - }) + pub fn folding_ranges(&self, file_id: HirFileId) -> Cancellable> { + self.with_db(|db| folding_ranges::folding_ranges(&db.parse_or_expand(file_id))) } /// Fuzzy searches for a symbol. - pub fn symbol_search(&self, query: Query, limit: usize) -> Cancellable> { + pub fn symbol_search( + &self, + query: Query, + limit: usize, + ) -> Cancellable> { self.with_db(|db| { symbol_index::world_symbols(db, query) - .into_iter() // xx: should we make this a par iter? - .filter_map(|s| s.try_to_nav(db)) + .into_iter() + .filter_map(|s| s.try_to_nav_hir(db)) .take(limit) - .map(UpmappingResult::call_site) .collect::>() }) } @@ -485,38 +484,38 @@ impl Analysis { /// Returns the definitions from the symbol at `position`. pub fn goto_definition( &self, - position: FilePosition, - ) -> Cancellable>>> { + position: HirFilePosition, + ) -> Cancellable>>> { self.with_db(|db| goto_definition::goto_definition(db, position)) } /// Returns the declaration from the symbol at `position`. pub fn goto_declaration( &self, - position: FilePosition, - ) -> Cancellable>>> { + position: HirFilePosition, + ) -> Cancellable>>> { self.with_db(|db| goto_declaration::goto_declaration(db, position)) } /// Returns the impls from the symbol at `position`. pub fn goto_implementation( &self, - position: FilePosition, - ) -> Cancellable>>> { + position: HirFilePosition, + ) -> Cancellable>>> { self.with_db(|db| goto_implementation::goto_implementation(db, position)) } /// Returns the type definitions for the symbol at `position`. pub fn goto_type_definition( &self, - position: FilePosition, - ) -> Cancellable>>> { + position: HirFilePosition, + ) -> Cancellable>>> { self.with_db(|db| goto_type_definition::goto_type_definition(db, position)) } pub fn find_all_refs( &self, - position: FilePosition, + position: HirFilePosition, search_scope: Option, ) -> Cancellable>> { let search_scope = AssertUnwindSafe(search_scope); @@ -530,7 +529,7 @@ impl Analysis { pub fn hover( &self, config: &HoverConfig, - range: FileRange, + range: HirFileRange, ) -> Cancellable>> { self.with_db(|db| hover::hover(db, range, config)) } @@ -566,8 +565,8 @@ impl Analysis { /// Computes call hierarchy candidates for the given file position. pub fn call_hierarchy( &self, - position: FilePosition, - ) -> Cancellable>>> { + position: HirFilePosition, + ) -> Cancellable>>> { self.with_db(|db| call_hierarchy::call_hierarchy(db, position)) } @@ -575,7 +574,7 @@ impl Analysis { pub fn incoming_calls( &self, config: CallHierarchyConfig, - position: FilePosition, + position: HirFilePosition, ) -> Cancellable>> { self.with_db(|db| call_hierarchy::incoming_calls(db, config, position)) } @@ -584,18 +583,24 @@ impl Analysis { pub fn outgoing_calls( &self, config: CallHierarchyConfig, - position: FilePosition, + position: HirFilePosition, ) -> Cancellable>> { self.with_db(|db| call_hierarchy::outgoing_calls(db, config, position)) } /// Returns a `mod name;` declaration which created the current module. - pub fn parent_module(&self, position: FilePosition) -> Cancellable> { + pub fn parent_module( + &self, + position: HirFilePosition, + ) -> Cancellable> { self.with_db(|db| parent_module::parent_module(db, position)) } /// Returns vec of `mod name;` declaration which are created by the current module. - pub fn child_modules(&self, position: FilePosition) -> Cancellable> { + pub fn child_modules( + &self, + position: HirFilePosition, + ) -> Cancellable> { self.with_db(|db| child_modules::child_modules(db, position)) } @@ -635,7 +640,7 @@ impl Analysis { } /// Returns the set of possible targets to run for the current file. - pub fn runnables(&self, file_id: FileId) -> Cancellable> { + pub fn runnables(&self, file_id: HirFileId) -> Cancellable> { self.with_db(|db| runnables::runnables(db, file_id)) } @@ -656,7 +661,7 @@ impl Analysis { pub fn highlight( &self, highlight_config: HighlightConfig, - file_id: FileId, + file_id: HirFileId, ) -> Cancellable> { self.with_db(|db| syntax_highlighting::highlight(db, highlight_config, file_id, None)) } @@ -665,7 +670,7 @@ impl Analysis { pub fn highlight_related( &self, config: HighlightRelatedConfig, - position: FilePosition, + position: HirFilePosition, ) -> Cancellable>> { self.with_db(|db| { highlight_related::highlight_related(&Semantics::new(db), config, position) @@ -676,7 +681,7 @@ impl Analysis { pub fn highlight_range( &self, highlight_config: HighlightConfig, - frange: FileRange, + frange: HirFileRange, ) -> Cancellable> { self.with_db(|db| { syntax_highlighting::highlight(db, highlight_config, frange.file_id, Some(frange.range)) @@ -684,7 +689,7 @@ impl Analysis { } /// Computes syntax highlighting for the given file. - pub fn highlight_as_html(&self, file_id: FileId, rainbow: bool) -> Cancellable { + pub fn highlight_as_html(&self, file_id: HirFileId, rainbow: bool) -> Cancellable { self.with_db(|db| syntax_highlighting::highlight_as_html(db, file_id, rainbow)) } @@ -818,7 +823,7 @@ impl Analysis { pub fn annotations( &self, config: &AnnotationConfig, - file_id: FileId, + file_id: HirFileId, ) -> Cancellable> { self.with_db(|db| annotations::annotations(db, config, file_id)) } @@ -859,12 +864,17 @@ impl Analysis { /// /// Salsa implements cancellation by unwinding with a special value and /// catching it on the API boundary. - fn with_db(&self, f: F) -> Cancellable + pub fn with_db(&self, f: F) -> Cancellable where - F: FnOnce(&RootDatabase) -> T + std::panic::UnwindSafe, + F: FnOnce(&RootDatabase) -> T, { let snap = self.db.clone(); - Cancelled::catch(|| f(&snap)) + let f = AssertUnwindSafe(f); + Cancelled::catch(|| ({ f }.0)(&snap)) + } + + pub fn db(&self) -> &RootDatabase { + &self.db } } diff --git a/crates/ide/src/moniker.rs b/crates/ide/src/moniker.rs index 795c1f2ca3c0..d5fa54b208ba 100644 --- a/crates/ide/src/moniker.rs +++ b/crates/ide/src/moniker.rs @@ -409,7 +409,7 @@ mod tests { #[track_caller] fn no_moniker(#[rust_analyzer::rust_fixture] ra_fixture: &str) { let (analysis, position) = fixture::position(ra_fixture); - if let Some(x) = analysis.moniker(position).unwrap() { + if let Some(x) = analysis.moniker(position.into_file_id(&analysis.db)).unwrap() { assert_eq!(x.info.len(), 0, "Moniker found but no moniker expected: {x:?}"); } } @@ -422,7 +422,11 @@ mod tests { kind: MonikerKind, ) { let (analysis, position) = fixture::position(ra_fixture); - let x = analysis.moniker(position).unwrap().expect("no moniker found").info; + let x = analysis + .moniker(position.into_file_id(&analysis.db)) + .unwrap() + .expect("no moniker found") + .info; assert_eq!(x.len(), 1); match x.into_iter().next().unwrap() { MonikerResult::Local { enclosing_moniker: Some(x) } => { @@ -447,7 +451,11 @@ mod tests { kind: MonikerKind, ) { let (analysis, position) = fixture::position(ra_fixture); - let x = analysis.moniker(position).unwrap().expect("no moniker found").info; + let x = analysis + .moniker(position.into_file_id(&analysis.db)) + .unwrap() + .expect("no moniker found") + .info; assert_eq!(x.len(), 1); match x.into_iter().next().unwrap() { MonikerResult::Local { enclosing_moniker } => { diff --git a/crates/ide/src/move_item.rs b/crates/ide/src/move_item.rs index f3bb3df1cd8d..2a033f1fc4c4 100644 --- a/crates/ide/src/move_item.rs +++ b/crates/ide/src/move_item.rs @@ -184,8 +184,11 @@ mod tests { direction: Direction, ) { let (analysis, range) = fixture::range(ra_fixture); - let edit = analysis.move_item(range, direction).unwrap().unwrap_or_default(); - let mut file = analysis.file_text(range.file_id).unwrap().to_string(); + let edit = analysis + .move_item(range.into_file_id(&analysis.db), direction) + .unwrap() + .unwrap_or_default(); + let mut file = analysis.file_text(range.file_id.file_id(&analysis.db)).unwrap().to_string(); edit.apply(&mut file); expect.assert_eq(&file); } diff --git a/crates/ide/src/parent_module.rs b/crates/ide/src/parent_module.rs index 50219cee57db..80cada18315e 100644 --- a/crates/ide/src/parent_module.rs +++ b/crates/ide/src/parent_module.rs @@ -1,6 +1,6 @@ -use hir::{Semantics, crate_def_map}; +use hir::{HirFilePosition, Semantics, crate_def_map}; use ide_db::{ - FileId, FilePosition, RootDatabase, + FileId, RootDatabase, base_db::{Crate, RootQueryDb}, }; use itertools::Itertools; @@ -9,7 +9,7 @@ use syntax::{ ast::{self, AstNode}, }; -use crate::NavigationTarget; +use crate::navigation_target::HirNavigationTarget; // Feature: Parent Module // @@ -22,11 +22,15 @@ use crate::NavigationTarget; // ![Parent Module](https://user-images.githubusercontent.com/48062697/113065580-04c21800-91b1-11eb-9a32-00086161c0bd.gif) /// This returns `Vec` because a module may be included from several places. -pub(crate) fn parent_module(db: &RootDatabase, position: FilePosition) -> Vec { +pub(crate) fn parent_module( + db: &RootDatabase, + mut position: HirFilePosition, +) -> Vec { let sema = Semantics::new(db); - let source_file = sema.parse_guess_edition(position.file_id); + position.file_id = sema.adjust_edition(position.file_id); + let syntax = sema.parse_or_expand(position.file_id); - let mut module = find_node_at_offset::(source_file.syntax(), position.offset); + let mut module = find_node_at_offset::(&syntax, position.offset); // If cursor is literally on `mod foo`, go to the grandpa. if let Some(m) = &module { @@ -43,11 +47,11 @@ pub(crate) fn parent_module(db: &RootDatabase, position: FilePosition) -> Vec sema .to_def(&module) .into_iter() - .flat_map(|module| NavigationTarget::from_module_to_decl(db, module)) + .map(|module| HirNavigationTarget::from_module_to_decl(db, module)) .collect(), None => sema - .file_to_module_defs(position.file_id) - .flat_map(|module| NavigationTarget::from_module_to_decl(db, module)) + .hir_file_to_module_defs(position.file_id) + .map(|module| HirNavigationTarget::from_module_to_decl(db, module)) .collect(), } } @@ -66,18 +70,21 @@ pub(crate) fn crates_for(db: &RootDatabase, file_id: FileId) -> Vec { #[cfg(test)] mod tests { - use ide_db::FileRange; + use hir::HirFileRange; use crate::fixture; fn check(#[rust_analyzer::rust_fixture] ra_fixture: &str) { let (analysis, position, expected) = fixture::annotations(ra_fixture); - let navs = analysis.parent_module(position).unwrap(); + let navs = analysis.parent_module(position.into()).unwrap(); let navs = navs .iter() - .map(|nav| FileRange { file_id: nav.file_id, range: nav.focus_or_full_range() }) + .map(|nav| HirFileRange { file_id: nav.file_id, range: nav.focus_or_full_range() }) .collect::>(); - assert_eq!(expected.into_iter().map(|(fr, _)| fr).collect::>(), navs); + assert_eq!( + expected.into_iter().map(|(r, _)| r.into()).collect::>(), + navs + ); } #[test] @@ -151,7 +158,7 @@ $0 mod foo; "#, ); - assert_eq!(analysis.crates_for(file_id).unwrap().len(), 1); + assert_eq!(analysis.crates_for(file_id.file_id(&analysis.db)).unwrap().len(), 1); } #[test] @@ -166,6 +173,6 @@ mod baz; mod baz; "#, ); - assert_eq!(analysis.crates_for(file_id).unwrap().len(), 2); + assert_eq!(analysis.crates_for(file_id.file_id(&analysis.db)).unwrap().len(), 2); } } diff --git a/crates/ide/src/references.rs b/crates/ide/src/references.rs index fe874bc99b40..7d0ed771b328 100644 --- a/crates/ide/src/references.rs +++ b/crates/ide/src/references.rs @@ -17,16 +17,14 @@ //! - The type name in a struct/enum/variant definition //! Then only constructor/initialization usages will be shown, filtering out other references. -use hir::{PathResolution, Semantics}; +use hir::{HirFileId, HirFilePosition, PathResolution, Semantics}; use ide_db::{ - FileId, RootDatabase, + FxHashMap, RootDatabase, defs::{Definition, NameClass, NameRefClass}, helpers::pick_best_token, search::{ReferenceCategory, SearchScope, UsageSearchResult}, }; use itertools::Itertools; -use nohash_hasher::IntMap; -use span::Edition; use syntax::{ AstNode, SyntaxKind::*, @@ -35,7 +33,9 @@ use syntax::{ match_ast, }; -use crate::{FilePosition, HighlightedRange, NavigationTarget, TryToNav, highlight_related}; +use crate::{ + HighlightedRange, TryToNav, highlight_related, navigation_target::HirNavigationTarget, +}; /// Result of a reference search operation. #[derive(Debug, Clone)] @@ -50,14 +50,14 @@ pub struct ReferenceSearchResult { /// The map key is the file ID, and the value is a vector of (range, category) pairs. /// - range: The text range of the reference in the file /// - category: Metadata about how the reference is used (read/write/etc) - pub references: IntMap>, + pub references: FxHashMap>, } /// Information about the declaration site of a searched item. #[derive(Debug, Clone)] pub struct Declaration { /// Navigation information to jump to the declaration - pub nav: NavigationTarget, + pub nav: HirNavigationTarget, /// Whether the declared item is mutable (relevant for variables) pub is_mut: bool, } @@ -109,11 +109,12 @@ pub struct Declaration { /// In these cases, other kinds of references (like type references) are filtered out. pub(crate) fn find_all_refs( sema: &Semantics<'_, RootDatabase>, - position: FilePosition, + mut position: HirFilePosition, search_scope: Option, ) -> Option> { let _p = tracing::info_span!("find_all_refs").entered(); - let syntax = sema.parse_guess_edition(position.file_id).syntax().clone(); + position.file_id = sema.adjust_edition(position.file_id); + let syntax = sema.parse_or_expand(position.file_id); let make_searcher = |literal_search: bool| { move |def: Definition| { let mut usages = @@ -122,11 +123,11 @@ pub(crate) fn find_all_refs( retain_adt_literal_usages(&mut usages, def, sema); } - let mut references: IntMap> = usages + let references: FxHashMap> = usages .into_iter() .map(|(file_id, refs)| { ( - file_id.file_id(sema.db), + file_id, refs.into_iter() .map(|file_ref| (file_ref.range, file_ref.category)) .unique() @@ -136,32 +137,20 @@ pub(crate) fn find_all_refs( .collect(); let declaration = match def { Definition::Module(module) => { - Some(NavigationTarget::from_module_to_decl(sema.db, module)) + Some(HirNavigationTarget::from_module_to_decl(sema.db, module)) } - def => def.try_to_nav(sema.db), + def => def.try_to_nav_hir(sema.db), } - .map(|nav| { - let (nav, extra_ref) = match nav.def_site { - Some(call) => (call, Some(nav.call_site)), - None => (nav.call_site, None), - }; - if let Some(extra_ref) = extra_ref { - references - .entry(extra_ref.file_id) - .or_default() - .push((extra_ref.focus_or_full_range(), ReferenceCategory::empty())); - } - Declaration { - is_mut: matches!(def, Definition::Local(l) if l.is_mut(sema.db)), - nav, - } + .map(|nav| Declaration { + is_mut: matches!(def, Definition::Local(l) if l.is_mut(sema.db)), + nav, }); ReferenceSearchResult { declaration, references } } }; // Find references for control-flow keywords. - if let Some(res) = handle_control_flow_keywords(sema, position) { + if let Some(res) = handle_control_flow_keywords(sema, &syntax, position) { return Some(vec![res]); } @@ -290,7 +279,10 @@ fn retain_adt_literal_usages( /// - `None` otherwise /// /// The returned name is the name of the type whose constructor usages should be searched for. -fn name_for_constructor_search(syntax: &SyntaxNode, position: FilePosition) -> Option { +fn name_for_constructor_search( + syntax: &SyntaxNode, + position: HirFilePosition, +) -> Option { let token = syntax.token_at_offset(position.offset).right_biased()?; let token_parent = token.parent()?; let kind = token.kind(); @@ -391,14 +383,11 @@ fn is_lit_name_ref(name_ref: &ast::NameRef) -> bool { fn handle_control_flow_keywords( sema: &Semantics<'_, RootDatabase>, - FilePosition { file_id, offset }: FilePosition, + syntax: &SyntaxNode, + HirFilePosition { file_id, offset }: HirFilePosition, ) -> Option { - let file = sema.parse_guess_edition(file_id); - let edition = sema - .attach_first_edition(file_id) - .map(|it| it.edition(sema.db)) - .unwrap_or(Edition::CURRENT); - let token = pick_best_token(file.syntax().token_at_offset(offset), |kind| match kind { + let edition = file_id.edition(sema.db); + let token = pick_best_token(syntax.token_at_offset(offset), |kind| match kind { _ if kind.is_keyword(edition) => 4, T![=>] => 3, _ => 1, @@ -422,7 +411,7 @@ fn handle_control_flow_keywords( .into_iter() .map(|HighlightedRange { range, category }| (range, category)) .collect(); - (file_id.file_id(sema.db), ranges) + (file_id, ranges) }) .collect(); @@ -454,10 +443,10 @@ fn test() { } "#, expect![[r#" - test_func Function FileId(0) 0..17 3..12 + test_func Function FileId(EditionedFileId(Id(1800))) 0..17 3..12 - FileId(0) 35..44 - FileId(0) 75..84 test + FileId(EditionedFileId(Id(1800))) 35..44 + FileId(EditionedFileId(Id(1800))) 75..84 test "#]], ); @@ -475,10 +464,10 @@ fn test() { } "#, expect![[r#" - test_func Function FileId(0) 0..17 3..12 + test_func Function FileId(EditionedFileId(Id(1800))) 0..17 3..12 - FileId(0) 35..44 - FileId(0) 96..105 test + FileId(EditionedFileId(Id(1800))) 35..44 + FileId(EditionedFileId(Id(1800))) 96..105 test "#]], ); } @@ -496,10 +485,10 @@ fn test() { } "#, expect![[r#" - f Field FileId(0) 11..17 11..12 + f Field FileId(EditionedFileId(Id(1800))) 11..17 11..12 - FileId(0) 61..62 read test - FileId(0) 76..77 write test + FileId(EditionedFileId(Id(1800))) 61..62 read test + FileId(EditionedFileId(Id(1800))) 76..77 write test "#]], ); } @@ -520,9 +509,9 @@ fn main() { } "#, expect![[r#" - Foo Struct FileId(0) 0..26 7..10 + Foo Struct FileId(EditionedFileId(Id(1800))) 0..26 7..10 - FileId(0) 101..104 + FileId(EditionedFileId(Id(1800))) 101..104 "#]], ); } @@ -538,10 +527,10 @@ struct Foo$0 {} } "#, expect![[r#" - Foo Struct FileId(0) 0..13 7..10 + Foo Struct FileId(EditionedFileId(Id(1800))) 0..13 7..10 - FileId(0) 41..44 - FileId(0) 54..57 + FileId(EditionedFileId(Id(1800))) 41..44 + FileId(EditionedFileId(Id(1800))) 54..57 "#]], ); } @@ -557,9 +546,9 @@ struct Foo $0{} } "#, expect![[r#" - Foo Struct FileId(0) 0..16 7..10 + Foo Struct FileId(EditionedFileId(Id(1800))) 0..16 7..10 - FileId(0) 64..67 + FileId(EditionedFileId(Id(1800))) 64..67 "#]], ); } @@ -576,9 +565,9 @@ fn main() { } "#, expect![[r#" - Foo Struct FileId(0) 0..16 7..10 + Foo Struct FileId(EditionedFileId(Id(1800))) 0..16 7..10 - FileId(0) 54..57 + FileId(EditionedFileId(Id(1800))) 54..57 "#]], ); } @@ -597,9 +586,9 @@ fn main() { } "#, expect![[r#" - Foo Union FileId(0) 0..24 6..9 + Foo Union FileId(EditionedFileId(Id(1800))) 0..24 6..9 - FileId(0) 62..65 + FileId(EditionedFileId(Id(1800))) 62..65 "#]], ); } @@ -621,11 +610,11 @@ fn main() { } "#, expect![[r#" - Foo Enum FileId(0) 0..37 5..8 + Foo Enum FileId(EditionedFileId(Id(1800))) 0..37 5..8 - FileId(0) 74..77 - FileId(0) 90..93 - FileId(0) 108..111 + FileId(EditionedFileId(Id(1800))) 74..77 + FileId(EditionedFileId(Id(1800))) 90..93 + FileId(EditionedFileId(Id(1800))) 108..111 "#]], ); } @@ -645,9 +634,9 @@ fn main() { } "#, expect![[r#" - A Variant FileId(0) 15..27 15..16 + A Variant FileId(EditionedFileId(Id(1800))) 15..27 15..16 - FileId(0) 95..96 + FileId(EditionedFileId(Id(1800))) 95..96 "#]], ); } @@ -667,9 +656,9 @@ fn main() { } "#, expect![[r#" - A Variant FileId(0) 15..21 15..16 + A Variant FileId(EditionedFileId(Id(1800))) 15..21 15..16 - FileId(0) 89..90 + FileId(EditionedFileId(Id(1800))) 89..90 "#]], ); } @@ -688,10 +677,10 @@ fn main() { } "#, expect![[r#" - Foo Enum FileId(0) 0..26 5..8 + Foo Enum FileId(EditionedFileId(Id(1800))) 0..26 5..8 - FileId(0) 50..53 - FileId(0) 63..66 + FileId(EditionedFileId(Id(1800))) 50..53 + FileId(EditionedFileId(Id(1800))) 63..66 "#]], ); } @@ -710,9 +699,9 @@ fn main() { } "#, expect![[r#" - Foo Enum FileId(0) 0..32 5..8 + Foo Enum FileId(EditionedFileId(Id(1800))) 0..32 5..8 - FileId(0) 73..76 + FileId(EditionedFileId(Id(1800))) 73..76 "#]], ); } @@ -731,9 +720,9 @@ fn main() { } "#, expect![[r#" - Foo Enum FileId(0) 0..33 5..8 + Foo Enum FileId(EditionedFileId(Id(1800))) 0..33 5..8 - FileId(0) 70..73 + FileId(EditionedFileId(Id(1800))) 70..73 "#]], ); } @@ -754,12 +743,12 @@ fn main() { i = 5; }"#, expect![[r#" - i Local FileId(0) 20..25 24..25 write + i Local FileId(EditionedFileId(Id(1800))) 20..25 24..25 write - FileId(0) 50..51 write - FileId(0) 54..55 read - FileId(0) 76..77 write - FileId(0) 94..95 write + FileId(EditionedFileId(Id(1800))) 50..51 write + FileId(EditionedFileId(Id(1800))) 54..55 read + FileId(EditionedFileId(Id(1800))) 76..77 write + FileId(EditionedFileId(Id(1800))) 94..95 write "#]], ); } @@ -778,10 +767,10 @@ fn bar() { } "#, expect![[r#" - spam Local FileId(0) 19..23 19..23 + spam Local FileId(EditionedFileId(Id(1800))) 19..23 19..23 - FileId(0) 34..38 read - FileId(0) 41..45 read + FileId(EditionedFileId(Id(1800))) 34..38 read + FileId(EditionedFileId(Id(1800))) 41..45 read "#]], ); } @@ -793,9 +782,9 @@ fn bar() { fn foo(i : u32) -> u32 { i$0 } "#, expect![[r#" - i ValueParam FileId(0) 7..8 7..8 + i ValueParam FileId(EditionedFileId(Id(1800))) 7..8 7..8 - FileId(0) 25..26 read + FileId(EditionedFileId(Id(1800))) 25..26 read "#]], ); } @@ -807,9 +796,9 @@ fn foo(i : u32) -> u32 { i$0 } fn foo(i$0 : u32) -> u32 { i } "#, expect![[r#" - i ValueParam FileId(0) 7..8 7..8 + i ValueParam FileId(EditionedFileId(Id(1800))) 7..8 7..8 - FileId(0) 25..26 read + FileId(EditionedFileId(Id(1800))) 25..26 read "#]], ); } @@ -828,9 +817,9 @@ fn main(s: Foo) { } "#, expect![[r#" - spam Field FileId(0) 17..30 21..25 + spam Field FileId(EditionedFileId(Id(1800))) 17..30 21..25 - FileId(0) 67..71 read + FileId(EditionedFileId(Id(1800))) 67..71 read "#]], ); } @@ -845,7 +834,7 @@ impl Foo { } "#, expect![[r#" - f Function FileId(0) 27..43 30..31 + f Function FileId(EditionedFileId(Id(1800))) 27..43 30..31 (no references) "#]], @@ -863,7 +852,7 @@ enum Foo { } "#, expect![[r#" - B Variant FileId(0) 22..23 22..23 + B Variant FileId(EditionedFileId(Id(1800))) 22..23 22..23 (no references) "#]], @@ -881,7 +870,7 @@ enum Foo { } "#, expect![[r#" - field Field FileId(0) 26..35 26..31 + field Field FileId(EditionedFileId(Id(1800))) 26..35 26..31 (no references) "#]], @@ -905,11 +894,11 @@ impl S { } "#, expect![[r#" - S Struct FileId(0) 0..38 7..8 + S Struct FileId(EditionedFileId(Id(1800))) 0..38 7..8 - FileId(0) 48..49 - FileId(0) 71..75 - FileId(0) 86..90 + FileId(EditionedFileId(Id(1800))) 48..49 + FileId(EditionedFileId(Id(1800))) 71..75 + FileId(EditionedFileId(Id(1800))) 86..90 "#]], ) } @@ -931,9 +920,9 @@ impl TestTrait for () { } "#, expect![[r#" - Assoc TypeAlias FileId(0) 92..108 97..102 + Assoc TypeAlias FileId(EditionedFileId(Id(1800))) 92..108 97..102 - FileId(0) 31..36 + FileId(EditionedFileId(Id(1800))) 31..36 "#]], ) } @@ -973,10 +962,10 @@ fn f() { } "#, expect![[r#" - Foo Struct FileId(1) 17..51 28..31 foo + Foo Struct FileId(EditionedFileId(Id(1801))) 17..51 28..31 foo - FileId(0) 53..56 - FileId(2) 79..82 + FileId(EditionedFileId(Id(1800))) 53..56 + FileId(EditionedFileId(Id(1802))) 79..82 "#]], ); } @@ -1000,9 +989,9 @@ pub struct Foo { } "#, expect![[r#" - foo Module FileId(0) 0..8 4..7 + foo Module FileId(EditionedFileId(Id(1800))) 0..8 4..7 - FileId(0) 14..17 import + FileId(EditionedFileId(Id(1800))) 14..17 import "#]], ); } @@ -1018,9 +1007,9 @@ mod foo; use self$0; "#, expect![[r#" - foo Module FileId(0) 0..8 4..7 + foo Module FileId(EditionedFileId(Id(1800))) 0..8 4..7 - FileId(1) 4..8 import + FileId(EditionedFileId(Id(1801))) 4..8 import "#]], ); } @@ -1033,9 +1022,9 @@ use self$0; use self$0; "#, expect![[r#" - Module FileId(0) 0..10 + Module FileId(EditionedFileId(Id(1800))) 0..10 - FileId(0) 4..8 import + FileId(EditionedFileId(Id(1800))) 4..8 import "#]], ); } @@ -1061,10 +1050,10 @@ pub(super) struct Foo$0 { } "#, expect![[r#" - Foo Struct FileId(2) 0..41 18..21 some + Foo Struct FileId(EditionedFileId(Id(1802))) 0..41 18..21 some - FileId(1) 20..23 import - FileId(1) 47..50 + FileId(EditionedFileId(Id(1801))) 20..23 import + FileId(EditionedFileId(Id(1801))) 47..50 "#]], ); } @@ -1089,10 +1078,10 @@ pub(super) struct Foo$0 { code, None, expect![[r#" - quux Function FileId(0) 19..35 26..30 + quux Function FileId(EditionedFileId(Id(1800))) 19..35 26..30 - FileId(1) 16..20 - FileId(2) 16..20 + FileId(EditionedFileId(Id(1801))) 16..20 + FileId(EditionedFileId(Id(1802))) 16..20 "#]], ); @@ -1102,9 +1091,9 @@ pub(super) struct Foo$0 { SearchScope::single_file(EditionedFileId::current_edition(db, FileId::from_raw(2))) }), expect![[r#" - quux Function FileId(0) 19..35 26..30 + quux Function FileId(EditionedFileId(Id(1800))) 19..35 26..30 - FileId(2) 16..20 + FileId(EditionedFileId(Id(1802))) 16..20 "#]], ); } @@ -1122,10 +1111,10 @@ fn foo() { } "#, expect![[r#" - m1 Macro FileId(0) 0..46 29..31 + m1 Macro FileId(EditionedFileId(Id(1800))) 0..46 29..31 - FileId(0) 63..65 - FileId(0) 73..75 + FileId(EditionedFileId(Id(1800))) 63..65 + FileId(EditionedFileId(Id(1800))) 73..75 "#]], ); } @@ -1140,10 +1129,10 @@ fn foo() { } "#, expect![[r#" - i Local FileId(0) 19..24 23..24 write + i Local FileId(EditionedFileId(Id(1800))) 19..24 23..24 write - FileId(0) 34..35 write - FileId(0) 38..39 read + FileId(EditionedFileId(Id(1800))) 34..35 write + FileId(EditionedFileId(Id(1800))) 38..39 read "#]], ); } @@ -1162,10 +1151,10 @@ fn foo() { } "#, expect![[r#" - f Field FileId(0) 15..21 15..16 + f Field FileId(EditionedFileId(Id(1800))) 15..21 15..16 - FileId(0) 55..56 read - FileId(0) 68..69 write + FileId(EditionedFileId(Id(1800))) 55..56 read + FileId(EditionedFileId(Id(1800))) 68..69 write "#]], ); } @@ -1180,9 +1169,9 @@ fn foo() { } "#, expect![[r#" - i Local FileId(0) 19..20 19..20 + i Local FileId(EditionedFileId(Id(1800))) 19..20 19..20 - FileId(0) 26..27 write + FileId(EditionedFileId(Id(1800))) 26..27 write "#]], ); } @@ -1204,9 +1193,9 @@ fn main() { } "#, expect![[r#" - new Function FileId(0) 54..81 61..64 + new Function FileId(EditionedFileId(Id(1800))) 54..81 61..64 - FileId(0) 126..129 + FileId(EditionedFileId(Id(1800))) 126..129 "#]], ); } @@ -1226,10 +1215,10 @@ use crate::f; fn g() { f(); } "#, expect![[r#" - f Function FileId(0) 22..31 25..26 + f Function FileId(EditionedFileId(Id(1800))) 22..31 25..26 - FileId(1) 11..12 import - FileId(1) 24..25 + FileId(EditionedFileId(Id(1801))) 11..12 import + FileId(EditionedFileId(Id(1801))) 24..25 "#]], ); } @@ -1249,9 +1238,9 @@ fn f(s: S) { } "#, expect![[r#" - field Field FileId(0) 15..24 15..20 + field Field FileId(EditionedFileId(Id(1800))) 15..24 15..20 - FileId(0) 68..73 read + FileId(EditionedFileId(Id(1800))) 68..73 read "#]], ); } @@ -1273,9 +1262,9 @@ fn f(e: En) { } "#, expect![[r#" - field Field FileId(0) 32..41 32..37 + field Field FileId(EditionedFileId(Id(1800))) 32..41 32..37 - FileId(0) 102..107 read + FileId(EditionedFileId(Id(1800))) 102..107 read "#]], ); } @@ -1297,9 +1286,9 @@ fn f() -> m::En { } "#, expect![[r#" - field Field FileId(0) 56..65 56..61 + field Field FileId(EditionedFileId(Id(1800))) 56..65 56..61 - FileId(0) 125..130 read + FileId(EditionedFileId(Id(1800))) 125..130 read "#]], ); } @@ -1322,10 +1311,10 @@ impl Foo { } "#, expect![[r#" - self SelfParam FileId(0) 47..51 47..51 + self SelfParam FileId(EditionedFileId(Id(1800))) 47..51 47..51 - FileId(0) 71..75 read - FileId(0) 152..156 read + FileId(EditionedFileId(Id(1800))) 71..75 read + FileId(EditionedFileId(Id(1800))) 152..156 read "#]], ); } @@ -1343,9 +1332,9 @@ impl Foo { } "#, expect![[r#" - self SelfParam FileId(0) 47..51 47..51 + self SelfParam FileId(EditionedFileId(Id(1800))) 47..51 47..51 - FileId(0) 63..67 read + FileId(EditionedFileId(Id(1800))) 63..67 read "#]], ); } @@ -1367,11 +1356,11 @@ fn main() { } "#, expect![[r#" - FileId(0) 24..26 - FileId(0) 42..43 - FileId(0) 55..57 - FileId(0) 74..75 - FileId(0) 97..98 + FileId(EditionedFileId(Id(1800))) 24..26 + FileId(EditionedFileId(Id(1800))) 42..43 + FileId(EditionedFileId(Id(1800))) 55..57 + FileId(EditionedFileId(Id(1800))) 74..75 + FileId(EditionedFileId(Id(1800))) 97..98 "#]], ); } @@ -1390,11 +1379,11 @@ fn main() { } "#, expect![[r#" - FileId(0) 16..21 - FileId(0) 61..81 - FileId(0) 102..118 - FileId(0) 139..159 - FileId(0) 177..193 + FileId(EditionedFileId(Id(1800))) 16..21 + FileId(EditionedFileId(Id(1800))) 61..81 + FileId(EditionedFileId(Id(1800))) 102..118 + FileId(EditionedFileId(Id(1800))) 139..159 + FileId(EditionedFileId(Id(1800))) 177..193 "#]], ); } @@ -1413,8 +1402,8 @@ fn main() { } "#, expect![[r#" - FileId(0) 58..60 - FileId(0) 61..81 + FileId(EditionedFileId(Id(1800))) 58..60 + FileId(EditionedFileId(Id(1800))) 61..81 "#]], ); } @@ -1441,11 +1430,11 @@ fn main() { } "#, expect![[r#" - FileId(0) 24..26 - FileId(0) 65..66 - FileId(0) 140..141 - FileId(0) 167..168 - FileId(0) 215..216 + FileId(EditionedFileId(Id(1800))) 24..26 + FileId(EditionedFileId(Id(1800))) 65..66 + FileId(EditionedFileId(Id(1800))) 140..141 + FileId(EditionedFileId(Id(1800))) 167..168 + FileId(EditionedFileId(Id(1800))) 215..216 "#]], ); } @@ -1466,11 +1455,11 @@ fn main() { } "#, expect![[r#" - FileId(0) 24..29 - FileId(0) 80..81 - FileId(0) 124..125 - FileId(0) 155..156 - FileId(0) 171..172 + FileId(EditionedFileId(Id(1800))) 24..29 + FileId(EditionedFileId(Id(1800))) 80..81 + FileId(EditionedFileId(Id(1800))) 124..125 + FileId(EditionedFileId(Id(1800))) 155..156 + FileId(EditionedFileId(Id(1800))) 171..172 "#]], ); } @@ -1493,12 +1482,12 @@ fn main() { } "#, expect![[r#" - FileId(0) 24..26 - FileId(0) 60..61 - FileId(0) 73..75 - FileId(0) 102..103 - FileId(0) 153..154 - FileId(0) 173..174 + FileId(EditionedFileId(Id(1800))) 24..26 + FileId(EditionedFileId(Id(1800))) 60..61 + FileId(EditionedFileId(Id(1800))) 73..75 + FileId(EditionedFileId(Id(1800))) 102..103 + FileId(EditionedFileId(Id(1800))) 153..154 + FileId(EditionedFileId(Id(1800))) 173..174 "#]], ); } @@ -1513,8 +1502,10 @@ fn main() { expect: Expect, ) { let (analysis, pos) = fixture::position(ra_fixture); - let refs = - analysis.find_all_refs(pos, search_scope.map(|it| it(&analysis.db))).unwrap().unwrap(); + let refs = analysis + .find_all_refs(pos.into(), search_scope.map(|it| it(&analysis.db))) + .unwrap() + .unwrap(); let mut actual = String::new(); for mut refs in refs { @@ -1558,13 +1549,13 @@ fn foo<'a, 'b: 'a>(x: &'a$0 ()) -> &'a () where &'a (): Foo<'a> { } "#, expect![[r#" - 'a LifetimeParam FileId(0) 55..57 + 'a LifetimeParam FileId(EditionedFileId(Id(1800))) 55..57 55..57 - FileId(0) 63..65 - FileId(0) 71..73 - FileId(0) 82..84 - FileId(0) 95..97 - FileId(0) 106..108 + FileId(EditionedFileId(Id(1800))) 63..65 + FileId(EditionedFileId(Id(1800))) 71..73 + FileId(EditionedFileId(Id(1800))) 82..84 + FileId(EditionedFileId(Id(1800))) 95..97 + FileId(EditionedFileId(Id(1800))) 106..108 "#]], ); } @@ -1576,10 +1567,10 @@ fn foo<'a, 'b: 'a>(x: &'a$0 ()) -> &'a () where &'a (): Foo<'a> { type Foo<'a, T> where T: 'a$0 = &'a T; "#, expect![[r#" - 'a LifetimeParam FileId(0) 9..11 + 'a LifetimeParam FileId(EditionedFileId(Id(1800))) 9..11 9..11 - FileId(0) 25..27 - FileId(0) 31..33 + FileId(EditionedFileId(Id(1800))) 25..27 + FileId(EditionedFileId(Id(1800))) 31..33 "#]], ); } @@ -1598,11 +1589,11 @@ impl<'a> Foo<'a> for &'a () { } "#, expect![[r#" - 'a LifetimeParam FileId(0) 47..49 + 'a LifetimeParam FileId(EditionedFileId(Id(1800))) 47..49 47..49 - FileId(0) 55..57 - FileId(0) 64..66 - FileId(0) 89..91 + FileId(EditionedFileId(Id(1800))) 55..57 + FileId(EditionedFileId(Id(1800))) 64..66 + FileId(EditionedFileId(Id(1800))) 89..91 "#]], ); } @@ -1618,9 +1609,9 @@ fn main() { } "#, expect![[r#" - a Local FileId(0) 59..60 59..60 + a Local FileId(EditionedFileId(Id(1800))) 59..60 59..60 - FileId(0) 80..81 read + MacroFile(MacroCallId(Id(4400))) 0..1 read "#]], ); } @@ -1636,9 +1627,9 @@ fn main() { } "#, expect![[r#" - a Local FileId(0) 59..60 59..60 + a Local FileId(EditionedFileId(Id(1800))) 59..60 59..60 - FileId(0) 80..81 read + MacroFile(MacroCallId(Id(4400))) 0..1 read "#]], ); } @@ -1657,10 +1648,10 @@ fn foo<'a>() -> &'a () { } "#, expect![[r#" - 'a Label FileId(0) 29..32 29..31 + 'a Label FileId(EditionedFileId(Id(1800))) 29..32 29..31 - FileId(0) 80..82 - FileId(0) 108..110 + FileId(EditionedFileId(Id(1800))) 80..82 + FileId(EditionedFileId(Id(1800))) 108..110 "#]], ); } @@ -1674,9 +1665,9 @@ fn foo() -> usize { } "#, expect![[r#" - FOO ConstParam FileId(0) 7..23 13..16 + FOO ConstParam FileId(EditionedFileId(Id(1800))) 7..23 13..16 - FileId(0) 42..45 + FileId(EditionedFileId(Id(1800))) 42..45 "#]], ); } @@ -1690,9 +1681,9 @@ trait Foo$0 where Self: {} impl Foo for () {} "#, expect![[r#" - Foo Trait FileId(0) 0..24 6..9 + Foo Trait FileId(EditionedFileId(Id(1800))) 0..24 6..9 - FileId(0) 31..34 + FileId(EditionedFileId(Id(1800))) 31..34 "#]], ); } @@ -1708,10 +1699,10 @@ trait Foo where Self$0 { impl Foo for () {} "#, expect![[r#" - Self TypeParam FileId(0) 0..44 6..9 + Self TypeParam FileId(EditionedFileId(Id(1800))) 0..44 6..9 - FileId(0) 16..20 - FileId(0) 37..41 + FileId(EditionedFileId(Id(1800))) 16..20 + FileId(EditionedFileId(Id(1800))) 37..41 "#]], ); } @@ -1727,11 +1718,11 @@ impl Foo for () {} } "#, expect![[r#" - Foo Struct FileId(0) 0..11 7..10 + Foo Struct FileId(EditionedFileId(Id(1800))) 0..11 7..10 - FileId(0) 18..21 - FileId(0) 28..32 - FileId(0) 50..54 + FileId(EditionedFileId(Id(1800))) 18..21 + FileId(EditionedFileId(Id(1800))) 28..32 + FileId(EditionedFileId(Id(1800))) 50..54 "#]], ); check( @@ -1743,11 +1734,11 @@ impl Foo where Self: { } "#, expect![[r#" - impl Impl FileId(0) 13..57 18..21 + impl Impl FileId(EditionedFileId(Id(1800))) 13..57 18..21 - FileId(0) 18..21 - FileId(0) 28..32 - FileId(0) 50..54 + FileId(EditionedFileId(Id(1800))) 18..21 + FileId(EditionedFileId(Id(1800))) 28..32 + FileId(EditionedFileId(Id(1800))) 50..54 "#]], ); } @@ -1767,9 +1758,9 @@ impl Foo { "#, expect![[r#" - Bar Variant FileId(0) 11..16 11..14 + Bar Variant FileId(EditionedFileId(Id(1800))) 11..16 11..14 - FileId(0) 89..92 + FileId(EditionedFileId(Id(1800))) 89..92 "#]], ); } @@ -1783,11 +1774,11 @@ trait Bar$0 = Foo where Self: ; fn foo(_: impl Bar, _: &dyn Bar) {} "#, expect![[r#" - Bar TraitAlias FileId(0) 13..42 19..22 + Bar TraitAlias FileId(EditionedFileId(Id(1800))) 13..42 19..22 - FileId(0) 53..56 - FileId(0) 66..69 - FileId(0) 79..82 + FileId(EditionedFileId(Id(1800))) 53..56 + FileId(EditionedFileId(Id(1800))) 66..69 + FileId(EditionedFileId(Id(1800))) 79..82 "#]], ); } @@ -1799,9 +1790,9 @@ fn foo(_: impl Bar, _: &dyn Bar) {} trait Foo = where Self$0: ; "#, expect![[r#" - Self TypeParam FileId(0) 0..25 6..9 + Self TypeParam FileId(EditionedFileId(Id(1800))) 0..25 6..9 - FileId(0) 18..22 + FileId(EditionedFileId(Id(1800))) 18..22 "#]], ); } @@ -1816,9 +1807,9 @@ fn test$0() { } "#, expect![[r#" - test Function FileId(0) 0..33 11..15 + test Function FileId(EditionedFileId(Id(1800))) 0..33 11..15 - FileId(0) 24..28 test + FileId(EditionedFileId(Id(1800))) 24..28 test "#]], ); } @@ -1838,12 +1829,12 @@ fn main() { } "#, expect![[r#" - A Const FileId(0) 0..18 6..7 + A Const FileId(EditionedFileId(Id(1800))) 0..18 6..7 - FileId(0) 42..43 - FileId(0) 54..55 - FileId(0) 97..98 - FileId(0) 101..102 + FileId(EditionedFileId(Id(1800))) 42..43 + FileId(EditionedFileId(Id(1800))) 54..55 + FileId(EditionedFileId(Id(1800))) 97..98 + FileId(EditionedFileId(Id(1800))) 101..102 "#]], ); } @@ -1855,8 +1846,8 @@ fn main() { fn foo(_: bool) -> bo$0ol { true } "#, expect![[r#" - FileId(0) 10..14 - FileId(0) 19..23 + FileId(EditionedFileId(Id(1800))) 10..14 + FileId(EditionedFileId(Id(1800))) 19..23 "#]], ); } @@ -1875,11 +1866,11 @@ pub use level2::Foo; pub use level1::Foo; "#, expect![[r#" - Foo Struct FileId(0) 0..15 11..14 + Foo Struct FileId(EditionedFileId(Id(1800))) 0..15 11..14 - FileId(1) 16..19 import - FileId(2) 16..19 import - FileId(3) 16..19 import + FileId(EditionedFileId(Id(1801))) 16..19 import + FileId(EditionedFileId(Id(1802))) 16..19 import + FileId(EditionedFileId(Id(1803))) 16..19 import "#]], ); } @@ -1905,11 +1896,11 @@ foo!(); lib::foo!(); "#, expect![[r#" - foo Macro FileId(1) 0..61 29..32 + foo Macro FileId(EditionedFileId(Id(1801))) 0..61 29..32 - FileId(0) 46..49 import - FileId(2) 0..3 - FileId(3) 5..8 + FileId(EditionedFileId(Id(1800))) 46..49 import + FileId(EditionedFileId(Id(1802))) 0..3 + FileId(EditionedFileId(Id(1803))) 5..8 "#]], ); } @@ -1927,9 +1918,9 @@ m$0!(); "#, expect![[r#" - m Macro FileId(0) 0..32 13..14 + m Macro FileId(EditionedFileId(Id(1800))) 0..32 13..14 - FileId(0) 64..65 + FileId(EditionedFileId(Id(1800))) 64..65 "#]], ); } @@ -1956,14 +1947,14 @@ fn f() { } "#, expect![[r#" - func Function FileId(0) 137..146 140..144 module + func Function MacroFile(MacroCallId(Id(3400))) 10..23 15..19 module - FileId(0) 181..185 + FileId(EditionedFileId(Id(1800))) 181..185 - func Function FileId(0) 137..146 140..144 + func Function MacroFile(MacroCallId(Id(3400))) 24..37 29..33 - FileId(0) 161..165 + FileId(EditionedFileId(Id(1800))) 161..165 "#]], ) } @@ -1979,9 +1970,9 @@ fn func$0() { } "#, expect![[r#" - func Function FileId(0) 25..50 28..32 + func Function MacroFile(MacroCallId(Id(3c00))) 0..17 2..6 - FileId(0) 41..45 + MacroFile(MacroCallId(Id(3c00))) 9..13 "#]], ) } @@ -2000,9 +1991,9 @@ trait Trait { } "#, expect![[r#" - func Function FileId(0) 48..87 51..55 Trait + func Function MacroFile(MacroCallId(Id(4000))) 0..23 2..6 Trait - FileId(0) 74..78 + MacroFile(MacroCallId(Id(4000))) 15..19 "#]], ) } @@ -2019,9 +2010,9 @@ use proc_macros::identity; fn func() {} "#, expect![[r#" - identity Attribute FileId(1) 1..107 32..40 + identity Attribute FileId(EditionedFileId(Id(1801))) 1..107 32..40 - FileId(0) 43..51 + FileId(EditionedFileId(Id(1800))) 43..51 "#]], ); check( @@ -2031,7 +2022,7 @@ fn func() {} fn func$0() {} "#, expect![[r#" - func Attribute FileId(0) 28..64 55..59 + func Attribute FileId(EditionedFileId(Id(1800))) 28..64 55..59 (no references) "#]], @@ -2049,9 +2040,9 @@ use proc_macros::mirror; mirror$0! {} "#, expect![[r#" - mirror ProcMacro FileId(1) 1..77 22..28 + mirror ProcMacro FileId(EditionedFileId(Id(1801))) 1..77 22..28 - FileId(0) 26..32 + FileId(EditionedFileId(Id(1800))) 26..32 "#]], ) } @@ -2068,10 +2059,10 @@ use proc_macros::DeriveIdentity; struct Foo; "#, expect![[r#" - derive_identity Derive FileId(2) 1..107 45..60 + derive_identity Derive FileId(EditionedFileId(Id(1802))) 1..107 45..60 - FileId(0) 17..31 import - FileId(0) 56..70 + FileId(EditionedFileId(Id(1800))) 17..31 import + MacroFile(MacroCallId(Id(4400))) 17..31 "#]], ); check( @@ -2081,7 +2072,7 @@ struct Foo; pub fn deri$0ve(_stream: TokenStream) -> TokenStream {} "#, expect![[r#" - derive Derive FileId(0) 28..125 79..85 + derive Derive FileId(EditionedFileId(Id(1800))) 28..125 79..85 (no references) "#]], @@ -2111,12 +2102,12 @@ fn f() { } "#, expect![[r#" - CONST Const FileId(0) 18..37 24..29 Trait + CONST Const FileId(EditionedFileId(Id(1800))) 18..37 24..29 Trait - FileId(0) 71..76 - FileId(0) 125..130 - FileId(0) 183..188 - FileId(0) 206..211 + FileId(EditionedFileId(Id(1800))) 71..76 + FileId(EditionedFileId(Id(1800))) 125..130 + FileId(EditionedFileId(Id(1800))) 183..188 + FileId(EditionedFileId(Id(1800))) 206..211 "#]], ); check( @@ -2140,12 +2131,12 @@ fn f() { } "#, expect![[r#" - TypeAlias TypeAlias FileId(0) 18..33 23..32 Trait + TypeAlias TypeAlias FileId(EditionedFileId(Id(1800))) 18..33 23..32 Trait - FileId(0) 66..75 - FileId(0) 117..126 - FileId(0) 181..190 - FileId(0) 207..216 + FileId(EditionedFileId(Id(1800))) 66..75 + FileId(EditionedFileId(Id(1800))) 117..126 + FileId(EditionedFileId(Id(1800))) 181..190 + FileId(EditionedFileId(Id(1800))) 207..216 "#]], ); check( @@ -2169,12 +2160,12 @@ fn f() { } "#, expect![[r#" - function Function FileId(0) 18..34 21..29 Trait + function Function FileId(EditionedFileId(Id(1800))) 18..34 21..29 Trait - FileId(0) 65..73 - FileId(0) 112..120 - FileId(0) 166..174 - FileId(0) 192..200 + FileId(EditionedFileId(Id(1800))) 65..73 + FileId(EditionedFileId(Id(1800))) 112..120 + FileId(EditionedFileId(Id(1800))) 166..174 + FileId(EditionedFileId(Id(1800))) 192..200 "#]], ); } @@ -2202,9 +2193,9 @@ fn f() { } "#, expect![[r#" - CONST Const FileId(0) 65..88 71..76 + CONST Const FileId(EditionedFileId(Id(1800))) 65..88 71..76 - FileId(0) 183..188 + FileId(EditionedFileId(Id(1800))) 183..188 "#]], ); check( @@ -2228,12 +2219,12 @@ fn f() { } "#, expect![[r#" - TypeAlias TypeAlias FileId(0) 61..81 66..75 + TypeAlias TypeAlias FileId(EditionedFileId(Id(1800))) 61..81 66..75 - FileId(0) 23..32 - FileId(0) 117..126 - FileId(0) 181..190 - FileId(0) 207..216 + FileId(EditionedFileId(Id(1800))) 23..32 + FileId(EditionedFileId(Id(1800))) 117..126 + FileId(EditionedFileId(Id(1800))) 181..190 + FileId(EditionedFileId(Id(1800))) 207..216 "#]], ); check( @@ -2257,9 +2248,9 @@ fn f() { } "#, expect![[r#" - function Function FileId(0) 62..78 65..73 + function Function FileId(EditionedFileId(Id(1800))) 62..78 65..73 - FileId(0) 166..174 + FileId(EditionedFileId(Id(1800))) 166..174 "#]], ); } @@ -2287,9 +2278,9 @@ fn f() { } "#, expect![[r#" - CONST Const FileId(0) 65..88 71..76 + CONST Const FileId(EditionedFileId(Id(1800))) 65..88 71..76 - FileId(0) 183..188 + FileId(EditionedFileId(Id(1800))) 183..188 "#]], ); check( @@ -2313,12 +2304,12 @@ fn f() { } "#, expect![[r#" - TypeAlias TypeAlias FileId(0) 18..33 23..32 Trait + TypeAlias TypeAlias FileId(EditionedFileId(Id(1800))) 18..33 23..32 Trait - FileId(0) 66..75 - FileId(0) 117..126 - FileId(0) 181..190 - FileId(0) 207..216 + FileId(EditionedFileId(Id(1800))) 66..75 + FileId(EditionedFileId(Id(1800))) 117..126 + FileId(EditionedFileId(Id(1800))) 181..190 + FileId(EditionedFileId(Id(1800))) 207..216 "#]], ); check( @@ -2342,9 +2333,9 @@ fn f() { } "#, expect![[r#" - function Function FileId(0) 62..78 65..73 + function Function FileId(EditionedFileId(Id(1800))) 62..78 65..73 - FileId(0) 166..174 + FileId(EditionedFileId(Id(1800))) 166..174 "#]], ); } @@ -2369,9 +2360,9 @@ impl Foo for Bar { fn method() {} "#, expect![[r#" - method Function FileId(0) 16..39 19..25 Foo + method Function FileId(EditionedFileId(Id(1800))) 16..39 19..25 Foo - FileId(0) 101..107 + FileId(EditionedFileId(Id(1800))) 101..107 "#]], ); check( @@ -2392,9 +2383,9 @@ impl Foo for Bar { fn method() {} "#, expect![[r#" - method Field FileId(0) 60..70 60..66 + method Field FileId(EditionedFileId(Id(1800))) 60..70 60..66 - FileId(0) 136..142 read + FileId(EditionedFileId(Id(1800))) 136..142 read "#]], ); check( @@ -2415,7 +2406,7 @@ impl Foo for Bar { fn method() {} "#, expect![[r#" - method Function FileId(0) 98..148 101..107 + method Function FileId(EditionedFileId(Id(1800))) 98..148 101..107 (no references) "#]], @@ -2438,9 +2429,9 @@ impl Foo for Bar { fn method() {} "#, expect![[r#" - method Field FileId(0) 60..70 60..66 + method Field FileId(EditionedFileId(Id(1800))) 60..70 60..66 - FileId(0) 136..142 read + FileId(EditionedFileId(Id(1800))) 136..142 read "#]], ); check( @@ -2461,7 +2452,7 @@ impl Foo for Bar { fn method$0() {} "#, expect![[r#" - method Function FileId(0) 151..165 154..160 + method Function FileId(EditionedFileId(Id(1800))) 151..165 154..160 (no references) "#]], @@ -2476,9 +2467,9 @@ fn r#fn$0() {} fn main() { r#fn(); } "#, expect![[r#" - r#fn Function FileId(0) 0..12 3..7 + r#fn Function FileId(EditionedFileId(Id(1800))) 0..12 3..7 - FileId(0) 25..29 + FileId(EditionedFileId(Id(1800))) 25..29 "#]], ); } @@ -2497,11 +2488,11 @@ fn test() { } "#, expect![[r#" - a Local FileId(0) 20..21 20..21 + a Local FileId(EditionedFileId(Id(1800))) 20..21 20..21 - FileId(0) 56..57 read - FileId(0) 60..61 read - FileId(0) 68..69 read + MacroFile(MacroCallId(Id(4007))) 28..29 read + MacroFile(MacroCallId(Id(4007))) 32..33 read + MacroFile(MacroCallId(Id(4007))) 39..40 read "#]], ); } @@ -2536,9 +2527,9 @@ fn main() { } "#, expect![[r#" - FileId(0) 136..138 - FileId(0) 207..213 - FileId(0) 264..270 + FileId(EditionedFileId(Id(1800))) 136..138 + FileId(EditionedFileId(Id(1800))) 207..213 + FileId(EditionedFileId(Id(1800))) 264..270 "#]], ) } @@ -2557,10 +2548,10 @@ fn$0 foo() -> u32 { } "#, expect![[r#" - FileId(0) 0..2 - FileId(0) 40..46 - FileId(0) 62..63 - FileId(0) 69..80 + FileId(EditionedFileId(Id(1800))) 0..2 + FileId(EditionedFileId(Id(1800))) 40..46 + FileId(EditionedFileId(Id(1800))) 62..63 + FileId(EditionedFileId(Id(1800))) 69..80 "#]], ); } @@ -2578,10 +2569,10 @@ pub async$0 fn foo() { } "#, expect![[r#" - FileId(0) 4..9 - FileId(0) 48..53 - FileId(0) 63..68 - FileId(0) 114..119 + FileId(EditionedFileId(Id(1800))) 4..9 + FileId(EditionedFileId(Id(1800))) 48..53 + FileId(EditionedFileId(Id(1800))) 63..68 + FileId(EditionedFileId(Id(1800))) 114..119 "#]], ); } @@ -2598,9 +2589,9 @@ fn main() { } "#, expect![[r#" - FileId(0) 16..19 - FileId(0) 40..45 - FileId(0) 55..63 + FileId(EditionedFileId(Id(1800))) 16..19 + FileId(EditionedFileId(Id(1800))) 40..45 + FileId(EditionedFileId(Id(1800))) 55..63 "#]], ) } @@ -2617,8 +2608,8 @@ fn main() { } "#, expect![[r#" - FileId(0) 16..19 - FileId(0) 40..45 + FileId(EditionedFileId(Id(1800))) 16..19 + FileId(EditionedFileId(Id(1800))) 40..45 "#]], ) } @@ -2634,8 +2625,8 @@ fn main() { } "#, expect![[r#" - FileId(0) 16..19 - FileId(0) 29..37 + FileId(EditionedFileId(Id(1800))) 16..19 + FileId(EditionedFileId(Id(1800))) 29..37 "#]], ) } @@ -2658,10 +2649,10 @@ fn foo() { } "#, expect![[r#" - FileId(0) 15..27 - FileId(0) 39..44 - FileId(0) 127..139 - FileId(0) 178..183 + FileId(EditionedFileId(Id(1800))) 15..27 + FileId(EditionedFileId(Id(1800))) 39..44 + FileId(EditionedFileId(Id(1800))) 127..139 + FileId(EditionedFileId(Id(1800))) 178..183 "#]], ); } @@ -2682,9 +2673,9 @@ fn main() { } "#, expect![[r#" - FileId(0) 16..18 - FileId(0) 51..57 - FileId(0) 78..84 + FileId(EditionedFileId(Id(1800))) 16..18 + FileId(EditionedFileId(Id(1800))) 51..57 + FileId(EditionedFileId(Id(1800))) 78..84 "#]], ) } @@ -2702,8 +2693,8 @@ fn main() { } "#, expect![[r#" - FileId(0) 16..19 - FileId(0) 84..89 + FileId(EditionedFileId(Id(1800))) 16..19 + FileId(EditionedFileId(Id(1800))) 84..89 "#]], ) } @@ -2719,8 +2710,8 @@ fn main() { } "#, expect![[r#" - FileId(0) 16..21 - FileId(0) 32..38 + FileId(EditionedFileId(Id(1800))) 16..21 + FileId(EditionedFileId(Id(1800))) 32..38 "#]], ) } @@ -2756,12 +2747,12 @@ fn main() { } "#, expect![[r#" - FileId(0) 46..48 - FileId(0) 106..108 - FileId(0) 122..149 - FileId(0) 135..141 - FileId(0) 165..181 - FileId(1) 6..12 + FileId(EditionedFileId(Id(1800))) 46..48 + FileId(EditionedFileId(Id(1800))) 106..108 + FileId(EditionedFileId(Id(1800))) 122..149 + FileId(EditionedFileId(Id(1800))) 135..141 + FileId(EditionedFileId(Id(1800))) 165..181 + FileId(EditionedFileId(Id(1801))) 6..12 "#]], ) } @@ -2787,10 +2778,10 @@ fn baz() { } "#, expect![[r#" - new Function FileId(0) 27..38 30..33 + new Function FileId(EditionedFileId(Id(1800))) 27..38 30..33 - FileId(0) 62..65 - FileId(0) 91..94 + FileId(EditionedFileId(Id(1800))) 62..65 + FileId(EditionedFileId(Id(1800))) 91..94 "#]], ); } @@ -2837,11 +2828,11 @@ type Itself = T; pub(in super::super) type Baz = Itself; "#, expect![[r#" - new Function FileId(0) 42..53 45..48 + new Function FileId(EditionedFileId(Id(1800))) 42..53 45..48 - FileId(0) 83..86 - FileId(1) 40..43 - FileId(1) 106..109 + FileId(EditionedFileId(Id(1800))) 83..86 + FileId(EditionedFileId(Id(1801))) 40..43 + FileId(EditionedFileId(Id(1801))) 106..109 "#]], ); } @@ -2876,12 +2867,12 @@ impl super::Foo { fn foo() { ::Assoc::new(); } "#, expect![[r#" - new Function FileId(0) 40..51 43..46 + new Function FileId(EditionedFileId(Id(1800))) 40..51 43..46 - FileId(0) 73..76 - FileId(0) 195..198 - FileId(1) 40..43 - FileId(1) 99..102 + FileId(EditionedFileId(Id(1800))) 73..76 + FileId(EditionedFileId(Id(1800))) 195..198 + FileId(EditionedFileId(Id(1801))) 40..43 + FileId(EditionedFileId(Id(1801))) 99..102 "#]], ); } @@ -2902,10 +2893,10 @@ impl Foo { } "#, expect![[r#" - new Function FileId(0) 27..38 30..33 + new Function FileId(EditionedFileId(Id(1800))) 27..38 30..33 - FileId(0) 68..71 - FileId(0) 123..126 + FileId(EditionedFileId(Id(1800))) 68..71 + FileId(EditionedFileId(Id(1800))) 123..126 "#]], ); } @@ -2933,10 +2924,10 @@ impl Foo { } "#, expect![[r#" - new Function FileId(0) 27..38 30..33 + new Function FileId(EditionedFileId(Id(1800))) 27..38 30..33 - FileId(0) 188..191 - FileId(0) 233..236 + FileId(EditionedFileId(Id(1800))) 188..191 + FileId(EditionedFileId(Id(1800))) 233..236 "#]], ); } @@ -2973,7 +2964,7 @@ fn bar() { } "#, expect![[r#" - new Function FileId(0) 27..38 30..33 + new Function FileId(EditionedFileId(Id(1800))) 27..38 30..33 (no references) "#]], @@ -2999,9 +2990,9 @@ impl Foo { } "#, expect![[r#" - new Function FileId(0) 27..38 30..33 + new Function FileId(EditionedFileId(Id(1800))) 27..38 30..33 - FileId(0) 131..134 + FileId(EditionedFileId(Id(1800))) 131..134 "#]], ); } @@ -3020,9 +3011,9 @@ fn howdy() { const FOO$0: i32 = 0; "#, expect![[r#" - FOO Const FileId(1) 0..19 6..9 + FOO Const MacroFile(MacroCallId(Id(3c01))) 0..15 5..8 - FileId(0) 45..48 + FileId(EditionedFileId(Id(1800))) 45..48 "#]], ); } @@ -3049,12 +3040,12 @@ fn main() { } "#, expect![[r#" - FileId(0) 92..94 - FileId(0) 128..129 - FileId(0) 141..143 - FileId(0) 177..178 - FileId(0) 237..238 - FileId(0) 257..258 + FileId(EditionedFileId(Id(1800))) 92..94 + FileId(EditionedFileId(Id(1800))) 128..129 + FileId(EditionedFileId(Id(1800))) 141..143 + FileId(EditionedFileId(Id(1800))) 177..178 + FileId(EditionedFileId(Id(1800))) 237..238 + FileId(EditionedFileId(Id(1800))) 257..258 "#]], ); } @@ -3080,11 +3071,11 @@ fn main() { } "#, expect![[r#" - FileId(0) 108..113 - FileId(0) 185..205 - FileId(0) 243..279 - FileId(0) 308..341 - FileId(0) 374..394 + FileId(EditionedFileId(Id(1800))) 108..113 + FileId(EditionedFileId(Id(1800))) 185..205 + FileId(EditionedFileId(Id(1800))) 243..279 + FileId(EditionedFileId(Id(1800))) 308..341 + FileId(EditionedFileId(Id(1800))) 374..394 "#]], ); } diff --git a/crates/ide/src/rename.rs b/crates/ide/src/rename.rs index fb84e8e6b474..59d2da3bd746 100644 --- a/crates/ide/src/rename.rs +++ b/crates/ide/src/rename.rs @@ -13,7 +13,7 @@ use ide_db::{ }; use itertools::Itertools; use stdx::{always, never}; -use syntax::{AstNode, SyntaxKind, SyntaxNode, TextRange, TextSize, ast}; +use syntax::{AstNode, AstToken, SyntaxKind, SyntaxNode, TextRange, TextSize, ast}; use ide_db::text_edit::TextEdit; @@ -30,7 +30,10 @@ pub(crate) fn prepare_rename( position: FilePosition, ) -> RenameResult> { let sema = Semantics::new(db); - let source_file = sema.parse_guess_edition(position.file_id); + let file_id = sema + .attach_first_edition(position.file_id) + .ok_or_else(|| format_err!("No references found at position"))?; + let source_file = sema.parse(file_id); let syntax = source_file.syntax(); let res = find_definitions(&sema, syntax, position, &Name::new_symbol_root(sym::underscore))? @@ -110,7 +113,8 @@ pub(crate) fn rename( bail!("Cannot rename alias reference to `self`") } }; - let mut usages = def.usages(&sema).all(); + + let mut usages = def.usages(&sema).all().map_out_of_macros(&sema); // FIXME: hack - removes the usage that triggered this rename operation. match usages.references.get_mut(&file_id).and_then(|refs| { @@ -163,6 +167,7 @@ pub(crate) fn will_rename_file( new_name_stem: &str, ) -> Option { let sema = Semantics::new(db); + let file_id = sema.attach_first_edition(file_id)?; let module = sema.file_to_module_def(file_id)?; let def = Definition::Module(module); let mut change = def.rename(&sema, new_name_stem, RenameDefinition::Yes).ok()?; @@ -209,12 +214,14 @@ fn find_definitions( new_name: &Name, ) -> RenameResult> { - let maybe_format_args = - syntax.token_at_offset(offset).find(|t| matches!(t.kind(), SyntaxKind::STRING)); + let maybe_format_args = syntax.token_at_offset(offset).find_map(ast::String::cast); - if let Some((range, _, _, Some(resolution))) = - maybe_format_args.and_then(|token| sema.check_for_format_args_template(token, offset)) - { + if let Some((range, _, _, Some(resolution))) = maybe_format_args.and_then(|token| { + sema.check_for_format_args_template_with_file( + InFile::new(sema.attach_first_edition(file_id)?.into(), token), + offset, + ) + }) { return Ok(vec![( FileRange { file_id, range }, SyntaxKind::STRING, @@ -394,7 +401,7 @@ fn rename_to_self( .ok_or_else(|| format_err!("No source for parameter found"))?; let def = Definition::Local(local); - let usages = def.usages(sema).all(); + let usages = def.usages(sema).all().map_out_of_macros(sema); let mut source_change = SourceChange::default(); source_change.extend(usages.iter().map(|(file_id, references)| { ( @@ -432,7 +439,8 @@ fn rename_self_to_param( sema.source(self_param).ok_or_else(|| format_err!("cannot find function source"))?; let def = Definition::Local(local); - let usages = def.usages(sema).all(); + let usages = def.usages(sema).all().map_out_of_macros(sema); + let edit = text_edit_from_self_param( &self_param, new_name.display(sema.db, file_id.edition(sema.db)).to_string(), @@ -512,12 +520,13 @@ mod tests { let ra_fixture_after = &trim_indent(ra_fixture_after); let (analysis, position) = fixture::position(ra_fixture_before); if !ra_fixture_after.starts_with("error: ") { - if let Err(err) = analysis.prepare_rename(position).unwrap() { + if let Err(err) = analysis.prepare_rename(position.into_file_id(&analysis.db)).unwrap() + { panic!("Prepare rename to '{new_name}' was failed: {err}") } } let rename_result = analysis - .rename(position, new_name) + .rename(position.into_file_id(&analysis.db), new_name) .unwrap_or_else(|err| panic!("Rename to '{new_name}' was cancelled: {err}")); match rename_result { Ok(source_change) => { @@ -549,10 +558,11 @@ mod tests { #[track_caller] fn check_conflicts(new_name: &str, #[rust_analyzer::rust_fixture] ra_fixture: &str) { let (analysis, position, conflicts) = fixture::annotations(ra_fixture); - let source_change = analysis.rename(position, new_name).unwrap().unwrap(); + let source_change = + analysis.rename(position.into_file_id(&analysis.db), new_name).unwrap().unwrap(); let expected_conflicts = conflicts .into_iter() - .map(|(file_range, _)| (file_range.file_id, file_range.range)) + .map(|(file_range, _)| (file_range.file_id.file_id(&analysis.db), file_range.range)) .sorted_unstable_by_key(|(file_id, range)| (*file_id, range.start())) .collect_vec(); let found_conflicts = source_change @@ -576,8 +586,10 @@ mod tests { expect: Expect, ) { let (analysis, position) = fixture::position(ra_fixture); - let source_change = - analysis.rename(position, new_name).unwrap().expect("Expect returned a RenameError"); + let source_change = analysis + .rename(position.into_file_id(&analysis.db), new_name) + .unwrap() + .expect("Expect returned a RenameError"); expect.assert_eq(&filter_expect(source_change)) } @@ -588,7 +600,7 @@ mod tests { ) { let (analysis, position) = fixture::position(ra_fixture); let source_change = analysis - .will_rename_file(position.file_id, new_name) + .will_rename_file(position.file_id.file_id(&analysis.db), new_name) .unwrap() .expect("Expect returned a RenameError"); expect.assert_eq(&filter_expect(source_change)) @@ -597,11 +609,11 @@ mod tests { fn check_prepare(#[rust_analyzer::rust_fixture] ra_fixture: &str, expect: Expect) { let (analysis, position) = fixture::position(ra_fixture); let result = analysis - .prepare_rename(position) + .prepare_rename(position.into_file_id(&analysis.db)) .unwrap_or_else(|err| panic!("PrepareRename was cancelled: {err}")); match result { Ok(RangeInfo { range, info: () }) => { - let source = analysis.file_text(position.file_id).unwrap(); + let source = analysis.file_text(position.file_id.file_id(&analysis.db)).unwrap(); expect.assert_eq(&format!("{range:?}: {}", &source[range])) } Err(RenameError(err)) => expect.assert_eq(&err), diff --git a/crates/ide/src/runnables.rs b/crates/ide/src/runnables.rs index 9d1a5bae96fb..c15c89460597 100644 --- a/crates/ide/src/runnables.rs +++ b/crates/ide/src/runnables.rs @@ -4,8 +4,8 @@ use arrayvec::ArrayVec; use ast::HasName; use cfg::{CfgAtom, CfgExpr}; use hir::{ - AsAssocItem, AttrsWithOwner, HasAttrs, HasCrate, HasSource, ModPath, Name, PathKind, Semantics, - Symbol, db::HirDatabase, sym, + AsAssocItem, AttrsWithOwner, HasAttrs, HasCrate, HasSource, HirFileId, ModPath, Name, PathKind, + Semantics, Symbol, db::HirDatabase, sym, }; use ide_assists::utils::{has_test_related_attribute, test_related_attribute_syn}; use ide_db::{ @@ -26,12 +26,14 @@ use syntax::{ format_smolstr, }; -use crate::{FileId, NavigationTarget, ToNav, TryToNav, references}; +use crate::{ + NavigationTarget, ToNav, TryToNav, navigation_target::HirNavigationTarget, references, +}; #[derive(Debug, Clone, Hash, PartialEq, Eq)] pub struct Runnable { pub use_name_in_title: bool, - pub nav: NavigationTarget, + pub nav: HirNavigationTarget, pub kind: RunnableKind, pub cfg: Option, pub update_test: UpdateTest, @@ -126,7 +128,7 @@ impl Runnable { // | VS Code | **rust-analyzer: Run** | // // ![Run](https://user-images.githubusercontent.com/48062697/113065583-055aae80-91b1-11eb-958f-d67efcaf6a2f.gif) -pub(crate) fn runnables(db: &RootDatabase, file_id: FileId) -> Vec { +pub(crate) fn runnables(db: &RootDatabase, file_id: HirFileId) -> Vec { let sema = Semantics::new(db); let mut res = Vec::new(); @@ -134,7 +136,10 @@ pub(crate) fn runnables(db: &RootDatabase, file_id: FileId) -> Vec { // In case an expansion creates multiple runnables we want to name them to avoid emitting a bunch of equally named runnables. let mut in_macro_expansion = FxIndexMap::>::default(); let mut add_opt = |runnable: Option, def| { - if let Some(runnable) = runnable.filter(|runnable| runnable.nav.file_id == file_id) { + if let Some(runnable) = runnable.and_then(|mut runnable| { + runnable.nav = runnable.nav.upmap_to(db, file_id)?.remove(0); + Some(runnable) + }) { if let Some(def) = def { let file_id = match def { Definition::Module(it) => { @@ -173,7 +178,7 @@ pub(crate) fn runnables(db: &RootDatabase, file_id: FileId) -> Vec { } }); - sema.file_to_module_defs(file_id) + sema.hir_file_to_module_defs(file_id) .map(|it| runnable_mod_outline_definition(&sema, it)) .for_each(|it| add_opt(it, None)); @@ -345,11 +350,9 @@ pub(crate) fn runnable_fn( let fn_source = sema.source(def)?; let nav = NavigationTarget::from_named( - sema.db, fn_source.as_ref().map(|it| it as &dyn ast::HasName), SymbolKind::Function, - ) - .call_site(); + ); let file_range = fn_source.syntax().original_file_range_with_macro_call_input(sema.db); let update_test = @@ -380,7 +383,7 @@ pub(crate) fn runnable_mod( let attrs = def.attrs(sema.db); let cfg = attrs.cfg(); - let nav = NavigationTarget::from_module_to_decl(sema.db, def).call_site(); + let nav = HirNavigationTarget::from_module_to_decl(sema.db, def); let module_source = sema.module_definition_node(def); let module_syntax = module_source.file_syntax(sema.db); @@ -410,7 +413,7 @@ pub(crate) fn runnable_impl( return None; } let cfg = attrs.cfg(); - let nav = def.try_to_nav(sema.db)?.call_site(); + let nav = def.try_to_nav_hir(sema.db)?; let ty = def.self_ty(sema.db); let adt_name = ty.as_adt()?.name(sema.db); let mut ty_args = ty.generic_parameters(sema.db, display_target).peekable(); @@ -477,7 +480,7 @@ fn runnable_mod_outline_definition( Some(Runnable { use_name_in_title: false, - nav: def.to_nav(sema.db).call_site(), + nav: def.to_nav_hir(sema.db), kind: RunnableKind::TestMod { path }, cfg, update_test, @@ -537,10 +540,9 @@ fn module_def_doctest(db: &RootDatabase, def: Definition) -> Option { .map_or_else(|| TestId::Name(def_name.display_no_db(edition).to_smolstr()), TestId::Path); let mut nav = match def { - Definition::Module(def) => NavigationTarget::from_module_to_decl(db, def), - def => def.try_to_nav(db)?, - } - .call_site(); + Definition::Module(def) => HirNavigationTarget::from_module_to_decl(db, def), + def => def.try_to_nav_hir(db)?, + }; nav.focus_range = None; nav.description = None; nav.docs = None; @@ -762,7 +764,7 @@ mod tests { fn check(#[rust_analyzer::rust_fixture] ra_fixture: &str, expect: Expect) { let (analysis, position) = fixture::position(ra_fixture); let result = analysis - .runnables(position.file_id) + .runnables(position.file_id.into()) .unwrap() .into_iter() .map(|runnable| { @@ -782,7 +784,7 @@ mod tests { fn check_tests(#[rust_analyzer::rust_fixture] ra_fixture: &str, expect: Expect) { let (analysis, position) = fixture::position(ra_fixture); - let tests = analysis.related_tests(position, None).unwrap(); + let tests = analysis.related_tests(position.into_file_id(&analysis.db), None).unwrap(); let navigation_targets = tests.into_iter().map(|runnable| runnable.nav).collect::>(); expect.assert_debug_eq(&navigation_targets); } @@ -820,14 +822,14 @@ mod not_a_root { "#, expect![[r#" [ - "(TestMod, NavigationTarget { file_id: FileId(0), full_range: 0..331, name: \"\", kind: Module })", - "(Bin, NavigationTarget { file_id: FileId(0), full_range: 1..13, focus_range: 4..8, name: \"main\", kind: Function })", - "(Bin, NavigationTarget { file_id: FileId(0), full_range: 15..76, focus_range: 42..71, name: \"__cortex_m_rt_main_trampoline\", kind: Function })", - "(Bin, NavigationTarget { file_id: FileId(0), full_range: 78..154, focus_range: 113..149, name: \"__cortex_m_rt_main_trampoline_unsafe\", kind: Function })", - "(Test, NavigationTarget { file_id: FileId(0), full_range: 156..180, focus_range: 167..175, name: \"test_foo\", kind: Function })", - "(Test, NavigationTarget { file_id: FileId(0), full_range: 182..233, focus_range: 214..228, name: \"test_full_path\", kind: Function })", - "(Test, NavigationTarget { file_id: FileId(0), full_range: 235..269, focus_range: 256..264, name: \"test_foo\", kind: Function })", - "(Bench, NavigationTarget { file_id: FileId(0), full_range: 271..293, focus_range: 283..288, name: \"bench\", kind: Function })", + "(TestMod, NavigationTarget { file_id: FileId(EditionedFileId(Id(1800))), full_range: 0..331, name: \"\", kind: Module })", + "(Bin, NavigationTarget { file_id: FileId(EditionedFileId(Id(1800))), full_range: 1..13, focus_range: 4..8, name: \"main\", kind: Function })", + "(Bin, NavigationTarget { file_id: FileId(EditionedFileId(Id(1800))), full_range: 15..76, focus_range: 42..71, name: \"__cortex_m_rt_main_trampoline\", kind: Function })", + "(Bin, NavigationTarget { file_id: FileId(EditionedFileId(Id(1800))), full_range: 78..154, focus_range: 113..149, name: \"__cortex_m_rt_main_trampoline_unsafe\", kind: Function })", + "(Test, NavigationTarget { file_id: FileId(EditionedFileId(Id(1800))), full_range: 156..180, focus_range: 167..175, name: \"test_foo\", kind: Function })", + "(Test, NavigationTarget { file_id: FileId(EditionedFileId(Id(1800))), full_range: 182..233, focus_range: 214..228, name: \"test_full_path\", kind: Function })", + "(Test, NavigationTarget { file_id: FileId(EditionedFileId(Id(1800))), full_range: 235..269, focus_range: 256..264, name: \"test_foo\", kind: Function })", + "(Bench, NavigationTarget { file_id: FileId(EditionedFileId(Id(1800))), full_range: 271..293, focus_range: 283..288, name: \"bench\", kind: Function })", ] "#]], ); @@ -931,15 +933,15 @@ impl Test for StructWithRunnable {} "#, expect![[r#" [ - "(Bin, NavigationTarget { file_id: FileId(0), full_range: 1..13, focus_range: 4..8, name: \"main\", kind: Function })", - "(DocTest, NavigationTarget { file_id: FileId(0), full_range: 15..74, name: \"should_have_runnable\" })", - "(DocTest, NavigationTarget { file_id: FileId(0), full_range: 76..148, name: \"should_have_runnable_1\" })", - "(DocTest, NavigationTarget { file_id: FileId(0), full_range: 150..254, name: \"should_have_runnable_2\" })", - "(DocTest, NavigationTarget { file_id: FileId(0), full_range: 256..320, name: \"should_have_no_runnable_3\" })", - "(DocTest, NavigationTarget { file_id: FileId(0), full_range: 322..398, name: \"should_have_no_runnable_4\" })", - "(DocTest, NavigationTarget { file_id: FileId(0), full_range: 900..965, name: \"StructWithRunnable\" })", - "(DocTest, NavigationTarget { file_id: FileId(0), full_range: 967..1024, focus_range: 1003..1021, name: \"impl\", kind: Impl })", - "(DocTest, NavigationTarget { file_id: FileId(0), full_range: 1088..1154, focus_range: 1133..1151, name: \"impl\", kind: Impl })", + "(Bin, NavigationTarget { file_id: FileId(EditionedFileId(Id(1800))), full_range: 1..13, focus_range: 4..8, name: \"main\", kind: Function })", + "(DocTest, NavigationTarget { file_id: FileId(EditionedFileId(Id(1800))), full_range: 15..74, name: \"should_have_runnable\" })", + "(DocTest, NavigationTarget { file_id: FileId(EditionedFileId(Id(1800))), full_range: 76..148, name: \"should_have_runnable_1\" })", + "(DocTest, NavigationTarget { file_id: FileId(EditionedFileId(Id(1800))), full_range: 150..254, name: \"should_have_runnable_2\" })", + "(DocTest, NavigationTarget { file_id: FileId(EditionedFileId(Id(1800))), full_range: 256..320, name: \"should_have_no_runnable_3\" })", + "(DocTest, NavigationTarget { file_id: FileId(EditionedFileId(Id(1800))), full_range: 322..398, name: \"should_have_no_runnable_4\" })", + "(DocTest, NavigationTarget { file_id: FileId(EditionedFileId(Id(1800))), full_range: 900..965, name: \"StructWithRunnable\" })", + "(DocTest, NavigationTarget { file_id: FileId(EditionedFileId(Id(1800))), full_range: 967..1024, focus_range: 1003..1021, name: \"impl\", kind: Impl })", + "(DocTest, NavigationTarget { file_id: FileId(EditionedFileId(Id(1800))), full_range: 1088..1154, focus_range: 1133..1151, name: \"impl\", kind: Impl })", ] "#]], ); @@ -963,8 +965,8 @@ impl Data { "#, expect![[r#" [ - "(Bin, NavigationTarget { file_id: FileId(0), full_range: 1..13, focus_range: 4..8, name: \"main\", kind: Function })", - "(DocTest, NavigationTarget { file_id: FileId(0), full_range: 44..98, name: \"foo\" })", + "(Bin, NavigationTarget { file_id: FileId(EditionedFileId(Id(1800))), full_range: 1..13, focus_range: 4..8, name: \"main\", kind: Function })", + "(DocTest, NavigationTarget { file_id: FileId(EditionedFileId(Id(1800))), full_range: 44..98, name: \"foo\" })", ] "#]], ); @@ -988,8 +990,8 @@ impl Data<'a> { "#, expect![[r#" [ - "(Bin, NavigationTarget { file_id: FileId(0), full_range: 1..13, focus_range: 4..8, name: \"main\", kind: Function })", - "(DocTest, NavigationTarget { file_id: FileId(0), full_range: 52..106, name: \"foo\" })", + "(Bin, NavigationTarget { file_id: FileId(EditionedFileId(Id(1800))), full_range: 1..13, focus_range: 4..8, name: \"main\", kind: Function })", + "(DocTest, NavigationTarget { file_id: FileId(EditionedFileId(Id(1800))), full_range: 52..106, name: \"foo\" })", ] "#]], ); @@ -1013,8 +1015,8 @@ impl Data<'a, T, U> { "#, expect![[r#" [ - "(Bin, NavigationTarget { file_id: FileId(0), full_range: 1..13, focus_range: 4..8, name: \"main\", kind: Function })", - "(DocTest, NavigationTarget { file_id: FileId(0), full_range: 70..124, name: \"foo\" })", + "(Bin, NavigationTarget { file_id: FileId(EditionedFileId(Id(1800))), full_range: 1..13, focus_range: 4..8, name: \"main\", kind: Function })", + "(DocTest, NavigationTarget { file_id: FileId(EditionedFileId(Id(1800))), full_range: 70..124, name: \"foo\" })", ] "#]], ); @@ -1038,8 +1040,8 @@ impl Data { "#, expect![[r#" [ - "(Bin, NavigationTarget { file_id: FileId(0), full_range: 1..13, focus_range: 4..8, name: \"main\", kind: Function })", - "(DocTest, NavigationTarget { file_id: FileId(0), full_range: 79..133, name: \"foo\" })", + "(Bin, NavigationTarget { file_id: FileId(EditionedFileId(Id(1800))), full_range: 1..13, focus_range: 4..8, name: \"main\", kind: Function })", + "(DocTest, NavigationTarget { file_id: FileId(EditionedFileId(Id(1800))), full_range: 79..133, name: \"foo\" })", ] "#]], ); @@ -1063,8 +1065,8 @@ impl<'a, T, const N: usize> Data<'a, T, N> { "#, expect![[r#" [ - "(Bin, NavigationTarget { file_id: FileId(0), full_range: 1..13, focus_range: 4..8, name: \"main\", kind: Function })", - "(DocTest, NavigationTarget { file_id: FileId(0), full_range: 100..154, name: \"foo\" })", + "(Bin, NavigationTarget { file_id: FileId(EditionedFileId(Id(1800))), full_range: 1..13, focus_range: 4..8, name: \"main\", kind: Function })", + "(DocTest, NavigationTarget { file_id: FileId(EditionedFileId(Id(1800))), full_range: 100..154, name: \"foo\" })", ] "#]], ); @@ -1082,8 +1084,8 @@ mod test_mod { "#, expect![[r#" [ - "(TestMod, NavigationTarget { file_id: FileId(0), full_range: 1..51, focus_range: 5..13, name: \"test_mod\", kind: Module, description: \"mod test_mod\" })", - "(Test, NavigationTarget { file_id: FileId(0), full_range: 20..49, focus_range: 35..44, name: \"test_foo1\", kind: Function })", + "(TestMod, NavigationTarget { file_id: FileId(EditionedFileId(Id(1800))), full_range: 1..51, focus_range: 5..13, name: \"test_mod\", kind: Module, description: \"mod test_mod\" })", + "(Test, NavigationTarget { file_id: FileId(EditionedFileId(Id(1800))), full_range: 20..49, focus_range: 35..44, name: \"test_foo1\", kind: Function })", ] "#]], ); @@ -1118,12 +1120,12 @@ mod root_tests { "#, expect![[r#" [ - "(TestMod, NavigationTarget { file_id: FileId(0), full_range: 22..323, focus_range: 26..40, name: \"nested_tests_0\", kind: Module, description: \"mod nested_tests_0\" })", - "(TestMod, NavigationTarget { file_id: FileId(0), full_range: 51..192, focus_range: 55..69, name: \"nested_tests_1\", kind: Module, description: \"mod nested_tests_1\" })", - "(Test, NavigationTarget { file_id: FileId(0), full_range: 84..126, focus_range: 107..121, name: \"nested_test_11\", kind: Function })", - "(Test, NavigationTarget { file_id: FileId(0), full_range: 140..182, focus_range: 163..177, name: \"nested_test_12\", kind: Function })", - "(TestMod, NavigationTarget { file_id: FileId(0), full_range: 202..286, focus_range: 206..220, name: \"nested_tests_2\", kind: Module, description: \"mod nested_tests_2\" })", - "(Test, NavigationTarget { file_id: FileId(0), full_range: 235..276, focus_range: 258..271, name: \"nested_test_2\", kind: Function })", + "(TestMod, NavigationTarget { file_id: FileId(EditionedFileId(Id(1800))), full_range: 22..323, focus_range: 26..40, name: \"nested_tests_0\", kind: Module, description: \"mod nested_tests_0\" })", + "(TestMod, NavigationTarget { file_id: FileId(EditionedFileId(Id(1800))), full_range: 51..192, focus_range: 55..69, name: \"nested_tests_1\", kind: Module, description: \"mod nested_tests_1\" })", + "(Test, NavigationTarget { file_id: FileId(EditionedFileId(Id(1800))), full_range: 84..126, focus_range: 107..121, name: \"nested_test_11\", kind: Function })", + "(Test, NavigationTarget { file_id: FileId(EditionedFileId(Id(1800))), full_range: 140..182, focus_range: 163..177, name: \"nested_test_12\", kind: Function })", + "(TestMod, NavigationTarget { file_id: FileId(EditionedFileId(Id(1800))), full_range: 202..286, focus_range: 206..220, name: \"nested_tests_2\", kind: Module, description: \"mod nested_tests_2\" })", + "(Test, NavigationTarget { file_id: FileId(EditionedFileId(Id(1800))), full_range: 235..276, focus_range: 258..271, name: \"nested_test_2\", kind: Function })", ] "#]], ); @@ -1141,8 +1143,8 @@ fn test_foo1() {} "#, expect![[r#" [ - "(TestMod, NavigationTarget { file_id: FileId(0), full_range: 0..51, name: \"\", kind: Module })", - "(Test, NavigationTarget { file_id: FileId(0), full_range: 1..50, focus_range: 36..45, name: \"test_foo1\", kind: Function }, Atom(KeyValue { key: \"feature\", value: \"foo\" }))", + "(TestMod, NavigationTarget { file_id: FileId(EditionedFileId(Id(1800))), full_range: 0..51, name: \"\", kind: Module })", + "(Test, NavigationTarget { file_id: FileId(EditionedFileId(Id(1800))), full_range: 1..50, focus_range: 36..45, name: \"test_foo1\", kind: Function }, Atom(KeyValue { key: \"feature\", value: \"foo\" }))", ] "#]], ); @@ -1160,8 +1162,8 @@ fn test_foo1() {} "#, expect![[r#" [ - "(TestMod, NavigationTarget { file_id: FileId(0), full_range: 0..73, name: \"\", kind: Module })", - "(Test, NavigationTarget { file_id: FileId(0), full_range: 1..72, focus_range: 58..67, name: \"test_foo1\", kind: Function }, All([Atom(KeyValue { key: \"feature\", value: \"foo\" }), Atom(KeyValue { key: \"feature\", value: \"bar\" })]))", + "(TestMod, NavigationTarget { file_id: FileId(EditionedFileId(Id(1800))), full_range: 0..73, name: \"\", kind: Module })", + "(Test, NavigationTarget { file_id: FileId(EditionedFileId(Id(1800))), full_range: 1..72, focus_range: 58..67, name: \"test_foo1\", kind: Function }, All([Atom(KeyValue { key: \"feature\", value: \"foo\" }), Atom(KeyValue { key: \"feature\", value: \"bar\" })]))", ] "#]], ); @@ -1200,7 +1202,7 @@ impl Foo { "#, expect![[r#" [ - "(DocTest, NavigationTarget { file_id: FileId(1), full_range: 27..81, name: \"foo\" })", + "(DocTest, NavigationTarget { file_id: FileId(EditionedFileId(Id(1801))), full_range: 27..81, name: \"foo\" })", ] "#]], ); @@ -1239,12 +1241,12 @@ generate_main!(); "#, expect![[r#" [ - "(TestMod, NavigationTarget { file_id: FileId(0), full_range: 0..345, name: \"\", kind: Module })", - "(TestMod, NavigationTarget { file_id: FileId(0), full_range: 282..312, focus_range: 286..291, name: \"tests\", kind: Module, description: \"mod tests\" })", - "(Test, NavigationTarget { file_id: FileId(0), full_range: 298..307, name: \"foo_test\", kind: Function })", - "(TestMod, NavigationTarget { file_id: FileId(0), full_range: 313..323, name: \"tests2\", kind: Module, description: \"mod tests2\" }, true)", - "(Test, NavigationTarget { file_id: FileId(0), full_range: 313..323, name: \"foo_test2\", kind: Function }, true)", - "(Bin, NavigationTarget { file_id: FileId(0), full_range: 327..341, name: \"main\", kind: Function })", + "(TestMod, NavigationTarget { file_id: FileId(EditionedFileId(Id(1800))), full_range: 0..345, name: \"\", kind: Module })", + "(TestMod, NavigationTarget { file_id: FileId(EditionedFileId(Id(1800))), full_range: 282..312, focus_range: 286..291, name: \"tests\", kind: Module, description: \"mod tests\" })", + "(Test, NavigationTarget { file_id: FileId(EditionedFileId(Id(1800))), full_range: 298..307, name: \"foo_test\", kind: Function })", + "(TestMod, NavigationTarget { file_id: FileId(EditionedFileId(Id(1800))), full_range: 313..323, name: \"tests2\", kind: Module, description: \"mod tests2\" }, true)", + "(Test, NavigationTarget { file_id: FileId(EditionedFileId(Id(1800))), full_range: 313..323, name: \"foo_test2\", kind: Function }, true)", + "(Bin, NavigationTarget { file_id: FileId(EditionedFileId(Id(1800))), full_range: 327..341, name: \"main\", kind: Function })", ] "#]], ); @@ -1272,10 +1274,10 @@ foo!(); "#, expect![[r#" [ - "(TestMod, NavigationTarget { file_id: FileId(0), full_range: 210..214, name: \"foo_tests\", kind: Module, description: \"mod foo_tests\" }, true)", - "(Test, NavigationTarget { file_id: FileId(0), full_range: 210..214, name: \"foo0\", kind: Function }, true)", - "(Test, NavigationTarget { file_id: FileId(0), full_range: 210..214, name: \"foo1\", kind: Function }, true)", - "(Test, NavigationTarget { file_id: FileId(0), full_range: 210..214, name: \"foo2\", kind: Function }, true)", + "(TestMod, NavigationTarget { file_id: FileId(EditionedFileId(Id(1800))), full_range: 210..214, name: \"foo_tests\", kind: Module, description: \"mod foo_tests\" }, true)", + "(Test, NavigationTarget { file_id: FileId(EditionedFileId(Id(1800))), full_range: 210..214, name: \"foo0\", kind: Function }, true)", + "(Test, NavigationTarget { file_id: FileId(EditionedFileId(Id(1800))), full_range: 210..214, name: \"foo1\", kind: Function }, true)", + "(Test, NavigationTarget { file_id: FileId(EditionedFileId(Id(1800))), full_range: 210..214, name: \"foo2\", kind: Function }, true)", ] "#]], ); @@ -1315,7 +1317,7 @@ fn t1() {} "#, expect![[r#" [ - "(TestMod, NavigationTarget { file_id: FileId(0), full_range: 1..7, focus_range: 5..6, name: \"m\", kind: Module, description: \"mod m\" })", + "(TestMod, NavigationTarget { file_id: FileId(EditionedFileId(Id(1800))), full_range: 1..7, focus_range: 5..6, name: \"m\", kind: Module, description: \"mod m\" })", ] "#]], ); @@ -1336,9 +1338,9 @@ fn t1() {} "#, expect![[r#" [ - "(TestMod, NavigationTarget { file_id: FileId(1), full_range: 0..39, name: \"m\", kind: Module })", - "(Test, NavigationTarget { file_id: FileId(1), full_range: 1..19, focus_range: 12..14, name: \"t0\", kind: Function })", - "(Test, NavigationTarget { file_id: FileId(1), full_range: 20..38, focus_range: 31..33, name: \"t1\", kind: Function })", + "(TestMod, NavigationTarget { file_id: FileId(EditionedFileId(Id(1801))), full_range: 0..39, name: \"m\", kind: Module })", + "(Test, NavigationTarget { file_id: FileId(EditionedFileId(Id(1801))), full_range: 1..19, focus_range: 12..14, name: \"t0\", kind: Function })", + "(Test, NavigationTarget { file_id: FileId(EditionedFileId(Id(1801))), full_range: 20..38, focus_range: 31..33, name: \"t1\", kind: Function })", ] "#]], ); @@ -1361,9 +1363,9 @@ mod module { "#, expect![[r#" [ - "(TestMod, NavigationTarget { file_id: FileId(0), full_range: 26..94, focus_range: 30..36, name: \"module\", kind: Module, description: \"mod module\" }, true)", - "(Test, NavigationTarget { file_id: FileId(0), full_range: 43..65, focus_range: 58..60, name: \"t0\", kind: Function }, true)", - "(Test, NavigationTarget { file_id: FileId(0), full_range: 70..92, focus_range: 85..87, name: \"t1\", kind: Function }, true)", + "(TestMod, NavigationTarget { file_id: FileId(EditionedFileId(Id(1800))), full_range: 26..94, focus_range: 30..36, name: \"module\", kind: Module, description: \"mod module\" }, true)", + "(Test, NavigationTarget { file_id: FileId(EditionedFileId(Id(1800))), full_range: 43..65, focus_range: 58..60, name: \"t0\", kind: Function }, true)", + "(Test, NavigationTarget { file_id: FileId(EditionedFileId(Id(1800))), full_range: 70..92, focus_range: 85..87, name: \"t1\", kind: Function }, true)", ] "#]], ); @@ -1400,7 +1402,9 @@ mod tests { [ NavigationTarget { file_id: FileId( - 0, + EditionedFileId( + Id(1800), + ), ), full_range: 31..85, focus_range: 46..54, @@ -1433,7 +1437,9 @@ mod tests { [ NavigationTarget { file_id: FileId( - 0, + EditionedFileId( + Id(1800), + ), ), full_range: 71..122, focus_range: 86..94, @@ -1473,7 +1479,9 @@ mod tests { [ NavigationTarget { file_id: FileId( - 0, + EditionedFileId( + Id(1800), + ), ), full_range: 133..183, focus_range: 148..156, @@ -1513,7 +1521,9 @@ mod tests { [ NavigationTarget { file_id: FileId( - 0, + EditionedFileId( + Id(1800), + ), ), full_range: 52..115, focus_range: 67..75, @@ -1522,7 +1532,9 @@ mod tests { }, NavigationTarget { file_id: FileId( - 0, + EditionedFileId( + Id(1800), + ), ), full_range: 121..185, focus_range: 136..145, @@ -1551,8 +1563,8 @@ impl Data<'a, A, 12, C, D> { "#, expect![[r#" [ - "(Bin, NavigationTarget { file_id: FileId(0), full_range: 1..13, focus_range: 4..8, name: \"main\", kind: Function })", - "(DocTest, NavigationTarget { file_id: FileId(0), full_range: 121..156, name: \"foo\" })", + "(Bin, NavigationTarget { file_id: FileId(EditionedFileId(Id(1800))), full_range: 1..13, focus_range: 4..8, name: \"main\", kind: Function })", + "(DocTest, NavigationTarget { file_id: FileId(EditionedFileId(Id(1800))), full_range: 121..156, name: \"foo\" })", ] "#]], ); @@ -1584,10 +1596,10 @@ impl Foo, ()> { "#, expect![[r#" [ - "(DocTest, NavigationTarget { file_id: FileId(0), full_range: 20..103, focus_range: 47..56, name: \"impl\", kind: Impl })", - "(DocTest, NavigationTarget { file_id: FileId(0), full_range: 63..101, name: \"t\" })", - "(DocTest, NavigationTarget { file_id: FileId(0), full_range: 105..188, focus_range: 126..146, name: \"impl\", kind: Impl })", - "(DocTest, NavigationTarget { file_id: FileId(0), full_range: 153..186, name: \"t\" })", + "(DocTest, NavigationTarget { file_id: FileId(EditionedFileId(Id(1800))), full_range: 20..103, focus_range: 47..56, name: \"impl\", kind: Impl })", + "(DocTest, NavigationTarget { file_id: FileId(EditionedFileId(Id(1800))), full_range: 63..101, name: \"t\" })", + "(DocTest, NavigationTarget { file_id: FileId(EditionedFileId(Id(1800))), full_range: 105..188, focus_range: 126..146, name: \"impl\", kind: Impl })", + "(DocTest, NavigationTarget { file_id: FileId(EditionedFileId(Id(1800))), full_range: 153..186, name: \"t\" })", ] "#]], ); @@ -1634,7 +1646,7 @@ macro_rules! foo { "#, expect![[r#" [ - "(DocTest, NavigationTarget { file_id: FileId(0), full_range: 1..94, name: \"foo\" })", + "(DocTest, NavigationTarget { file_id: FileId(EditionedFileId(Id(1800))), full_range: 1..94, name: \"foo\" })", ] "#]], ); @@ -1682,14 +1694,14 @@ mod r#mod { "#, expect![[r#" [ - "(TestMod, NavigationTarget { file_id: FileId(0), full_range: 1..461, focus_range: 5..10, name: \"r#mod\", kind: Module, description: \"mod r#mod\" })", - "(Test, NavigationTarget { file_id: FileId(0), full_range: 17..41, focus_range: 32..36, name: \"r#fn\", kind: Function })", - "(DocTest, NavigationTarget { file_id: FileId(0), full_range: 47..84, name: \"r#for\", container_name: \"r#mod\" })", - "(DocTest, NavigationTarget { file_id: FileId(0), full_range: 90..146, name: \"r#struct\", container_name: \"r#mod\" })", - "(DocTest, NavigationTarget { file_id: FileId(0), full_range: 152..266, focus_range: 189..205, name: \"impl\", kind: Impl })", - "(DocTest, NavigationTarget { file_id: FileId(0), full_range: 216..260, name: \"r#fn\" })", - "(DocTest, NavigationTarget { file_id: FileId(0), full_range: 323..367, name: \"r#fn\" })", - "(DocTest, NavigationTarget { file_id: FileId(0), full_range: 401..459, focus_range: 445..456, name: \"impl\", kind: Impl })", + "(TestMod, NavigationTarget { file_id: FileId(EditionedFileId(Id(1800))), full_range: 1..461, focus_range: 5..10, name: \"r#mod\", kind: Module, description: \"mod r#mod\" })", + "(Test, NavigationTarget { file_id: FileId(EditionedFileId(Id(1800))), full_range: 17..41, focus_range: 32..36, name: \"r#fn\", kind: Function })", + "(DocTest, NavigationTarget { file_id: FileId(EditionedFileId(Id(1800))), full_range: 47..84, name: \"r#for\", container_name: \"r#mod\" })", + "(DocTest, NavigationTarget { file_id: FileId(EditionedFileId(Id(1800))), full_range: 90..146, name: \"r#struct\", container_name: \"r#mod\" })", + "(DocTest, NavigationTarget { file_id: FileId(EditionedFileId(Id(1800))), full_range: 152..266, focus_range: 189..205, name: \"impl\", kind: Impl })", + "(DocTest, NavigationTarget { file_id: FileId(EditionedFileId(Id(1800))), full_range: 216..260, name: \"r#fn\" })", + "(DocTest, NavigationTarget { file_id: FileId(EditionedFileId(Id(1800))), full_range: 323..367, name: \"r#fn\" })", + "(DocTest, NavigationTarget { file_id: FileId(EditionedFileId(Id(1800))), full_range: 401..459, focus_range: 445..456, name: \"impl\", kind: Impl })", ] "#]], ) @@ -1722,9 +1734,9 @@ fn exp_main() {} "#, expect![[r#" [ - "(Bin, NavigationTarget { file_id: FileId(0), full_range: 36..80, focus_range: 67..75, name: \"exp_main\", kind: Function })", - "(TestMod, NavigationTarget { file_id: FileId(0), full_range: 83..168, focus_range: 100..115, name: \"test_mod_inline\", kind: Module, description: \"mod test_mod_inline\" }, Atom(Flag(\"test\")))", - "(TestMod, NavigationTarget { file_id: FileId(0), full_range: 192..218, focus_range: 209..217, name: \"test_mod\", kind: Module, description: \"mod test_mod\" }, Atom(Flag(\"test\")))", + "(Bin, NavigationTarget { file_id: FileId(EditionedFileId(Id(1800))), full_range: 36..80, focus_range: 67..75, name: \"exp_main\", kind: Function })", + "(TestMod, NavigationTarget { file_id: FileId(EditionedFileId(Id(1800))), full_range: 83..168, focus_range: 100..115, name: \"test_mod_inline\", kind: Module, description: \"mod test_mod_inline\" }, Atom(Flag(\"test\")))", + "(TestMod, NavigationTarget { file_id: FileId(EditionedFileId(Id(1800))), full_range: 192..218, focus_range: 209..217, name: \"test_mod\", kind: Module, description: \"mod test_mod\" }, Atom(Flag(\"test\")))", ] "#]], ) diff --git a/crates/ide/src/ssr.rs b/crates/ide/src/ssr.rs index 7df4499a0c2f..7a05a91bc09b 100644 --- a/crates/ide/src/ssr.rs +++ b/crates/ide/src/ssr.rs @@ -59,8 +59,8 @@ mod tests { use expect_test::expect; use ide_assists::{Assist, AssistResolveStrategy}; use ide_db::{ - FileRange, FxHashSet, RootDatabase, base_db::salsa::Durability, - symbol_index::SymbolsDatabase, + FileRange, FxHashSet, RootDatabase, + base_db::{RootQueryDb, salsa::Durability}, }; use test_fixture::WithFixture; use triomphe::Arc; diff --git a/crates/ide/src/static_index.rs b/crates/ide/src/static_index.rs index efee39c13db9..7b7b350ba1aa 100644 --- a/crates/ide/src/static_index.rs +++ b/crates/ide/src/static_index.rs @@ -2,7 +2,7 @@ //! read-only code browsers and emitting LSIF use arrayvec::ArrayVec; -use hir::{Crate, Module, Semantics, db::HirDatabase}; +use hir::{Crate, EditionedFileId, Module, Semantics, db::HirDatabase}; use ide_db::{ FileId, FileRange, FxHashMap, FxHashSet, RootDatabase, base_db::{RootQueryDb, SourceDatabase, VfsPath}, @@ -149,8 +149,14 @@ pub enum VendoredLibrariesConfig<'a> { impl StaticIndex<'_> { fn add_file(&mut self, file_id: FileId) { + let sema = hir::Semantics::new(self.db); + let root = sema.parse_guess_edition(file_id).syntax().clone(); + let editioned_file_id = sema + .attach_first_edition(file_id) + .unwrap_or_else(|| EditionedFileId::new(sema.db, file_id, Edition::CURRENT)); + let edition = editioned_file_id.edition(sema.db); let current_crate = crates_for(self.db, file_id).pop().map(Into::into); - let folds = self.analysis.folding_ranges(file_id).unwrap(); + let folds = self.analysis.folding_ranges(editioned_file_id.into()).unwrap(); let inlay_hints = self .analysis .inlay_hints( @@ -184,17 +190,11 @@ impl StaticIndex<'_> { fields_to_resolve: InlayFieldsToResolve::empty(), range_exclusive_hints: false, }, - file_id, + editioned_file_id.into(), None, ) .unwrap(); // hovers - let sema = hir::Semantics::new(self.db); - let root = sema.parse_guess_edition(file_id).syntax().clone(); - let edition = sema - .attach_first_edition(file_id) - .map(|it| it.edition(self.db)) - .unwrap_or(Edition::CURRENT); let display_target = match sema.first_crate(file_id) { Some(krate) => krate.to_display_target(sema.db), None => return, @@ -231,14 +231,13 @@ impl StaticIndex<'_> { documentation: documentation_for_definition(&sema, def, scope_node), hover: Some(hover_for_definition( &sema, - file_id, + editioned_file_id.into(), def, None, scope_node, None, false, &hover_config, - edition, display_target, )), definition: def.try_to_nav(self.db).map(UpmappingResult::call_site).map(|it| { @@ -266,7 +265,7 @@ impl StaticIndex<'_> { result.tokens.push((range, id)); }; - if let Some(module) = sema.file_to_module_def(file_id) { + if let Some(module) = sema.file_to_module_def(editioned_file_id) { let def = Definition::Module(module); let range = root.text_range(); add_token(def, range, &root); @@ -340,7 +339,8 @@ mod tests { ) { let (analysis, ranges) = fixture::annotations_without_marker(ra_fixture); let s = StaticIndex::compute(&analysis, vendored_libs_config); - let mut range_set: FxHashSet<_> = ranges.iter().map(|it| it.0).collect(); + let mut range_set: FxHashSet = + ranges.iter().map(|it| it.0.into_file_id(&analysis.db)).collect(); for f in s.files { for (range, _) in f.tokens { if range.start() == TextSize::from(0) { @@ -366,7 +366,8 @@ mod tests { ) { let (analysis, ranges) = fixture::annotations_without_marker(ra_fixture); let s = StaticIndex::compute(&analysis, vendored_libs_config); - let mut range_set: FxHashSet<_> = ranges.iter().map(|it| it.0).collect(); + let mut range_set: FxHashSet = + ranges.iter().map(|it| it.0.into_file_id(&analysis.db)).collect(); for (_, t) in s.tokens.iter() { if let Some(t) = t.definition { if t.range.start() == TextSize::from(0) { @@ -391,7 +392,8 @@ mod tests { ) { let (analysis, ranges) = fixture::annotations_without_marker(ra_fixture); let s = StaticIndex::compute(&analysis, vendored_libs_config); - let mut range_set: FxHashMap<_, i32> = ranges.iter().map(|it| (it.0, 0)).collect(); + let mut range_set: FxHashMap<_, i32> = + ranges.iter().map(|it| (it.0.into_file_id(&analysis.db), 0)).collect(); // Make sure that all references have at least one range. We use a HashMap instead of a // a HashSet so that we can have more than one reference at the same range. diff --git a/crates/ide/src/syntax_highlighting.rs b/crates/ide/src/syntax_highlighting.rs index 3ca172977cb9..6e009d491e6a 100644 --- a/crates/ide/src/syntax_highlighting.rs +++ b/crates/ide/src/syntax_highlighting.rs @@ -15,7 +15,7 @@ mod tests; use std::ops::ControlFlow; use either::Either; -use hir::{DefWithBody, EditionedFileId, InFile, InRealFile, MacroKind, Name, Semantics}; +use hir::{DefWithBody, HirFileId, InFile, MacroKind, Name, Semantics}; use ide_db::{FxHashMap, FxHashSet, Ranker, RootDatabase, SymbolKind}; use syntax::{ AstNode, AstToken, NodeOrToken, @@ -25,7 +25,7 @@ use syntax::{ }; use crate::{ - FileId, HlMod, HlOperator, HlPunct, HlTag, + HlMod, HlOperator, HlPunct, HlTag, syntax_highlighting::{ escape::{highlight_escape_byte, highlight_escape_char, highlight_escape_string}, format::highlight_format_string, @@ -189,19 +189,16 @@ pub struct HighlightConfig { pub(crate) fn highlight( db: &RootDatabase, config: HighlightConfig, - file_id: FileId, + file_id: HirFileId, range_to_highlight: Option, ) -> Vec { let _p = tracing::info_span!("highlight").entered(); let sema = Semantics::new(db); - let file_id = sema - .attach_first_edition(file_id) - .unwrap_or_else(|| EditionedFileId::current_edition(db, file_id)); // Determine the root based on the given range. let (root, range_to_highlight) = { - let file = sema.parse(file_id); - let source_file = file.syntax(); + let file_id = sema.adjust_edition(file_id); + let source_file = sema.parse_or_expand(file_id); match range_to_highlight { Some(range) => { let node = match source_file.covering_element(range) { @@ -216,7 +213,7 @@ pub(crate) fn highlight( let mut hl = highlights::Highlights::new(root.text_range()); let krate = sema.scope(&root).map(|it| it.krate()); - traverse(&mut hl, &sema, config, InRealFile::new(file_id, &root), krate, range_to_highlight); + traverse(&mut hl, &sema, config, InFile::new(file_id, &root), krate, range_to_highlight); hl.to_vec() } @@ -224,11 +221,12 @@ fn traverse( hl: &mut Highlights, sema: &Semantics<'_, RootDatabase>, config: HighlightConfig, - InRealFile { file_id, value: root }: InRealFile<&SyntaxNode>, + InFile { file_id, value: root }: InFile<&SyntaxNode>, krate: Option, range_to_highlight: TextRange, ) { - let is_unlinked = sema.file_to_module_def(file_id.file_id(sema.db)).is_none(); + let is_unlinked = + file_id.file_id().is_some_and(|file_id| sema.file_to_module_def(file_id).is_none()); enum AttrOrDerive { Attr(ast::Item), @@ -305,7 +303,7 @@ fn traverse( } if attr_or_derive_item.is_none() { - if sema.is_attr_macro_call(InFile::new(file_id.into(), &item)) { + if sema.is_attr_macro_call(InFile::new(file_id, &item)) { attr_or_derive_item = Some(AttrOrDerive::Attr(item)); } else { let adt = match item { @@ -317,8 +315,7 @@ fn traverse( match adt { Some(adt) if sema.is_derive_annotated(InFile::new( - file_id.into(), - &adt, + file_id, &adt, )) => { attr_or_derive_item = @@ -389,7 +386,7 @@ fn traverse( let (descended_element, current_body) = match element { // Attempt to descend tokens into macro-calls. NodeOrToken::Token(token) if in_macro => { - let descended = descend_token(sema, InRealFile::new(file_id, token)); + let descended = descend_token(sema, InFile::new(file_id, token)); let body = match &descended.value { NodeOrToken::Node(n) => { sema.body_for(InFile::new(descended.file_id, n.syntax())) @@ -400,7 +397,7 @@ fn traverse( }; (descended, body) } - n => (InFile::new(file_id.into(), n), body_stack.last().copied().flatten()), + n => (InFile::new(file_id, n), body_stack.last().copied().flatten()), }; // string highlight injections if let (Some(original_token), Some(descended_token)) = @@ -487,7 +484,7 @@ fn string_injections( hl: &mut Highlights, sema: &Semantics<'_, RootDatabase>, config: HighlightConfig, - file_id: EditionedFileId, + file_id: HirFileId, krate: Option, token: SyntaxToken, descended_token: &SyntaxToken, @@ -533,16 +530,16 @@ fn string_injections( fn descend_token( sema: &Semantics<'_, RootDatabase>, - token: InRealFile, + token: InFile, ) -> InFile> { if token.value.kind() == COMMENT { - return token.map(NodeOrToken::Token).into(); + return token.map(NodeOrToken::Token); } let ranker = Ranker::from_token(&token.value); let mut t = None; let mut r = 0; - sema.descend_into_macros_breakable(token.clone().into(), |tok, _ctx| { + sema.descend_into_macros_breakable(token.clone(), |tok, _ctx| { // FIXME: Consider checking ctx transparency for being opaque? let my_rank = ranker.rank_token(&tok.value); @@ -569,7 +566,7 @@ fn descend_token( ControlFlow::Continue(()) }); - let token = t.unwrap_or_else(|| token.into()); + let token = t.unwrap_or(token); token.map(|token| match token.parent().and_then(ast::NameLike::cast) { // Remap the token into the wrapping single token nodes Some(parent) => match (token.kind(), parent.syntax().kind()) { diff --git a/crates/ide/src/syntax_highlighting/html.rs b/crates/ide/src/syntax_highlighting/html.rs index 9fd807f031f1..12c7a90657ee 100644 --- a/crates/ide/src/syntax_highlighting/html.rs +++ b/crates/ide/src/syntax_highlighting/html.rs @@ -1,22 +1,18 @@ //! Renders a bit of code as HTML. -use hir::{EditionedFileId, Semantics}; +use hir::{HirFileId, Semantics}; use oorandom::Rand32; use stdx::format_to; -use syntax::AstNode; use crate::{ - FileId, RootDatabase, + RootDatabase, syntax_highlighting::{HighlightConfig, highlight}, }; -pub(crate) fn highlight_as_html(db: &RootDatabase, file_id: FileId, rainbow: bool) -> String { +pub(crate) fn highlight_as_html(db: &RootDatabase, file_id: HirFileId, rainbow: bool) -> String { let sema = Semantics::new(db); - let file_id = sema - .attach_first_edition(file_id) - .unwrap_or_else(|| EditionedFileId::current_edition(db, file_id)); - let file = sema.parse(file_id); - let file = file.syntax(); + let file_id = sema.adjust_edition(file_id); + let file = sema.parse_or_expand(file_id); fn rainbowify(seed: u64) -> String { let mut rng = Rand32::new(seed); format!( @@ -39,7 +35,7 @@ pub(crate) fn highlight_as_html(db: &RootDatabase, file_id: FileId, rainbow: boo macro_bang: true, syntactic_name_ref_highlighting: false, }, - file_id.file_id(db), + file_id, None, ); let text = file.to_string(); diff --git a/crates/ide/src/syntax_highlighting/inject.rs b/crates/ide/src/syntax_highlighting/inject.rs index 7f5c2c1ec849..24c32e0ae3ff 100644 --- a/crates/ide/src/syntax_highlighting/inject.rs +++ b/crates/ide/src/syntax_highlighting/inject.rs @@ -1,9 +1,9 @@ //! "Recursive" Syntax highlighting for code in doctests and fixtures. -use std::mem; +use std::{mem, panic::AssertUnwindSafe}; use either::Either; -use hir::{EditionedFileId, HirFileId, InFile, Semantics, sym}; +use hir::{HirFileId, InFile, Semantics, sym}; use ide_db::{ SymbolKind, active_parameter::ActiveParameter, defs::Definition, documentation::docs_with_rangemap, rust_doc::is_rust_fence, @@ -87,7 +87,7 @@ pub(super) fn ra_fixture( inject_doc_comment: config.inject_doc_comment, macro_bang: config.macro_bang, }, - tmp_file_id, + tmp_file_id.into(), ) .unwrap() { @@ -115,14 +115,13 @@ pub(super) fn doc_comment( hl: &mut Highlights, sema: &Semantics<'_, RootDatabase>, config: HighlightConfig, - src_file_id: EditionedFileId, + src_file_id: HirFileId, node: &SyntaxNode, ) { let (attributes, def) = match doc_attributes(sema, node) { Some(it) => it, None => return, }; - let src_file_id: HirFileId = src_file_id.into(); // Extract intra-doc links and emit highlights for them. if let Some((docs, doc_mapping)) = docs_with_rangemap(sema.db, &attributes) { @@ -241,6 +240,7 @@ pub(super) fn doc_comment( inj.add_unmapped("\n}"); let (analysis, tmp_file_id) = Analysis::from_single_file(inj.take_text()); + let tmp_file_id = AssertUnwindSafe(tmp_file_id.into()); if let Ok(ranges) = analysis.with_db(|db| { super::highlight( @@ -255,7 +255,7 @@ pub(super) fn doc_comment( inject_doc_comment: config.inject_doc_comment, macro_bang: config.macro_bang, }, - tmp_file_id, + { tmp_file_id }.0, None, ) }) { diff --git a/crates/ide/src/syntax_highlighting/tests.rs b/crates/ide/src/syntax_highlighting/tests.rs index dd359326c61d..b1504acc8c2e 100644 --- a/crates/ide/src/syntax_highlighting/tests.rs +++ b/crates/ide/src/syntax_highlighting/tests.rs @@ -1,11 +1,12 @@ use std::time::Instant; use expect_test::{ExpectFile, expect_file}; +use hir::HirFileRange; use ide_db::SymbolKind; use span::Edition; use test_utils::{AssertLinear, bench, bench_fixture, skip_slow_tests}; -use crate::{FileRange, HighlightConfig, HlTag, TextRange, fixture}; +use crate::{HighlightConfig, HlTag, TextRange, fixture}; const HL_CONFIG: HighlightConfig = HighlightConfig { strings: true, @@ -1178,7 +1179,7 @@ struct Foo { let highlights = &analysis .highlight_range( HL_CONFIG, - FileRange { file_id, range: TextRange::at(45.into(), 1.into()) }, + HirFileRange { file_id: file_id.into(), range: TextRange::at(45.into(), 1.into()) }, ) .unwrap(); @@ -1194,7 +1195,7 @@ macro_rules! test {} }"# .trim(), ); - let _ = analysis.highlight(HL_CONFIG, file_id).unwrap(); + let _ = analysis.highlight(HL_CONFIG, file_id.into()).unwrap(); } #[test] @@ -1214,7 +1215,7 @@ trait Trait {} fn foo(x: &fn(&dyn Trait)) {} "#, ); - let _ = analysis.highlight(HL_CONFIG, file_id).unwrap(); + let _ = analysis.highlight(HL_CONFIG, file_id.into()).unwrap(); } /// Highlights the code given by the `ra_fixture` argument, renders the @@ -1226,7 +1227,7 @@ fn check_highlighting( rainbow: bool, ) { let (analysis, file_id) = fixture::file(ra_fixture.trim()); - let actual_html = &analysis.highlight_as_html(file_id, rainbow).unwrap(); + let actual_html = &analysis.highlight_as_html(file_id.into(), rainbow).unwrap(); expect.assert_eq(actual_html) } @@ -1242,7 +1243,7 @@ fn benchmark_syntax_highlighting_long_struct() { let hash = { let _pt = bench("syntax highlighting long struct"); analysis - .highlight(HL_CONFIG, file_id) + .highlight(HL_CONFIG, file_id.into()) .unwrap() .iter() .filter(|it| it.highlight.tag == HlTag::Symbol(SymbolKind::Struct)) @@ -1268,7 +1269,7 @@ fn syntax_highlighting_not_quadratic() { let time = Instant::now(); let hash = analysis - .highlight(HL_CONFIG, file_id) + .highlight(HL_CONFIG, file_id.into()) .unwrap() .iter() .filter(|it| it.highlight.tag == HlTag::Symbol(SymbolKind::Struct)) @@ -1293,7 +1294,7 @@ fn benchmark_syntax_highlighting_parser() { let hash = { let _pt = bench("syntax highlighting parser"); analysis - .highlight(HL_CONFIG, file_id) + .highlight(HL_CONFIG, file_id.into()) .unwrap() .iter() .filter(|it| { @@ -1318,7 +1319,7 @@ fn f<'de, T: Deserialize<'de>>() { "# .trim(), ); - let _ = analysis.highlight(HL_CONFIG, file_id).unwrap(); + let _ = analysis.highlight(HL_CONFIG, file_id.into()).unwrap(); } #[test] diff --git a/crates/ide/src/test_explorer.rs b/crates/ide/src/test_explorer.rs index 06cbd50e946a..8ce993036ef7 100644 --- a/crates/ide/src/test_explorer.rs +++ b/crates/ide/src/test_explorer.rs @@ -1,11 +1,11 @@ //! Discovers tests -use hir::{Crate, Module, ModuleDef, Semantics}; +use hir::{Crate, EditionedFileId, Module, ModuleDef, Semantics}; use ide_db::base_db; use ide_db::{FileId, RootDatabase, base_db::RootQueryDb}; use syntax::TextRange; -use crate::{NavigationTarget, Runnable, TryToNav, runnables::runnable_fn}; +use crate::{Runnable, TryToNav, navigation_target::HirNavigationTarget, runnables::runnable_fn}; #[derive(Debug)] pub enum TestItemKind { @@ -72,7 +72,7 @@ fn discover_tests_in_module( let module_id = format!("{prefix_id}::{module_name}"); let module_children = discover_tests_in_module(db, c, module_id.clone(), only_in_this_file); if !module_children.is_empty() { - let nav = NavigationTarget::from_module_to_decl(sema.db, c).call_site; + let nav = HirNavigationTarget::from_module_to_decl(sema.db, c).upmap(sema.db).call_site; r.push(TestItem { id: module_id, kind: TestItemKind::Module, @@ -94,15 +94,15 @@ fn discover_tests_in_module( if !f.is_test(db) { continue; } - let nav = f.try_to_nav(db).map(|r| r.call_site); + let nav = f.try_to_nav(db); let fn_name = f.name(db).as_str().to_owned(); r.push(TestItem { id: format!("{prefix_id}::{fn_name}"), kind: TestItemKind::Function, label: fn_name, parent: Some(prefix_id.clone()), - file: nav.as_ref().map(|n| n.file_id), - text_range: nav.as_ref().map(|n| n.focus_or_full_range()), + file: nav.as_ref().map(|n| n.call_site.file_id), + text_range: nav.as_ref().map(|n| n.call_site.focus_or_full_range()), runnable: runnable_fn(&sema, f), }); } @@ -122,7 +122,10 @@ pub(crate) fn discover_tests_in_crate_by_test_id( pub(crate) fn discover_tests_in_file(db: &RootDatabase, file_id: FileId) -> Vec { let sema = Semantics::new(db); - let Some(module) = sema.file_to_module_def(file_id) else { return vec![] }; + let Some(module) = sema.file_to_module_def(EditionedFileId::current_edition(db, file_id)) + else { + return vec![]; + }; let Some((mut tests, id)) = find_module_id_and_test_parents(&sema, module) else { return vec![]; }; @@ -155,7 +158,7 @@ fn find_module_id_and_test_parents( let module_name = &module.name(sema.db); let module_name = module_name.as_ref().map(|n| n.as_str()).unwrap_or("[mod without name]"); id += module_name; - let nav = NavigationTarget::from_module_to_decl(sema.db, module).call_site; + let nav = HirNavigationTarget::from_module_to_decl(sema.db, module).upmap(sema.db).call_site; r.push(TestItem { id: id.clone(), kind: TestItemKind::Module, diff --git a/crates/ide/src/typing/on_enter.rs b/crates/ide/src/typing/on_enter.rs index fdc583a15cc7..718097f09431 100644 --- a/crates/ide/src/typing/on_enter.rs +++ b/crates/ide/src/typing/on_enter.rs @@ -200,9 +200,10 @@ mod tests { fn apply_on_enter(before: &str) -> Option { let (analysis, position) = fixture::position(before); - let result = analysis.on_enter(position).unwrap()?; + let result = analysis.on_enter(position.into_file_id(&analysis.db)).unwrap()?; - let mut actual = analysis.file_text(position.file_id).unwrap().to_string(); + let mut actual = + analysis.file_text(position.file_id.file_id(&analysis.db)).unwrap().to_string(); result.apply(&mut actual); Some(actual) } diff --git a/crates/ide/src/view_memory_layout.rs b/crates/ide/src/view_memory_layout.rs index 63701a4d15e9..6315d556c2e7 100644 --- a/crates/ide/src/view_memory_layout.rs +++ b/crates/ide/src/view_memory_layout.rs @@ -219,7 +219,7 @@ mod tests { ) -> Option { let (analysis, position, _) = fixture::annotations(ra_fixture); - view_memory_layout(&analysis.db, position) + view_memory_layout(&analysis.db, position.into_file_id(&analysis.db)) } #[test] diff --git a/crates/ide/src/view_syntax_tree.rs b/crates/ide/src/view_syntax_tree.rs index ecd93e8b2819..448e8da13edd 100644 --- a/crates/ide/src/view_syntax_tree.rs +++ b/crates/ide/src/view_syntax_tree.rs @@ -179,7 +179,7 @@ mod tests { fn check(#[rust_analyzer::rust_fixture] ra_fixture: &str, expect: expect_test::Expect) { let (analysis, file_id) = fixture::file(ra_fixture); - let syn = analysis.view_syntax_tree(file_id).unwrap(); + let syn = analysis.view_syntax_tree(file_id.file_id(&analysis.db)).unwrap(); expect.assert_eq(&syn) } diff --git a/crates/rust-analyzer/Cargo.toml b/crates/rust-analyzer/Cargo.toml index b301a7189b3c..37a114f7fc14 100644 --- a/crates/rust-analyzer/Cargo.toml +++ b/crates/rust-analyzer/Cargo.toml @@ -67,7 +67,9 @@ load-cargo.workspace = true proc-macro-api.workspace = true profile.workspace = true project-model.workspace = true +span = { workspace = true , features = ["salsa"]} stdx.workspace = true +syntax-bridge.workspace = true syntax.workspace = true parser.workspace = true toolchain.workspace = true @@ -90,7 +92,6 @@ xshell.workspace = true test-utils.workspace = true test-fixture.workspace = true -syntax-bridge.workspace = true [features] jemalloc = ["jemallocator", "profile/jemalloc"] diff --git a/crates/rust-analyzer/src/cli/analysis_stats.rs b/crates/rust-analyzer/src/cli/analysis_stats.rs index 0ee01982fea2..e048b3ce8380 100644 --- a/crates/rust-analyzer/src/cli/analysis_stats.rs +++ b/crates/rust-analyzer/src/cli/analysis_stats.rs @@ -1177,7 +1177,7 @@ impl flags::AnalysisStats { fields_to_resolve: InlayFieldsToResolve::empty(), range_exclusive_hints: true, }, - analysis.editioned_file_id_to_vfs(file_id), + file_id.into(), None, ); } @@ -1193,7 +1193,7 @@ impl flags::AnalysisStats { annotate_enum_variant_references: false, location: ide::AnnotationLocation::AboveName, }, - analysis.editioned_file_id_to_vfs(file_id), + file_id.into(), ) .unwrap() .into_iter() diff --git a/crates/rust-analyzer/src/cli/highlight.rs b/crates/rust-analyzer/src/cli/highlight.rs index 84607b9fd5d5..238ce2a32861 100644 --- a/crates/rust-analyzer/src/cli/highlight.rs +++ b/crates/rust-analyzer/src/cli/highlight.rs @@ -7,7 +7,7 @@ use crate::cli::{flags, read_stdin}; impl flags::Highlight { pub fn run(self) -> anyhow::Result<()> { let (analysis, file_id) = Analysis::from_single_file(read_stdin()?); - let html = analysis.highlight_as_html(file_id, self.rainbow).unwrap(); + let html = analysis.highlight_as_html(file_id.into(), self.rainbow).unwrap(); println!("{html}"); Ok(()) } diff --git a/crates/rust-analyzer/src/cli/lsif.rs b/crates/rust-analyzer/src/cli/lsif.rs index f3b0699d5515..f162bc1d4c39 100644 --- a/crates/rust-analyzer/src/cli/lsif.rs +++ b/crates/rust-analyzer/src/cli/lsif.rs @@ -125,6 +125,7 @@ impl LsifManager<'_, '_> { index: line_index, encoding: PositionEncoding::Wide(WideEncoding::Utf16), endings: LineEndings::Unix, + transform: Default::default(), }; let range_id = self.add_vertex(lsif::Vertex::Range { range: to_proto::range(&line_index, id.range), @@ -246,6 +247,7 @@ impl LsifManager<'_, '_> { index: line_index, encoding: PositionEncoding::Wide(WideEncoding::Utf16), endings: LineEndings::Unix, + transform: Default::default(), }; let result = folds .into_iter() diff --git a/crates/rust-analyzer/src/cli/scip.rs b/crates/rust-analyzer/src/cli/scip.rs index d258c5d8191f..5a0efda57ed3 100644 --- a/crates/rust-analyzer/src/cli/scip.rs +++ b/crates/rust-analyzer/src/cli/scip.rs @@ -342,6 +342,7 @@ fn get_line_index(db: &RootDatabase, file_id: FileId) -> LineIndex { index: db.line_index(file_id), encoding: PositionEncoding::Utf8, endings: LineEndings::Unix, + transform: Default::default(), } } diff --git a/crates/rust-analyzer/src/cli/ssr.rs b/crates/rust-analyzer/src/cli/ssr.rs index e3e3a143de03..383c30e38376 100644 --- a/crates/rust-analyzer/src/cli/ssr.rs +++ b/crates/rust-analyzer/src/cli/ssr.rs @@ -1,7 +1,10 @@ //! Applies structured search replace rules from the command line. use anyhow::Context; -use ide_db::{EditionedFileId, base_db::SourceDatabase}; +use ide_db::{ + EditionedFileId, + base_db::{RootQueryDb, SourceDatabase}, +}; use ide_ssr::MatchFinder; use load_cargo::{LoadCargoConfig, ProcMacroServerChoice, load_workspace_at}; use project_model::{CargoConfig, RustLibSource}; @@ -50,7 +53,6 @@ impl flags::Search { /// for much else. pub fn run(self) -> anyhow::Result<()> { use ide_db::base_db::SourceDatabase; - use ide_db::symbol_index::SymbolsDatabase; let cargo_config = CargoConfig { all_targets: true, set_test: true, ..CargoConfig::default() }; let load_cargo_config = LoadCargoConfig { diff --git a/crates/rust-analyzer/src/cli/symbols.rs b/crates/rust-analyzer/src/cli/symbols.rs index 9fad6723afcd..a40ea894be86 100644 --- a/crates/rust-analyzer/src/cli/symbols.rs +++ b/crates/rust-analyzer/src/cli/symbols.rs @@ -7,7 +7,7 @@ impl flags::Symbols { pub fn run(self) -> anyhow::Result<()> { let text = read_stdin()?; let (analysis, file_id) = Analysis::from_single_file(text); - let structure = analysis.file_structure(file_id).unwrap(); + let structure = analysis.file_structure(file_id.into()).unwrap(); for s in structure { println!("{s:?}"); } diff --git a/crates/rust-analyzer/src/global_state.rs b/crates/rust-analyzer/src/global_state.rs index 62a28a1a685d..dceb22f47fb1 100644 --- a/crates/rust-analyzer/src/global_state.rs +++ b/crates/rust-analyzer/src/global_state.rs @@ -10,9 +10,12 @@ use std::{ }; use crossbeam_channel::{Receiver, Sender, unbounded}; -use hir::ChangeWithProcMacros; -use ide::{Analysis, AnalysisHost, Cancellable, FileId, SourceRootId}; -use ide_db::base_db::{Crate, ProcMacroPaths, SourceDatabase}; +use hir::{ChangeWithProcMacros, HirFileId, db::ExpandDatabase}; +use ide::{Analysis, AnalysisHost, Cancellable, Edition, FileId, SourceRootId}; +use ide_db::{ + EditionedFileId, + base_db::{Crate, ProcMacroPaths, SourceDatabase}, +}; use itertools::Itertools; use load_cargo::SourceRootConfig; use lsp_types::{SemanticTokens, Url}; @@ -24,6 +27,8 @@ use proc_macro_api::ProcMacroClient; use project_model::{ManifestPath, ProjectWorkspace, ProjectWorkspaceKind, WorkspaceBuildScripts}; use rustc_hash::{FxHashMap, FxHashSet}; use stdx::thread; +#[allow(deprecated)] +use syntax_bridge::prettify_macro_expansion::prettify_macro_expansion; use tracing::{Level, span, trace}; use triomphe::Arc; use vfs::{AbsPathBuf, AnchoredPathBuf, ChangeKind, Vfs, VfsPath}; @@ -33,7 +38,7 @@ use crate::{ diagnostics::{CheckFixes, DiagnosticCollection}, discover, flycheck::{FlycheckHandle, FlycheckMessage}, - line_index::{LineEndings, LineIndex}, + line_index::{LineEndings, LineIndex, PositionTransform}, lsp::{from_proto, to_proto::url_from_abs_path}, lsp_ext, main_loop::Task, @@ -721,10 +726,19 @@ impl GlobalStateSnapshot { url_to_file_id(&self.vfs_read(), url) } + /// Returns `None` if the file was excluded. + pub(crate) fn url_to_hir_file_id(&self, url: &Url) -> anyhow::Result> { + url_to_hir_file_id(&self.analysis, &self.vfs_read(), url) + } + pub(crate) fn file_id_to_url(&self, id: FileId) -> Url { file_id_to_url(&self.vfs_read(), id) } + pub(crate) fn hir_file_id_to_url(&self, id: HirFileId) -> Url { + hir_file_id_to_url(&self.analysis, &self.vfs_read(), id) + } + /// Returns `None` if the file was excluded. pub(crate) fn vfs_path_to_file_id(&self, vfs_path: &VfsPath) -> anyhow::Result> { vfs_path_to_file_id(&self.vfs_read(), vfs_path) @@ -733,16 +747,62 @@ impl GlobalStateSnapshot { pub(crate) fn file_line_index(&self, file_id: FileId) -> Cancellable { let endings = self.vfs.read().1[&file_id]; let index = self.analysis.file_line_index(file_id)?; - let res = LineIndex { index, endings, encoding: self.config.caps().negotiated_encoding() }; + let res = LineIndex { + index, + endings, + encoding: self.config.caps().negotiated_encoding(), + transform: Default::default(), + }; Ok(res) } + pub(crate) fn hir_line_index(&self, file_id: HirFileId) -> Cancellable { + match file_id { + HirFileId::FileId(editioned_file_id) => { + self.file_line_index(editioned_file_id.file_id(self.analysis.db())) + } + HirFileId::MacroFile(macro_file_id) => { + // FIXME: Cache this + let s = self + .analysis + .with_db(|db| db.parse_macro_expansion(macro_file_id).value.0.syntax_node())?; + let mut transform = vec![]; + #[allow(deprecated)] + let s = prettify_macro_expansion(s, &mut |_| None, |mods| { + transform = mods + .iter() + .map(|(pos, kind)| (pos.offset(), *kind)) + .sorted_by(|&(a_off, a2), &(b_off, b2)| a_off.cmp(&b_off).then_with(|| { + // the prettify infra inserts these in reverse due to implementation + // reasons, but or our line assumptions we need to flip them so that + // then indent is not treated as part of the line + match (a2,b2) { + (syntax_bridge::prettify_macro_expansion::PrettifyWsKind::Indent(_) | syntax_bridge::prettify_macro_expansion::PrettifyWsKind::Space, syntax_bridge::prettify_macro_expansion::PrettifyWsKind::Newline) => std::cmp::Ordering::Greater, + (syntax_bridge::prettify_macro_expansion::PrettifyWsKind::Newline, syntax_bridge::prettify_macro_expansion::PrettifyWsKind::Indent(_) | syntax_bridge::prettify_macro_expansion::PrettifyWsKind::Space) => std::cmp::Ordering::Less, + (syntax_bridge::prettify_macro_expansion::PrettifyWsKind::Space,syntax_bridge::prettify_macro_expansion::PrettifyWsKind::Indent(_)) => std::cmp::Ordering::Greater, + (syntax_bridge::prettify_macro_expansion::PrettifyWsKind::Indent(_),syntax_bridge::prettify_macro_expansion::PrettifyWsKind::Space) => std::cmp::Ordering::Less, + _ => std::cmp::Ordering::Equal + } + })) + .collect(); + }); + let res = LineIndex { + index: Arc::new(ide_db::line_index::LineIndex::new(&s.to_string())), + endings: LineEndings::Unix, + encoding: self.config.caps().negotiated_encoding(), + transform: PositionTransform { insertions: transform }, + }; + Ok(res) + } + } + } + pub(crate) fn file_version(&self, file_id: FileId) -> Option { Some(self.mem_docs.get(self.vfs_read().file_path(file_id))?.version) } pub(crate) fn url_file_version(&self, url: &Url) -> Option { - let path = from_proto::vfs_path(url).ok()?; + let path = from_proto::url_to_vfs_path(url).ok()?.into_vfs()?; Some(self.mem_docs.get(&path)?.version) } @@ -818,10 +878,40 @@ pub(crate) fn file_id_to_url(vfs: &vfs::Vfs, id: FileId) -> Url { url_from_abs_path(path) } +pub(crate) fn hir_file_id_to_url(analysis: &Analysis, vfs: &vfs::Vfs, id: HirFileId) -> Url { + match id { + HirFileId::FileId(editioned_file_id) => { + file_id_to_url(vfs, editioned_file_id.file_id(analysis.db())) + } + HirFileId::MacroFile(macro_file_id) => lsp_types::Url::parse(&format!( + "rust-macro-file:{}.macro-file.rs", + ide_db::base_db::salsa::plumbing::AsId::as_id(¯o_file_id).as_bits() + )) + .unwrap(), + } +} + /// Returns `None` if the file was excluded. pub(crate) fn url_to_file_id(vfs: &vfs::Vfs, url: &Url) -> anyhow::Result> { - let path = from_proto::vfs_path(url)?; - vfs_path_to_file_id(vfs, &path) + let path = from_proto::url_to_vfs_path(url)?; + match path { + from_proto::VfsOrMacroPath::Vfs(path) => vfs_path_to_file_id(vfs, &path), + from_proto::VfsOrMacroPath::Macro(..) => anyhow::bail!("unexpected macro file"), + } +} + +/// Returns `None` if the file was excluded. +pub(crate) fn url_to_hir_file_id( + analysis: &Analysis, + vfs: &vfs::Vfs, + url: &Url, +) -> anyhow::Result> { + let path = from_proto::url_to_vfs_path(url)?; + Ok(match path { + from_proto::VfsOrMacroPath::Vfs(path) => vfs_path_to_file_id(vfs, &path)? + .map(|file_id| EditionedFileId::new(analysis.db(), file_id, Edition::CURRENT).into()), + from_proto::VfsOrMacroPath::Macro(call) => Some(call.into()), + }) } /// Returns `None` if the file was excluded. diff --git a/crates/rust-analyzer/src/handlers/notification.rs b/crates/rust-analyzer/src/handlers/notification.rs index 200e972e4289..6088c025ba99 100644 --- a/crates/rust-analyzer/src/handlers/notification.rs +++ b/crates/rust-analyzer/src/handlers/notification.rs @@ -18,7 +18,10 @@ use crate::{ config::{Config, ConfigChange}, flycheck::Target, global_state::{FetchWorkspaceRequest, GlobalState}, - lsp::{from_proto, utils::apply_document_changes}, + lsp::{ + from_proto::{self, VfsOrMacroPath}, + utils::apply_document_changes, + }, lsp_ext::{self, RunFlycheckParams}, mem_docs::DocumentData, reload, @@ -61,7 +64,7 @@ pub(crate) fn handle_did_open_text_document( ) -> anyhow::Result<()> { let _p = tracing::info_span!("handle_did_open_text_document").entered(); - if let Ok(path) = from_proto::vfs_path(¶ms.text_document.uri) { + if let Ok(VfsOrMacroPath::Vfs(path)) = from_proto::url_to_vfs_path(¶ms.text_document.uri) { let already_exists = state .mem_docs .insert( @@ -103,7 +106,7 @@ pub(crate) fn handle_did_change_text_document( ) -> anyhow::Result<()> { let _p = tracing::info_span!("handle_did_change_text_document").entered(); - if let Ok(path) = from_proto::vfs_path(¶ms.text_document.uri) { + if let Ok(VfsOrMacroPath::Vfs(path)) = from_proto::url_to_vfs_path(¶ms.text_document.uri) { let Some(DocumentData { version, data }) = state.mem_docs.get_mut(&path) else { tracing::error!(?path, "unexpected DidChangeTextDocument"); return Ok(()); @@ -132,7 +135,7 @@ pub(crate) fn handle_did_close_text_document( ) -> anyhow::Result<()> { let _p = tracing::info_span!("handle_did_close_text_document").entered(); - if let Ok(path) = from_proto::vfs_path(¶ms.text_document.uri) { + if let Ok(VfsOrMacroPath::Vfs(path)) = from_proto::url_to_vfs_path(¶ms.text_document.uri) { if state.mem_docs.remove(&path).is_err() { tracing::error!("orphan DidCloseTextDocument: {}", path); } @@ -155,9 +158,9 @@ pub(crate) fn handle_did_save_text_document( state: &mut GlobalState, params: DidSaveTextDocumentParams, ) -> anyhow::Result<()> { - if let Ok(vfs_path) = from_proto::vfs_path(¶ms.text_document.uri) { + if let Ok(VfsOrMacroPath::Vfs(path)) = from_proto::url_to_vfs_path(¶ms.text_document.uri) { let snap = state.snapshot(); - let file_id = try_default!(snap.vfs_path_to_file_id(&vfs_path)?); + let file_id = try_default!(snap.vfs_path_to_file_id(&path)?); let sr = snap.analysis.source_root_id(file_id)?; if state.config.script_rebuild_on_save(Some(sr)) && state.build_deps_changed { @@ -168,7 +171,7 @@ pub(crate) fn handle_did_save_text_document( } // Re-fetch workspaces if a workspace related file has changed - if let Some(path) = vfs_path.as_path() { + if let Some(path) = path.as_path() { let additional_files = &state .config .discover_workspace_config() @@ -196,7 +199,7 @@ pub(crate) fn handle_did_save_text_document( } } - if !state.config.check_on_save(Some(sr)) || run_flycheck(state, vfs_path) { + if !state.config.check_on_save(Some(sr)) || run_flycheck(state, path) { return Ok(()); } } else if state.config.check_on_save(None) && state.config.flycheck_workspace(None) { @@ -290,8 +293,10 @@ pub(crate) fn handle_did_change_watched_files( params: DidChangeWatchedFilesParams, ) -> anyhow::Result<()> { for change in params.changes.iter().unique_by(|&it| &it.uri) { - if let Ok(path) = from_proto::abs_path(&change.uri) { - state.loader.handle.invalidate(path); + if let Ok(VfsOrMacroPath::Vfs(path)) = from_proto::url_to_vfs_path(&change.uri) { + if let Some(path) = path.into_abs_path() { + state.loader.handle.invalidate(path); + } } } Ok(()) @@ -450,7 +455,7 @@ pub(crate) fn handle_run_flycheck( ) -> anyhow::Result<()> { let _p = tracing::info_span!("handle_run_flycheck").entered(); if let Some(text_document) = params.text_document { - if let Ok(vfs_path) = from_proto::vfs_path(&text_document.uri) { + if let Ok(VfsOrMacroPath::Vfs(vfs_path)) = from_proto::url_to_vfs_path(&text_document.uri) { if run_flycheck(state, vfs_path) { return Ok(()); } diff --git a/crates/rust-analyzer/src/handlers/request.rs b/crates/rust-analyzer/src/handlers/request.rs index a76a65220d3b..bf45d67b0d79 100644 --- a/crates/rust-analyzer/src/handlers/request.rs +++ b/crates/rust-analyzer/src/handlers/request.rs @@ -6,9 +6,10 @@ use std::{fs, io::Write as _, ops::Not, process::Stdio}; use anyhow::Context; use base64::{Engine, prelude::BASE64_STANDARD}; +use hir::{EditionedFileId, HirFileId, HirFilePosition, HirFileRange, db::ExpandDatabase}; use ide::{ AnnotationConfig, AssistKind, AssistResolveStrategy, Cancellable, CompletionFieldsToResolve, - FilePosition, FileRange, HoverAction, HoverGotoTypeData, InlayFieldsToResolve, Query, + Edition, FilePosition, FileRange, HoverAction, HoverGotoTypeData, InlayFieldsToResolve, Query, RangeInfo, ReferenceCategory, Runnable, RunnableKind, SingleResolve, SourceChange, TextEdit, }; use ide_db::{FxHashMap, SymbolKind}; @@ -28,6 +29,8 @@ use project_model::{CargoWorkspace, ManifestPath, ProjectWorkspaceKind, TargetKi use serde_json::json; use stdx::{format_to, never}; use syntax::{TextRange, TextSize}; +#[allow(deprecated)] +use syntax_bridge::prettify_macro_expansion::prettify_macro_expansion; use triomphe::Arc; use vfs::{AbsPath, AbsPathBuf, FileId, VfsPath}; @@ -81,7 +84,7 @@ pub(crate) fn handle_analyzer_status( let mut file_id = None; if let Some(tdi) = params.text_document { - match from_proto::file_id(&snap, &tdi.uri) { + match snap.url_to_file_id(&tdi.uri) { Ok(Some(it)) => file_id = Some(it), Ok(None) => {} Err(_) => format_to!(buf, "file {} not found in vfs", tdi.uri), @@ -141,7 +144,7 @@ pub(crate) fn handle_view_syntax_tree( params: lsp_ext::ViewSyntaxTreeParams, ) -> anyhow::Result { let _p = tracing::info_span!("handle_view_syntax_tree").entered(); - let id = try_default!(from_proto::file_id(&snap, ¶ms.text_document.uri)?); + let id = try_default!(snap.url_to_file_id(¶ms.text_document.uri)?); let res = snap.analysis.view_syntax_tree(id)?; Ok(res) } @@ -180,7 +183,7 @@ pub(crate) fn handle_view_file_text( snap: GlobalStateSnapshot, params: lsp_types::TextDocumentIdentifier, ) -> anyhow::Result { - let file_id = try_default!(from_proto::file_id(&snap, ¶ms.uri)?); + let file_id = try_default!(snap.url_to_file_id(¶ms.uri)?); Ok(snap.analysis.file_text(file_id)?.to_string()) } @@ -189,7 +192,7 @@ pub(crate) fn handle_view_item_tree( params: lsp_ext::ViewItemTreeParams, ) -> anyhow::Result { let _p = tracing::info_span!("handle_view_item_tree").entered(); - let file_id = try_default!(from_proto::file_id(&snap, ¶ms.text_document.uri)?); + let file_id = try_default!(snap.url_to_file_id(¶ms.text_document.uri)?); let res = snap.analysis.view_item_tree(file_id)?; Ok(res) } @@ -319,11 +322,11 @@ pub(crate) fn handle_expand_macro( params: lsp_ext::ExpandMacroParams, ) -> anyhow::Result> { let _p = tracing::info_span!("handle_expand_macro").entered(); - let file_id = try_default!(from_proto::file_id(&snap, ¶ms.text_document.uri)?); - let line_index = snap.file_line_index(file_id)?; + let file_id = try_default!(snap.url_to_hir_file_id(¶ms.text_document.uri)?); + let line_index = snap.hir_line_index(file_id)?; let offset = from_proto::offset(&line_index, params.position)?; - let res = snap.analysis.expand_macro(FilePosition { file_id, offset })?; + let res = snap.analysis.expand_macro(HirFilePosition { file_id, offset })?; Ok(res.map(|it| lsp_ext::ExpandedMacro { name: it.name, expansion: it.expansion })) } @@ -332,7 +335,7 @@ pub(crate) fn handle_selection_range( params: lsp_types::SelectionRangeParams, ) -> anyhow::Result>> { let _p = tracing::info_span!("handle_selection_range").entered(); - let file_id = try_default!(from_proto::file_id(&snap, ¶ms.text_document.uri)?); + let file_id = try_default!(snap.url_to_file_id(¶ms.text_document.uri)?); let line_index = snap.file_line_index(file_id)?; let res: anyhow::Result> = params .positions @@ -375,7 +378,7 @@ pub(crate) fn handle_matching_brace( params: lsp_ext::MatchingBraceParams, ) -> anyhow::Result> { let _p = tracing::info_span!("handle_matching_brace").entered(); - let file_id = try_default!(from_proto::file_id(&snap, ¶ms.text_document.uri)?); + let file_id = try_default!(snap.url_to_file_id(¶ms.text_document.uri)?); let line_index = snap.file_line_index(file_id)?; params .positions @@ -399,7 +402,7 @@ pub(crate) fn handle_join_lines( ) -> anyhow::Result> { let _p = tracing::info_span!("handle_join_lines").entered(); - let file_id = try_default!(from_proto::file_id(&snap, ¶ms.text_document.uri)?); + let file_id = try_default!(snap.url_to_file_id(¶ms.text_document.uri)?); let config = snap.config.join_lines(); let line_index = snap.file_line_index(file_id)?; @@ -496,10 +499,15 @@ pub(crate) fn handle_document_diagnostics( snap: GlobalStateSnapshot, params: lsp_types::DocumentDiagnosticParams, ) -> anyhow::Result { - let file_id = match from_proto::file_id(&snap, ¶ms.text_document.uri)? { + let file_id = match snap.url_to_hir_file_id(¶ms.text_document.uri)? { Some(it) => it, None => return Ok(empty_diagnostic_report()), }; + + let Some(file_id) = file_id.file_id() else { + return Ok(empty_diagnostic_report()); + }; + let file_id = snap.analysis.editioned_file_id_to_vfs(file_id); let source_root = snap.analysis.source_root_id(file_id)?; if !snap.analysis.is_local_source_root(source_root)? { return Ok(empty_diagnostic_report()); @@ -563,8 +571,8 @@ pub(crate) fn handle_document_symbol( params: lsp_types::DocumentSymbolParams, ) -> anyhow::Result> { let _p = tracing::info_span!("handle_document_symbol").entered(); - let file_id = try_default!(from_proto::file_id(&snap, ¶ms.text_document.uri)?); - let line_index = snap.file_line_index(file_id)?; + let file_id = try_default!(snap.url_to_hir_file_id(¶ms.text_document.uri)?); + let line_index = snap.hir_line_index(file_id)?; let mut parents: Vec<(lsp_types::DocumentSymbol, Option)> = Vec::new(); @@ -609,7 +617,7 @@ pub(crate) fn handle_document_symbol( let res = if snap.config.hierarchical_symbols() { document_symbols.into() } else { - let url = to_proto::url(&snap, file_id); + let url = to_proto::url_hir(&snap, file_id); let mut symbol_information = Vec::::new(); for symbol in document_symbols { flatten_document_symbol(&symbol, None, &url, &mut symbol_information); @@ -803,12 +811,12 @@ pub(crate) fn handle_goto_definition( ) -> anyhow::Result> { let _p = tracing::info_span!("handle_goto_definition").entered(); let position = - try_default!(from_proto::file_position(&snap, params.text_document_position_params)?); + try_default!(from_proto::hir_file_position(&snap, params.text_document_position_params)?); let nav_info = match snap.analysis.goto_definition(position)? { None => return Ok(None), Some(it) => it, }; - let src = FileRange { file_id: position.file_id, range: nav_info.range }; + let src = HirFileRange { file_id: position.file_id, range: nav_info.range }; let res = to_proto::goto_definition_response(&snap, Some(src), nav_info.info)?; Ok(Some(res)) } @@ -818,7 +826,7 @@ pub(crate) fn handle_goto_declaration( params: lsp_types::request::GotoDeclarationParams, ) -> anyhow::Result> { let _p = tracing::info_span!("handle_goto_declaration").entered(); - let position = try_default!(from_proto::file_position( + let position = try_default!(from_proto::hir_file_position( &snap, params.text_document_position_params.clone() )?); @@ -826,7 +834,7 @@ pub(crate) fn handle_goto_declaration( None => return handle_goto_definition(snap, params), Some(it) => it, }; - let src = FileRange { file_id: position.file_id, range: nav_info.range }; + let src = HirFileRange { file_id: position.file_id, range: nav_info.range }; let res = to_proto::goto_definition_response(&snap, Some(src), nav_info.info)?; Ok(Some(res)) } @@ -837,12 +845,12 @@ pub(crate) fn handle_goto_implementation( ) -> anyhow::Result> { let _p = tracing::info_span!("handle_goto_implementation").entered(); let position = - try_default!(from_proto::file_position(&snap, params.text_document_position_params)?); + try_default!(from_proto::hir_file_position(&snap, params.text_document_position_params)?); let nav_info = match snap.analysis.goto_implementation(position)? { None => return Ok(None), Some(it) => it, }; - let src = FileRange { file_id: position.file_id, range: nav_info.range }; + let src = HirFileRange { file_id: position.file_id, range: nav_info.range }; let res = to_proto::goto_definition_response(&snap, Some(src), nav_info.info)?; Ok(Some(res)) } @@ -853,12 +861,12 @@ pub(crate) fn handle_goto_type_definition( ) -> anyhow::Result> { let _p = tracing::info_span!("handle_goto_type_definition").entered(); let position = - try_default!(from_proto::file_position(&snap, params.text_document_position_params)?); + try_default!(from_proto::hir_file_position(&snap, params.text_document_position_params)?); let nav_info = match snap.analysis.goto_type_definition(position)? { None => return Ok(None), Some(it) => it, }; - let src = FileRange { file_id: position.file_id, range: nav_info.range }; + let src = HirFileRange { file_id: position.file_id, range: nav_info.range }; let res = to_proto::goto_definition_response(&snap, Some(src), nav_info.info)?; Ok(Some(res)) } @@ -906,7 +914,7 @@ pub(crate) fn handle_parent_module( } // check if invoked at the crate root - let file_id = try_default!(from_proto::file_id(&snap, ¶ms.text_document.uri)?); + let file_id = try_default!(snap.url_to_file_id(¶ms.text_document.uri)?); let crate_id = match snap.analysis.crates_for(file_id)?.first() { Some(&crate_id) => crate_id, None => return Ok(None), @@ -930,7 +938,7 @@ pub(crate) fn handle_parent_module( } // locate parent module by semantics - let position = try_default!(from_proto::file_position(&snap, params)?); + let position = try_default!(from_proto::hir_file_position(&snap, params)?); let navs = snap.analysis.parent_module(position)?; let res = to_proto::goto_definition_response(&snap, None, navs)?; Ok(Some(res)) @@ -942,7 +950,7 @@ pub(crate) fn handle_child_modules( ) -> anyhow::Result> { let _p = tracing::info_span!("handle_child_modules").entered(); // locate child module by semantics - let position = try_default!(from_proto::file_position(&snap, params)?); + let position = try_default!(from_proto::hir_file_position(&snap, params)?); let navs = snap.analysis.child_modules(position)?; let res = to_proto::goto_definition_response(&snap, None, navs)?; Ok(Some(res)) @@ -953,11 +961,14 @@ pub(crate) fn handle_runnables( params: lsp_ext::RunnablesParams, ) -> anyhow::Result> { let _p = tracing::info_span!("handle_runnables").entered(); - let file_id = try_default!(from_proto::file_id(&snap, ¶ms.text_document.uri)?); - let source_root = snap.analysis.source_root_id(file_id).ok(); - let line_index = snap.file_line_index(file_id)?; + let file_id = try_default!(snap.url_to_hir_file_id(¶ms.text_document.uri)?); + // let source_root = snap.analysis.source_root_id(file_id).ok(); + // FIXME + let source_root = None; + + let line_index = snap.hir_line_index(file_id)?; let offset = params.position.and_then(|it| from_proto::offset(&line_index, it).ok()); - let target_spec = TargetSpec::for_file(&snap, file_id)?; + let target_spec = TargetSpec::for_hir_file(&snap, file_id)?; let mut res = Vec::new(); for runnable in snap.analysis.runnables(file_id)? { @@ -1035,23 +1046,24 @@ pub(crate) fn handle_runnables( Some(TargetSpec::ProjectJson(_)) => {} None => { if !snap.config.linked_or_discovered_projects().is_empty() { - if let Some(path) = snap.file_id_to_file_path(file_id).parent() { - let mut cargo_args = vec!["check".to_owned(), "--workspace".to_owned()]; - cargo_args.extend(config.cargo_extra_args.iter().cloned()); - res.push(lsp_ext::Runnable { - label: "cargo check --workspace".to_owned(), - location: None, - kind: lsp_ext::RunnableKind::Cargo, - args: lsp_ext::RunnableArgs::Cargo(lsp_ext::CargoRunnableArgs { - workspace_root: None, - cwd: path.as_path().unwrap().to_path_buf().into(), - override_cargo: config.override_cargo, - cargo_args, - executable_args: Vec::new(), - environment: Default::default(), - }), - }); - }; + // FIXME + // if let Some(path) = snap.file_id_to_file_path(file_id).parent() { + // let mut cargo_args = vec!["check".to_owned(), "--workspace".to_owned()]; + // cargo_args.extend(config.cargo_extra_args.iter().cloned()); + // res.push(lsp_ext::Runnable { + // label: "cargo check --workspace".to_owned(), + // location: None, + // kind: lsp_ext::RunnableKind::Cargo, + // args: lsp_ext::RunnableArgs::Cargo(lsp_ext::CargoRunnableArgs { + // workspace_root: None, + // cwd: path.as_path().unwrap().to_path_buf().into(), + // override_cargo: config.override_cargo, + // cargo_args, + // executable_args: Vec::new(), + // environment: Default::default(), + // }), + // }); + // }; } } } @@ -1145,8 +1157,10 @@ pub(crate) fn handle_completion_resolve( let resolve_data: lsp_ext::CompletionResolveData = serde_json::from_value(data)?; - let file_id = from_proto::file_id(&snap, &resolve_data.position.text_document.uri)? + let file_id = snap + .url_to_file_id(&resolve_data.position.text_document.uri)? .expect("we never provide completions for excluded files"); + let line_index = snap.file_line_index(file_id)?; // FIXME: We should fix up the position when retrying the cancelled request instead let Ok(offset) = from_proto::offset(&line_index, resolve_data.position.position) else { @@ -1229,11 +1243,16 @@ pub(crate) fn handle_folding_range( params: FoldingRangeParams, ) -> anyhow::Result>> { let _p = tracing::info_span!("handle_folding_range").entered(); - let file_id = try_default!(from_proto::file_id(&snap, ¶ms.text_document.uri)?); + let file_id = try_default!(snap.url_to_hir_file_id(¶ms.text_document.uri)?); let folds = snap.analysis.folding_ranges(file_id)?; - let text = snap.analysis.file_text(file_id)?; - let line_index = snap.file_line_index(file_id)?; - let line_folding_only = snap.config.line_folding_only(); + let (text, line_folding_only) = match file_id { + HirFileId::FileId(editioned_file_id) => ( + snap.analysis.file_text(snap.analysis.editioned_file_id_to_vfs(editioned_file_id))?, + snap.config.line_folding_only(), + ), + HirFileId::MacroFile(_) => (Arc::from(String::new()) as Arc, false), + }; + let line_index = snap.hir_line_index(file_id)?; let res = folds .into_iter() .map(|it| to_proto::folding_range(&text, &line_index, line_folding_only, it)) @@ -1266,7 +1285,7 @@ pub(crate) fn handle_hover( PositionOrRange::Position(position) => Range::new(position, position), PositionOrRange::Range(range) => range, }; - let file_range = try_default!(from_proto::file_range(&snap, ¶ms.text_document, range)?); + let file_range = try_default!(from_proto::hir_file_range(&snap, ¶ms.text_document, range)?); let hover = snap.config.hover(); let info = match snap.analysis.hover(&hover, file_range)? { @@ -1274,7 +1293,7 @@ pub(crate) fn handle_hover( Some(info) => info, }; - let line_index = snap.file_line_index(file_range.file_id)?; + let line_index = snap.hir_line_index(file_range.file_id)?; let range = to_proto::range(&line_index, info.range); let markup_kind = hover.format; let hover = lsp_ext::Hover { @@ -1349,7 +1368,8 @@ pub(crate) fn handle_references( params: lsp_types::ReferenceParams, ) -> anyhow::Result>> { let _p = tracing::info_span!("handle_references").entered(); - let position = try_default!(from_proto::file_position(&snap, params.text_document_position)?); + let position = + try_default!(from_proto::hir_file_position(&snap, params.text_document_position)?); let exclude_imports = snap.config.find_all_refs_exclude_imports(); let exclude_tests = snap.config.find_all_refs_exclude_tests(); @@ -1363,7 +1383,7 @@ pub(crate) fn handle_references( .into_iter() .flat_map(|refs| { let decl = if include_declaration { - refs.declaration.map(|decl| FileRange { + refs.declaration.map(|decl| HirFileRange { file_id: decl.nav.file_id, range: decl.nav.focus_or_full_range(), }) @@ -1378,7 +1398,7 @@ pub(crate) fn handle_references( (!exclude_imports || !category.contains(ReferenceCategory::IMPORT)) && (!exclude_tests || !category.contains(ReferenceCategory::TEST)) }) - .map(move |(range, _)| FileRange { file_id, range }) + .map(move |(range, _)| HirFileRange { file_id, range }) }) .chain(decl) }) @@ -1420,7 +1440,7 @@ pub(crate) fn handle_code_action( return Ok(None); } - let file_id = try_default!(from_proto::file_id(&snap, ¶ms.text_document.uri)?); + let file_id = try_default!(snap.url_to_file_id(¶ms.text_document.uri)?); let line_index = snap.file_line_index(file_id)?; let frange = try_default!(from_proto::file_range(&snap, ¶ms.text_document, params.range)?); let source_root = snap.analysis.source_root_id(file_id)?; @@ -1500,8 +1520,10 @@ pub(crate) fn handle_code_action_resolve( return Ok(code_action); }; - let file_id = from_proto::file_id(&snap, ¶ms.code_action_params.text_document.uri)? + let file_id = snap + .url_to_file_id(¶ms.code_action_params.text_document.uri)? .expect("we never provide code actions for excluded files"); + if snap.file_version(file_id) != params.version { return Err(invalid_params_error("stale code action".to_owned()).into()); } @@ -1605,8 +1627,8 @@ pub(crate) fn handle_code_lens( return Ok(Some(Vec::default())); } - let file_id = try_default!(from_proto::file_id(&snap, ¶ms.text_document.uri)?); - let target_spec = TargetSpec::for_file(&snap, file_id)?; + let file_id = try_default!(snap.url_to_hir_file_id(¶ms.text_document.uri)?); + let target_spec = TargetSpec::for_hir_file(&snap, file_id)?; let annotations = snap.analysis.annotations( &AnnotationConfig { @@ -1670,17 +1692,16 @@ pub(crate) fn handle_document_highlight( ) -> anyhow::Result>> { let _p = tracing::info_span!("handle_document_highlight").entered(); let position = - try_default!(from_proto::file_position(&snap, params.text_document_position_params)?); - let line_index = snap.file_line_index(position.file_id)?; - let source_root = snap.analysis.source_root_id(position.file_id)?; + try_default!(from_proto::hir_file_position(&snap, params.text_document_position_params)?); + let line_index = snap.hir_line_index(position.file_id)?; + // FIXME + // let source_root = snap.analysis.source_root_id(position.file_id)?; - let refs = match snap - .analysis - .highlight_related(snap.config.highlight_related(Some(source_root)), position)? - { - None => return Ok(None), - Some(refs) => refs, - }; + let refs = + match snap.analysis.highlight_related(snap.config.highlight_related(None), position)? { + None => return Ok(None), + Some(refs) => refs, + }; let res = refs .into_iter() .map(|ide::HighlightedRange { range, category }| lsp_types::DocumentHighlight { @@ -1719,12 +1740,12 @@ pub(crate) fn handle_inlay_hints( ) -> anyhow::Result>> { let _p = tracing::info_span!("handle_inlay_hints").entered(); let document_uri = ¶ms.text_document.uri; - let FileRange { file_id, range } = try_default!(from_proto::file_range( + let HirFileRange { file_id, range } = try_default!(from_proto::hir_file_range( &snap, &TextDocumentIdentifier::new(document_uri.to_owned()), params.range, )?); - let line_index = snap.file_line_index(file_id)?; + let line_index = snap.hir_line_index(file_id)?; let range = TextRange::new( range.start().min(line_index.index.len()), range.end().min(line_index.index.len()), @@ -1773,6 +1794,7 @@ pub(crate) fn handle_inlay_hints_resolve( let mut forced_resolve_inlay_hints_config = snap.config.inlay_hints(); forced_resolve_inlay_hints_config.fields_to_resolve = InlayFieldsToResolve::empty(); + let file_id = EditionedFileId::new(snap.analysis.db(), file_id, Edition::CURRENT).into(); let resolve_hints = snap.analysis.inlay_hints_resolve( &forced_resolve_inlay_hints_config, file_id, @@ -1808,7 +1830,7 @@ pub(crate) fn handle_call_hierarchy_prepare( ) -> anyhow::Result>> { let _p = tracing::info_span!("handle_call_hierarchy_prepare").entered(); let position = - try_default!(from_proto::file_position(&snap, params.text_document_position_params)?); + try_default!(from_proto::hir_file_position(&snap, params.text_document_position_params)?); let nav_info = match snap.analysis.call_hierarchy(position)? { None => return Ok(None), @@ -1833,8 +1855,8 @@ pub(crate) fn handle_call_hierarchy_incoming( let item = params.item; let doc = TextDocumentIdentifier::new(item.uri); - let frange = try_default!(from_proto::file_range(&snap, &doc, item.selection_range)?); - let fpos = FilePosition { file_id: frange.file_id, offset: frange.range.start() }; + let frange = try_default!(from_proto::hir_file_range(&snap, &doc, item.selection_range)?); + let fpos = HirFilePosition { file_id: frange.file_id, offset: frange.range.start() }; let config = snap.config.call_hierarchy(); let call_items = match snap.analysis.incoming_calls(config, fpos)? { @@ -1846,7 +1868,7 @@ pub(crate) fn handle_call_hierarchy_incoming( for call_item in call_items.into_iter() { let file_id = call_item.target.file_id; - let line_index = snap.file_line_index(file_id)?; + let line_index = snap.hir_line_index(file_id)?; let item = to_proto::call_hierarchy_item(&snap, call_item.target)?; res.push(CallHierarchyIncomingCall { from: item, @@ -1871,9 +1893,9 @@ pub(crate) fn handle_call_hierarchy_outgoing( let item = params.item; let doc = TextDocumentIdentifier::new(item.uri); - let frange = try_default!(from_proto::file_range(&snap, &doc, item.selection_range)?); - let fpos = FilePosition { file_id: frange.file_id, offset: frange.range.start() }; - let line_index = snap.file_line_index(fpos.file_id)?; + let frange = try_default!(from_proto::hir_file_range(&snap, &doc, item.selection_range)?); + let fpos = HirFilePosition { file_id: frange.file_id, offset: frange.range.start() }; + let line_index = snap.hir_line_index(fpos.file_id)?; let config = snap.config.call_hierarchy(); let call_items = match snap.analysis.outgoing_calls(config, fpos)? { @@ -1906,9 +1928,8 @@ pub(crate) fn handle_semantic_tokens_full( ) -> anyhow::Result> { let _p = tracing::info_span!("handle_semantic_tokens_full").entered(); - let file_id = try_default!(from_proto::file_id(&snap, ¶ms.text_document.uri)?); - let text = snap.analysis.file_text(file_id)?; - let line_index = snap.file_line_index(file_id)?; + let file_id = try_default!(snap.url_to_hir_file_id(¶ms.text_document.uri)?); + let line_index = snap.hir_line_index(file_id)?; let mut highlight_config = snap.config.highlighting_config(); // Avoid flashing a bunch of unresolved references when the proc-macro servers haven't been spawned yet. @@ -1917,7 +1938,7 @@ pub(crate) fn handle_semantic_tokens_full( let highlights = snap.analysis.highlight(highlight_config, file_id)?; let semantic_tokens = to_proto::semantic_tokens( - &text, + // &text, &line_index, highlights, snap.config.semantics_tokens_augments_syntax_tokens(), @@ -1936,9 +1957,8 @@ pub(crate) fn handle_semantic_tokens_full_delta( ) -> anyhow::Result> { let _p = tracing::info_span!("handle_semantic_tokens_full_delta").entered(); - let file_id = try_default!(from_proto::file_id(&snap, ¶ms.text_document.uri)?); - let text = snap.analysis.file_text(file_id)?; - let line_index = snap.file_line_index(file_id)?; + let file_id = try_default!(snap.url_to_hir_file_id(¶ms.text_document.uri)?); + let line_index = snap.hir_line_index(file_id)?; let mut highlight_config = snap.config.highlighting_config(); // Avoid flashing a bunch of unresolved references when the proc-macro servers haven't been spawned yet. @@ -1947,7 +1967,7 @@ pub(crate) fn handle_semantic_tokens_full_delta( let highlights = snap.analysis.highlight(highlight_config, file_id)?; let semantic_tokens = to_proto::semantic_tokens( - &text, + // &text, &line_index, highlights, snap.config.semantics_tokens_augments_syntax_tokens(), @@ -1979,9 +1999,9 @@ pub(crate) fn handle_semantic_tokens_range( ) -> anyhow::Result> { let _p = tracing::info_span!("handle_semantic_tokens_range").entered(); - let frange = try_default!(from_proto::file_range(&snap, ¶ms.text_document, params.range)?); - let text = snap.analysis.file_text(frange.file_id)?; - let line_index = snap.file_line_index(frange.file_id)?; + let frange = + try_default!(from_proto::hir_file_range(&snap, ¶ms.text_document, params.range)?); + let line_index = snap.hir_line_index(frange.file_id)?; let mut highlight_config = snap.config.highlighting_config(); // Avoid flashing a bunch of unresolved references when the proc-macro servers haven't been spawned yet. @@ -1990,7 +2010,7 @@ pub(crate) fn handle_semantic_tokens_range( let highlights = snap.analysis.highlight_range(highlight_config, frange)?; let semantic_tokens = to_proto::semantic_tokens( - &text, + // &text, &line_index, highlights, snap.config.semantics_tokens_augments_syntax_tokens(), @@ -2046,7 +2066,7 @@ pub(crate) fn handle_open_cargo_toml( params: lsp_ext::OpenCargoTomlParams, ) -> anyhow::Result> { let _p = tracing::info_span!("handle_open_cargo_toml").entered(); - let file_id = try_default!(from_proto::file_id(&snap, ¶ms.text_document.uri)?); + let file_id = try_default!(snap.url_to_file_id(¶ms.text_document.uri)?); let cargo_spec = match TargetSpec::for_file(&snap, file_id)? { Some(TargetSpec::Cargo(it)) => it, @@ -2059,12 +2079,30 @@ pub(crate) fn handle_open_cargo_toml( Ok(Some(res)) } +pub(crate) fn macro_file_content( + snap: GlobalStateSnapshot, + url: lsp_types::TextDocumentIdentifier, +) -> anyhow::Result { + let file_id = snap.url_to_hir_file_id(&url.uri)?.expect("expected macro file"); + snap.analysis + .with_db(|db| { + #[allow(deprecated)] + prettify_macro_expansion( + db.parse_macro_expansion(file_id.macro_file().unwrap()).value.0.syntax_node(), + &mut |_| None, + |_| (), + ) + .to_string() + }) + .map_err(Into::into) +} + pub(crate) fn handle_move_item( snap: GlobalStateSnapshot, params: lsp_ext::MoveItemParams, ) -> anyhow::Result> { let _p = tracing::info_span!("handle_move_item").entered(); - let file_id = try_default!(from_proto::file_id(&snap, ¶ms.text_document.uri)?); + let file_id = try_default!(snap.url_to_file_id(¶ms.text_document.uri)?); let range = try_default!(from_proto::file_range(&snap, ¶ms.text_document, params.range)?); let direction = match params.direction { @@ -2091,7 +2129,7 @@ pub(crate) fn handle_view_recursive_memory_layout( params: lsp_types::TextDocumentPositionParams, ) -> anyhow::Result> { let _p = tracing::info_span!("handle_view_recursive_memory_layout").entered(); - let file_id = try_default!(from_proto::file_id(&snap, ¶ms.text_document.uri)?); + let file_id = try_default!(snap.url_to_file_id(¶ms.text_document.uri)?); let line_index = snap.file_line_index(file_id)?; let offset = from_proto::offset(&line_index, params.position)?; @@ -2120,12 +2158,12 @@ fn to_command_link(command: lsp_types::Command, tooltip: String) -> lsp_ext::Com fn show_impl_command_link( snap: &GlobalStateSnapshot, - position: &FilePosition, + position: &HirFilePosition, ) -> Option { if snap.config.hover_actions().implementations && snap.config.client_commands().show_reference { if let Some(nav_data) = snap.analysis.goto_implementation(*position).unwrap_or(None) { - let uri = to_proto::url(snap, position.file_id); - let line_index = snap.file_line_index(position.file_id).ok()?; + let uri = to_proto::url_hir(snap, position.file_id); + let line_index = snap.hir_line_index(position.file_id).ok()?; let position = to_proto::position(&line_index, position.offset); let locations: Vec<_> = nav_data .info @@ -2146,18 +2184,18 @@ fn show_impl_command_link( fn show_ref_command_link( snap: &GlobalStateSnapshot, - position: &FilePosition, + position: &HirFilePosition, ) -> Option { if snap.config.hover_actions().references && snap.config.client_commands().show_reference { if let Some(ref_search_res) = snap.analysis.find_all_refs(*position, None).unwrap_or(None) { - let uri = to_proto::url(snap, position.file_id); - let line_index = snap.file_line_index(position.file_id).ok()?; + let uri = to_proto::url_hir(snap, position.file_id); + let line_index = snap.hir_line_index(position.file_id).ok()?; let position = to_proto::position(&line_index, position.offset); let locations: Vec<_> = ref_search_res .into_iter() .flat_map(|res| res.references) .flat_map(|(file_id, ranges)| { - ranges.into_iter().map(move |(range, _)| FileRange { file_id, range }) + ranges.into_iter().map(move |(range, _)| HirFileRange { file_id, range }) }) .unique() .filter_map(|range| to_proto::location(snap, range).ok()) @@ -2183,7 +2221,7 @@ fn runnable_action_links( return None; } - let target_spec = TargetSpec::for_file(snap, runnable.nav.file_id).ok()?; + let target_spec = TargetSpec::for_hir_file(snap, runnable.nav.file_id).ok()?; if should_skip_target(&runnable, target_spec.as_ref()) { return None; } @@ -2279,7 +2317,7 @@ fn run_rustfmt( text_document: TextDocumentIdentifier, range: Option, ) -> anyhow::Result>> { - let file_id = try_default!(from_proto::file_id(snap, &text_document.uri)?); + let file_id = try_default!(snap.url_to_file_id(&text_document.uri)?); let file = snap.analysis.file_text(file_id)?; // Determine the edition of the crate the file belongs to (if there's multiple, we pick the @@ -2490,7 +2528,7 @@ pub(crate) fn internal_testing_fetch_config( Some(it) => Some( state .analysis - .source_root_id(try_default!(from_proto::file_id(&state, &it.uri)?)) + .source_root_id(try_default!(state.url_to_file_id(&it.uri)?)) .map_err(anyhow::Error::from)?, ), None => None, diff --git a/crates/rust-analyzer/src/integrated_benchmarks.rs b/crates/rust-analyzer/src/integrated_benchmarks.rs index 84b7888258f8..958b8756bfbd 100644 --- a/crates/rust-analyzer/src/integrated_benchmarks.rs +++ b/crates/rust-analyzer/src/integrated_benchmarks.rs @@ -10,10 +10,10 @@ //! in release mode in VS Code. There's however "rust-analyzer: Copy Run Command Line" //! which you can use to paste the command in terminal and add `--release` manually. -use hir::ChangeWithProcMacros; +use hir::{ChangeWithProcMacros, EditionedFileId}; use ide::{ AnalysisHost, CallableSnippets, CompletionConfig, CompletionFieldsToResolve, DiagnosticsConfig, - FilePosition, TextSize, + Edition, FilePosition, TextSize, }; use ide_db::{ SnippetCap, @@ -76,7 +76,12 @@ fn integrated_highlighting_benchmark() { { let _it = stdx::timeit("initial"); let analysis = host.analysis(); - analysis.highlight_as_html(file_id, false).unwrap(); + analysis + .highlight_as_html( + EditionedFileId::new(analysis.db(), file_id, Edition::CURRENT).into(), + false, + ) + .unwrap(); } { @@ -97,7 +102,12 @@ fn integrated_highlighting_benchmark() { let _it = stdx::timeit("after change"); let _span = profile::cpu_span(); let analysis = host.analysis(); - analysis.highlight_as_html(file_id, false).unwrap(); + analysis + .highlight_as_html( + EditionedFileId::new(analysis.db(), file_id, Edition::CURRENT).into(), + false, + ) + .unwrap(); } } diff --git a/crates/rust-analyzer/src/line_index.rs b/crates/rust-analyzer/src/line_index.rs index 951762074073..7936b15a34ba 100644 --- a/crates/rust-analyzer/src/line_index.rs +++ b/crates/rust-analyzer/src/line_index.rs @@ -5,8 +5,11 @@ //! This module does line ending conversion and detection (so that we can //! convert back to `\r\n` on the way out). +use ide::{TextRange, TextSize}; use ide_db::line_index::WideEncoding; +use itertools::Itertools; use memchr::memmem; +use syntax_bridge::prettify_macro_expansion::PrettifyWsKind; use triomphe::Arc; #[derive(Clone, Copy)] @@ -19,6 +22,107 @@ pub(crate) struct LineIndex { pub(crate) index: Arc, pub(crate) endings: LineEndings, pub(crate) encoding: PositionEncoding, + pub(crate) transform: PositionTransform, +} + +impl LineIndex { + pub(crate) fn line_col(&self, mut offset: TextSize) -> ide::LineCol { + if !self.transform.insertions.is_empty() { + offset += TextSize::new( + self.transform + .insertions + .iter() + .copied() + .take_while(|&(off, _)| off < offset) + .map(|(_, len)| ws_kind_width(len)) + .sum::(), + ); + } + self.index.line_col(offset) + } + + pub(crate) fn offset(&self, line_col: ide::LineCol) -> Option { + let mut offset = self.index.offset(line_col)?; + if !self.transform.insertions.is_empty() { + let mut iter = self.transform.insertions.iter(); + let overall_sub = TextSize::new(if line_col.line == 0 { + 0 + } else { + // collect all ws insertions until the line `line` starts + // we need to offset our range by this value + let mut nl_seen = 0; + iter.peeking_take_while(|&&(_p, ws)| { + let m = nl_seen != line_col.line; + if ws == PrettifyWsKind::Newline { + nl_seen += 1; + } + m + }) + .copied() + .map(|(_, len)| ws_kind_width(len)) + .sum::() + }); + offset -= overall_sub; + + for (pos, ws) in iter.copied().take_while(|&(_, ws)| ws != PrettifyWsKind::Newline) { + if offset < pos { + break; + } + offset -= TextSize::new(ws_kind_width(ws)); + } + } + Some(offset) + } + + #[allow(dead_code)] + pub(crate) fn line(&self, line: u32) -> Option { + let mut range = self.index.line(line)?; + if !self.transform.insertions.is_empty() { + let mut iter = self.transform.insertions.iter(); + let overall_sub = TextSize::new(if line == 0 { + 0 + } else { + // collect all ws insertions until the line `line` starts + // we need to offset our range by this value + let mut nl_seen = 0; + iter.peeking_take_while(|&&(_p, ws)| { + let m = nl_seen != line; + if ws == PrettifyWsKind::Newline { + nl_seen += 1; + } + m + }) + .copied() + .map(|(_, len)| ws_kind_width(len)) + .sum::() + }); + + // collect all ws insertions within the line `line` + // we need to deduct this from range end by this value + let end_sub = TextSize::new( + iter.copied() + .take_while(|&(_, ws)| ws != PrettifyWsKind::Newline) + .map(|(_, len)| ws_kind_width(len)) + .sum::(), + ); + range = + TextRange::new(range.start() - overall_sub, range.end() - overall_sub - end_sub); + } + Some(range) + } +} + +#[derive(Default)] +pub(crate) struct PositionTransform { + pub insertions: Vec<(TextSize, PrettifyWsKind)>, +} + +fn ws_kind_width(ws: PrettifyWsKind) -> u32 { + match ws { + PrettifyWsKind::Space => 1, + PrettifyWsKind::Indent(indent) => 4 * (indent as u32), + PrettifyWsKind::Newline => 1, + } } #[derive(Debug, Clone, Copy, PartialEq, Eq, PartialOrd, Ord, Hash)] diff --git a/crates/rust-analyzer/src/lsp/capabilities.rs b/crates/rust-analyzer/src/lsp/capabilities.rs index f94e7486ff8f..ff6ddfd68ce1 100644 --- a/crates/rust-analyzer/src/lsp/capabilities.rs +++ b/crates/rust-analyzer/src/lsp/capabilities.rs @@ -416,6 +416,10 @@ impl ClientCapabilities { == Some(true) } + pub fn virtual_macro_files(&self) -> bool { + self.experimental_bool("virtualMacroFiles") + } + pub fn code_action_group(&self) -> bool { self.experimental_bool("codeActionGroup") } diff --git a/crates/rust-analyzer/src/lsp/ext.rs b/crates/rust-analyzer/src/lsp/ext.rs index b132323bec5b..62e49056ea48 100644 --- a/crates/rust-analyzer/src/lsp/ext.rs +++ b/crates/rust-analyzer/src/lsp/ext.rs @@ -18,6 +18,14 @@ use paths::Utf8PathBuf; use rustc_hash::FxHashMap; use serde::{Deserialize, Serialize}; +pub enum MacroFileContent {} + +impl Request for MacroFileContent { + type Params = lsp_types::TextDocumentIdentifier; + type Result = String; + const METHOD: &'static str = "rust-analyzer/macroFileContent"; +} + pub enum InternalTestingFetchConfig {} #[derive(Deserialize, Serialize, Debug)] diff --git a/crates/rust-analyzer/src/lsp/from_proto.rs b/crates/rust-analyzer/src/lsp/from_proto.rs index 02757616d4ff..b67c7eb9691b 100644 --- a/crates/rust-analyzer/src/lsp/from_proto.rs +++ b/crates/rust-analyzer/src/lsp/from_proto.rs @@ -1,7 +1,8 @@ //! Conversion lsp_types types to rust-analyzer specific ones. use anyhow::format_err; +use hir::{HirFilePosition, HirFileRange, MacroCallId}; use ide::{Annotation, AnnotationKind, AssistKind, LineCol}; -use ide_db::{FileId, FilePosition, FileRange, line_index::WideLineCol}; +use ide_db::{FilePosition, FileRange, base_db::salsa, line_index::WideLineCol}; use paths::Utf8PathBuf; use syntax::{TextRange, TextSize}; use vfs::AbsPathBuf; @@ -12,13 +13,34 @@ use crate::{ lsp_ext, try_default, }; -pub(crate) fn abs_path(url: &lsp_types::Url) -> anyhow::Result { - let path = url.to_file_path().map_err(|()| anyhow::format_err!("url is not a file"))?; - Ok(AbsPathBuf::try_from(Utf8PathBuf::from_path_buf(path).unwrap()).unwrap()) +#[derive(Clone)] +pub(crate) enum VfsOrMacroPath { + Vfs(vfs::VfsPath), + Macro(MacroCallId), +} + +impl VfsOrMacroPath { + pub(crate) fn into_vfs(self) -> Option { + if let Self::Vfs(v) = self { Some(v) } else { None } + } } -pub(crate) fn vfs_path(url: &lsp_types::Url) -> anyhow::Result { - abs_path(url).map(vfs::VfsPath::from) +pub(crate) fn url_to_vfs_path(url: &lsp_types::Url) -> anyhow::Result { + if url.scheme() == "rust-macro-file" { + // rust-macro-file:/id.macro-file.rs + let macro_call = url + .path() + .strip_suffix(".macro-file.rs") + .and_then(|it| it.parse::().ok()) + .ok_or_else(|| format_err!("Invalid `rust-macro-file` url: {url:?}"))?; + + return Ok(VfsOrMacroPath::Macro(unsafe { + salsa::plumbing::FromId::from_id(salsa::Id::from_bits(macro_call)) + })); + } + let path = url.to_file_path().map_err(|()| anyhow::format_err!("url is not a file"))?; + let path = AbsPathBuf::try_from(Utf8PathBuf::from_path_buf(path).unwrap()).unwrap(); + Ok(VfsOrMacroPath::Vfs(vfs::VfsPath::from(path))) } pub(crate) fn offset( @@ -35,18 +57,9 @@ pub(crate) fn offset( .ok_or_else(|| format_err!("Invalid wide col offset"))? } }; - let line_range = line_index.index.line(line_col.line).ok_or_else(|| { + line_index.offset(line_col).ok_or_else(|| { format_err!("Invalid offset {line_col:?} (line index length: {:?})", line_index.index.len()) - })?; - let col = TextSize::from(line_col.col); - let clamped_len = col.min(line_range.len()); - if clamped_len < col { - tracing::error!( - "Position {line_col:?} column exceeds line length {}, clamping it", - u32::from(line_range.len()), - ); - } - Ok(line_range.start() + clamped_len) + }) } pub(crate) fn text_range( @@ -61,25 +74,28 @@ pub(crate) fn text_range( } } -/// Returns `None` if the file was excluded. -pub(crate) fn file_id( - snap: &GlobalStateSnapshot, - url: &lsp_types::Url, -) -> anyhow::Result> { - snap.url_to_file_id(url) -} - /// Returns `None` if the file was excluded. pub(crate) fn file_position( snap: &GlobalStateSnapshot, tdpp: lsp_types::TextDocumentPositionParams, ) -> anyhow::Result> { - let file_id = try_default!(file_id(snap, &tdpp.text_document.uri)?); + let file_id = try_default!(snap.url_to_file_id(&tdpp.text_document.uri)?); let line_index = snap.file_line_index(file_id)?; let offset = offset(&line_index, tdpp.position)?; Ok(Some(FilePosition { file_id, offset })) } +/// Returns `None` if the file was excluded. +pub(crate) fn hir_file_position( + snap: &GlobalStateSnapshot, + tdpp: lsp_types::TextDocumentPositionParams, +) -> anyhow::Result> { + let file_id = try_default!(snap.url_to_hir_file_id(&tdpp.text_document.uri)?); + let line_index = snap.hir_line_index(file_id)?; + let offset = offset(&line_index, tdpp.position)?; + Ok(Some(HirFilePosition { file_id, offset })) +} + /// Returns `None` if the file was excluded. pub(crate) fn file_range( snap: &GlobalStateSnapshot, @@ -90,12 +106,23 @@ pub(crate) fn file_range( } /// Returns `None` if the file was excluded. +pub(crate) fn hir_file_range( + snap: &GlobalStateSnapshot, + text_document_identifier: &lsp_types::TextDocumentIdentifier, + range: lsp_types::Range, +) -> anyhow::Result> { + let file_id = try_default!(snap.url_to_hir_file_id(&text_document_identifier.uri)?); + let line_index = snap.hir_line_index(file_id)?; + let range = text_range(&line_index, range)?; + Ok(Some(HirFileRange { file_id, range })) +} + pub(crate) fn file_range_uri( snap: &GlobalStateSnapshot, document: &lsp_types::Url, range: lsp_types::Range, ) -> anyhow::Result> { - let file_id = try_default!(file_id(snap, document)?); + let file_id = try_default!(snap.url_to_file_id(document)?); let line_index = snap.file_line_index(file_id)?; let range = text_range(&line_index, range)?; Ok(Some(FileRange { file_id, range })) @@ -128,9 +155,9 @@ pub(crate) fn annotation( { return Ok(None); } - let pos @ FilePosition { file_id, .. } = - try_default!(file_position(snap, params.text_document_position_params)?); - let line_index = snap.file_line_index(file_id)?; + let pos @ HirFilePosition { file_id, .. } = + try_default!(hir_file_position(snap, params.text_document_position_params)?); + let line_index = snap.hir_line_index(file_id)?; Ok(Annotation { range: text_range(&line_index, range)?, @@ -141,8 +168,9 @@ pub(crate) fn annotation( if snap.url_file_version(¶ms.text_document.uri) != Some(data.version) { return Ok(None); } - let pos @ FilePosition { file_id, .. } = try_default!(file_position(snap, params)?); - let line_index = snap.file_line_index(file_id)?; + let pos @ HirFilePosition { file_id, .. } = + try_default!(hir_file_position(snap, params)?); + let line_index = snap.hir_line_index(file_id)?; Ok(Annotation { range: text_range(&line_index, range)?, diff --git a/crates/rust-analyzer/src/lsp/to_proto.rs b/crates/rust-analyzer/src/lsp/to_proto.rs index 8a848fb848cc..dc7202ecf33a 100644 --- a/crates/rust-analyzer/src/lsp/to_proto.rs +++ b/crates/rust-analyzer/src/lsp/to_proto.rs @@ -7,19 +7,22 @@ use std::{ }; use base64::{Engine, prelude::BASE64_STANDARD}; +use hir::{HirFileId, HirFileRange}; use ide::{ Annotation, AnnotationKind, Assist, AssistKind, Cancellable, CompletionFieldsToResolve, - CompletionItem, CompletionItemKind, CompletionRelevance, Documentation, FileId, FileRange, - FileSystemEdit, Fold, FoldKind, Highlight, HlMod, HlOperator, HlPunct, HlRange, HlTag, Indel, - InlayFieldsToResolve, InlayHint, InlayHintLabel, InlayHintLabelPart, InlayKind, LazyProperty, - Markup, NavigationTarget, ReferenceCategory, RenameError, Runnable, Severity, SignatureHelp, + CompletionItem, CompletionItemKind, CompletionRelevance, Documentation, FileId, FileSystemEdit, + Fold, FoldKind, Highlight, HirNavigationTarget, HlMod, HlOperator, HlPunct, HlRange, HlTag, + Indel, InlayFieldsToResolve, InlayHint, InlayHintLabel, InlayHintLabelPart, InlayKind, + LazyProperty, Markup, ReferenceCategory, RenameError, Runnable, Severity, SignatureHelp, SnippetEdit, SourceChange, StructureNodeKind, SymbolKind, TextEdit, TextRange, TextSize, UpdateTest, }; -use ide_db::{FxHasher, assists, rust_doc::format_docs, source_change::ChangeAnnotationId}; +use ide_db::{ + FxHasher, assists, base_db::SourceDatabase, rust_doc::format_docs, + source_change::ChangeAnnotationId, +}; use itertools::Itertools; use paths::{Utf8Component, Utf8Prefix}; -use semver::VersionReq; use serde_json::to_value; use vfs::AbsPath; @@ -38,7 +41,7 @@ use crate::{ }; pub(crate) fn position(line_index: &LineIndex, offset: TextSize) -> lsp_types::Position { - let line_col = line_index.index.line_col(offset); + let line_col = line_index.line_col(offset); match line_index.encoding { PositionEncoding::Utf8 => lsp_types::Position::new(line_col.line, line_col.col), PositionEncoding::Wide(enc) => { @@ -553,7 +556,7 @@ pub(crate) fn inlay_hint( snap: &GlobalStateSnapshot, fields_to_resolve: &InlayFieldsToResolve, line_index: &LineIndex, - file_id: FileId, + file_id: HirFileId, mut inlay_hint: InlayHint, ) -> Cancellable { let hint_needs_resolve = |hint: &InlayHint| -> Option { @@ -584,10 +587,7 @@ pub(crate) fn inlay_hint( LazyProperty::Computed(it) => Some(it), LazyProperty::Lazy => { something_to_resolve |= - snap.config.visual_studio_code_version().is_none_or(|version| { - VersionReq::parse(">=1.86.0").unwrap().matches(version) - }) && resolve_range_and_hash.is_some() - && fields_to_resolve.resolve_text_edits; + resolve_range_and_hash.is_some() && fields_to_resolve.resolve_text_edits; None } }) @@ -600,12 +600,12 @@ pub(crate) fn inlay_hint( inlay_hint.label, )?; - let data = match resolve_range_and_hash { - Some((resolve_range, hash)) if something_to_resolve => Some( + let data = match (resolve_range_and_hash, file_id.file_id()) { + (Some((resolve_range, hash)), Some(file_id)) if something_to_resolve => Some( to_value(lsp_ext::InlayHintResolveData { - file_id: file_id.index(), + file_id: file_id.file_id(snap.analysis.db()).index(), hash: hash.to_string(), - version: snap.file_version(file_id), + version: snap.file_version(snap.analysis.editioned_file_id_to_vfs(file_id)), resolve_range: range(line_index, resolve_range), }) .unwrap(), @@ -718,7 +718,6 @@ fn inlay_hint_label( static TOKEN_RESULT_COUNTER: AtomicU32 = AtomicU32::new(1); pub(crate) fn semantic_tokens( - text: &str, line_index: &LineIndex, highlights: Vec, semantics_tokens_augments_syntax_tokens: bool, @@ -764,11 +763,7 @@ pub(crate) fn semantic_tokens( let token_index = semantic_tokens::type_index(ty); let modifier_bitset = mods.0; - for mut text_range in line_index.index.lines(highlight_range.range) { - if text[text_range].ends_with('\n') { - text_range = - TextRange::new(text_range.start(), text_range.end() - TextSize::of('\n')); - } + for text_range in line_index.index.lines(highlight_range.range) { let range = range(line_index, text_range); builder.push(range, token_index, modifier_bitset); } @@ -956,6 +951,10 @@ pub(crate) fn url(snap: &GlobalStateSnapshot, file_id: FileId) -> lsp_types::Url snap.file_id_to_url(file_id) } +pub(crate) fn url_hir(snap: &GlobalStateSnapshot, file_id: HirFileId) -> lsp_types::Url { + snap.hir_file_id_to_url(file_id) +} + /// Returns a `Url` object from a given path, will lowercase drive letters if present. /// This will only happen when processing windows paths. /// @@ -999,10 +998,10 @@ pub(crate) fn optional_versioned_text_document_identifier( pub(crate) fn location( snap: &GlobalStateSnapshot, - frange: FileRange, + frange: HirFileRange, ) -> Cancellable { - let url = url(snap, frange.file_id); - let line_index = snap.file_line_index(frange.file_id)?; + let url = url_hir(snap, frange.file_id); + let line_index = snap.hir_line_index(frange.file_id)?; let range = range(&line_index, frange.range); let loc = lsp_types::Location::new(url, range); Ok(loc) @@ -1011,10 +1010,10 @@ pub(crate) fn location( /// Prefer using `location_link`, if the client has the cap. pub(crate) fn location_from_nav( snap: &GlobalStateSnapshot, - nav: NavigationTarget, + nav: HirNavigationTarget, ) -> Cancellable { - let url = url(snap, nav.file_id); - let line_index = snap.file_line_index(nav.file_id)?; + let url = url_hir(snap, nav.file_id); + let line_index = snap.hir_line_index(nav.file_id)?; let range = range(&line_index, nav.focus_or_full_range()); let loc = lsp_types::Location::new(url, range); Ok(loc) @@ -1022,12 +1021,12 @@ pub(crate) fn location_from_nav( pub(crate) fn location_link( snap: &GlobalStateSnapshot, - src: Option, - target: NavigationTarget, + src: Option, + target: HirNavigationTarget, ) -> Cancellable { let origin_selection_range = match src { Some(src) => { - let line_index = snap.file_line_index(src.file_id)?; + let line_index = snap.hir_line_index(src.file_id)?; let range = range(&line_index, src.range); Some(range) } @@ -1045,11 +1044,11 @@ pub(crate) fn location_link( fn location_info( snap: &GlobalStateSnapshot, - target: NavigationTarget, + target: HirNavigationTarget, ) -> Cancellable<(lsp_types::Url, lsp_types::Range, lsp_types::Range)> { - let line_index = snap.file_line_index(target.file_id)?; + let line_index = snap.hir_line_index(target.file_id)?; - let target_uri = url(snap, target.file_id); + let target_uri = url_hir(snap, target.file_id); let target_range = range(&line_index, target.full_range); let target_selection_range = target.focus_range.map(|it| range(&line_index, it)).unwrap_or(target_range); @@ -1058,8 +1057,8 @@ fn location_info( pub(crate) fn goto_definition_response( snap: &GlobalStateSnapshot, - src: Option, - targets: Vec, + src: Option, + targets: Vec, ) -> Cancellable { if snap.config.location_link() { let links = targets @@ -1071,7 +1070,7 @@ pub(crate) fn goto_definition_response( } else { let locations = targets .into_iter() - .map(|nav| FileRange { file_id: nav.file_id, range: nav.focus_or_full_range() }) + .map(|nav| HirFileRange { file_id: nav.file_id, range: nav.focus_or_full_range() }) .unique() .map(|range| location(snap, range)) .collect::>>()?; @@ -1460,7 +1459,7 @@ impl From pub(crate) fn call_hierarchy_item( snap: &GlobalStateSnapshot, - target: NavigationTarget, + target: HirNavigationTarget, ) -> Cancellable { let name = target.name.to_string(); let detail = target.description.clone(); @@ -1538,9 +1537,7 @@ pub(crate) fn runnable( snap: &GlobalStateSnapshot, runnable: Runnable, ) -> Cancellable> { - let target_spec = TargetSpec::for_file(snap, runnable.nav.file_id)?; - let source_root = snap.analysis.source_root_id(runnable.nav.file_id).ok(); - let config = snap.config.runnables(source_root); + let target_spec = TargetSpec::for_hir_file(snap, runnable.nav.file_id)?; match target_spec { Some(TargetSpec::Cargo(spec)) => { @@ -1561,7 +1558,18 @@ pub(crate) fn runnable( }; let label = runnable.label(Some(&target)); + let source_root = snap.analysis.with_db(|db| { + db.file_source_root( + runnable + .nav + .file_id + .original_file_respecting_includes(db) + .file_id(snap.analysis.db()), + ) + .source_root_id(snap.analysis.db()) + })?; let location = location_link(snap, None, runnable.nav)?; + let config = snap.config.runnables(Some(source_root)); Ok(Some(lsp_ext::Runnable { label, @@ -1604,13 +1612,26 @@ pub(crate) fn runnable( } } None => { - let Some(path) = snap.file_id_to_file_path(runnable.nav.file_id).parent() else { + let Some(path) = runnable.nav.file_id.file_id().and_then(|file_id| { + snap.file_id_to_file_path(snap.analysis.editioned_file_id_to_vfs(file_id)).parent() + }) else { return Ok(None); }; let (cargo_args, executable_args) = CargoTargetSpec::runnable_args(snap, None, &runnable.kind, &runnable.cfg); let label = runnable.label(None); + let source_root = snap.analysis.with_db(|db| { + db.file_source_root( + runnable + .nav + .file_id + .original_file_respecting_includes(db) + .file_id(snap.analysis.db()), + ) + .source_root_id(snap.analysis.db()) + })?; + let config = snap.config.runnables(Some(source_root)); let location = location_link(snap, None, runnable.nav)?; Ok(Some(lsp_ext::Runnable { @@ -1638,7 +1659,7 @@ pub(crate) fn code_lens( let client_commands_config = snap.config.client_commands(); match annotation.kind { AnnotationKind::Runnable(run) => { - let line_index = snap.file_line_index(run.nav.file_id)?; + let line_index = snap.hir_line_index(run.nav.file_id)?; let annotation_range = range(&line_index, annotation.range); let update_test = run.update_test; @@ -1704,9 +1725,9 @@ pub(crate) fn code_lens( if !client_commands_config.show_reference { return Ok(()); } - let line_index = snap.file_line_index(pos.file_id)?; + let line_index = snap.hir_line_index(pos.file_id)?; let annotation_range = range(&line_index, annotation.range); - let url = url(snap, pos.file_id); + let url = url_hir(snap, pos.file_id); let pos = position(&line_index, pos.offset); let id = lsp_types::TextDocumentIdentifier { uri: url.clone() }; @@ -1725,7 +1746,7 @@ pub(crate) fn code_lens( .filter_map(|target| { location( snap, - FileRange { file_id: target.file_id, range: target.full_range }, + HirFileRange { file_id: target.file_id, range: target.full_range }, ) .ok() }) @@ -1758,9 +1779,9 @@ pub(crate) fn code_lens( if !client_commands_config.show_reference { return Ok(()); } - let line_index = snap.file_line_index(pos.file_id)?; + let line_index = snap.hir_line_index(pos.file_id)?; let annotation_range = range(&line_index, annotation.range); - let url = url(snap, pos.file_id); + let url = url_hir(snap, pos.file_id); let pos = position(&line_index, pos.offset); let id = lsp_types::TextDocumentIdentifier { uri: url.clone() }; @@ -1832,7 +1853,8 @@ pub(crate) fn test_item( } pub(crate) mod command { - use ide::{FileRange, NavigationTarget}; + use hir::HirFileRange; + use ide::HirNavigationTarget; use serde_json::to_value; use crate::{ @@ -1889,13 +1911,13 @@ pub(crate) mod command { pub(crate) fn goto_location( snap: &GlobalStateSnapshot, - nav: &NavigationTarget, + nav: &HirNavigationTarget, ) -> Option { let value = if snap.config.location_link() { let link = location_link(snap, None, nav.clone()).ok()?; to_value(link).ok()? } else { - let range = FileRange { file_id: nav.file_id, range: nav.focus_or_full_range() }; + let range = HirFileRange { file_id: nav.file_id, range: nav.focus_or_full_range() }; let location = location(snap, range).ok()?; to_value(location).ok()? }; @@ -1997,13 +2019,14 @@ fn main() { }"#; let (analysis, file_id) = Analysis::from_single_file(text.to_owned()); - let folds = analysis.folding_ranges(file_id).unwrap(); + let folds = analysis.folding_ranges(file_id.into()).unwrap(); assert_eq!(folds.len(), 4); let line_index = LineIndex { index: Arc::new(ide::LineIndex::new(text)), endings: LineEndings::Unix, encoding: PositionEncoding::Utf8, + transform: Default::default(), }; let converted: Vec = folds.into_iter().map(|it| folding_range(text, &line_index, true, it)).collect(); @@ -2034,7 +2057,10 @@ fn bar(_: usize) {} let (offset, text) = extract_offset(text); let (analysis, file_id) = Analysis::from_single_file(text); let help = signature_help( - analysis.signature_help(FilePosition { file_id, offset }).unwrap().unwrap(), + analysis + .signature_help(FilePosition { file_id: file_id.file_id(analysis.db()), offset }) + .unwrap() + .unwrap(), CallInfoConfig { params_only: false, docs: true }, false, ); @@ -2069,6 +2095,7 @@ fn bar(_: usize) {} index: Arc::new(ide::LineIndex::new(&source)), endings, encoding: PositionEncoding::Utf8, + transform: Default::default(), }; let res = merge_text_and_snippet_edits(&line_index, edit, snippets, true); diff --git a/crates/rust-analyzer/src/lsp/utils.rs b/crates/rust-analyzer/src/lsp/utils.rs index 5bea7084fdb5..886905bd51a1 100644 --- a/crates/rust-analyzer/src/lsp/utils.rs +++ b/crates/rust-analyzer/src/lsp/utils.rs @@ -190,6 +190,7 @@ pub(crate) fn apply_document_changes( // We don't care about line endings here. endings: LineEndings::Unix, encoding, + transform: Default::default(), }; // The changes we got must be applied sequentially, but can cross lines so we diff --git a/crates/rust-analyzer/src/main_loop.rs b/crates/rust-analyzer/src/main_loop.rs index 0c0438c4b8ff..7e86d1e47749 100644 --- a/crates/rust-analyzer/src/main_loop.rs +++ b/crates/rust-analyzer/src/main_loop.rs @@ -30,7 +30,8 @@ use crate::{ request::empty_diagnostic_report, }, lsp::{ - from_proto, to_proto, + from_proto::{self, VfsOrMacroPath}, + to_proto, utils::{Progress, notification_is}, }, lsp_ext, @@ -491,9 +492,11 @@ impl GlobalState { if let Some(diagnostic_changes) = self.diagnostics.take_changes() { for file_id in diagnostic_changes { let uri = file_id_to_url(&self.vfs.read().0, file_id); - let version = from_proto::vfs_path(&uri) - .ok() - .and_then(|path| self.mem_docs.get(&path).map(|it| it.version)); + let version = from_proto::url_to_vfs_path(&uri).ok().and_then(|path| match path { + VfsOrMacroPath::Vfs(path) => self.mem_docs.get(&path).map(|it| it.version), + // FIXME MACRO DIAGNOSTICS SHOULD WORK! + VfsOrMacroPath::Macro(..) => None, + }); let diagnostics = self.diagnostics.diagnostics_for(file_id).cloned().collect::>(); @@ -904,17 +907,20 @@ impl GlobalState { self.task_pool.handle.spawn_with_sender(ThreadIntent::Worker, move |sender| { let _p = tracing::info_span!("GlobalState::check_if_indexed").entered(); tracing::debug!(?uri, "handling uri"); - let Some(id) = from_proto::file_id(&snap, &uri).expect("unable to get FileId") - else { + let Some(id) = snap.url_to_file_id(&uri).expect("unable to get FileId") else { return; }; if let Ok(crates) = &snap.analysis.crates_for(id) { if crates.is_empty() { if snap.config.discover_workspace_config().is_some() { - let path = - from_proto::abs_path(&uri).expect("Unable to get AbsPath"); - let arg = DiscoverProjectParam::Path(path); - sender.send(Task::DiscoverLinkedProjects(arg)).unwrap(); + if let VfsOrMacroPath::Vfs(path) = from_proto::url_to_vfs_path(&uri) + .expect("Unable to get AbsPath") + { + if let Some(path) = path.into_abs_path() { + let arg = DiscoverProjectParam::Path(path); + sender.send(Task::DiscoverLinkedProjects(arg)).unwrap(); + } + } } } else { tracing::debug!(?uri, "is indexed"); @@ -1195,6 +1201,7 @@ impl GlobalState { .on::(handlers::handle_open_docs) .on::(handlers::handle_open_cargo_toml) .on::(handlers::handle_move_item) + .on::(handlers::macro_file_content) // .on::(handlers::internal_testing_fetch_config) .finish(); diff --git a/crates/rust-analyzer/src/target_spec.rs b/crates/rust-analyzer/src/target_spec.rs index 7132e09146eb..d9a03db15b34 100644 --- a/crates/rust-analyzer/src/target_spec.rs +++ b/crates/rust-analyzer/src/target_spec.rs @@ -3,7 +3,8 @@ use std::mem; use cfg::{CfgAtom, CfgExpr}; -use hir::sym; +use hir::db::ExpandDatabase; +use hir::{HirFileId, sym}; use ide::{Cancellable, Crate, FileId, RunnableKind, TestId}; use project_model::project_json::Runnable; use project_model::{CargoFeatures, ManifestPath, TargetKind}; @@ -35,6 +36,28 @@ impl TargetSpec { Ok(global_state_snapshot.target_spec_for_crate(crate_id)) } + pub(crate) fn for_hir_file( + global_state_snapshot: &GlobalStateSnapshot, + file_id: HirFileId, + ) -> Cancellable> { + let crate_id = match file_id { + HirFileId::FileId(editioned_file_id) => { + match &*global_state_snapshot + .analysis + .crates_for(editioned_file_id.file_id(global_state_snapshot.analysis.db()))? + { + &[crate_id, ..] => crate_id, + _ => return Ok(None), + } + } + HirFileId::MacroFile(macro_file_id) => global_state_snapshot + .analysis + .with_db(|db| db.lookup_intern_macro_call(macro_file_id).krate)?, + }; + + Ok(global_state_snapshot.target_spec_for_crate(crate_id)) + } + pub(crate) fn target_kind(&self) -> TargetKind { match self { TargetSpec::Cargo(cargo) => cargo.target_kind, diff --git a/crates/stdx/src/lib.rs b/crates/stdx/src/lib.rs index 978c50d807bc..9c878d5a3d8f 100644 --- a/crates/stdx/src/lib.rs +++ b/crates/stdx/src/lib.rs @@ -76,6 +76,17 @@ impl TupleExt for (T, U, V) { } } +impl<'a, T, U> TupleExt for &'a (T, U) { + type Head = &'a T; + type Tail = &'a U; + fn head(self) -> Self::Head { + &self.0 + } + fn tail(self) -> Self::Tail { + &self.1 + } +} + pub fn to_lower_snake_case(s: &str) -> String { to_snake_case(s, char::to_lowercase) } diff --git a/crates/syntax/src/ted.rs b/crates/syntax/src/ted.rs index 6fcbdd006c24..0684fb287be0 100644 --- a/crates/syntax/src/ted.rs +++ b/crates/syntax/src/ted.rs @@ -75,6 +75,7 @@ impl Position { }; Position { repr } } + pub fn offset(&self) -> TextSize { match &self.repr { PositionRepr::FirstChild(node) => node.text_range().start(), diff --git a/docs/book/src/contributing/lsp-extensions.md b/docs/book/src/contributing/lsp-extensions.md index 1ada1cb24c2c..53e60a4da59c 100644 --- a/docs/book/src/contributing/lsp-extensions.md +++ b/docs/book/src/contributing/lsp-extensions.md @@ -1,5 +1,5 @@