diff --git a/tools/lsp/common/rename_component.rs b/tools/lsp/common/rename_component.rs index f2110295999..114c8bb09e6 100644 --- a/tools/lsp/common/rename_component.rs +++ b/tools/lsp/common/rename_component.rs @@ -1,59 +1,66 @@ // Copyright © SixtyFPS GmbH // SPDX-License-Identifier: GPL-3.0-only OR LicenseRef-Slint-Royalty-free-2.0 OR LicenseRef-Slint-Software-3.0 -use std::path::Path; +use std::path::{Path, PathBuf}; +use std::rc::Rc; use crate::{common, util}; use i_slint_compiler::diagnostics::Spanned; -use i_slint_compiler::parser::{ - syntax_nodes, SyntaxKind, SyntaxNode, SyntaxToken, TextRange, TextSize, -}; +use i_slint_compiler::parser::{syntax_nodes, SyntaxKind, SyntaxNode, SyntaxToken, TextRange}; use lsp_types::Url; use smol_str::SmolStr; #[cfg(target_arch = "wasm32")] use crate::wasm_prelude::*; -fn main_identifier(input: &SyntaxNode) -> Option { +pub fn main_identifier(input: &SyntaxNode) -> Option { input.child_token(SyntaxKind::Identifier) } -fn is_symbol_name_exported(document_node: &syntax_nodes::Document, type_name: &SmolStr) -> bool { +fn is_symbol_name_exported( + document_cache: &common::DocumentCache, + document_node: &syntax_nodes::Document, + query: &DeclarationNodeQuery, +) -> bool { for export in document_node.ExportsList() { for specifier in export.ExportSpecifier() { - let export_name = specifier - .ExportName() - .as_ref() - .and_then(|sn| i_slint_compiler::parser::identifier_text(sn)); - let export_id = - i_slint_compiler::parser::identifier_text(&specifier.ExportIdentifier()); - if export_name.as_ref() == Some(type_name) - || (export_name.is_none() && export_id.as_ref() == Some(type_name)) - { - return true; + if let Some(export_name) = specifier.ExportName() { + if i_slint_compiler::parser::identifier_text(&export_name).as_ref() + == Some(&query.name) + && query.is_same_symbol(document_cache, main_identifier(&export_name).unwrap()) + { + return true; + } + } else { + let export_id = specifier.ExportIdentifier(); + if i_slint_compiler::parser::identifier_text(&export_id).as_ref() + == Some(&query.name) + && query.is_same_symbol(document_cache, main_identifier(&export_id).unwrap()) + { + return true; + } } } if let Some(component) = export.Component() { - if i_slint_compiler::parser::identifier_text(&component.DeclaredIdentifier()) - .unwrap_or_default() - == *type_name + let identifier = component.DeclaredIdentifier(); + if i_slint_compiler::parser::identifier_text(&identifier).as_ref() == Some(&query.name) + && query.is_same_symbol(document_cache, main_identifier(&identifier).unwrap()) { return true; } } for structs in export.StructDeclaration() { - if i_slint_compiler::parser::identifier_text(&structs.DeclaredIdentifier()) - .unwrap_or_default() - == *type_name + let identifier = structs.DeclaredIdentifier(); + if i_slint_compiler::parser::identifier_text(&identifier).as_ref() == Some(&query.name) + && query.is_same_symbol(document_cache, main_identifier(&identifier).unwrap()) { return true; } } for enums in export.EnumDeclaration() { - if i_slint_compiler::parser::identifier_text(&enums.DeclaredIdentifier()) - .unwrap_or_default() - == *type_name + if i_slint_compiler::parser::identifier_text(&enums.DeclaredIdentifier()).as_ref() + == Some(&query.name) { return true; } @@ -63,87 +70,11 @@ fn is_symbol_name_exported(document_node: &syntax_nodes::Document, type_name: &S false } -fn replace_in_all_elements( - document_cache: &common::DocumentCache, - element: &syntax_nodes::Element, - action: &mut dyn FnMut(&syntax_nodes::Element, &mut Vec), - edits: &mut Vec, -) { - // HACK: We inject an ignored component into the live preview. Do not - // Generate changes for that -- it does not really exist. - // - // The proper fix for both is to enhance the slint interpreter to accept - // the previewed component via API, so that the entire _SLINT_LivePreview - // hack becomes unnecessary. - if common::is_element_node_ignored(element) { - return; - } - - action(element, edits); - - for c in element.children() { - match c.kind() { - SyntaxKind::SubElement => { - let e: syntax_nodes::SubElement = c.into(); - replace_in_all_elements(document_cache, &e.Element(), action, edits); - } - SyntaxKind::RepeatedElement => { - let e: syntax_nodes::RepeatedElement = c.into(); - replace_in_all_elements(document_cache, &e.SubElement().Element(), action, edits); - } - SyntaxKind::ConditionalElement => { - let e: syntax_nodes::ConditionalElement = c.into(); - replace_in_all_elements(document_cache, &e.SubElement().Element(), action, edits); - } - _ => { /* do nothing */ } - } - } -} - -fn replace_element_types( - document_cache: &common::DocumentCache, - element: &syntax_nodes::Element, - old_type: &SmolStr, - new_type: &str, - edits: &mut Vec, -) { - replace_in_all_elements( - document_cache, - element, - &mut |element, edits| { - if let Some(name) = element.QualifiedName().and_then(|qn| main_identifier(&qn)) { - if i_slint_compiler::parser::normalize_identifier(name.text()) == *old_type { - edits.push( - common::SingleTextEdit::from_path( - document_cache, - element.source_file.path(), - lsp_types::TextEdit { - range: util::token_to_lsp_range(&name), - new_text: new_type.to_string(), - }, - ) - .expect("URL conversion can not fail here"), - ) - } - } - }, - edits, - ) -} - fn fix_imports( document_cache: &common::DocumentCache, + query: &DeclarationNodeQuery, exporter_path: &Path, - old_type: &SmolStr, new_type: &str, - fixup_local_use: &dyn Fn( - &common::DocumentCache, - &syntax_nodes::Document, - &TextRange, - &SmolStr, - &str, - &mut Vec, - ), edits: &mut Vec, ) { let Ok(exporter_url) = Url::from_file_path(exporter_path) else { @@ -154,32 +85,32 @@ fn fix_imports( continue; } - fix_import_in_document( - document_cache, - doc, - exporter_path, - old_type, - new_type, - fixup_local_use, - edits, - ); + fix_import_in_document(document_cache, query, doc, exporter_path, new_type, edits); + } +} + +fn import_path( + document_directory: &Path, + import_specifier: &syntax_nodes::ImportSpecifier, +) -> Option { + let import = import_specifier + .child_token(SyntaxKind::StringLiteral) + .map(|t| t.text().trim_matches('"').to_string())?; + + if import == "std-widgets.slint" || import.starts_with("@") { + return None; // No need to ever look at this! } + + // Do not bother with the TypeLoader: It will check the FS, which we do not use:-/ + Some(i_slint_compiler::pathutils::clean_path(&document_directory.join(import))) } fn fix_import_in_document( document_cache: &common::DocumentCache, + query: &DeclarationNodeQuery, document_node: &syntax_nodes::Document, exporter_path: &Path, - old_type: &SmolStr, new_type: &str, - fixup_local_use: &dyn Fn( - &common::DocumentCache, - &syntax_nodes::Document, - &TextRange, - &SmolStr, - &str, - &mut Vec, - ), edits: &mut Vec, ) { let Some(document_directory) = @@ -189,13 +120,9 @@ fn fix_import_in_document( }; for import_specifier in document_node.ImportSpecifier() { - let import = import_specifier - .child_token(SyntaxKind::StringLiteral) - .map(|t| t.text().trim_matches('"').to_string()) - .unwrap_or_default(); - - // Do not bother with the TypeLoader: It will check the FS, which we do not use:-/ - let import_path = i_slint_compiler::pathutils::clean_path(&document_directory.join(import)); + let Some(import_path) = import_path(&document_directory, &import_specifier) else { + continue; + }; if import_path != exporter_path { continue; @@ -209,30 +136,20 @@ fn fix_import_in_document( let Some(external) = main_identifier(&identifier.ExternalName()) else { continue; }; - if i_slint_compiler::parser::normalize_identifier(external.text()) != *old_type { + if i_slint_compiler::parser::normalize_identifier(external.text()) != query.name { continue; } let Some(source_file) = external.source_file() else { continue; }; - edits.push( - common::SingleTextEdit::from_path( - document_cache, - source_file.path(), - lsp_types::TextEdit { - range: util::token_to_lsp_range(&external), - new_text: new_type.to_string(), - }, - ) - .expect("URL conversion can not fail here"), - ); - if let Some(internal) = identifier.InternalName().and_then(|i| main_identifier(&i)) { - if i_slint_compiler::parser::normalize_identifier(&internal.text()) == *new_type { - // remove " as Foo" part, no need to change anything else though! + if i_slint_compiler::parser::normalize_identifier(&internal.text()) + == i_slint_compiler::parser::normalize_identifier(new_type) + { + // `Old as New` => `New` let start_position = - util::text_size_to_lsp_position(source_file, external.text_range().end()); + util::text_size_to_lsp_position(source_file, external.text_range().start()); let end_position = util::text_size_to_lsp_position(source_file, identifier.text_range().end()); edits.push( @@ -241,7 +158,20 @@ fn fix_import_in_document( source_file.path(), lsp_types::TextEdit { range: lsp_types::Range::new(start_position, end_position), - new_text: String::new(), + new_text: new_type.to_string(), + }, + ) + .expect("URL conversion can not fail here"), + ); + } else { + // `Old as New` => `New` + edits.push( + common::SingleTextEdit::from_path( + document_cache, + source_file.path(), + lsp_types::TextEdit { + range: util::token_to_lsp_range(&external), + new_text: new_type.to_string(), }, ) .expect("URL conversion can not fail here"), @@ -249,20 +179,29 @@ fn fix_import_in_document( } // Nothing else to change: We still use the old internal name. continue; + } else { + edits.push( + common::SingleTextEdit::from_path( + document_cache, + source_file.path(), + lsp_types::TextEdit { + range: util::token_to_lsp_range(&external), + new_text: new_type.to_string(), + }, + ) + .expect("URL conversion can not fail here"), + ); } + let Some(sub_query) = query.sub_query(external) else { + continue; + }; + // Change exports - fix_exports(document_cache, document_node, old_type, new_type, fixup_local_use, edits); + fix_exports(document_cache, document_node, &sub_query, new_type, edits); // Change all local usages: - fixup_local_use( - document_cache, - document_node, - &document_node.text_range(), - old_type, - new_type, - edits, - ); + rename_local_symbols(document_cache, document_node, &sub_query, new_type, edits); } } } @@ -270,26 +209,22 @@ fn fix_import_in_document( fn fix_exports( document_cache: &common::DocumentCache, document_node: &syntax_nodes::Document, - old_type: &SmolStr, + query: &DeclarationNodeQuery, new_type: &str, - fixup_local_use: &dyn Fn( - &common::DocumentCache, - &syntax_nodes::Document, - &TextRange, - &SmolStr, - &str, - &mut Vec, - ), edits: &mut Vec, ) { + let normalized_new_type = i_slint_compiler::parser::normalize_identifier(new_type); + for export in document_node.ExportsList() { for specifier in export.ExportSpecifier() { let Some(identifier) = main_identifier(&specifier.ExportIdentifier()) else { continue; }; - if i_slint_compiler::parser::normalize_identifier(identifier.text()) == *old_type { - let Some(source_file) = identifier.source_file() else { + if i_slint_compiler::parser::normalize_identifier(identifier.text()) == query.name + && query.is_same_symbol(document_cache, identifier.clone()) + { + let Some(source_file) = identifier.source_file().cloned() else { continue; }; @@ -305,17 +240,21 @@ fn fix_exports( .expect("URL conversion can not fail here"), ); - let update_imports = if let Some(export_name) = - specifier.ExportName().and_then(|en| main_identifier(&en)) - { + let sub_query = if let Some(export_name) = specifier.ExportName() { // Remove "as Foo" - if export_name.text().to_string() == new_type { + if i_slint_compiler::parser::identifier_text(&export_name).as_ref() + == Some(&normalized_new_type) + { let start_position = util::text_size_to_lsp_position( - source_file, - identifier.text_range().end(), + &source_file, + identifier + .text_range() + .end() + .checked_add(1.into()) + .expect("There are more tokens"), ); let end_position = util::text_size_to_lsp_position( - source_file, + &source_file, export_name.text_range().end(), ); edits.push( @@ -329,310 +268,471 @@ fn fix_exports( ) .expect("URL conversion can not fail here"), ); - true + query.sub_query(identifier) } else { - false + None } } else { - true + query.sub_query(identifier) }; - if update_imports { + if let Some(sub_query) = sub_query { let my_path = document_node.source_file.path(); - fix_imports( - document_cache, - my_path, - old_type, - new_type, - fixup_local_use, - edits, - ); + fix_imports(document_cache, &sub_query, my_path, new_type, edits); } } } } } -fn visit_document_components( +/// Rename all local non import/export related identifiers +fn rename_local_symbols( + document_cache: &common::DocumentCache, document_node: &syntax_nodes::Document, - action: &mut impl FnMut(&syntax_nodes::Component), + query: &DeclarationNodeQuery, + new_type: &str, + edits: &mut Vec, ) { - for component in document_node.Component() { - action(&component); - } - for exported in document_node.ExportsList() { - if let Some(component) = exported.Component() { - action(&component); + let mut current_token = document_node.first_token(); + while let Some(current) = current_token { + if current.kind() == SyntaxKind::Identifier + && i_slint_compiler::parser::normalize_identifier(current.text()) == query.name + { + if ![ + SyntaxKind::ExternalName, + SyntaxKind::InternalName, + SyntaxKind::ExportIdentifier, + SyntaxKind::ExportName, + ] + .contains(¤t.parent().kind()) + && query.is_same_symbol(document_cache, current.clone()) + { + edits.push( + common::SingleTextEdit::from_path( + document_cache, + current.source_file.path(), + lsp_types::TextEdit { + range: util::token_to_lsp_range(¤t), + new_text: new_type.to_string(), + }, + ) + .expect("URL conversion can not fail here"), + ) + } } + + current_token = current.next_token(); } } -fn visit_document_structs( - document_node: &syntax_nodes::Document, - action: &mut impl FnMut(&syntax_nodes::StructDeclaration), -) { - for struct_decl in document_node.StructDeclaration() { - action(&struct_decl); +/// Rename an InternalName in an impoort statement +/// +/// The ExternalName is different form our name, which is why we ended up here. +/// +/// Change the InternalName, fix up local usage and then fix up exports. If exports +/// change something, also fix all the necessary imports. +fn rename_internal_name( + document_cache: &common::DocumentCache, + query: &DeclarationNodeQuery, + internal_name: &syntax_nodes::InternalName, + new_type: &str, +) -> lsp_types::WorkspaceEdit { + let Some(old_type) = i_slint_compiler::parser::identifier_text(&internal_name) else { + return Default::default(); + }; + let Some(document) = document_cache.get_document_for_source_file(&internal_name.source_file) + else { + return Default::default(); + }; + let Some(document_node) = &document.node else { + return Default::default(); + }; + + let mut edits = vec![]; + + let parent: syntax_nodes::ImportIdentifier = internal_name.parent().unwrap().into(); + let external_name = parent.ExternalName(); + let external_name_token = main_identifier(&external_name).unwrap(); + let external_str = i_slint_compiler::parser::normalize_identifier(external_name_token.text()); + + let normalized_new_type = i_slint_compiler::parser::normalize_identifier(new_type); + + if external_str == normalized_new_type { + // `New as Old` -> `New` + edits.push( + common::SingleTextEdit::from_path( + document_cache, + query.token.source_file.path(), + lsp_types::TextEdit { + range: util::text_range_to_lsp_range( + &external_name_token.source_file, + TextRange::new( + external_name_token.next_token().unwrap().text_range().start(), + query.token.text_range().end(), + ), + ), + new_text: String::new(), + }, + ) + .expect("URL conversion can not fail here"), + ) + } else if old_type != normalized_new_type { + // `Some as Old` -> `Some as New` + edits.push( + common::SingleTextEdit::from_path( + document_cache, + query.token.source_file.path(), + lsp_types::TextEdit { + range: util::token_to_lsp_range(&main_identifier(&internal_name).unwrap()), + new_text: new_type.to_string(), + }, + ) + .expect("URL conversion can not fail here"), + ); } - for exported in document_node.ExportsList() { - for struct_decl in exported.StructDeclaration() { - action(&struct_decl); - } + + // Change exports + fix_exports(document_cache, document_node, &query, new_type, &mut edits); + + // Change all local usages: + rename_local_symbols(document_cache, document_node, &query, new_type, &mut edits); + + common::create_workspace_edit_from_single_text_edits(edits) +} + +/// We ended up in an ExportName that we need to rename. +/// +/// The internal name is different, otherwise we would not have ended up here:-) +/// So we need to rename the export itself and then fix up imports. +fn rename_export_name( + document_cache: &common::DocumentCache, + query: &DeclarationNodeQuery, + export_name: &syntax_nodes::ExportName, + new_type: &str, +) -> lsp_types::WorkspaceEdit { + let mut edits = vec![]; + + let specifier: syntax_nodes::ExportSpecifier = export_name.parent().unwrap().into(); + let internal_name = specifier.ExportIdentifier(); + if i_slint_compiler::parser::identifier_text(&internal_name).as_ref() + == Some(&i_slint_compiler::parser::normalize_identifier(new_type)) + { + edits.push( + common::SingleTextEdit::from_path( + document_cache, + export_name.source_file.path(), + lsp_types::TextEdit { + range: util::node_to_lsp_range(&specifier), + new_text: new_type.to_string(), + }, + ) + .expect("URL conversion can not fail here"), + ); + } else { + edits.push( + common::SingleTextEdit::from_path( + document_cache, + export_name.source_file.path(), + lsp_types::TextEdit { + range: util::token_to_lsp_range(&main_identifier(export_name).unwrap()), + new_text: new_type.to_string(), + }, + ) + .expect("URL conversion can not fail here"), + ); } + + // Change exports + fix_imports(document_cache, &query, export_name.source_file.path(), new_type, &mut edits); + + common::create_workspace_edit_from_single_text_edits(edits) } -fn declaration_validity_range( - document_node: &syntax_nodes::Document, - identifier: &syntax_nodes::DeclaredIdentifier, -) -> TextRange { - let parent = identifier.parent().unwrap(); - let start = parent.last_token().unwrap().text_range().end() + TextSize::new(1); +#[derive(Clone, Debug)] +pub enum DeclarationNodeKind { + DeclaredIdentifier(syntax_nodes::DeclaredIdentifier), + InternalName(syntax_nodes::InternalName), + ExportName(syntax_nodes::ExportName), +} + +#[derive(Clone, Debug)] +pub struct DeclarationNode { + kind: DeclarationNodeKind, + query: DeclarationNodeQuery, +} - let mut token = parent.last_token().unwrap().next_token(); - let identifier_text = i_slint_compiler::parser::identifier_text(identifier).unwrap_or_default(); +pub fn find_declaration_node( + document_cache: &common::DocumentCache, + token: &SyntaxToken, +) -> Option { + if token.kind() != SyntaxKind::Identifier { + return None; + } + + DeclarationNodeQuery::new(document_cache, token.clone())?.find_declaration_node(document_cache) +} + +impl DeclarationNode { + pub fn rename( + &self, + document_cache: &common::DocumentCache, + new_type: &str, + ) -> crate::Result { + match &self.kind { + DeclarationNodeKind::DeclaredIdentifier(id) => { + rename_declared_identifier(document_cache, &self.query, &id, &new_type) + } + DeclarationNodeKind::InternalName(internal) => { + Ok(rename_internal_name(document_cache, &self.query, &internal, &new_type)) + } + DeclarationNodeKind::ExportName(export) => { + Ok(rename_export_name(document_cache, &self.query, &export, &new_type)) + } + } + } +} + +fn find_last_declared_identifier_at_or_before( + token: SyntaxToken, + type_name: &SmolStr, +) -> Option { + let mut token = Some(token); - while let Some(t) = &token { + while let Some(t) = token { if t.kind() == SyntaxKind::Identifier { - let new_parent = t.parent(); - if new_parent.kind() == SyntaxKind::DeclaredIdentifier - && i_slint_compiler::parser::identifier_text(&new_parent).unwrap_or_default() - == identifier_text + let node = t.parent(); + if node.kind() == SyntaxKind::DeclaredIdentifier + && i_slint_compiler::parser::identifier_text(&node).as_ref() == Some(type_name) { - let new_grand_parent = new_parent.parent().unwrap(); - match parent.kind() { - SyntaxKind::Component => { - if new_grand_parent.kind() == SyntaxKind::Component { - return TextRange::new( - start, - new_grand_parent.last_token().unwrap().text_range().end(), - ); - } - } - SyntaxKind::EnumDeclaration | SyntaxKind::StructDeclaration => { - if [SyntaxKind::EnumDeclaration, SyntaxKind::StructDeclaration] - .contains(&new_grand_parent.kind()) - { - return TextRange::new( - start, - new_grand_parent.last_token().unwrap().text_range().end(), - ); - } - } - _ => unreachable!(), - } + return Some(node.into()); } } - token = t.next_token(); + token = t.prev_token(); } - TextRange::new(start, document_node.text_range().end()) + None } -/// Rename the `DeclaredIdentifier` in a struct/component declaration -pub fn rename_identifier_from_declaration( - document_cache: &common::DocumentCache, - identifier: &syntax_nodes::DeclaredIdentifier, - new_type: &str, -) -> crate::Result { - fn change_local_element_type( - document_cache: &common::DocumentCache, - document_node: &syntax_nodes::Document, - validity_range: &TextRange, - old_type: &SmolStr, - new_type: &str, - edits: &mut Vec, - ) { - visit_document_components(document_node, &mut move |component| { - if validity_range.contains_range(component.text_range()) { - replace_element_types( - document_cache, - &component.Element(), - old_type, - new_type, - edits, - ); +#[derive(Clone, Debug)] +struct DeclarationNodeQuery { + info: common::token_info::TokenInfo, + name: SmolStr, + token: SyntaxToken, +} + +impl DeclarationNodeQuery { + fn new(document_cache: &common::DocumentCache, token: SyntaxToken) -> Option { + let info = common::token_info::token_info(document_cache, token.clone())?; + let name = i_slint_compiler::parser::normalize_identifier(token.text()); + + Some(DeclarationNodeQuery { info, name, token }) + } + + fn sub_query(&self, token: SyntaxToken) -> Option { + let name = i_slint_compiler::parser::normalize_identifier(token.text()); + + Some(DeclarationNodeQuery { info: self.info.clone(), name, token }) + } + + fn is_export_identifier_or_external_name(&self) -> bool { + self.token.kind() == SyntaxKind::Identifier + && [SyntaxKind::ExportIdentifier, SyntaxKind::ExternalName] + .contains(&self.token.parent().kind()) + } + + fn start_token(&self) -> Option { + if self.is_export_identifier_or_external_name() { + None + } else { + Some(self.token.clone()) + } + } + + fn is_same_symbol(&self, document_cache: &common::DocumentCache, token: SyntaxToken) -> bool { + let Some(info) = common::token_info::token_info(document_cache, token) else { + return false; + }; + + match (&self.info, &info) { + (common::token_info::TokenInfo::Type(s), common::token_info::TokenInfo::Type(o)) => { + s == o } - }); + ( + common::token_info::TokenInfo::ElementType(s), + common::token_info::TokenInfo::ElementType(o), + ) => s == o, + ( + common::token_info::TokenInfo::ElementRc(s), + common::token_info::TokenInfo::ElementRc(o), + ) => Rc::ptr_eq(s, o), + (_, _) => false, + } } - fn change_local_data_type( + /// Find the declaration node we should rename + fn find_declaration_node( + self: Self, document_cache: &common::DocumentCache, - document_node: &syntax_nodes::Document, - validity_range: &TextRange, - old_type: &SmolStr, - new_type: &str, - edits: &mut Vec, - ) { - visit_document_components(document_node, &mut |component| { - if validity_range.contains_range(component.text_range()) { - for qualified_name in - component.descendants().filter(|node| node.kind() == SyntaxKind::QualifiedName) - { - if let Some(first_identifier) = main_identifier(&qualified_name) { - if i_slint_compiler::parser::normalize_identifier(first_identifier.text()) - == *old_type - { - edits.push( - common::SingleTextEdit::from_path( - document_cache, - qualified_name.source_file.path(), - lsp_types::TextEdit { - range: util::token_to_lsp_range(&first_identifier), - new_text: new_type.to_string(), - }, - ) - .expect("URL conversion can not fail here"), - ) - } - } - } + ) -> Option { + let node = self.token.parent(); + + match node.kind() { + SyntaxKind::DeclaredIdentifier => Some(DeclarationNode { + kind: DeclarationNodeKind::DeclaredIdentifier(node.into()), + query: self, + }), + SyntaxKind::InternalName => Some(DeclarationNode { + kind: DeclarationNodeKind::InternalName(node.into()), + query: self, + }), + SyntaxKind::ExportName => Some(DeclarationNode { + kind: DeclarationNodeKind::ExportName(node.into()), + query: self, + }), + _ => { + let document = + document_cache.get_document_by_path(self.token.source_file.path())?; + let document_node = document.node.clone()?; + let start_token = self.start_token(); + + find_declaration_node_impl(document_cache, &document_node, start_token, self) } - }); - visit_document_structs(document_node, &mut |struct_decl| { - if validity_range.contains_range(struct_decl.text_range()) { - for qualified_name in struct_decl - .descendants() - .filter(|d| d.kind() == SyntaxKind::QualifiedName) - .map(|d| Into::::into(d)) - { - let identifier = main_identifier(&qualified_name).unwrap(); - if i_slint_compiler::parser::normalize_identifier(identifier.text()) - == *old_type + } + } +} + +fn find_declaration_node_impl( + document_cache: &common::DocumentCache, + document_node: &syntax_nodes::Document, + start_token: Option, + query: DeclarationNodeQuery, +) -> Option { + // Exported under a custom name? + if start_token.is_none() { + for export_item in document_node.ExportsList() { + for specifier in export_item.ExportSpecifier() { + if let Some(export_name) = specifier.ExportName() { + if i_slint_compiler::parser::identifier_text(&export_name).as_ref() + == Some(&query.name) { - edits.push( - common::SingleTextEdit::from_path( - document_cache, - qualified_name.source_file.path(), - lsp_types::TextEdit { - range: util::token_to_lsp_range(&identifier), - new_text: new_type.to_string(), - }, - ) - .expect("URL conversion can not fail here"), - ) + return Some(DeclarationNode { + kind: DeclarationNodeKind::ExportName(export_name), + query, + }); } } } - }); - } - - let action: Option< - &dyn Fn( - &common::DocumentCache, - &syntax_nodes::Document, - &TextRange, - &SmolStr, - &str, - &mut Vec, - ), - > = match identifier.parent().map(|p| p.kind()).unwrap_or(SyntaxKind::Error) { - SyntaxKind::Component => Some(&change_local_element_type), - SyntaxKind::EnumDeclaration | SyntaxKind::StructDeclaration => { - Some(&change_local_data_type) } - _ => None, - }; + } - if let Some(action) = action { - rename_declared_identifier(document_cache, identifier, new_type, action) - } else { - Err("Can not rename this identifier".into()) + let mut token = document_node.last_token(); + + while let Some(t) = token { + if let Some(declared_identifier) = + find_last_declared_identifier_at_or_before(t.clone(), &query.name) + { + if query.is_same_symbol(document_cache, main_identifier(&declared_identifier).unwrap()) + { + return Some(DeclarationNode { + kind: DeclarationNodeKind::DeclaredIdentifier(declared_identifier.into()), + query, + }); + } + + token = declared_identifier.first_token().and_then(|t| t.prev_token()); + } else { + token = None; + } + } + + // Imported? + for import_spec in document_node.ImportSpecifier() { + if let Some(import_id) = import_spec.ImportIdentifierList() { + for id in import_id.ImportIdentifier() { + let external = i_slint_compiler::parser::identifier_text(&id.ExternalName()); + let internal = + id.InternalName().and_then(|i| i_slint_compiler::parser::identifier_text(&i)); + + if internal.as_ref() == Some(&query.name) { + return Some(DeclarationNode { + kind: DeclarationNodeKind::InternalName(id.InternalName().unwrap()), + query, + }); + } + + if external.as_ref() == Some(&query.name) { + let document_path = document_node.source_file.path(); + let document_dir = document_path.parent()?; + let path = import_path(document_dir, &import_spec)?; + let import_doc = document_cache.get_document_by_path(&path)?; + let import_doc_node = import_doc.node.as_ref()?; + + return find_declaration_node_impl( + document_cache, + import_doc_node, + None, + query, + ); + } + } + } } + + None } -/// Helper function to rename a `DeclaredIdentifier`. +/// Rename a `DeclaredIdentifier`. +/// +/// This is a locally defined thing. +/// +/// Fix up local usages, fix exports and any imports elsewhere if the exports changed fn rename_declared_identifier( document_cache: &common::DocumentCache, - identifier: &syntax_nodes::DeclaredIdentifier, + query: &DeclarationNodeQuery, + declared_identifier: &syntax_nodes::DeclaredIdentifier, new_type: &str, - fixup_local_use: &dyn Fn( - &common::DocumentCache, - &syntax_nodes::Document, - &TextRange, - &SmolStr, - &str, - &mut Vec, - ), ) -> crate::Result { - let source_file = identifier.source_file().expect("Identifier had no source file"); + let source_file = &declared_identifier.source_file; let document = document_cache .get_document_for_source_file(source_file) .expect("Identifier is in unknown document"); - let parent = identifier.parent().unwrap(); + let Some(document_node) = &document.node else { + return Err("No document found".into()); + }; + + let parent = declared_identifier.parent().unwrap(); + + let normalized_new_type = i_slint_compiler::parser::normalize_identifier(new_type); if parent.kind() != SyntaxKind::Component - && document.local_registry.lookup(new_type) != i_slint_compiler::langtype::Type::Invalid + && document.local_registry.lookup(normalized_new_type.as_str()) + != i_slint_compiler::langtype::Type::Invalid { return Err(format!("{new_type} is already a registered type").into()); } if parent.kind() == SyntaxKind::Component - && document.local_registry.lookup_element(new_type).is_ok() + && document.local_registry.lookup_element(&normalized_new_type.as_str()).is_ok() { return Err(format!("{new_type} is already a registered element").into()); } - let old_type = i_slint_compiler::parser::identifier_text(&identifier).unwrap(); - let normalized_new_type = i_slint_compiler::parser::normalize_identifier(new_type); + let old_type = &query.name; - if old_type == normalized_new_type { + if *old_type == normalized_new_type { return Ok(lsp_types::WorkspaceEdit::default()); } - let parent = identifier.parent().expect("Identifier had no parent"); - debug_assert!([ - SyntaxKind::Component, - SyntaxKind::EnumDeclaration, - SyntaxKind::StructDeclaration - ] - .contains(&parent.kind())); - - let Some(document_node) = &document.node else { - return Err("No document found".into()); - }; - - let validity_range = declaration_validity_range(document_node, identifier); let mut edits = vec![]; - // Replace the identifier itself - edits.push( - common::SingleTextEdit::from_path( - document_cache, - source_file.path(), - lsp_types::TextEdit { - range: util::node_to_lsp_range(identifier), - new_text: new_type.to_string(), - }, - ) - .expect("URL conversion can not fail here"), - ); - // Change all local usages: - fixup_local_use( - document_cache, - document_node, - &validity_range, - &old_type, - new_type, - &mut edits, - ); + rename_local_symbols(document_cache, document_node, query, new_type, &mut edits); // Change exports (if the type lives till the end of the document!) - if validity_range.end() == document_node.text_range().end() { - fix_exports( - document_cache, - document_node, - &old_type, - new_type, - fixup_local_use, - &mut edits, - ); - - if is_symbol_name_exported(document_node, &old_type) { - let my_path = source_file.path(); + fix_exports(document_cache, document_node, query, new_type, &mut edits); - fix_imports(document_cache, my_path, &old_type, new_type, fixup_local_use, &mut edits); - } + if is_symbol_name_exported(document_cache, document_node, query) { + fix_imports(document_cache, query, source_file.path(), new_type, &mut edits); } Ok(common::create_workspace_edit_from_single_text_edits(edits)) @@ -650,11 +750,11 @@ mod tests { use crate::common::text_edit; #[track_caller] - fn find_node_by_comment( + fn find_token_by_comment( document_cache: &common::DocumentCache, document_path: &Path, suffix: &str, - ) -> i_slint_compiler::parser::SyntaxNode { + ) -> SyntaxToken { let document = document_cache.get_document_by_path(document_path).unwrap(); let document = document.node.as_ref().unwrap(); @@ -663,20 +763,29 @@ mod tests { let comment = document.token_at_offset(offset.into()).next().unwrap(); assert_eq!(comment.kind(), SyntaxKind::Comment); let mut token = comment.prev_token(); + while let Some(t) = &token { - if ![SyntaxKind::Comment, SyntaxKind::Whitespace].contains(&t.kind()) { + if ![SyntaxKind::Comment, SyntaxKind::Eof, SyntaxKind::Whitespace].contains(&t.kind()) { break; } token = t.prev_token(); } - token.map(|t| t.parent()).unwrap() + token.unwrap() } #[track_caller] - fn compile_test_changes( + fn find_node_by_comment( + document_cache: &common::DocumentCache, + document_path: &Path, + suffix: &str, + ) -> SyntaxNode { + find_token_by_comment(document_cache, document_path, suffix).parent() + } + + #[track_caller] + fn apply_text_changes( document_cache: &common::DocumentCache, edit: &lsp_types::WorkspaceEdit, - allow_warnings: bool, ) -> Vec { eprintln!("Edit:"); for it in text_edit::EditIterator::new(edit) { @@ -697,12 +806,23 @@ mod tests { } eprintln!("*** All changes reported ***"); - let code = { - let mut map: HashMap = document_cache - .all_url_documents() - .map(|(url, dn)| (url, dn.source_file.as_ref())) - .map(|(url, sf)| (url, sf.source().unwrap().to_string())) - .collect(); + changed_text + } + + #[track_caller] + fn compile_test_changes( + document_cache: &common::DocumentCache, + edit: &lsp_types::WorkspaceEdit, + allow_warnings: bool, + ) -> Vec { + let changed_text = apply_text_changes(document_cache, edit); + + let code = { + let mut map: HashMap = document_cache + .all_url_documents() + .map(|(url, dn)| (url, dn.source_file.as_ref())) + .map(|(url, sf)| (url, sf.source().unwrap().to_string())) + .collect(); for ct in &changed_text { map.insert(ct.url.clone(), ct.contents.clone()); } @@ -715,49 +835,237 @@ mod tests { changed_text } + #[track_caller] + pub fn rename_tester_with_new_name( + document_cache: &common::DocumentCache, + document_path: &Path, + suffix: &str, + new_name: &str, + ) -> Vec { + let edit = find_declaration_node( + &document_cache, + &find_token_by_comment(&document_cache, document_path, suffix), + ) + .unwrap() + .rename(&document_cache, new_name) + .unwrap(); + compile_test_changes(&document_cache, &edit, false) + } + + #[track_caller] + pub fn rename_tester( + document_cache: &common::DocumentCache, + document_path: &Path, + suffix: &str, + ) -> Vec { + rename_tester_with_new_name(document_cache, document_path, suffix, "XxxYyyZzz") + } + #[test] - fn test_rename_component_from_definition() { + fn test_rename_redefined_component() { + let document_cache = test::compile_test_with_sources( + "fluent", + HashMap::from([( + Url::from_file_path(test::main_test_file_name()).unwrap(), + r#" +component Foo /* <- TEST_ME_1 */ { @children } + +export { Foo /* 2 */ } + +component Bar { + Foo /* 1.1 */ { } +} + +component Foo /* <- TEST_ME_2 */ inherits Foo /* 1.2 */ { + Foo /* 1.3 */ { } +} + "# + .to_string(), + )]), + true, // Component `Foo` is replacing a component with the same name + ); + + // Can not rename the first one... + assert!(find_declaration_node( + &document_cache, + &find_token_by_comment(&document_cache, &test::main_test_file_name(), "_1"), + ) + .is_none(),); + + let edit = find_declaration_node( + &document_cache, + &find_token_by_comment(&document_cache, &test::main_test_file_name(), "_2"), + ) + .unwrap() + .rename(&document_cache, "XxxYyyZzz") + .unwrap(); + + let edited_text = apply_text_changes(&document_cache, &edit); // DO NOT COMPILE, THAT WILL FAIL! + + assert_eq!(edited_text.len(), 1); + + assert!(edited_text[0].contents.contains("component Foo /* <- TEST_ME_1 ")); + // The *last* Foo gets exported + assert!(edited_text[0].contents.contains("export { XxxYyyZzz /* 2 */ }")); + + // All the following are wrong: + assert!(edited_text[0].contents.contains("XxxYyyZzz /* 1.1 ")); + assert!(edited_text[0].contents.contains("inherits XxxYyyZzz /* 1.2 ")); + assert!(edited_text[0].contents.contains("XxxYyyZzz /* 1.3 ")); + } + + #[test] + fn test_rename_redefined_enum() { + let document_cache = test::compile_test_with_sources( + "fluent", + HashMap::from([( + Url::from_file_path(test::main_test_file_name()).unwrap(), + r#" +enum Foo /* <- TEST_ME_1 */ { test1 } + +export { Foo /* 2 */ } + +struct Bar { + bar_test: Foo +} + +enum Foo /* <- TEST_ME_2 */ { + test2 +} + +export struct Baz { + baz_test: Foo +} + "# + .to_string(), + )]), + true, // Component `Foo` is replacing a component with the same name + ); + + // Can not rename the first one... + assert!(find_declaration_node( + &document_cache, + &find_token_by_comment(&document_cache, &test::main_test_file_name(), "_1"), + ) + .is_none(),); + + let edit = find_declaration_node( + &document_cache, + &find_token_by_comment(&document_cache, &test::main_test_file_name(), "_2"), + ) + .unwrap() + .rename(&document_cache, "XxxYyyZzz") + .unwrap(); + + let edited_text = apply_text_changes(&document_cache, &edit); // DO NOT COMPILE, THAT WILL FAIL! + + assert_eq!(edited_text.len(), 1); + + assert!(edited_text[0].contents.contains("enum Foo /* <- TEST_ME_1 ")); + // The *last* Foo gets exported! + assert!(edited_text[0].contents.contains("export { XxxYyyZzz /* 2 */ }")); + assert!(edited_text[0].contents.contains("baz_test: XxxYyyZzz")); + + // All the following are wrong: + assert!(edited_text[0].contents.contains("bar_test: XxxYyyZzz")); + } + + #[test] + fn test_rename_redefined_struct() { let document_cache = test::compile_test_with_sources( "fluent", HashMap::from([( Url::from_file_path(test::main_test_file_name()).unwrap(), r#" -component Foo /* <- TEST_ME_1 */ { @children } +struct Foo /* <- TEST_ME_1 */ { test: bool } + +export { Foo /* 2 */ } + +struct Bar { + bar_test: Foo +} + +struct Foo /* <- TEST_ME_2 */ { + foo_test: Foo +} + "# + .to_string(), + )]), + true, // Component `Foo` is replacing a component with the same name + ); + + // Can not rename the first one... + assert!(find_declaration_node( + &document_cache, + &find_token_by_comment(&document_cache, &test::main_test_file_name(), "_1"), + ) + .is_none(),); + + let edit = find_declaration_node( + &document_cache, + &find_token_by_comment(&document_cache, &test::main_test_file_name(), "_2"), + ) + .unwrap() + .rename(&document_cache, "XxxYyyZzz") + .unwrap(); + + let edited_text = apply_text_changes(&document_cache, &edit); // DO NOT COMPILE, THAT WILL FAIL! + + assert_eq!(edited_text.len(), 1); + + assert!(edited_text[0].contents.contains("struct Foo /* <- TEST_ME_1 ")); + // The *last* Foo gets exported! + assert!(edited_text[0].contents.contains("export { XxxYyyZzz /* 2 */ }")); + + // All the following are wrong: + assert!(edited_text[0].contents.contains("bar_test: XxxYyyZzz")); + assert!(edited_text[0].contents.contains("foo_test: XxxYyyZzz")); + } + #[test] + fn test_rename_component_from_definition() { + let document_cache = test::compile_test_with_sources( + "fluent", + HashMap::from([( + Url::from_file_path(test::main_test_file_name()).unwrap(), + r#" export { Foo } enum Xyz { Foo, Bar } struct Abc { Foo: Xyz } +component Foo /* <- TEST_ME_1 */ inherits Rectangle { + @children +} + component Baz { - Foo /* Baz */ { } + Foo /* <- TEST_ME_2 */ { } } -component Foo /* <- TEST_ME_2 */ inherits Foo { - Foo /* 1 */ { } - @children +struct Foo { + bar: bool, } -export component Bar inherits Foo { - Foo /* <- TEST_ME_3 */ { } +export component Bar inherits Foo /* <- TEST_ME_3 */ { + Foo /* <- TEST_ME_4 */ { } Rectangle { - Foo /* <- TEST_ME_4 */ { } + Foo /* <- TEST_ME_5 */ { } Foo := Baz { } } if true: Rectangle { - Foo /* <- TEST_ME_5 */ { } + Foo /* <- TEST_ME_6 */ { } } if false: Rectangle { - Foo /* <- TEST_ME_6 */ { } + Foo /* <- TEST_ME_7 */ { } } function Foo(Foo: int) { Foo + 1; } function F() { self.Foo(42); } - for i in [1, 2, 3]: Foo /* <- TEST_ME_7 */ { } + for i in [1, 2, 3]: Foo /* <- TEST_ME_8 */ { } } "# .to_string(), @@ -765,55 +1073,28 @@ export component Bar inherits Foo { true, // Component `Foo` is replacing a component with the same name ); - let id = find_node_by_comment(&document_cache, &test::main_test_file_name(), "_1").into(); - let edit = rename_identifier_from_declaration(&document_cache, &id, "XxxYyyZzz").unwrap(); - let edited_text = compile_test_changes(&document_cache, &edit, false); - + let edited_text = rename_tester(&document_cache, &test::main_test_file_name(), "_1"); assert_eq!(edited_text.len(), 1); - assert!(edited_text[0].contents.contains("component XxxYyyZzz /* <- TEST_ME_1 ")); - // The *last* Foo gets exported! - assert!(edited_text[0].contents.contains("export { Foo }")); - assert!(edited_text[0].contents.contains("enum Xyz { Foo,")); - assert!(edited_text[0].contents.contains("struct Abc { Foo:")); - assert!(edited_text[0].contents.contains("XxxYyyZzz /* Baz ")); - assert!(edited_text[0] - .contents - .contains("component Foo /* <- TEST_ME_2 */ inherits XxxYyyZzz ")); - assert!(edited_text[0].contents.contains("XxxYyyZzz /* 1 */")); - assert!(edited_text[0].contents.contains("export component Bar inherits Foo {")); - assert!(edited_text[0].contents.contains("Foo /* <- TEST_ME_3 ")); - assert!(edited_text[0].contents.contains("Foo /* <- TEST_ME_4 ")); - assert!(edited_text[0].contents.contains("Foo := Baz {")); - assert!(edited_text[0].contents.contains("Foo /* <- TEST_ME_5 ")); - assert!(edited_text[0].contents.contains("Foo /* <- TEST_ME_6 ")); - assert!(edited_text[0].contents.contains("function Foo(Foo: int) { Foo + 1; }")); - assert!(edited_text[0].contents.contains("function F() { self.Foo(42); }")); - assert!(edited_text[0].contents.contains("Foo /* <- TEST_ME_7 ")); - - let id = find_node_by_comment(&document_cache, &test::main_test_file_name(), "_2").into(); - let edit = rename_identifier_from_declaration(&document_cache, &id, "XxxYyyZzz").unwrap(); - let edited_text = compile_test_changes(&document_cache, &edit, false); - assert_eq!(edited_text.len(), 1); - assert!(edited_text[0].contents.contains("component Foo /* <- TEST_ME_1 ")); assert!(edited_text[0].contents.contains("export { XxxYyyZzz }")); assert!(edited_text[0].contents.contains("enum Xyz { Foo,")); assert!(edited_text[0].contents.contains("struct Abc { Foo:")); - assert!(edited_text[0].contents.contains("Foo /* Baz ")); + assert!(edited_text[0].contents.contains("component XxxYyyZzz /* <- TEST_ME_1 ")); + assert!(edited_text[0].contents.contains("TEST_ME_1 */ inherits Rectangle ")); + assert!(edited_text[0].contents.contains("XxxYyyZzz /* <- TEST_ME_2 ")); + assert!(edited_text[0].contents.contains("struct Foo {")); assert!(edited_text[0] .contents - .contains("component XxxYyyZzz /* <- TEST_ME_2 */ inherits Foo ")); - assert!(edited_text[0].contents.contains("Foo /* 1 */")); - assert!(edited_text[0].contents.contains("export component Bar inherits XxxYyyZzz {")); - assert!(edited_text[0].contents.contains("XxxYyyZzz /* <- TEST_ME_3 ")); + .contains("component Bar inherits XxxYyyZzz /* <- TEST_ME_3 ")); assert!(edited_text[0].contents.contains("XxxYyyZzz /* <- TEST_ME_4 ")); - assert!(edited_text[0].contents.contains("Foo := Baz {")); assert!(edited_text[0].contents.contains("XxxYyyZzz /* <- TEST_ME_5 ")); + assert!(edited_text[0].contents.contains("Foo := Baz ")); assert!(edited_text[0].contents.contains("XxxYyyZzz /* <- TEST_ME_6 ")); - assert!(edited_text[0].contents.contains("function Foo(Foo: int) { Foo + 1; }")); - assert!(edited_text[0].contents.contains("function F() { self.Foo(42); }")); assert!(edited_text[0].contents.contains("XxxYyyZzz /* <- TEST_ME_7 ")); + assert!(edited_text[0].contents.contains("function Foo(Foo:")); + assert!(edited_text[0].contents.contains("F() { self.Foo(")); + assert!(edited_text[0].contents.contains("XxxYyyZzz /* <- TEST_ME_8 ")); } #[test] @@ -827,8 +1108,13 @@ export component Bar inherits Foo { true, ); - let id = find_node_by_comment(&document_cache, &test::main_test_file_name(), "_1").into(); - let edit = rename_identifier_from_declaration(&document_cache, &id, "XxxYyyZzz").unwrap(); + let edit = find_declaration_node( + &document_cache, + &find_token_by_comment(&document_cache, &test::main_test_file_name(), "_1"), + ) + .unwrap() + .rename(&document_cache, "XxxYyyZzz") + .unwrap(); assert_eq!(text_edit::EditIterator::new(&edit).count(), 1); @@ -865,10 +1151,8 @@ export { Foo as FExport } false, ); - let id = find_node_by_comment(&document_cache, &test::test_file_name("source.slint"), "_1") - .into(); - let edit = rename_identifier_from_declaration(&document_cache, &id, "XxxYyyZzz").unwrap(); - let edited_text = compile_test_changes(&document_cache, &edit, false); + let edited_text = + rename_tester(&document_cache, &test::test_file_name("source.slint"), "_1"); assert_eq!(edited_text.len(), 1); assert_eq!( @@ -954,10 +1238,8 @@ export { Foo as User4Fxx } false, ); - let id = find_node_by_comment(&document_cache, &test::test_file_name("source.slint"), "_1") - .into(); - let edit = rename_identifier_from_declaration(&document_cache, &id, "XxxYyyZzz").unwrap(); - let edited_text = compile_test_changes(&document_cache, &edit, false); + let edited_text = + rename_tester(&document_cache, &test::test_file_name("source.slint"), "_1"); for ed in &edited_text { let ed_path = ed.url.to_file_path().unwrap(); @@ -1064,11 +1346,8 @@ export { Foo as User4Fxx } false, ); - let id = - find_node_by_comment(&document_cache, &test::test_file_name("s/source.slint"), "_1") - .into(); - let edit = rename_identifier_from_declaration(&document_cache, &id, "XxxYyyZzz").unwrap(); - let edited_text = compile_test_changes(&document_cache, &edit, false); + let edited_text = + rename_tester(&document_cache, &test::test_file_name("s/source.slint"), "_1"); for ed in &edited_text { let ed_path = ed.url.to_file_path().unwrap(); @@ -1136,10 +1415,8 @@ export component Foo /* <- TEST_ME_2 */ { } false, ); - let id = find_node_by_comment(&document_cache, &test::test_file_name("user1.slint"), "_1") - .into(); - let edit = rename_identifier_from_declaration(&document_cache, &id, "XxxYyyZzz").unwrap(); - let edited_text = compile_test_changes(&document_cache, &edit, false); + let edited_text = + rename_tester(&document_cache, &test::test_file_name("user1.slint"), "_1"); for ed in &edited_text { let ed_path = ed.url.to_file_path().unwrap(); @@ -1154,10 +1431,8 @@ export component Foo /* <- TEST_ME_2 */ { } } } - let id = find_node_by_comment(&document_cache, &test::test_file_name("user2.slint"), "_2") - .into(); - let edit = rename_identifier_from_declaration(&document_cache, &id, "XxxYyyZzz").unwrap(); - let edited_text = compile_test_changes(&document_cache, &edit, false); + let edited_text = + rename_tester(&document_cache, &test::test_file_name("user2.slint"), "_2"); for ed in &edited_text { let ed_path = ed.url.to_file_path().unwrap(); @@ -1202,56 +1477,17 @@ export component Bar { false, ); - let id = find_node_by_comment(&document_cache, &test::main_test_file_name(), "_1").into(); - - assert!(rename_identifier_from_declaration(&document_cache, &id, "Foo").is_err()); - assert!(rename_identifier_from_declaration(&document_cache, &id, "UsedStruct").is_ok()); - assert!(rename_identifier_from_declaration(&document_cache, &id, "UsedEnum").is_ok()); - assert!(rename_identifier_from_declaration(&document_cache, &id, "Baz").is_err()); - assert!( - rename_identifier_from_declaration(&document_cache, &id, "HorizontalLayout").is_err() - ); - } - - #[test] - fn test_exported_type_names() { - let document_cache = test::compile_test_with_sources( - "fluent", - HashMap::from([( - Url::from_file_path(test::main_test_file_name()).unwrap(), - r#" -export component Foo {} -export component Baz {} - -component Bar {} -component Bat {} -component Cat {} - -export { Bat, Cat as Cat, Bar as RenamedBar, Baz as RenamedBaz, StructBar as RenamedStructBar } - -export struct StructBar { foo: int } - -export enum EnumBar { bar } - "# - .to_string(), - )]), - false, - ); - - let doc = document_cache.get_document_by_path(&test::main_test_file_name()).unwrap(); - let doc = doc.node.as_ref().unwrap(); + let dn = find_declaration_node( + &document_cache, + &find_token_by_comment(&document_cache, &test::main_test_file_name(), "_1"), + ) + .unwrap(); - assert!(!is_symbol_name_exported(doc, &SmolStr::from("Foobar"))); // does not exist - assert!(is_symbol_name_exported(doc, &SmolStr::from("Foo"))); - assert!(is_symbol_name_exported(doc, &SmolStr::from("Baz"))); - assert!(!is_symbol_name_exported(doc, &SmolStr::from("Bar"))); // not exported - assert!(is_symbol_name_exported(doc, &SmolStr::from("Bat"))); - assert!(is_symbol_name_exported(doc, &SmolStr::from("Cat"))); - assert!(is_symbol_name_exported(doc, &SmolStr::from("RenamedBar"))); - assert!(is_symbol_name_exported(doc, &SmolStr::from("RenamedBaz"))); - assert!(is_symbol_name_exported(doc, &SmolStr::from("RenamedStructBar"))); - assert!(is_symbol_name_exported(doc, &SmolStr::from("StructBar"))); - assert!(is_symbol_name_exported(doc, &SmolStr::from("EnumBar"))); + assert!(dn.rename(&document_cache, "Foo").is_err()); + assert!(dn.rename(&document_cache, "UsedStruct").is_ok()); + assert!(dn.rename(&document_cache, "UsedEnum").is_ok()); + assert!(dn.rename(&document_cache, "Baz").is_err()); + assert!(dn.rename(&document_cache, "HorizontalLayout").is_err()); } #[test] @@ -1280,9 +1516,7 @@ export component Bar { false, ); - let id = find_node_by_comment(&document_cache, &test::main_test_file_name(), "_1").into(); - let edit = rename_identifier_from_declaration(&document_cache, &id, "XxxYyyZzz").unwrap(); - let edited_text = compile_test_changes(&document_cache, &edit, false); + let edited_text = rename_tester(&document_cache, &test::main_test_file_name(), "_1"); assert_eq!(edited_text.len(), 1); assert!(edited_text[0].contents.contains("struct XxxYyyZzz /* <- TEST_ME_1 ")); @@ -1316,9 +1550,12 @@ export component Bar { false, ); - let id = find_node_by_comment(&document_cache, &test::main_test_file_name(), "_1").into(); - let edit = rename_identifier_from_declaration(&document_cache, &id, "Xxx_Yyy-Zzz").unwrap(); - let edited_text = compile_test_changes(&document_cache, &edit, false); + let edited_text = rename_tester_with_new_name( + &document_cache, + &test::main_test_file_name(), + "_1", + "Xxx_Yyy-Zzz", + ); assert_eq!(edited_text.len(), 1); assert!(edited_text[0].contents.contains("export struct Xxx_Yyy-Zzz /* <- TEST_ME_1 ")); @@ -1352,9 +1589,12 @@ export component Bar { false, ); - let id = find_node_by_comment(&document_cache, &test::main_test_file_name(), "_1").into(); - let edit = rename_identifier_from_declaration(&document_cache, &id, "Xxx_Yyy-Zzz").unwrap(); - let edited_text = compile_test_changes(&document_cache, &edit, false); + let edited_text = rename_tester_with_new_name( + &document_cache, + &test::main_test_file_name(), + "_1", + "Xxx_Yyy-Zzz", + ); assert_eq!(edited_text.len(), 1); assert!(edited_text[0].contents.contains("export struct Xxx_Yyy-Zzz /* <- TEST_ME_1 ")); @@ -1393,10 +1633,8 @@ export { Foo as FExport } false, ); - let id = find_node_by_comment(&document_cache, &test::test_file_name("source.slint"), "_1") - .into(); - let edit = rename_identifier_from_declaration(&document_cache, &id, "XxxYyyZzz").unwrap(); - let edited_text = compile_test_changes(&document_cache, &edit, false); + let edited_text = + rename_tester(&document_cache, &test::test_file_name("source.slint"), "_1"); assert_eq!(edited_text.len(), 1); assert_eq!( @@ -1485,10 +1723,8 @@ export { Foo as User4Fxx } false, ); - let id = find_node_by_comment(&document_cache, &test::test_file_name("source.slint"), "_1") - .into(); - let edit = rename_identifier_from_declaration(&document_cache, &id, "XxxYyyZzz").unwrap(); - let edited_text = compile_test_changes(&document_cache, &edit, false); + let edited_text = + rename_tester(&document_cache, &test::test_file_name("source.slint"), "_1"); for ed in &edited_text { let ed_path = ed.url.to_file_path().unwrap(); @@ -1530,10 +1766,6 @@ export { Foo as User4Fxx } export { Foo } enum Foo /* <- TEST_ME_1 */ { - M1, M2, - } - - enum Foo /* <- TEST_ME_2 */ { test, } @@ -1552,29 +1784,11 @@ export { Foo as User4Fxx } true, // redefinition of type warning ); - let id = find_node_by_comment(&document_cache, &test::main_test_file_name(), "_1").into(); - let edit = rename_identifier_from_declaration(&document_cache, &id, "XxxYyyZzz").unwrap(); - let edited_text = compile_test_changes(&document_cache, &edit, false); - - assert_eq!(edited_text.len(), 1); - assert!(edited_text[0].contents.contains("export { Foo }")); - assert!(edited_text[0].contents.contains("enum XxxYyyZzz /* <- TEST_ME_1 ")); - assert!(edited_text[0].contents.contains("M1, M2,")); - assert!(edited_text[0].contents.contains("enum Foo /* <- TEST_ME_2 ")); - assert!(edited_text[0].contents.contains("test,")); - assert!(edited_text[0].contents.contains("property baz-prop")); - assert!(edited_text[0].contents.contains("baz-prop: Foo.test;")); - assert!(edited_text[0].contents.contains("property bar-prop")); - - let id = find_node_by_comment(&document_cache, &test::main_test_file_name(), "_2").into(); - let edit = rename_identifier_from_declaration(&document_cache, &id, "XxxYyyZzz").unwrap(); - let edited_text = compile_test_changes(&document_cache, &edit, false); + let edited_text = rename_tester(&document_cache, &test::main_test_file_name(), "_1"); assert_eq!(edited_text.len(), 1); assert!(edited_text[0].contents.contains("export { XxxYyyZzz }")); - assert!(edited_text[0].contents.contains("enum Foo /* <- TEST_ME_1 ")); - assert!(edited_text[0].contents.contains("M1, M2,")); - assert!(edited_text[0].contents.contains("enum XxxYyyZzz /* <- TEST_ME_2 ")); + assert!(edited_text[0].contents.contains("enum XxxYyyZzz /* <- TEST_ME_1 ")); assert!(edited_text[0].contents.contains("test,")); assert!(edited_text[0].contents.contains("property baz-prop")); assert!(edited_text[0].contents.contains("baz-prop: XxxYyyZzz.test;")); @@ -1594,10 +1808,6 @@ export { Foo as User4Fxx } export { Foo } - struct Foo /* <- TEST_ME_2 */ { - test: Foo, - } - component Baz { in-out property baz-prop; } @@ -1613,27 +1823,11 @@ export { Foo as User4Fxx } true, // redefinition of type warning ); - let id = find_node_by_comment(&document_cache, &test::main_test_file_name(), "_1").into(); - let edit = rename_identifier_from_declaration(&document_cache, &id, "XxxYyyZzz").unwrap(); - let edited_text = compile_test_changes(&document_cache, &edit, false); + let edited_text = rename_tester(&document_cache, &test::main_test_file_name(), "_1"); assert_eq!(edited_text.len(), 1); assert!(edited_text[0].contents.contains("enum XxxYyyZzz /* <- TEST_ME_1 */ ")); - assert!(edited_text[0].contents.contains("struct Foo /* <- TEST_ME_2 */ ")); - assert!(edited_text[0].contents.contains("export { Foo }")); - assert!(edited_text[0].contents.contains("test: XxxYyyZzz,")); - assert!(edited_text[0].contents.contains("property baz-prop")); - assert!(edited_text[0].contents.contains("property bar-prop")); - - let id = find_node_by_comment(&document_cache, &test::main_test_file_name(), "_2").into(); - let edit = rename_identifier_from_declaration(&document_cache, &id, "XxxYyyZzz").unwrap(); - let edited_text = compile_test_changes(&document_cache, &edit, false); - - assert_eq!(edited_text.len(), 1); - assert!(edited_text[0].contents.contains("enum Foo /* <- TEST_ME_1 */ ")); assert!(edited_text[0].contents.contains("export { XxxYyyZzz }")); - assert!(edited_text[0].contents.contains("struct XxxYyyZzz /* <- TEST_ME_2 */ ")); - assert!(edited_text[0].contents.contains("test: Foo,")); assert!(edited_text[0].contents.contains("property baz-prop")); assert!(edited_text[0].contents.contains("property bar-prop")); } @@ -1669,10 +1863,8 @@ export { Foo as User4Fxx } false, ); - let id = find_node_by_comment(&document_cache, &test::test_file_name("source.slint"), "_1") - .into(); - let edit = rename_identifier_from_declaration(&document_cache, &id, "XxxYyyZzz").unwrap(); - let edited_text = compile_test_changes(&document_cache, &edit, false); + let edited_text = + rename_tester(&document_cache, &test::test_file_name("source.slint"), "_1"); assert_eq!(edited_text.len(), 1); assert_eq!( @@ -1762,10 +1954,8 @@ export { Foo as User4Fxx } false, ); - let id = find_node_by_comment(&document_cache, &test::test_file_name("source.slint"), "_1") - .into(); - let edit = rename_identifier_from_declaration(&document_cache, &id, "XxxYyyZzz").unwrap(); - let edited_text = compile_test_changes(&document_cache, &edit, false); + let edited_text = + rename_tester(&document_cache, &test::test_file_name("source.slint"), "_1"); for ed in &edited_text { let ed_path = ed.url.to_file_path().unwrap(); @@ -1806,4 +1996,655 @@ export { Foo as User4Fxx } } } } + + #[track_caller] + fn find_declaration_node_by_comment( + document_cache: &common::DocumentCache, + document_path: &Path, + suffix: &str, + ) -> DeclarationNode { + let name = find_node_by_comment(document_cache, document_path, suffix); + find_declaration_node(document_cache, &main_identifier(&name).unwrap()).unwrap() + } + + #[test] + fn test_rename_component_from_use() { + let document_cache = test::compile_test_with_sources( + "fluent", + HashMap::from([( + Url::from_file_path(test::main_test_file_name()).unwrap(), + r#" +export { Foo /* <- TEST_ME_1 */ } + +enum Xyz { Foo, Bar } + +struct Abc { Foo: Xyz } + +component Foo /* <- TEST_ME_TARGET */ inherits Rectangle { + @children +} + +component Baz { + Foo /* <- TEST_ME_2 */ { } +} + +struct Foo { + bar: bool, +} + +export component Bar inherits Foo /* <- TEST_ME_3 */ { + Foo /* <- TEST_ME_4 */ { } + Rectangle { + Foo /* <- TEST_ME_5 */ { } + Foo := Baz { } + } + + if true: Rectangle { + Foo /* <- TEST_ME_6 */ { } + } + + if false: Rectangle { + Foo /* <- TEST_ME_7 */ { } + } + + function Foo(Foo: int) { Foo + 1; } + function F() { self.Foo(42); } + + for i in [1, 2, 3]: Foo /* <- TEST_ME_8 */ { } +} + "# + .to_string(), + )]), + true, // Component `Foo` is replacing a component with the same name + ); + + let target = + find_token_by_comment(&document_cache, &test::main_test_file_name(), "_TARGET"); + + let id = + find_declaration_node_by_comment(&document_cache, &test::main_test_file_name(), "_1"); + id.query.is_same_symbol(&document_cache, target.clone()); + + let id = + find_declaration_node_by_comment(&document_cache, &test::main_test_file_name(), "_2"); + id.query.is_same_symbol(&document_cache, target.clone()); + + let id = + find_declaration_node_by_comment(&document_cache, &test::main_test_file_name(), "_3"); + id.query.is_same_symbol(&document_cache, target.clone()); + + let id = + find_declaration_node_by_comment(&document_cache, &test::main_test_file_name(), "_4"); + id.query.is_same_symbol(&document_cache, target.clone()); + + let id = + find_declaration_node_by_comment(&document_cache, &test::main_test_file_name(), "_5"); + id.query.is_same_symbol(&document_cache, target.clone()); + + let id = + find_declaration_node_by_comment(&document_cache, &test::main_test_file_name(), "_6"); + id.query.is_same_symbol(&document_cache, target.clone()); + + let id = + find_declaration_node_by_comment(&document_cache, &test::main_test_file_name(), "_7"); + id.query.is_same_symbol(&document_cache, target.clone()); + + let id = + find_declaration_node_by_comment(&document_cache, &test::main_test_file_name(), "_8"); + id.query.is_same_symbol(&document_cache, target); + } + + #[test] + fn test_rename_struct_from_use() { + let document_cache = test::compile_test_with_sources( + "fluent", + HashMap::from([( + Url::from_file_path(test::main_test_file_name()).unwrap(), + r#" +struct Foo /* <- TEST_ME_DECL */ { + test: bool, +} + +struct Bar { + bar-test: Foo /* <- TEST_ME_1 */, +} + +export component Bar { + property bar-prop: { test: false }; +} + "# + .to_string(), + )]), + false, + ); + + let declaration = find_declaration_node( + &document_cache, + &find_token_by_comment(&document_cache, &test::main_test_file_name(), "_DECL"), + ) + .unwrap() + .query + .token; + + let id = + find_declaration_node_by_comment(&document_cache, &test::main_test_file_name(), "_1"); + id.query.is_same_symbol(&document_cache, declaration.clone()); + + let id = + find_declaration_node_by_comment(&document_cache, &test::main_test_file_name(), "_2"); + id.query.is_same_symbol(&document_cache, declaration); + } + + #[test] + fn test_rename_component_from_use_with_export() { + let document_cache = test::compile_test_with_sources( + "fluent", + HashMap::from([ + ( + Url::from_file_path(test::main_test_file_name()).unwrap(), + r#" + import { F-o_o /* <- TEST_ME_IMPORT1 */ } from "source.slint"; + import { UserComponent } from "user.slint"; + import { User2Component } from "user2.slint"; + import { F-o-o /* <- TEST_ME_IMPORT2 */ as User3Fxx /* <- TEST_ME_IN1 */ } from "user3.slint"; + import { User4Fxx } from "user4.slint"; + + export component Main { + F_o_o /* <- TEST_ME_1 */ { } + UserComponent { } + User2Component { } + User3Fxx /* <- TEST_ME_2 */ { } + User4Fxx /* <- TEST_ME_3 */ { } + } + "# + .to_string(), + ), + ( + Url::from_file_path(test::test_file_name("source.slint")).unwrap(), + r#" + export component F_o-o /* <- TEST_ME_DEF1 */ { @children } + "# + .to_string(), + ), + ( + Url::from_file_path(test::test_file_name("user.slint")).unwrap(), + r#" + import { F-o-o /* <- TEST_ME_IMPORT3 */ as Bar } from "source.slint"; + + export component UserComponent { + Bar /* <- TEST_ME_4 */ { } + } + + export { Bar } + "# + .to_string(), + ), + ( + Url::from_file_path(test::test_file_name("user2.slint")).unwrap(), + r#" + import { F_o_o /* <- TEST_ME_IMPORT4 */ as XxxYyyZzz } from "source.slint"; + + export component User2Component { + XxxYyyZzz /* <- TEST_ME_5 */ { } + } + "# + .to_string(), + ), + ( + Url::from_file_path(test::test_file_name("user3.slint")).unwrap(), + r#" + import { F-o_o /* <- TEST_ME_IMPORT5 */ } from "source.slint"; + + export { F_o-o /* <- TEST_ME_EXPORT1 */ } + "# + .to_string(), + ), + ( + Url::from_file_path(test::test_file_name("user4.slint")).unwrap(), + r#" + import { F-o_o /* <- TEST_ME_IMPORT6 */ } from "source.slint"; + + export { F_o-o /* <- TEST_ME_EXPORT2 */ as User4Fxx /* <- TEST_ME_EXT1 */} + "# + .to_string(), + ), + ]), + false, + ); + + let declaration = + find_token_by_comment(&document_cache, &test::test_file_name("source.slint"), "_DEF1"); + + let id = + find_declaration_node_by_comment(&document_cache, &test::main_test_file_name(), "_1"); + id.query.is_same_symbol(&document_cache, declaration.clone()); + + let id = + find_declaration_node_by_comment(&document_cache, &test::main_test_file_name(), "_2"); + let internal_name = + find_token_by_comment(&document_cache, &test::main_test_file_name(), "_IN1"); + id.query.is_same_symbol(&document_cache, internal_name); + + let export_name = + find_token_by_comment(&document_cache, &test::test_file_name("user4.slint"), "_EXT1"); + + let id = + find_declaration_node_by_comment(&document_cache, &test::main_test_file_name(), "_3"); + id.query.is_same_symbol(&document_cache, export_name); + } + + #[test] + fn test_rename_struct_from_use_with_export() { + let document_cache = test::compile_test_with_sources( + "fluent", + HashMap::from([ + ( + Url::from_file_path(test::main_test_file_name()).unwrap(), + r#" +import { Foo /* <- TEST_ME_IMPORT1 */ } from "source.slint"; +import { UserComponent } from "user.slint"; +import { User2Struct } from "user2.slint"; +import { Foo /* <- TEST_ME_IMPORT2 */ as User3Fxx /* <- TEST_ME_IN1 */} from "user3.slint"; +import { User4Fxx } from "user4.slint"; + +export component Main { + property main-prop; + property main-prop2; + property main-prop3; + property main-prop4 <=> uc.user-component-prop; + + property test: main-prop3.member.test_me; + + uc := UserComponent { } +} + "# + .to_string(), + ), + ( + Url::from_file_path(test::test_file_name("source.slint")).unwrap(), + r#" +export struct Foo /* <- TEST_ME_DEF1 */ { test-me: bool, } + "# + .to_string(), + ), + ( + Url::from_file_path(test::test_file_name("user.slint")).unwrap(), + r#" +import { Foo /* <- TEST_ME_IMPORT1 */ as Bar } from "source.slint"; + + +export component UserComponent { + in-out property user-component-prop; +} + +export { Bar } + "# + .to_string(), + ), + ( + Url::from_file_path(test::test_file_name("user2.slint")).unwrap(), + r#" +import { Foo /* <- TEST_ME_IMPORT2 */ as XxxYyyZzz } from "source.slint"; + +export struct User2Struct { + member: XxxYyyZzz, +} + "# + .to_string(), + ), + ( + Url::from_file_path(test::test_file_name("user3.slint")).unwrap(), + r#" +import { Foo /* <- TEST_ME_IMPORT3 */} from "source.slint"; + +export { Foo /* <- TEST_ME_EXPORT1 */} + "# + .to_string(), + ), + ( + Url::from_file_path(test::test_file_name("user4.slint")).unwrap(), + r#" +import { Foo /* <- TEST_ME_IMPORT4 */ } from "source.slint"; + +export { Foo /* <- TEST_ME_EXPORT2 */ as User4Fxx /* <- TEST_ME_EN1 */} + "# + .to_string(), + ), + ]), + false, + ); + + let declaration = + find_token_by_comment(&document_cache, &test::test_file_name("source.slint"), "_DEF1"); + + let id = + find_declaration_node_by_comment(&document_cache, &test::main_test_file_name(), "_1"); + id.query.is_same_symbol(&document_cache, declaration); + + let id = + find_declaration_node_by_comment(&document_cache, &test::main_test_file_name(), "_2"); + let internal_name = + find_token_by_comment(&document_cache, &test::main_test_file_name(), "_IN1"); + id.query.is_same_symbol(&document_cache, internal_name); + + let export_name = + find_token_by_comment(&document_cache, &test::test_file_name("user4.slint"), "_EN1"); + + let id = + find_declaration_node_by_comment(&document_cache, &test::main_test_file_name(), "_3"); + id.query.is_same_symbol(&document_cache, export_name); + } + + #[test] + fn test_rename_enum_from_use_with_export() { + let document_cache = test::compile_test_with_sources( + "fluent", + HashMap::from([ + ( + Url::from_file_path(test::main_test_file_name()).unwrap(), + r#" +import { Foo } from "source.slint"; +import { UserComponent } from "user.slint"; +import { User2Struct } from "user2.slint"; +import { Foo as User3Fxx /* <- TEST_ME_IN1 */} from "user3.slint"; +import { User4Fxx } from "user4.slint"; + +export component Main { + property main-prop; + property main-prop2; + property main-prop3; + property main-prop4 <=> uc.user-component-prop; + + property test: main-prop3.member == Foo/* <- TEST_ME_4 */.test; + + uc := UserComponent { } +} + "# + .to_string(), + ), + ( + Url::from_file_path(test::test_file_name("source.slint")).unwrap(), + r#" +export enum Foo /* <- TEST_ME_DEF1 */ { test, } + "# + .to_string(), + ), + ( + Url::from_file_path(test::test_file_name("user.slint")).unwrap(), + r#" +import { Foo /* <- TEST_ME_IMPORT1 */ as Bar } from "source.slint"; + + +export component UserComponent { + in-out property user-component-prop; +} + +export { Bar } + "# + .to_string(), + ), + ( + Url::from_file_path(test::test_file_name("user2.slint")).unwrap(), + r#" +import { Foo /* <- TEST_ME_IMPORT2 */ as XxxYyyZzz } from "source.slint"; + +export struct User2Struct { + member: XxxYyyZzz, +} + "# + .to_string(), + ), + ( + Url::from_file_path(test::test_file_name("user3.slint")).unwrap(), + r#" +import { Foo /* <- TEST_ME_IMPORT3 */} from "source.slint"; + +export { Foo /* <- TEST_ME_EXPORT1 */} + "# + .to_string(), + ), + ( + Url::from_file_path(test::test_file_name("user4.slint")).unwrap(), + r#" +import { Foo /* <- TEST_ME_IMPORT4 */ } from "source.slint"; + +export { Foo /* <- TEST_ME_EXPORT2 */ as User4Fxx /* <- TEST_ME_EN1 */} + "# + .to_string(), + ), + ]), + false, + ); + + let declaration = + find_token_by_comment(&document_cache, &test::test_file_name("source.slint"), "_DEF1"); + + let internal_name = + find_token_by_comment(&document_cache, &test::main_test_file_name(), "_IN1"); + + let export_name = + find_token_by_comment(&document_cache, &test::test_file_name("user4.slint"), "_EN1"); + + let id = + find_declaration_node_by_comment(&document_cache, &test::main_test_file_name(), "_1"); + id.query.is_same_symbol(&document_cache, declaration); + + let id = + find_declaration_node_by_comment(&document_cache, &test::main_test_file_name(), "_2"); + id.query.is_same_symbol(&document_cache, internal_name); + + let id = + find_declaration_node_by_comment(&document_cache, &test::main_test_file_name(), "_3"); + id.query.is_same_symbol(&document_cache, export_name.clone()); + + let id = + find_declaration_node_by_comment(&document_cache, &test::main_test_file_name(), "_4"); + id.query.is_same_symbol(&document_cache, export_name); + } + + #[test] + fn test_rename_import_from_internal_name() { + let document_cache = test::compile_test_with_sources( + "fluent", + HashMap::from([ + ( + Url::from_file_path(test::main_test_file_name()).unwrap(), + r#" +import { Foo as Bar /* <- TEST_ME_1 */ } from "source.slint"; + +export component Main { + Bar { } +} + "# + .to_string(), + ), + ( + Url::from_file_path(test::test_file_name("source.slint")).unwrap(), + r#" +export component Foo { } + "# + .to_string(), + ), + ]), + false, + ); + + let edited_text = + rename_tester_with_new_name(&document_cache, &test::main_test_file_name(), "_1", "Baz"); + + assert_eq!(edited_text.len(), 1); + assert!(edited_text[0] + .contents + .contains("import { Foo as Baz /* <- TEST_ME_1 */ } from \"source.slint\";")); + assert!(edited_text[0].contents.contains("component Main {")); + assert!(edited_text[0].contents.contains(" Baz { ")); + + let edited_text = + rename_tester_with_new_name(&document_cache, &test::main_test_file_name(), "_1", "Foo"); + + assert_eq!(edited_text.len(), 1); + assert!(edited_text[0] + .contents + .contains("import { Foo /* <- TEST_ME_1 */ } from \"source.slint\";")); + assert!(edited_text[0].contents.contains("component Main {")); + assert!(edited_text[0].contents.contains(" Foo { ")); + } + + #[test] + fn test_rename_import_from_external_name() { + let document_cache = test::compile_test_with_sources( + "fluent", + HashMap::from([ + ( + Url::from_file_path(test::main_test_file_name()).unwrap(), + r#" +import { Foo /* <- TEST_ME_1 */ as Bar } from "source.slint"; + +export component Main { + Bar { } +} + "# + .to_string(), + ), + ( + Url::from_file_path(test::test_file_name("source.slint")).unwrap(), + r#" +export component Foo { } + "# + .to_string(), + ), + ]), + false, + ); + + let edited_text = + rename_tester_with_new_name(&document_cache, &test::main_test_file_name(), "_1", "Baz"); + + assert_eq!(edited_text.len(), 2); + + for ed in &edited_text { + let ed_path = ed.url.to_file_path().unwrap(); + if ed_path == test::main_test_file_name() { + assert!(ed + .contents + .contains("import { Baz /* <- TEST_ME_1 */ as Bar } from \"source.slint\";")); + assert!(ed.contents.contains("component Main {")); + assert!(ed.contents.contains(" Bar { ")); + } else if ed_path == test::test_file_name("source.slint") { + assert!(ed.contents.contains("export component Baz { }")); + } else { + panic!("Unexpected file!"); + } + } + + let edited_text = + rename_tester_with_new_name(&document_cache, &test::main_test_file_name(), "_1", "Bar"); + + assert_eq!(edited_text.len(), 2); + for ed in &edited_text { + let ed_path = ed.url.to_file_path().unwrap(); + if ed_path == test::main_test_file_name() { + assert!(ed.contents.contains("import { Bar } from \"source.slint\";")); + assert!(ed.contents.contains("component Main {")); + assert!(ed.contents.contains(" Bar { ")); + } else if ed_path == test::test_file_name("source.slint") { + assert!(ed.contents.contains("export component Bar { }")); + } else { + panic!("Unexpected file!"); + } + } + } + + #[test] + fn test_rename_import_from_external_name_with_export_renaming() { + let document_cache = test::compile_test_with_sources( + "fluent", + HashMap::from([ + ( + Url::from_file_path(test::main_test_file_name()).unwrap(), + r#" +import { Foo /* <- TEST_ME_1 */ as Bar } from "source.slint"; + +export component Main { + Bar { } +} + "# + .to_string(), + ), + ( + Url::from_file_path(test::test_file_name("source.slint")).unwrap(), + r#" +component XxxYyyZzz { } + +export { XxxYyyZzz as Foo } + "# + .to_string(), + ), + ]), + false, + ); + + let edited_text = rename_tester_with_new_name( + &document_cache, + &test::main_test_file_name(), + "_1", + "XFooX", + ); + + assert_eq!(edited_text.len(), 2); + + for ed in &edited_text { + let ed_path = ed.url.to_file_path().unwrap(); + if ed_path == test::main_test_file_name() { + assert!(ed + .contents + .contains("import { XFooX /* <- TEST_ME_1 */ as Bar } from \"source.slint\";")); + assert!(ed.contents.contains("component Main {")); + assert!(ed.contents.contains(" Bar { ")); + } else if ed_path == test::test_file_name("source.slint") { + assert!(ed.contents.contains("component XxxYyyZzz { }")); + assert!(ed.contents.contains("export { XxxYyyZzz as XFooX }")); + } else { + panic!("Unexpected file!"); + } + } + + let edited_text = + rename_tester_with_new_name(&document_cache, &test::main_test_file_name(), "_1", "Bar"); + + assert_eq!(edited_text.len(), 2); + + for ed in &edited_text { + let ed_path = ed.url.to_file_path().unwrap(); + if ed_path == test::main_test_file_name() { + assert!(ed.contents.contains("import { Bar } from \"source.slint\";")); + assert!(ed.contents.contains("component Main {")); + assert!(ed.contents.contains(" Bar { ")); + } else if ed_path == test::test_file_name("source.slint") { + assert!(ed.contents.contains("component XxxYyyZzz { }")); + assert!(ed.contents.contains("export { XxxYyyZzz as Bar }")); + } else { + panic!("Unexpected file!"); + } + } + + let edited_text = rename_tester(&document_cache, &test::main_test_file_name(), "_1"); + + assert_eq!(edited_text.len(), 2); + for ed in &edited_text { + let ed_path = ed.url.to_file_path().unwrap(); + if ed_path == test::main_test_file_name() { + assert!(ed.contents.contains( + "import { XxxYyyZzz /* <- TEST_ME_1 */ as Bar } from \"source.slint\";" + )); + assert!(ed.contents.contains("component Main {")); + assert!(ed.contents.contains(" Bar { ")); + } else if ed_path == test::test_file_name("source.slint") { + assert!(ed.contents.contains("component XxxYyyZzz { }")); + assert!(ed.contents.contains("export { XxxYyyZzz }")); + } else { + panic!("Unexpected file!"); + } + } + } } diff --git a/tools/lsp/common/token_info.rs b/tools/lsp/common/token_info.rs index 77094bb4008..c5792eebfa3 100644 --- a/tools/lsp/common/token_info.rs +++ b/tools/lsp/common/token_info.rs @@ -13,6 +13,7 @@ use i_slint_compiler::pathutils::clean_path; use smol_str::{SmolStr, ToSmolStr}; use std::path::Path; +#[derive(Clone, Debug)] pub enum TokenInfo { Type(Type), ElementType(ElementType), @@ -47,11 +48,15 @@ pub fn token_info(document_cache: &common::DocumentCache, token: SyntaxToken) -> Some(TokenInfo::Type(doc.local_registry.lookup_qualified(&qual.members))) } SyntaxKind::Element => { - let qual = i_slint_compiler::object_tree::QualifiedTypeName::from_node(n); - let doc = document_cache.get_document_for_source_file(&node.source_file)?; - Some(TokenInfo::ElementType( - doc.local_registry.lookup_element(&qual.to_string()).ok()?, - )) + if !crate::common::is_element_node_ignored(&parent.into()) { + let qual = i_slint_compiler::object_tree::QualifiedTypeName::from_node(n); + let doc = document_cache.get_document_for_source_file(&node.source_file)?; + Some(TokenInfo::ElementType( + doc.local_registry.lookup_element(&qual.to_string()).ok()?, + )) + } else { + None + } } SyntaxKind::Expression => { if token.kind() != SyntaxKind::Identifier { @@ -170,11 +175,11 @@ pub fn token_info(document_cache: &common::DocumentCache, token: SyntaxToken) -> } return find_property_declaration_in_base(document_cache, element, &prop_name); } else if node.kind() == SyntaxKind::DeclaredIdentifier { + if token.kind() != SyntaxKind::Identifier { + return None; + } let parent = node.parent()?; if parent.kind() == SyntaxKind::PropertyChangedCallback { - if token.kind() != SyntaxKind::Identifier { - return None; - } let prop_name = i_slint_compiler::parser::normalize_identifier(token.text()); let element = syntax_nodes::Element::new(parent.parent()?)?; if let Some(p) = element.PropertyDeclaration().find_map(|p| { @@ -186,6 +191,60 @@ pub fn token_info(document_cache: &common::DocumentCache, token: SyntaxToken) -> } return find_property_declaration_in_base(document_cache, element, &prop_name); } + if parent.kind() == SyntaxKind::Component { + let doc = document_cache.get_document_for_source_file(&node.source_file)?; + let element_type = doc + .local_registry + .lookup_element( + i_slint_compiler::parser::normalize_identifier(token.text()).as_str(), + ) + .ok()?; + if let ElementType::Component(component) = &element_type { + if component + .node + .as_ref() + .map(|n| n.text_range().contains_range(token.text_range())) + .unwrap_or_default() + { + return Some(TokenInfo::ElementType(element_type)); + } + } + } + if parent.kind() == SyntaxKind::StructDeclaration { + let doc = document_cache.get_document_for_source_file(&node.source_file)?; + let ty = doc + .local_registry + .lookup(i_slint_compiler::parser::normalize_identifier(token.text()).as_str()); + match &ty { + Type::Struct(s) + if s.node + .as_ref() + .and_then(|n| n.parent()) + .map(|n| n.text_range().contains_range(token.text_range())) + .unwrap_or_default() => + { + return Some(TokenInfo::Type(ty)); + } + _ => { /* nothing to do */ } + } + } + if parent.kind() == SyntaxKind::EnumDeclaration { + let doc = document_cache.get_document_for_source_file(&node.source_file)?; + let ty = doc + .local_registry + .lookup(i_slint_compiler::parser::normalize_identifier(token.text()).as_str()); + match &ty { + Type::Enumeration(e) + if e.node + .as_ref() + .map(|n| n.text_range().contains_range(token.text_range())) + .unwrap_or_default() => + { + return Some(TokenInfo::Type(ty)); + } + _ => { /* nothing to do */ } + } + } } node = node.parent()?; } diff --git a/tools/lsp/language.rs b/tools/lsp/language.rs index 2e57954c15e..55b7c3cd5ba 100644 --- a/tools/lsp/language.rs +++ b/tools/lsp/language.rs @@ -498,51 +498,33 @@ pub fn register_request_handlers(rh: &mut RequestHandler) { .collect(); return Ok(Some(common::create_workspace_edit(uri, version, edits))); } - match p.kind() { - SyntaxKind::DeclaredIdentifier => { - common::rename_component::rename_identifier_from_declaration( - &document_cache, - &p.into(), - ¶ms.new_name, - ) + if let Some(declaration_node) = + common::rename_component::find_declaration_node(&document_cache, &tk) + { + return declaration_node + .rename(&document_cache, ¶ms.new_name) .map(Some) .map_err(|e| LspError { code: LspErrorCode::RequestFailed, message: e.to_string(), - }) - } - _ => Err(LspError { - code: LspErrorCode::RequestFailed, - message: "This symbol cannot be renamed.".into(), - }), + }); } - } else { - Err(LspError { - code: LspErrorCode::RequestFailed, - message: "This symbol cannot be renamed.".into(), - }) } + + Err(LspError { + code: LspErrorCode::RequestFailed, + message: "This symbol cannot be renamed.".into(), + }) }); rh.register::(|params, ctx| async move { let mut document_cache = ctx.document_cache.borrow_mut(); let uri = params.text_document.uri; - if let Some((tk, _off)) = token_descr(&mut document_cache, &uri, ¶ms.position) { + if let Some((tk, _)) = token_descr(&mut document_cache, &uri, ¶ms.position) { if find_element_id_for_highlight(&tk, &tk.parent()).is_some() { return Ok(Some(PrepareRenameResponse::Range(util::token_to_lsp_range(&tk)))); } - let p = tk.parent(); - if matches!(p.kind(), SyntaxKind::DeclaredIdentifier) { - if let Some(gp) = p.parent() { - if [ - SyntaxKind::Component, - SyntaxKind::EnumDeclaration, - SyntaxKind::StructDeclaration, - ] - .contains(&gp.kind()) - { - return Ok(Some(PrepareRenameResponse::Range(util::node_to_lsp_range(&p)))); - } - } + if common::rename_component::find_declaration_node(&document_cache, &tk).is_some() { + return Ok(Some(PrepareRenameResponse::Range(util::token_to_lsp_range(&tk)))); } } Ok(None) diff --git a/tools/lsp/preview.rs b/tools/lsp/preview.rs index 62cc96b1130..483d39cdcb1 100644 --- a/tools/lsp/preview.rs +++ b/tools/lsp/preview.rs @@ -301,7 +301,7 @@ fn add_new_component() { } /// Find the identifier that belongs to a component of the given `name` in the `document` -pub fn find_component_identifiers( +fn find_component_identifiers( document: &syntax_nodes::Document, name: &str, ) -> Vec { @@ -381,11 +381,17 @@ fn rename_component( return; }; - if let Ok(edit) = rename_component::rename_identifier_from_declaration( + if let Ok(edit) = rename_component::find_declaration_node( &document_cache, - &identifiers.last().unwrap(), - &new_name, - ) { + &identifiers + .get(0) + .unwrap() + .child_token(i_slint_compiler::parser::SyntaxKind::Identifier) + .unwrap(), + ) + .unwrap() + .rename(&document_cache, &new_name) + { // Update which component to show after refresh from the editor. let mut cache = CONTENT_CACHE.get_or_init(Default::default).lock().unwrap(); cache.rename_current_component(&old_url, &old_name, &new_name);