diff --git a/cli/lsp/documents.rs b/cli/lsp/documents.rs index 035c6a2909..f342129b5c 100644 --- a/cli/lsp/documents.rs +++ b/cli/lsp/documents.rs @@ -49,7 +49,6 @@ use std::borrow::Cow; use std::collections::BTreeSet; use std::collections::HashMap; use std::collections::HashSet; -use std::collections::VecDeque; use std::fs; use std::ops::Range; use std::str::FromStr; @@ -643,26 +642,6 @@ pub fn to_lsp_range(range: &deno_graph::Range) -> lsp::Range { } } -/// Recurse and collect specifiers that appear in the dependent map. -fn recurse_dependents( - specifier: &ModuleSpecifier, - map: &HashMap>, -) -> Vec { - let mut dependents = HashSet::new(); - let mut pending = VecDeque::new(); - pending.push_front(specifier); - while let Some(specifier) = pending.pop_front() { - if let Some(deps) = map.get(specifier) { - for dep in deps { - if dependents.insert(dep) { - pending.push_front(dep); - } - } - } - } - dependents.into_iter().cloned().collect() -} - #[derive(Debug)] struct RedirectResolver { cache: Arc, @@ -851,9 +830,6 @@ pub struct Documents { /// A flag that indicates that stated data is potentially invalid and needs to /// be recalculated before being considered valid. dirty: bool, - /// A map where the key is a specifier and the value is a set of specifiers - /// that depend on the key. - dependents_map: Arc>>, /// A map of documents that are "open" in the language server. open_docs: HashMap>, /// Documents stored on the file system. @@ -883,7 +859,6 @@ impl Documents { Self { cache: cache.clone(), dirty: true, - dependents_map: Default::default(), open_docs: HashMap::default(), file_system_docs: Default::default(), imports: Default::default(), @@ -1115,24 +1090,9 @@ impl Documents { false } - /// Return an array of specifiers, if any, that are dependent upon the - /// supplied specifier. This is used to determine invalidation of diagnostics - /// when a module has been changed. - pub fn dependents( - &mut self, - specifier: &ModuleSpecifier, - ) -> Vec { - self.calculate_dependents_if_dirty(); - if let Some(specifier) = self.resolve_specifier(specifier) { - recurse_dependents(&specifier, &self.dependents_map) - } else { - vec![] - } - } - /// Returns a collection of npm package requirements. pub fn npm_package_reqs(&mut self) -> Arc> { - self.calculate_dependents_if_dirty(); + self.calculate_npm_reqs_if_dirty(); self.npm_specifier_reqs.clone() } @@ -1439,93 +1399,46 @@ impl Documents { fs_docs.set_dirty(true); } self.dirty = true; - self.calculate_dependents_if_dirty(); } /// Iterate through the documents, building a map where the key is a unique /// document and the value is a set of specifiers that depend on that /// document. - fn calculate_dependents_if_dirty(&mut self) { - #[derive(Default)] - struct DocAnalyzer { - dependents_map: HashMap>, - analyzed_specifiers: HashSet, - pending_specifiers: VecDeque, - npm_reqs: HashSet, - has_node_builtin_specifier: bool, - } - - impl DocAnalyzer { - fn add(&mut self, dep: &ModuleSpecifier, specifier: &ModuleSpecifier) { - if !self.analyzed_specifiers.contains(dep) { - self.analyzed_specifiers.insert(dep.clone()); - // perf: ensure this is not added to unless this specifier has never - // been analyzed in order to not cause an extra file system lookup - self.pending_specifiers.push_back(dep.clone()); - if let Ok(reference) = NpmPackageReqReference::from_specifier(dep) { - self.npm_reqs.insert(reference.into_inner().req); - } - } - - self - .dependents_map - .entry(dep.clone()) - .or_default() - .insert(specifier.clone()); - } - - fn analyze_doc(&mut self, specifier: &ModuleSpecifier, doc: &Document) { - self.analyzed_specifiers.insert(specifier.clone()); - for dependency in doc.dependencies().values() { - if let Some(dep) = dependency.get_code() { - if !self.has_node_builtin_specifier && dep.scheme() == "node" { - self.has_node_builtin_specifier = true; - } - self.add(dep, specifier); - } - if let Some(dep) = dependency.get_type() { - self.add(dep, specifier); - } - } - if let Some(dep) = doc.maybe_types_dependency().maybe_specifier() { - self.add(dep, specifier); - } - } - } - + fn calculate_npm_reqs_if_dirty(&mut self) { + let mut npm_reqs = HashSet::new(); + let mut has_node_builtin_specifier = false; let is_fs_docs_dirty = self.file_system_docs.set_dirty(false); - if !is_fs_docs_dirty && !self.dirty { return; } - - let mut doc_analyzer = DocAnalyzer::default(); - // favor documents that are open in case a document exists in both collections - for entry in self.file_system_docs.docs.iter() { - let specifier = entry.key(); - let doc = entry.value(); - doc_analyzer.analyze_doc(specifier, doc); - } - for (specifier, doc) in self.open_docs.iter() { - doc_analyzer.analyze_doc(specifier, doc); - } - - let resolver = self.get_resolver(); - let npm_resolver = self.get_npm_resolver(); - while let Some(specifier) = doc_analyzer.pending_specifiers.pop_front() { - if let Some(doc) = self.open_docs.get(&specifier) { - doc_analyzer.analyze_doc(&specifier, doc); - } else if let Some(doc) = self.file_system_docs.get( - &self.cache, - resolver, - &specifier, - npm_resolver, - ) { - doc_analyzer.analyze_doc(&specifier, &doc); + let mut visit_doc = |doc: &Arc| { + for dependency in doc.dependencies().values() { + if let Some(dep) = dependency.get_code() { + if dep.scheme() == "node" { + has_node_builtin_specifier = true; + } + if let Ok(reference) = NpmPackageReqReference::from_specifier(dep) { + npm_reqs.insert(reference.into_inner().req); + } + } + if let Some(dep) = dependency.get_type() { + if let Ok(reference) = NpmPackageReqReference::from_specifier(dep) { + npm_reqs.insert(reference.into_inner().req); + } + } } + if let Some(dep) = doc.maybe_types_dependency().maybe_specifier() { + if let Ok(reference) = NpmPackageReqReference::from_specifier(dep) { + npm_reqs.insert(reference.into_inner().req); + } + } + }; + for entry in self.file_system_docs.docs.iter() { + visit_doc(entry.value()) + } + for doc in self.open_docs.values() { + visit_doc(doc); } - - let mut npm_reqs = doc_analyzer.npm_reqs; // fill the reqs from the lockfile if let Some(lockfile) = self.lockfile.as_ref() { @@ -1542,14 +1455,12 @@ impl Documents { // Ensure a @types/node package exists when any module uses a node: specifier. // Unlike on the command line, here we just add @types/node to the npm package // requirements since this won't end up in the lockfile. - self.has_injected_types_node_package = doc_analyzer - .has_node_builtin_specifier + self.has_injected_types_node_package = has_node_builtin_specifier && !npm_reqs.iter().any(|r| r.name == "@types/node"); if self.has_injected_types_node_package { npm_reqs.insert(PackageReq::from_str("@types/node").unwrap()); } - self.dependents_map = Arc::new(doc_analyzer.dependents_map); self.npm_specifier_reqs = Arc::new({ let mut reqs = npm_reqs.into_iter().collect::>(); reqs.sort(); diff --git a/cli/lsp/language_server.rs b/cli/lsp/language_server.rs index acb06e711f..17c8cc5ba0 100644 --- a/cli/lsp/language_server.rs +++ b/cli/lsp/language_server.rs @@ -1235,9 +1235,7 @@ impl Inner { Ok(document) => { if document.is_diagnosable() { self.refresh_npm_specifiers().await; - self - .diagnostics_server - .invalidate(&self.documents.dependents(&specifier)); + self.diagnostics_server.invalidate(&[specifier]); self.send_diagnostics_update(); self.send_testing_update(); } @@ -1279,9 +1277,7 @@ impl Inner { .normalize_url(¶ms.text_document.uri, LspUrlKind::File); if self.is_diagnosable(&specifier) { self.refresh_npm_specifiers().await; - let mut specifiers = self.documents.dependents(&specifier); - specifiers.push(specifier.clone()); - self.diagnostics_server.invalidate(&specifiers); + self.diagnostics_server.invalidate(&[specifier.clone()]); self.send_diagnostics_update(); self.send_testing_update(); } @@ -3181,8 +3177,7 @@ impl tower_lsp::LanguageServer for LanguageServer { let document = inner.did_open(&specifier, params); if document.is_diagnosable() { inner.refresh_npm_specifiers().await; - let specifiers = inner.documents.dependents(&specifier); - inner.diagnostics_server.invalidate(&specifiers); + inner.diagnostics_server.invalidate(&[specifier]); inner.send_diagnostics_update(); inner.send_testing_update(); } diff --git a/tests/integration/lsp_tests.rs b/tests/integration/lsp_tests.rs index 6821eb8da5..96fb9507ab 100644 --- a/tests/integration/lsp_tests.rs +++ b/tests/integration/lsp_tests.rs @@ -7862,18 +7862,18 @@ fn lsp_npm_specifier_unopened_file() { .use_http_server() .use_temp_cwd() .build(); - let mut client = context.new_lsp_command().build(); - client.initialize_default(); - + let temp_dir = context.temp_dir(); // create other.ts, which re-exports an npm specifier - client.deno_dir().write( + temp_dir.write( "other.ts", "export { default as chalk } from 'npm:chalk@5';", ); + let mut client = context.new_lsp_command().build(); + client.initialize_default(); // cache the other.ts file to the DENO_DIR let deno = deno_cmd_with_deno_dir(client.deno_dir()) - .current_dir(client.deno_dir().path()) + .current_dir(temp_dir.path()) .arg("cache") .arg("--quiet") .arg("other.ts") @@ -7891,12 +7891,9 @@ fn lsp_npm_specifier_unopened_file() { assert!(stderr.is_empty()); // open main.ts, which imports other.ts (unopened) - let main_url = - ModuleSpecifier::from_file_path(client.deno_dir().path().join("main.ts")) - .unwrap(); client.did_open(json!({ "textDocument": { - "uri": main_url, + "uri": temp_dir.uri().join("main.ts").unwrap(), "languageId": "typescript", "version": 1, "text": "import { chalk } from './other.ts';\n\n", @@ -7907,7 +7904,7 @@ fn lsp_npm_specifier_unopened_file() { "textDocument/didChange", json!({ "textDocument": { - "uri": main_url, + "uri": temp_dir.uri().join("main.ts").unwrap(), "version": 2 }, "contentChanges": [ @@ -7925,7 +7922,7 @@ fn lsp_npm_specifier_unopened_file() { // now ensure completions work let list = client.get_completion_list( - main_url, + temp_dir.uri().join("main.ts").unwrap(), (2, 6), json!({ "triggerKind": 2,