diff --git a/cli/file_fetcher.rs b/cli/file_fetcher.rs index 5b530cf6dd..a74a14a3fa 100644 --- a/cli/file_fetcher.rs +++ b/cli/file_fetcher.rs @@ -186,10 +186,6 @@ impl FileFetcher { } } - pub fn http_cache(&self) -> &Arc { - &self.http_cache - } - pub fn cache_setting(&self) -> &CacheSetting { &self.cache_setting } diff --git a/cli/jsr.rs b/cli/jsr.rs new file mode 100644 index 0000000000..9897a11935 --- /dev/null +++ b/cli/jsr.rs @@ -0,0 +1,306 @@ +// Copyright 2018-2024 the Deno authors. All rights reserved. MIT license. + +use crate::args::jsr_url; +use crate::file_fetcher::FileFetcher; +use dashmap::DashMap; +use deno_cache_dir::HttpCache; +use deno_core::parking_lot::Mutex; +use deno_core::serde_json; +use deno_core::ModuleSpecifier; +use deno_graph::packages::JsrPackageInfo; +use deno_graph::packages::JsrPackageVersionInfo; +use deno_lockfile::Lockfile; +use deno_runtime::permissions::PermissionsContainer; +use deno_semver::jsr::JsrPackageReqReference; +use deno_semver::package::PackageNv; +use deno_semver::package::PackageReq; +use std::borrow::Cow; +use std::sync::Arc; + +/// Keep in sync with `JsrFetchResolver`! +#[derive(Debug)] +pub struct JsrCacheResolver { + nv_by_req: DashMap>, + /// The `module_graph` field of the version infos should be forcibly absent. + /// It can be large and we don't want to store it. + info_by_nv: DashMap>>, + info_by_name: DashMap>>, + cache: Arc, +} + +impl JsrCacheResolver { + pub fn new( + cache: Arc, + lockfile: Option>>, + ) -> Self { + let nv_by_req = DashMap::new(); + if let Some(lockfile) = lockfile { + for (req_url, nv_url) in &lockfile.lock().content.packages.specifiers { + let Some(req) = req_url.strip_prefix("jsr:") else { + continue; + }; + let Some(nv) = nv_url.strip_prefix("jsr:") else { + continue; + }; + let Ok(req) = PackageReq::from_str(req) else { + continue; + }; + let Ok(nv) = PackageNv::from_str(nv) else { + continue; + }; + nv_by_req.insert(req, Some(nv)); + } + } + Self { + nv_by_req, + info_by_nv: Default::default(), + info_by_name: Default::default(), + cache: cache.clone(), + } + } + + pub fn req_to_nv(&self, req: &PackageReq) -> Option { + if let Some(nv) = self.nv_by_req.get(req) { + return nv.value().clone(); + } + let maybe_get_nv = || { + let name = req.name.clone(); + let package_info = self.package_info(&name)?; + // Find the first matching version of the package which is cached. + let mut versions = package_info.versions.keys().collect::>(); + versions.sort(); + let version = versions + .into_iter() + .rev() + .find(|v| { + if req.version_req.tag().is_some() || !req.version_req.matches(v) { + return false; + } + let nv = PackageNv { + name: name.clone(), + version: (*v).clone(), + }; + self.package_version_info(&nv).is_some() + }) + .cloned()?; + Some(PackageNv { name, version }) + }; + let nv = maybe_get_nv(); + self.nv_by_req.insert(req.clone(), nv.clone()); + nv + } + + pub fn jsr_to_registry_url( + &self, + specifier: &ModuleSpecifier, + ) -> Option { + let req_ref = JsrPackageReqReference::from_str(specifier.as_str()).ok()?; + let req = req_ref.req().clone(); + let maybe_nv = self.req_to_nv(&req); + let nv = maybe_nv.as_ref()?; + let info = self.package_version_info(nv)?; + let path = info.export(&normalize_export_name(req_ref.sub_path()))?; + jsr_url() + .join(&format!("{}/{}/{}", &nv.name, &nv.version, &path)) + .ok() + } + + pub fn lookup_export_for_path( + &self, + nv: &PackageNv, + path: &str, + ) -> Option { + let info = self.package_version_info(nv)?; + let path = path.strip_prefix("./").unwrap_or(path); + for (export, path_) in info.exports() { + if path_.strip_prefix("./").unwrap_or(path_) == path { + return Some(export.strip_prefix("./").unwrap_or(export).to_string()); + } + } + None + } + + pub fn lookup_req_for_nv(&self, nv: &PackageNv) -> Option { + for entry in self.nv_by_req.iter() { + let Some(nv_) = entry.value() else { + continue; + }; + if nv_ == nv { + return Some(entry.key().clone()); + } + } + None + } + + pub fn package_info(&self, name: &str) -> Option> { + if let Some(info) = self.info_by_name.get(name) { + return info.value().clone(); + } + let read_cached_package_info = || { + let meta_url = jsr_url().join(&format!("{}/meta.json", name)).ok()?; + let meta_bytes = read_cached_url(&meta_url, &self.cache)?; + serde_json::from_slice::(&meta_bytes).ok() + }; + let info = read_cached_package_info().map(Arc::new); + self.info_by_name.insert(name.to_string(), info.clone()); + info + } + + pub fn package_version_info( + &self, + nv: &PackageNv, + ) -> Option> { + if let Some(info) = self.info_by_nv.get(nv) { + return info.value().clone(); + } + let read_cached_package_version_info = || { + let meta_url = jsr_url() + .join(&format!("{}/{}_meta.json", &nv.name, &nv.version)) + .ok()?; + let meta_bytes = read_cached_url(&meta_url, &self.cache)?; + partial_jsr_package_version_info_from_slice(&meta_bytes).ok() + }; + let info = read_cached_package_version_info().map(Arc::new); + self.info_by_nv.insert(nv.clone(), info.clone()); + info + } +} + +fn read_cached_url( + url: &ModuleSpecifier, + cache: &Arc, +) -> Option> { + cache + .read_file_bytes( + &cache.cache_item_key(url).ok()?, + None, + deno_cache_dir::GlobalToLocalCopy::Disallow, + ) + .ok()? +} + +/// This is similar to a subset of `JsrCacheResolver` which fetches rather than +/// just reads the cache. Keep in sync! +#[derive(Debug)] +pub struct JsrFetchResolver { + nv_by_req: DashMap>, + /// The `module_graph` field of the version infos should be forcibly absent. + /// It can be large and we don't want to store it. + info_by_nv: DashMap>>, + info_by_name: DashMap>>, + file_fetcher: FileFetcher, +} + +impl JsrFetchResolver { + pub fn new(file_fetcher: FileFetcher) -> Self { + Self { + nv_by_req: Default::default(), + info_by_nv: Default::default(), + info_by_name: Default::default(), + file_fetcher, + } + } + + pub async fn req_to_nv(&self, req: &PackageReq) -> Option { + if let Some(nv) = self.nv_by_req.get(req) { + return nv.value().clone(); + } + let maybe_get_nv = || async { + let name = req.name.clone(); + let package_info = self.package_info(&name).await?; + // Find the first matching version of the package which is cached. + let mut versions = package_info.versions.keys().collect::>(); + versions.sort(); + let version = versions + .into_iter() + .rev() + .find(|v| req.version_req.tag().is_none() && req.version_req.matches(v)) + .cloned()?; + Some(PackageNv { name, version }) + }; + let nv = maybe_get_nv().await; + self.nv_by_req.insert(req.clone(), nv.clone()); + nv + } + + pub async fn package_info(&self, name: &str) -> Option> { + if let Some(info) = self.info_by_name.get(name) { + return info.value().clone(); + } + let read_cached_package_info = || async { + let meta_url = jsr_url().join(&format!("{}/meta.json", name)).ok()?; + let file = self + .file_fetcher + .fetch(&meta_url, PermissionsContainer::allow_all()) + .await + .ok()?; + serde_json::from_slice::(&file.source).ok() + }; + let info = read_cached_package_info().await.map(Arc::new); + self.info_by_name.insert(name.to_string(), info.clone()); + info + } + + pub async fn package_version_info( + &self, + nv: &PackageNv, + ) -> Option> { + if let Some(info) = self.info_by_nv.get(nv) { + return info.value().clone(); + } + let read_cached_package_version_info = || async { + let meta_url = jsr_url() + .join(&format!("{}/{}_meta.json", &nv.name, &nv.version)) + .ok()?; + let file = self + .file_fetcher + .fetch(&meta_url, PermissionsContainer::allow_all()) + .await + .ok()?; + partial_jsr_package_version_info_from_slice(&file.source).ok() + }; + let info = read_cached_package_version_info().await.map(Arc::new); + self.info_by_nv.insert(nv.clone(), info.clone()); + info + } +} + +// TODO(nayeemrmn): This is duplicated from a private function in deno_graph +// 0.65.1. Make it public or cleanup otherwise. +fn normalize_export_name(sub_path: Option<&str>) -> Cow { + let Some(sub_path) = sub_path else { + return Cow::Borrowed("."); + }; + if sub_path.is_empty() || matches!(sub_path, "/" | ".") { + Cow::Borrowed(".") + } else { + let sub_path = if sub_path.starts_with('/') { + Cow::Owned(format!(".{}", sub_path)) + } else if !sub_path.starts_with("./") { + Cow::Owned(format!("./{}", sub_path)) + } else { + Cow::Borrowed(sub_path) + }; + if let Some(prefix) = sub_path.strip_suffix('/') { + Cow::Owned(prefix.to_string()) + } else { + sub_path + } + } +} + +/// This is a roundabout way of deserializing `JsrPackageVersionInfo`, +/// because we only want the `exports` field and `module_graph` is large. +fn partial_jsr_package_version_info_from_slice( + slice: &[u8], +) -> serde_json::Result { + let mut info = serde_json::from_slice::(slice)?; + Ok(JsrPackageVersionInfo { + manifest: Default::default(), // not used by the LSP (only caching checks this in deno_graph) + exports: info + .as_object_mut() + .and_then(|o| o.remove("exports")) + .unwrap_or_default(), + module_graph: None, + }) +} diff --git a/cli/lsp/completions.rs b/cli/lsp/completions.rs index 074d913c5e..2186949fb9 100644 --- a/cli/lsp/completions.rs +++ b/cli/lsp/completions.rs @@ -6,13 +6,13 @@ use super::config::WorkspaceSettings; use super::documents::Documents; use super::documents::DocumentsFilter; use super::jsr::CliJsrSearchApi; -use super::jsr::JsrResolver; use super::lsp_custom; use super::npm::CliNpmSearchApi; use super::registries::ModuleRegistry; use super::search::PackageSearchApi; use super::tsc; +use crate::jsr::JsrFetchResolver; use crate::util::path::is_importable_ext; use crate::util::path::relative_specifier; use crate::util::path::specifier_to_file_path; @@ -181,7 +181,7 @@ pub async fn get_import_completions( &text, &range, jsr_search_api, - jsr_search_api.get_resolver(), + Some(jsr_search_api.get_resolver()), ) .await?; Some(lsp::CompletionResponse::List(lsp::CompletionList { @@ -518,7 +518,7 @@ async fn get_jsr_completions( specifier: &str, range: &lsp::Range, jsr_search_api: &impl PackageSearchApi, - jsr_resolver: &JsrResolver, + jsr_resolver: Option<&JsrFetchResolver>, ) -> Option> { // First try to match `jsr:some-package@some-version/`. if let Ok(req_ref) = JsrPackageReqReference::from_str(specifier) { @@ -526,7 +526,10 @@ async fn get_jsr_completions( if sub_path.is_some() || specifier.ends_with('/') { let export_prefix = sub_path.unwrap_or(""); let req = req_ref.req(); - let nv = jsr_resolver.req_to_nv(req); + let nv = match jsr_resolver { + Some(jsr_resolver) => jsr_resolver.req_to_nv(req).await, + None => None, + }; let nv = nv.or_else(|| PackageNv::from_str(&req.to_string()).ok())?; let exports = jsr_search_api.exports(&nv).await.ok()?; let items = exports @@ -799,7 +802,6 @@ mod tests { use super::*; use crate::cache::GlobalHttpCache; use crate::cache::HttpCache; - use crate::cache::RealDenoCacheEnv; use crate::lsp::documents::Documents; use crate::lsp::documents::LanguageId; use crate::lsp::search::tests::TestPackageSearchApi; @@ -1007,14 +1009,6 @@ mod tests { #[tokio::test] async fn test_get_jsr_completions() { - let temp_dir = TempDir::default(); - let jsr_resolver = JsrResolver::from_cache_and_lockfile( - Arc::new(GlobalHttpCache::new( - temp_dir.path().to_path_buf(), - RealDenoCacheEnv, - )), - None, - ); let jsr_search_api = TestPackageSearchApi::default() .with_package_version("@std/archive", "1.0.0", &[]) .with_package_version("@std/assert", "1.0.0", &[]) @@ -1031,15 +1025,10 @@ mod tests { }, }; let referrer = ModuleSpecifier::parse("file:///referrer.ts").unwrap(); - let actual = get_jsr_completions( - &referrer, - "jsr:as", - &range, - &jsr_search_api, - &jsr_resolver, - ) - .await - .unwrap(); + let actual = + get_jsr_completions(&referrer, "jsr:as", &range, &jsr_search_api, None) + .await + .unwrap(); assert_eq!( actual, vec![ @@ -1095,14 +1084,6 @@ mod tests { #[tokio::test] async fn test_get_jsr_completions_for_versions() { - let temp_dir = TempDir::default(); - let jsr_resolver = JsrResolver::from_cache_and_lockfile( - Arc::new(GlobalHttpCache::new( - temp_dir.path().to_path_buf(), - RealDenoCacheEnv, - )), - None, - ); let jsr_search_api = TestPackageSearchApi::default() .with_package_version("@std/assert", "0.3.0", &[]) .with_package_version("@std/assert", "0.4.0", &[]) @@ -1123,7 +1104,7 @@ mod tests { "jsr:@std/assert@", &range, &jsr_search_api, - &jsr_resolver, + None, ) .await .unwrap(); @@ -1205,14 +1186,6 @@ mod tests { #[tokio::test] async fn test_get_jsr_completions_for_exports() { - let temp_dir = TempDir::default(); - let jsr_resolver = JsrResolver::from_cache_and_lockfile( - Arc::new(GlobalHttpCache::new( - temp_dir.path().to_path_buf(), - RealDenoCacheEnv, - )), - None, - ); let jsr_search_api = TestPackageSearchApi::default().with_package_version( "@std/path", "0.1.0", @@ -1234,7 +1207,7 @@ mod tests { "jsr:@std/path@0.1.0/co", &range, &jsr_search_api, - &jsr_resolver, + None, ) .await .unwrap(); diff --git a/cli/lsp/documents.rs b/cli/lsp/documents.rs index 276cae0a23..722cd77f67 100644 --- a/cli/lsp/documents.rs +++ b/cli/lsp/documents.rs @@ -3,7 +3,6 @@ use super::cache::calculate_fs_version; use super::cache::calculate_fs_version_at_path; use super::cache::LSP_DISALLOW_GLOBAL_TO_LOCAL_COPY; -use super::jsr::JsrResolver; use super::language_server::StateNpmSnapshot; use super::text::LineIndex; use super::tsc; @@ -15,6 +14,7 @@ use crate::args::ConfigFile; use crate::args::JsxImportSourceConfig; use crate::cache::FastInsecureHasher; use crate::cache::HttpCache; +use crate::jsr::JsrCacheResolver; use crate::lsp::logging::lsp_warn; use crate::npm::CliNpmResolver; use crate::resolver::CliGraphResolver; @@ -893,7 +893,7 @@ pub struct Documents { /// A resolver that takes into account currently loaded import map and JSX /// settings. resolver: Arc, - jsr_resolver: Arc, + jsr_resolver: Arc, /// The npm package requirements found in npm specifiers. npm_specifier_reqs: Arc>, /// Gets if any document had a node: specifier such that a @types/node package @@ -928,10 +928,7 @@ impl Documents { bare_node_builtins_enabled: false, sloppy_imports_resolver: None, })), - jsr_resolver: Arc::new(JsrResolver::from_cache_and_lockfile( - cache.clone(), - None, - )), + jsr_resolver: Arc::new(JsrCacheResolver::new(cache.clone(), None)), npm_specifier_reqs: Default::default(), has_injected_types_node_package: false, redirect_resolver: Arc::new(RedirectResolver::new(cache)), @@ -1336,7 +1333,7 @@ impl Documents { Ok(()) } - pub fn get_jsr_resolver(&self) -> &Arc { + pub fn get_jsr_resolver(&self) -> &Arc { &self.jsr_resolver } @@ -1344,10 +1341,8 @@ impl Documents { &mut self, lockfile: Option>>, ) { - self.jsr_resolver = Arc::new(JsrResolver::from_cache_and_lockfile( - self.cache.clone(), - lockfile, - )); + self.jsr_resolver = + Arc::new(JsrCacheResolver::new(self.cache.clone(), lockfile)); } pub fn update_config(&mut self, options: UpdateDocumentConfigOptions) { @@ -1452,7 +1447,7 @@ impl Documents { // specifier for free. sloppy_imports_resolver: None, })); - self.jsr_resolver = Arc::new(JsrResolver::from_cache_and_lockfile( + self.jsr_resolver = Arc::new(JsrCacheResolver::new( self.cache.clone(), options.maybe_lockfile, )); diff --git a/cli/lsp/jsr.rs b/cli/lsp/jsr.rs index 47a2c1e84f..29ecec60bd 100644 --- a/cli/lsp/jsr.rs +++ b/cli/lsp/jsr.rs @@ -1,198 +1,26 @@ // Copyright 2018-2024 the Deno authors. All rights reserved. MIT license. use crate::args::jsr_api_url; -use crate::args::jsr_url; use crate::file_fetcher::FileFetcher; +use crate::jsr::JsrFetchResolver; use dashmap::DashMap; -use deno_cache_dir::HttpCache; use deno_core::anyhow::anyhow; use deno_core::error::AnyError; -use deno_core::parking_lot::Mutex; use deno_core::serde_json; -use deno_core::ModuleSpecifier; -use deno_graph::packages::JsrPackageInfo; -use deno_graph::packages::JsrPackageVersionInfo; -use deno_lockfile::Lockfile; use deno_runtime::permissions::PermissionsContainer; -use deno_semver::jsr::JsrPackageReqReference; use deno_semver::package::PackageNv; -use deno_semver::package::PackageReq; use deno_semver::Version; use serde::Deserialize; -use std::borrow::Cow; use std::sync::Arc; -use super::cache::LSP_DISALLOW_GLOBAL_TO_LOCAL_COPY; use super::search::PackageSearchApi; -#[derive(Debug)] -pub struct JsrResolver { - nv_by_req: DashMap>, - /// The `module_graph` field of the version infos should be forcibly absent. - /// It can be large and we don't want to store it. - info_by_nv: DashMap>, - info_by_name: DashMap>, - cache: Arc, -} - -impl JsrResolver { - pub fn from_cache_and_lockfile( - cache: Arc, - lockfile: Option>>, - ) -> Self { - let nv_by_req = DashMap::new(); - if let Some(lockfile) = lockfile { - for (req_url, nv_url) in &lockfile.lock().content.packages.specifiers { - let Some(req) = req_url.strip_prefix("jsr:") else { - continue; - }; - let Some(nv) = nv_url.strip_prefix("jsr:") else { - continue; - }; - let Ok(req) = PackageReq::from_str(req) else { - continue; - }; - let Ok(nv) = PackageNv::from_str(nv) else { - continue; - }; - nv_by_req.insert(req, Some(nv)); - } - } - Self { - nv_by_req, - info_by_nv: Default::default(), - info_by_name: Default::default(), - cache: cache.clone(), - } - } - - pub fn req_to_nv(&self, req: &PackageReq) -> Option { - let nv = self.nv_by_req.entry(req.clone()).or_insert_with(|| { - let name = req.name.clone(); - let maybe_package_info = self - .info_by_name - .entry(name.clone()) - .or_insert_with(|| read_cached_package_info(&name, &self.cache)); - let package_info = maybe_package_info.as_ref()?; - // Find the first matching version of the package which is cached. - let mut versions = package_info.versions.keys().collect::>(); - versions.sort(); - let version = versions - .into_iter() - .rev() - .find(|v| { - if req.version_req.tag().is_some() || !req.version_req.matches(v) { - return false; - } - let nv = PackageNv { - name: name.clone(), - version: (*v).clone(), - }; - self - .info_by_nv - .entry(nv.clone()) - .or_insert_with(|| { - read_cached_package_version_info(&nv, &self.cache) - }) - .is_some() - }) - .cloned()?; - Some(PackageNv { name, version }) - }); - nv.value().clone() - } - - pub fn jsr_to_registry_url( - &self, - specifier: &ModuleSpecifier, - ) -> Option { - let req_ref = JsrPackageReqReference::from_str(specifier.as_str()).ok()?; - let req = req_ref.req().clone(); - let maybe_nv = self.req_to_nv(&req); - let nv = maybe_nv.as_ref()?; - let maybe_info = self - .info_by_nv - .entry(nv.clone()) - .or_insert_with(|| read_cached_package_version_info(nv, &self.cache)); - let info = maybe_info.as_ref()?; - let path = info.export(&normalize_export_name(req_ref.sub_path()))?; - jsr_url() - .join(&format!("{}/{}/{}", &nv.name, &nv.version, &path)) - .ok() - } - - pub fn lookup_export_for_path( - &self, - nv: &PackageNv, - path: &str, - ) -> Option { - let maybe_info = self - .info_by_nv - .entry(nv.clone()) - .or_insert_with(|| read_cached_package_version_info(nv, &self.cache)); - let info = maybe_info.as_ref()?; - let path = path.strip_prefix("./").unwrap_or(path); - for (export, path_) in info.exports() { - if path_.strip_prefix("./").unwrap_or(path_) == path { - return Some(export.strip_prefix("./").unwrap_or(export).to_string()); - } - } - None - } - - pub fn lookup_req_for_nv(&self, nv: &PackageNv) -> Option { - for entry in self.nv_by_req.iter() { - let Some(nv_) = entry.value() else { - continue; - }; - if nv_ == nv { - return Some(entry.key().clone()); - } - } - None - } -} - -fn read_cached_package_info( - name: &str, - cache: &Arc, -) -> Option { - let meta_url = jsr_url().join(&format!("{}/meta.json", name)).ok()?; - let meta_cache_item_key = cache.cache_item_key(&meta_url).ok()?; - let meta_bytes = cache - .read_file_bytes( - &meta_cache_item_key, - None, - LSP_DISALLOW_GLOBAL_TO_LOCAL_COPY, - ) - .ok()??; - serde_json::from_slice::(&meta_bytes).ok() -} - -fn read_cached_package_version_info( - nv: &PackageNv, - cache: &Arc, -) -> Option { - let meta_url = jsr_url() - .join(&format!("{}/{}_meta.json", &nv.name, &nv.version)) - .ok()?; - let meta_cache_item_key = cache.cache_item_key(&meta_url).ok()?; - let meta_bytes = cache - .read_file_bytes( - &meta_cache_item_key, - None, - LSP_DISALLOW_GLOBAL_TO_LOCAL_COPY, - ) - .ok()??; - partial_jsr_package_version_info_from_slice(&meta_bytes).ok() -} - #[derive(Debug, Clone)] pub struct CliJsrSearchApi { file_fetcher: FileFetcher, /// We only store this here so the completion system has access to a resolver /// that always uses the global cache. - resolver: Arc, + resolver: Arc, search_cache: Arc>>>, versions_cache: Arc>>>, exports_cache: Arc>>>, @@ -200,10 +28,7 @@ pub struct CliJsrSearchApi { impl CliJsrSearchApi { pub fn new(file_fetcher: FileFetcher) -> Self { - let resolver = Arc::new(JsrResolver::from_cache_and_lockfile( - file_fetcher.http_cache().clone(), - None, - )); + let resolver = Arc::new(JsrFetchResolver::new(file_fetcher.clone())); Self { file_fetcher, resolver, @@ -213,7 +38,7 @@ impl CliJsrSearchApi { } } - pub fn get_resolver(&self) -> &Arc { + pub fn get_resolver(&self) -> &Arc { &self.resolver } } @@ -245,19 +70,12 @@ impl PackageSearchApi for CliJsrSearchApi { if let Some(versions) = self.versions_cache.get(name) { return Ok(versions.clone()); } - let mut meta_url = jsr_url().clone(); - meta_url - .path_segments_mut() - .map_err(|_| anyhow!("Custom jsr URL cannot be a base."))? - .pop_if_empty() - .push(name) - .push("meta.json"); - let file = self - .file_fetcher - .fetch(&meta_url, PermissionsContainer::allow_all()) - .await?; - let info = serde_json::from_slice::(&file.source)?; - let mut versions = info.versions.into_keys().collect::>(); + let info = self + .resolver + .package_info(name) + .await + .ok_or_else(|| anyhow!("JSR package info not found: {}", name))?; + let mut versions = info.versions.keys().cloned().collect::>(); versions.sort(); versions.reverse(); let versions = Arc::new(versions); @@ -274,18 +92,11 @@ impl PackageSearchApi for CliJsrSearchApi { if let Some(exports) = self.exports_cache.get(nv) { return Ok(exports.clone()); } - let mut meta_url = jsr_url().clone(); - meta_url - .path_segments_mut() - .map_err(|_| anyhow!("Custom jsr URL cannot be a base."))? - .pop_if_empty() - .push(&nv.name) - .push(&format!("{}_meta.json", &nv.version)); - let file = self - .file_fetcher - .fetch(&meta_url, PermissionsContainer::allow_all()) - .await?; - let info = partial_jsr_package_version_info_from_slice(&file.source)?; + let info = self + .resolver + .package_version_info(nv) + .await + .ok_or_else(|| anyhow!("JSR package version info not found: {}", nv))?; let mut exports = info .exports() .map(|(n, _)| n.to_string()) @@ -297,46 +108,6 @@ impl PackageSearchApi for CliJsrSearchApi { } } -// TODO(nayeemrmn): This is duplicated from a private function in deno_graph -// 0.65.1. Make it public or cleanup otherwise. -fn normalize_export_name(sub_path: Option<&str>) -> Cow { - let Some(sub_path) = sub_path else { - return Cow::Borrowed("."); - }; - if sub_path.is_empty() || matches!(sub_path, "/" | ".") { - Cow::Borrowed(".") - } else { - let sub_path = if sub_path.starts_with('/') { - Cow::Owned(format!(".{}", sub_path)) - } else if !sub_path.starts_with("./") { - Cow::Owned(format!("./{}", sub_path)) - } else { - Cow::Borrowed(sub_path) - }; - if let Some(prefix) = sub_path.strip_suffix('/') { - Cow::Owned(prefix.to_string()) - } else { - sub_path - } - } -} - -/// This is a roundabout way of deserializing `JsrPackageVersionInfo`, -/// because we only want the `exports` field and `module_graph` is large. -fn partial_jsr_package_version_info_from_slice( - slice: &[u8], -) -> serde_json::Result { - let mut info = serde_json::from_slice::(slice)?; - Ok(JsrPackageVersionInfo { - manifest: Default::default(), // not used by the LSP (only caching checks this in deno_graph) - exports: info - .as_object_mut() - .and_then(|o| o.remove("exports")) - .unwrap_or_default(), - module_graph: None, - }) -} - fn parse_jsr_search_response(source: &str) -> Result, AnyError> { #[derive(Debug, Deserialize)] #[serde(rename_all = "camelCase")] diff --git a/cli/lsp/mod.rs b/cli/lsp/mod.rs index a2d0854642..f15d2a3658 100644 --- a/cli/lsp/mod.rs +++ b/cli/lsp/mod.rs @@ -21,7 +21,7 @@ mod completions; mod config; mod diagnostics; mod documents; -pub mod jsr; +mod jsr; pub mod language_server; mod logging; mod lsp_custom; @@ -32,7 +32,7 @@ mod performance; mod refactor; mod registries; mod repl; -pub mod search; +mod search; mod semantic_tokens; mod testing; mod text; diff --git a/cli/main.rs b/cli/main.rs index 60d10badcb..c39547daa0 100644 --- a/cli/main.rs +++ b/cli/main.rs @@ -12,6 +12,7 @@ mod file_fetcher; mod graph_util; mod http_util; mod js; +mod jsr; mod lsp; mod module_loader; mod napi; diff --git a/cli/tools/registry/pm.rs b/cli/tools/registry/pm.rs index a3fa8a0f3f..0c10c4993b 100644 --- a/cli/tools/registry/pm.rs +++ b/cli/tools/registry/pm.rs @@ -2,6 +2,7 @@ use std::collections::HashMap; use std::path::PathBuf; +use std::sync::Arc; use deno_ast::TextChange; use deno_config::FmtOptionsConfig; @@ -13,7 +14,6 @@ use deno_core::futures::StreamExt; use deno_core::serde_json; use deno_semver::jsr::JsrPackageReqReference; use deno_semver::npm::NpmPackageReqReference; -use deno_semver::package::PackageReq; use jsonc_parser::ast::ObjectProp; use jsonc_parser::ast::Value; @@ -22,8 +22,7 @@ use crate::args::CacheSetting; use crate::args::Flags; use crate::factory::CliFactory; use crate::file_fetcher::FileFetcher; -use crate::lsp::jsr::CliJsrSearchApi; -use crate::lsp::search::PackageSearchApi; +use crate::jsr::JsrFetchResolver; pub async fn add(flags: Flags, add_flags: AddFlags) -> Result<(), AnyError> { let cli_factory = CliFactory::from_flags(flags.clone()).await?; @@ -78,16 +77,13 @@ pub async fn add(flags: Flags, add_flags: AddFlags) -> Result<(), AnyError> { None, ); deps_file_fetcher.set_download_log_level(log::Level::Trace); - let jsr_search_api = CliJsrSearchApi::new(deps_file_fetcher); + let jsr_resolver = Arc::new(JsrFetchResolver::new(deps_file_fetcher)); let package_futures = package_reqs .into_iter() - .map(|package_req| { - find_package_and_select_version_for_req( - jsr_search_api.clone(), - package_req, - ) - .boxed_local() + .map(move |package_req| { + find_package_and_select_version_for_req(jsr_resolver.clone(), package_req) + .boxed_local() }) .collect::>(); @@ -185,42 +181,27 @@ enum PackageAndVersion { Selected(SelectedPackage), } -async fn jsr_find_package_and_select_version( - jsr_search_api: CliJsrSearchApi, - req: &PackageReq, -) -> Result { - let jsr_prefixed_name = format!("jsr:{}", req.name); - - // TODO(bartlomieju): Need to do semver as well - @luca/flag@^0.14 should use to - // highest possible `0.14.x` version. - let version_req = req.version_req.version_text(); - if version_req != "*" { - bail!("Specifying version constraints is currently not supported. Package: {}@{}", jsr_prefixed_name, version_req); - } - - let Ok(versions) = jsr_search_api.versions(&req.name).await else { - return Ok(PackageAndVersion::NotFound(jsr_prefixed_name)); - }; - - let Some(latest_version) = versions.first() else { - return Ok(PackageAndVersion::NotFound(jsr_prefixed_name)); - }; - - Ok(PackageAndVersion::Selected(SelectedPackage { - import_name: req.name.to_string(), - package_name: jsr_prefixed_name, - // TODO(bartlomieju): fix it, it should not always be caret - version_req: format!("^{}", latest_version), - })) -} - async fn find_package_and_select_version_for_req( - jsr_search_api: CliJsrSearchApi, + jsr_resolver: Arc, add_package_req: AddPackageReq, ) -> Result { match add_package_req { AddPackageReq::Jsr(pkg_ref) => { - jsr_find_package_and_select_version(jsr_search_api, pkg_ref.req()).await + let req = pkg_ref.req(); + let jsr_prefixed_name = format!("jsr:{}", &req.name); + let Some(nv) = jsr_resolver.req_to_nv(req).await else { + return Ok(PackageAndVersion::NotFound(jsr_prefixed_name)); + }; + let range_symbol = if req.version_req.version_text().starts_with('~') { + '~' + } else { + '^' + }; + Ok(PackageAndVersion::Selected(SelectedPackage { + import_name: req.name.to_string(), + package_name: jsr_prefixed_name, + version_req: format!("{}{}", range_symbol, &nv.version), + })) } AddPackageReq::Npm(pkg_req) => { bail!( diff --git a/tests/integration/pm_tests.rs b/tests/integration/pm_tests.rs index 4e0345331d..cc5527c40a 100644 --- a/tests/integration/pm_tests.rs +++ b/tests/integration/pm_tests.rs @@ -48,6 +48,38 @@ fn add_basic_no_deno_json() { })); } +#[test] +fn add_version_contraint() { + let context = pm_context_builder().build(); + let temp_dir = context.temp_dir().path(); + + let output = context.new_command().args("add @denotest/add@1").run(); + output.assert_exit_code(0); + let output = output.combined_output(); + assert_contains!(output, "Add @denotest/add"); + temp_dir.join("deno.json").assert_matches_json(json!({ + "imports": { + "@denotest/add": "jsr:@denotest/add@^1.0.0" + } + })); +} + +#[test] +fn add_tilde() { + let context = pm_context_builder().build(); + let temp_dir = context.temp_dir().path(); + + let output = context.new_command().args("add @denotest/add@~1").run(); + output.assert_exit_code(0); + let output = output.combined_output(); + assert_contains!(output, "Add @denotest/add"); + temp_dir.join("deno.json").assert_matches_json(json!({ + "imports": { + "@denotest/add": "jsr:@denotest/add@~1.0.0" + } + })); +} + #[test] fn add_multiple() { let starting_deno_json = json!({ @@ -90,16 +122,6 @@ fn add_not_supported_npm() { assert_contains!(output, "error: Adding npm: packages is currently not supported. Package: npm:express"); } -#[test] -fn add_not_supported_version_constraint() { - let context = pm_context_builder().build(); - - let output = context.new_command().args("add @denotest/add@1").run(); - output.assert_exit_code(1); - let output = output.combined_output(); - assert_contains!(output, "error: Specifying version constraints is currently not supported. Package: jsr:@denotest/add@1"); -} - fn pm_context_builder() -> TestContextBuilder { TestContextBuilder::new() .use_http_server()