mirror of
https://github.com/denoland/deno.git
synced 2024-11-21 15:04:11 -05:00
refactor: don't share reqwest::HttpClient
across tokio runtimes (#24092)
This also fixes several issues where we weren't properly creating http clients with the user's settings.
This commit is contained in:
parent
72088f2f52
commit
3341c50b6a
32 changed files with 1387 additions and 1224 deletions
2
cli/cache/mod.rs
vendored
2
cli/cache/mod.rs
vendored
|
@ -286,7 +286,7 @@ impl Loader for FetchCacher {
|
|||
}
|
||||
})
|
||||
}
|
||||
.boxed()
|
||||
.boxed_local()
|
||||
}
|
||||
|
||||
fn cache_module_info(
|
||||
|
|
6
cli/clippy.toml
Normal file
6
cli/clippy.toml
Normal file
|
@ -0,0 +1,6 @@
|
|||
disallowed-methods = [
|
||||
{ path = "reqwest::Client::new", reason = "create an HttpClient via an HttpClientProvider instead" },
|
||||
]
|
||||
disallowed-types = [
|
||||
{ path = "reqwest::Client", reason = "use crate::http_util::HttpClient instead" },
|
||||
]
|
|
@ -25,7 +25,7 @@ use crate::graph_container::MainModuleGraphContainer;
|
|||
use crate::graph_util::FileWatcherReporter;
|
||||
use crate::graph_util::ModuleGraphBuilder;
|
||||
use crate::graph_util::ModuleGraphCreator;
|
||||
use crate::http_util::HttpClient;
|
||||
use crate::http_util::HttpClientProvider;
|
||||
use crate::module_loader::CliModuleLoaderFactory;
|
||||
use crate::module_loader::ModuleLoadPreparer;
|
||||
use crate::node::CliCjsCodeAnalyzer;
|
||||
|
@ -152,7 +152,7 @@ struct CliFactoryServices {
|
|||
file_fetcher: Deferred<Arc<FileFetcher>>,
|
||||
global_http_cache: Deferred<Arc<GlobalHttpCache>>,
|
||||
http_cache: Deferred<Arc<dyn HttpCache>>,
|
||||
http_client: Deferred<Arc<HttpClient>>,
|
||||
http_client_provider: Deferred<Arc<HttpClientProvider>>,
|
||||
emit_cache: Deferred<EmitCache>,
|
||||
emitter: Deferred<Arc<Emitter>>,
|
||||
fs: Deferred<Arc<dyn deno_fs::FileSystem>>,
|
||||
|
@ -279,9 +279,9 @@ impl CliFactory {
|
|||
})
|
||||
}
|
||||
|
||||
pub fn http_client(&self) -> &Arc<HttpClient> {
|
||||
self.services.http_client.get_or_init(|| {
|
||||
Arc::new(HttpClient::new(
|
||||
pub fn http_client_provider(&self) -> &Arc<HttpClientProvider> {
|
||||
self.services.http_client_provider.get_or_init(|| {
|
||||
Arc::new(HttpClientProvider::new(
|
||||
Some(self.root_cert_store_provider().clone()),
|
||||
self.options.unsafely_ignore_certificate_errors().clone(),
|
||||
))
|
||||
|
@ -294,7 +294,7 @@ impl CliFactory {
|
|||
self.http_cache()?.clone(),
|
||||
self.options.cache_setting(),
|
||||
!self.options.no_remote(),
|
||||
self.http_client().clone(),
|
||||
self.http_client_provider().clone(),
|
||||
self.blob_store().clone(),
|
||||
Some(self.text_only_progress_bar().clone()),
|
||||
)))
|
||||
|
@ -436,7 +436,7 @@ impl CliFactory {
|
|||
},
|
||||
maybe_lockfile: self.maybe_lockfile().as_ref().cloned(),
|
||||
fs: fs.clone(),
|
||||
http_client: self.http_client().clone(),
|
||||
http_client_provider: self.http_client_provider().clone(),
|
||||
npm_global_cache_dir: self.deno_dir()?.npm_folder_path(),
|
||||
cache_setting: self.options.cache_setting(),
|
||||
text_only_progress_bar: self.text_only_progress_bar().clone(),
|
||||
|
@ -760,9 +760,9 @@ impl CliFactory {
|
|||
&self,
|
||||
) -> Result<DenoCompileBinaryWriter, AnyError> {
|
||||
Ok(DenoCompileBinaryWriter::new(
|
||||
self.file_fetcher()?,
|
||||
self.http_client(),
|
||||
self.deno_dir()?,
|
||||
self.file_fetcher()?,
|
||||
self.http_client_provider(),
|
||||
self.npm_resolver().await?.as_ref(),
|
||||
self.options.npm_system_info(),
|
||||
self.package_json_deps_provider(),
|
||||
|
|
|
@ -1,17 +1,14 @@
|
|||
// Copyright 2018-2024 the Deno authors. All rights reserved. MIT license.
|
||||
|
||||
use crate::args::CacheSetting;
|
||||
use crate::auth_tokens::AuthToken;
|
||||
use crate::auth_tokens::AuthTokens;
|
||||
use crate::cache::HttpCache;
|
||||
use crate::colors;
|
||||
use crate::http_util;
|
||||
use crate::http_util::resolve_redirect_from_response;
|
||||
use crate::http_util::CacheSemantics;
|
||||
use crate::http_util::HeadersMap;
|
||||
use crate::http_util::HttpClient;
|
||||
use crate::http_util::FetchOnceArgs;
|
||||
use crate::http_util::FetchOnceResult;
|
||||
use crate::http_util::HttpClientProvider;
|
||||
use crate::util::progress_bar::ProgressBar;
|
||||
use crate::util::progress_bar::UpdateGuard;
|
||||
|
||||
use deno_ast::MediaType;
|
||||
use deno_core::anyhow::bail;
|
||||
|
@ -24,11 +21,7 @@ use deno_core::parking_lot::Mutex;
|
|||
use deno_core::url::Url;
|
||||
use deno_core::ModuleSpecifier;
|
||||
use deno_graph::source::LoaderChecksum;
|
||||
use deno_runtime::deno_fetch::reqwest::header::HeaderValue;
|
||||
use deno_runtime::deno_fetch::reqwest::header::ACCEPT;
|
||||
use deno_runtime::deno_fetch::reqwest::header::AUTHORIZATION;
|
||||
use deno_runtime::deno_fetch::reqwest::header::IF_NONE_MATCH;
|
||||
use deno_runtime::deno_fetch::reqwest::StatusCode;
|
||||
|
||||
use deno_runtime::deno_web::BlobStore;
|
||||
use deno_runtime::permissions::PermissionsContainer;
|
||||
use log::debug;
|
||||
|
@ -165,14 +158,14 @@ pub struct FetchNoFollowOptions<'a> {
|
|||
}
|
||||
|
||||
/// A structure for resolving, fetching and caching source files.
|
||||
#[derive(Debug, Clone)]
|
||||
#[derive(Debug)]
|
||||
pub struct FileFetcher {
|
||||
auth_tokens: AuthTokens,
|
||||
allow_remote: bool,
|
||||
memory_files: MemoryFiles,
|
||||
cache_setting: CacheSetting,
|
||||
http_cache: Arc<dyn HttpCache>,
|
||||
http_client: Arc<HttpClient>,
|
||||
http_client_provider: Arc<HttpClientProvider>,
|
||||
blob_store: Arc<BlobStore>,
|
||||
download_log_level: log::Level,
|
||||
progress_bar: Option<ProgressBar>,
|
||||
|
@ -183,7 +176,7 @@ impl FileFetcher {
|
|||
http_cache: Arc<dyn HttpCache>,
|
||||
cache_setting: CacheSetting,
|
||||
allow_remote: bool,
|
||||
http_client: Arc<HttpClient>,
|
||||
http_client_provider: Arc<HttpClientProvider>,
|
||||
blob_store: Arc<BlobStore>,
|
||||
progress_bar: Option<ProgressBar>,
|
||||
) -> Self {
|
||||
|
@ -193,7 +186,7 @@ impl FileFetcher {
|
|||
memory_files: Default::default(),
|
||||
cache_setting,
|
||||
http_cache,
|
||||
http_client,
|
||||
http_client_provider,
|
||||
blob_store,
|
||||
download_log_level: log::Level::Info,
|
||||
progress_bar,
|
||||
|
@ -400,16 +393,16 @@ impl FileFetcher {
|
|||
let mut maybe_etag = maybe_etag;
|
||||
let mut retried = false; // retry intermittent failures
|
||||
let result = loop {
|
||||
let result = match fetch_no_follow(
|
||||
&self.http_client,
|
||||
FetchOnceArgs {
|
||||
let result = match self
|
||||
.http_client_provider
|
||||
.get_or_create()?
|
||||
.fetch_no_follow(FetchOnceArgs {
|
||||
url: specifier.clone(),
|
||||
maybe_accept: maybe_accept.map(ToOwned::to_owned),
|
||||
maybe_etag: maybe_etag.clone(),
|
||||
maybe_auth_token: maybe_auth_token.clone(),
|
||||
maybe_progress_guard: maybe_progress_guard.as_ref(),
|
||||
},
|
||||
)
|
||||
})
|
||||
.await?
|
||||
{
|
||||
FetchOnceResult::NotModified => {
|
||||
|
@ -641,140 +634,17 @@ impl FileFetcher {
|
|||
}
|
||||
}
|
||||
|
||||
#[derive(Debug, Eq, PartialEq)]
|
||||
enum FetchOnceResult {
|
||||
Code(Vec<u8>, HeadersMap),
|
||||
NotModified,
|
||||
Redirect(Url, HeadersMap),
|
||||
RequestError(String),
|
||||
ServerError(StatusCode),
|
||||
}
|
||||
|
||||
#[derive(Debug)]
|
||||
struct FetchOnceArgs<'a> {
|
||||
pub url: Url,
|
||||
pub maybe_accept: Option<String>,
|
||||
pub maybe_etag: Option<String>,
|
||||
pub maybe_auth_token: Option<AuthToken>,
|
||||
pub maybe_progress_guard: Option<&'a UpdateGuard>,
|
||||
}
|
||||
|
||||
/// Asynchronously fetches the given HTTP URL one pass only.
|
||||
/// If no redirect is present and no error occurs,
|
||||
/// yields Code(ResultPayload).
|
||||
/// If redirect occurs, does not follow and
|
||||
/// yields Redirect(url).
|
||||
async fn fetch_no_follow<'a>(
|
||||
http_client: &HttpClient,
|
||||
args: FetchOnceArgs<'a>,
|
||||
) -> Result<FetchOnceResult, AnyError> {
|
||||
let mut request = http_client.get_no_redirect(args.url.clone())?;
|
||||
|
||||
if let Some(etag) = args.maybe_etag {
|
||||
let if_none_match_val = HeaderValue::from_str(&etag)?;
|
||||
request = request.header(IF_NONE_MATCH, if_none_match_val);
|
||||
}
|
||||
if let Some(auth_token) = args.maybe_auth_token {
|
||||
let authorization_val = HeaderValue::from_str(&auth_token.to_string())?;
|
||||
request = request.header(AUTHORIZATION, authorization_val);
|
||||
}
|
||||
if let Some(accept) = args.maybe_accept {
|
||||
let accepts_val = HeaderValue::from_str(&accept)?;
|
||||
request = request.header(ACCEPT, accepts_val);
|
||||
}
|
||||
let response = match request.send().await {
|
||||
Ok(resp) => resp,
|
||||
Err(err) => {
|
||||
if err.is_connect() || err.is_timeout() {
|
||||
return Ok(FetchOnceResult::RequestError(err.to_string()));
|
||||
}
|
||||
return Err(err.into());
|
||||
}
|
||||
};
|
||||
|
||||
if response.status() == StatusCode::NOT_MODIFIED {
|
||||
return Ok(FetchOnceResult::NotModified);
|
||||
}
|
||||
|
||||
let mut result_headers = HashMap::new();
|
||||
let response_headers = response.headers();
|
||||
|
||||
if let Some(warning) = response_headers.get("X-Deno-Warning") {
|
||||
log::warn!(
|
||||
"{} {}",
|
||||
crate::colors::yellow("Warning"),
|
||||
warning.to_str().unwrap()
|
||||
);
|
||||
}
|
||||
|
||||
for key in response_headers.keys() {
|
||||
let key_str = key.to_string();
|
||||
let values = response_headers.get_all(key);
|
||||
let values_str = values
|
||||
.iter()
|
||||
.map(|e| e.to_str().unwrap().to_string())
|
||||
.collect::<Vec<String>>()
|
||||
.join(",");
|
||||
result_headers.insert(key_str, values_str);
|
||||
}
|
||||
|
||||
if response.status().is_redirection() {
|
||||
let new_url = resolve_redirect_from_response(&args.url, &response)?;
|
||||
return Ok(FetchOnceResult::Redirect(new_url, result_headers));
|
||||
}
|
||||
|
||||
let status = response.status();
|
||||
|
||||
if status.is_server_error() {
|
||||
return Ok(FetchOnceResult::ServerError(status));
|
||||
}
|
||||
|
||||
if status.is_client_error() {
|
||||
let err = if response.status() == StatusCode::NOT_FOUND {
|
||||
custom_error(
|
||||
"NotFound",
|
||||
format!("Import '{}' failed, not found.", args.url),
|
||||
)
|
||||
} else {
|
||||
generic_error(format!(
|
||||
"Import '{}' failed: {}",
|
||||
args.url,
|
||||
response.status()
|
||||
))
|
||||
};
|
||||
return Err(err);
|
||||
}
|
||||
|
||||
let body = http_util::get_response_body_with_progress(
|
||||
response,
|
||||
args.maybe_progress_guard,
|
||||
)
|
||||
.await?;
|
||||
|
||||
Ok(FetchOnceResult::Code(body, result_headers))
|
||||
}
|
||||
|
||||
#[allow(clippy::print_stdout)]
|
||||
#[allow(clippy::print_stderr)]
|
||||
#[cfg(test)]
|
||||
mod tests {
|
||||
use crate::cache::GlobalHttpCache;
|
||||
use crate::cache::RealDenoCacheEnv;
|
||||
use crate::http_util::HttpClient;
|
||||
use crate::version;
|
||||
use crate::http_util::HttpClientProvider;
|
||||
|
||||
use super::*;
|
||||
use deno_core::error::get_custom_error_class;
|
||||
use deno_core::resolve_url;
|
||||
use deno_core::url::Url;
|
||||
use deno_runtime::deno_fetch::create_http_client;
|
||||
use deno_runtime::deno_fetch::CreateHttpClientOptions;
|
||||
use deno_runtime::deno_tls::rustls::RootCertStore;
|
||||
use deno_runtime::deno_web::Blob;
|
||||
use deno_runtime::deno_web::InMemoryBlobPart;
|
||||
use std::collections::hash_map::RandomState;
|
||||
use std::collections::HashSet;
|
||||
use std::fs::read;
|
||||
use test_util::TempDir;
|
||||
|
||||
fn setup(
|
||||
|
@ -797,7 +667,7 @@ mod tests {
|
|||
Arc::new(GlobalHttpCache::new(location, RealDenoCacheEnv)),
|
||||
cache_setting,
|
||||
true,
|
||||
Arc::new(HttpClient::new(None, None)),
|
||||
Arc::new(HttpClientProvider::new(None, None)),
|
||||
blob_store.clone(),
|
||||
None,
|
||||
);
|
||||
|
@ -1051,7 +921,7 @@ mod tests {
|
|||
)),
|
||||
CacheSetting::ReloadAll,
|
||||
true,
|
||||
Arc::new(HttpClient::new(None, None)),
|
||||
Arc::new(HttpClientProvider::new(None, None)),
|
||||
Default::default(),
|
||||
None,
|
||||
);
|
||||
|
@ -1083,7 +953,7 @@ mod tests {
|
|||
)),
|
||||
CacheSetting::Use,
|
||||
true,
|
||||
Arc::new(HttpClient::new(None, None)),
|
||||
Arc::new(HttpClientProvider::new(None, None)),
|
||||
Default::default(),
|
||||
None,
|
||||
);
|
||||
|
@ -1120,7 +990,7 @@ mod tests {
|
|||
)),
|
||||
CacheSetting::Use,
|
||||
true,
|
||||
Arc::new(HttpClient::new(None, None)),
|
||||
Arc::new(HttpClientProvider::new(None, None)),
|
||||
Default::default(),
|
||||
None,
|
||||
);
|
||||
|
@ -1259,7 +1129,7 @@ mod tests {
|
|||
)),
|
||||
CacheSetting::Use,
|
||||
true,
|
||||
Arc::new(HttpClient::new(None, None)),
|
||||
Arc::new(HttpClientProvider::new(None, None)),
|
||||
Default::default(),
|
||||
None,
|
||||
);
|
||||
|
@ -1299,7 +1169,7 @@ mod tests {
|
|||
)),
|
||||
CacheSetting::Use,
|
||||
true,
|
||||
Arc::new(HttpClient::new(None, None)),
|
||||
Arc::new(HttpClientProvider::new(None, None)),
|
||||
Default::default(),
|
||||
None,
|
||||
);
|
||||
|
@ -1425,7 +1295,7 @@ mod tests {
|
|||
)),
|
||||
CacheSetting::Use,
|
||||
false,
|
||||
Arc::new(HttpClient::new(None, None)),
|
||||
Arc::new(HttpClientProvider::new(None, None)),
|
||||
Default::default(),
|
||||
None,
|
||||
);
|
||||
|
@ -1450,7 +1320,7 @@ mod tests {
|
|||
Arc::new(GlobalHttpCache::new(location.clone(), RealDenoCacheEnv)),
|
||||
CacheSetting::Only,
|
||||
true,
|
||||
Arc::new(HttpClient::new(None, None)),
|
||||
Arc::new(HttpClientProvider::new(None, None)),
|
||||
Default::default(),
|
||||
None,
|
||||
);
|
||||
|
@ -1458,7 +1328,7 @@ mod tests {
|
|||
Arc::new(GlobalHttpCache::new(location, RealDenoCacheEnv)),
|
||||
CacheSetting::Use,
|
||||
true,
|
||||
Arc::new(HttpClient::new(None, None)),
|
||||
Arc::new(HttpClientProvider::new(None, None)),
|
||||
Default::default(),
|
||||
None,
|
||||
);
|
||||
|
@ -1602,580 +1472,6 @@ mod tests {
|
|||
test_fetch_remote_encoded("windows-1255", "windows-1255", expected).await;
|
||||
}
|
||||
|
||||
fn create_test_client() -> HttpClient {
|
||||
HttpClient::from_client(
|
||||
create_http_client("test_client", CreateHttpClientOptions::default())
|
||||
.unwrap(),
|
||||
)
|
||||
}
|
||||
|
||||
#[tokio::test]
|
||||
async fn test_fetch_string() {
|
||||
let _http_server_guard = test_util::http_server();
|
||||
// Relies on external http server. See target/debug/test_server
|
||||
let url = Url::parse("http://127.0.0.1:4545/assets/fixture.json").unwrap();
|
||||
let client = create_test_client();
|
||||
let result = fetch_no_follow(
|
||||
&client,
|
||||
FetchOnceArgs {
|
||||
url,
|
||||
maybe_accept: None,
|
||||
maybe_etag: None,
|
||||
maybe_auth_token: None,
|
||||
maybe_progress_guard: None,
|
||||
},
|
||||
)
|
||||
.await;
|
||||
if let Ok(FetchOnceResult::Code(body, headers)) = result {
|
||||
assert!(!body.is_empty());
|
||||
assert_eq!(headers.get("content-type").unwrap(), "application/json");
|
||||
assert_eq!(headers.get("etag"), None);
|
||||
assert_eq!(headers.get("x-typescript-types"), None);
|
||||
} else {
|
||||
panic!();
|
||||
}
|
||||
}
|
||||
|
||||
#[tokio::test]
|
||||
async fn test_fetch_gzip() {
|
||||
let _http_server_guard = test_util::http_server();
|
||||
// Relies on external http server. See target/debug/test_server
|
||||
let url = Url::parse("http://127.0.0.1:4545/run/import_compression/gziped")
|
||||
.unwrap();
|
||||
let client = create_test_client();
|
||||
let result = fetch_no_follow(
|
||||
&client,
|
||||
FetchOnceArgs {
|
||||
url,
|
||||
maybe_accept: None,
|
||||
maybe_etag: None,
|
||||
maybe_auth_token: None,
|
||||
maybe_progress_guard: None,
|
||||
},
|
||||
)
|
||||
.await;
|
||||
if let Ok(FetchOnceResult::Code(body, headers)) = result {
|
||||
assert_eq!(String::from_utf8(body).unwrap(), "console.log('gzip')");
|
||||
assert_eq!(
|
||||
headers.get("content-type").unwrap(),
|
||||
"application/javascript"
|
||||
);
|
||||
assert_eq!(headers.get("etag"), None);
|
||||
assert_eq!(headers.get("x-typescript-types"), None);
|
||||
} else {
|
||||
panic!();
|
||||
}
|
||||
}
|
||||
|
||||
#[tokio::test]
|
||||
async fn test_fetch_with_etag() {
|
||||
let _http_server_guard = test_util::http_server();
|
||||
let url = Url::parse("http://127.0.0.1:4545/etag_script.ts").unwrap();
|
||||
let client = create_test_client();
|
||||
let result = fetch_no_follow(
|
||||
&client,
|
||||
FetchOnceArgs {
|
||||
url: url.clone(),
|
||||
maybe_accept: None,
|
||||
maybe_etag: None,
|
||||
maybe_auth_token: None,
|
||||
maybe_progress_guard: None,
|
||||
},
|
||||
)
|
||||
.await;
|
||||
if let Ok(FetchOnceResult::Code(body, headers)) = result {
|
||||
assert!(!body.is_empty());
|
||||
assert_eq!(String::from_utf8(body).unwrap(), "console.log('etag')");
|
||||
assert_eq!(
|
||||
headers.get("content-type").unwrap(),
|
||||
"application/typescript"
|
||||
);
|
||||
assert_eq!(headers.get("etag").unwrap(), "33a64df551425fcc55e");
|
||||
} else {
|
||||
panic!();
|
||||
}
|
||||
|
||||
let res = fetch_no_follow(
|
||||
&client,
|
||||
FetchOnceArgs {
|
||||
url,
|
||||
maybe_accept: None,
|
||||
maybe_etag: Some("33a64df551425fcc55e".to_string()),
|
||||
maybe_auth_token: None,
|
||||
maybe_progress_guard: None,
|
||||
},
|
||||
)
|
||||
.await;
|
||||
assert_eq!(res.unwrap(), FetchOnceResult::NotModified);
|
||||
}
|
||||
|
||||
#[tokio::test]
|
||||
async fn test_fetch_brotli() {
|
||||
let _http_server_guard = test_util::http_server();
|
||||
// Relies on external http server. See target/debug/test_server
|
||||
let url = Url::parse("http://127.0.0.1:4545/run/import_compression/brotli")
|
||||
.unwrap();
|
||||
let client = create_test_client();
|
||||
let result = fetch_no_follow(
|
||||
&client,
|
||||
FetchOnceArgs {
|
||||
url,
|
||||
maybe_accept: None,
|
||||
maybe_etag: None,
|
||||
maybe_auth_token: None,
|
||||
maybe_progress_guard: None,
|
||||
},
|
||||
)
|
||||
.await;
|
||||
if let Ok(FetchOnceResult::Code(body, headers)) = result {
|
||||
assert!(!body.is_empty());
|
||||
assert_eq!(String::from_utf8(body).unwrap(), "console.log('brotli');");
|
||||
assert_eq!(
|
||||
headers.get("content-type").unwrap(),
|
||||
"application/javascript"
|
||||
);
|
||||
assert_eq!(headers.get("etag"), None);
|
||||
assert_eq!(headers.get("x-typescript-types"), None);
|
||||
} else {
|
||||
panic!();
|
||||
}
|
||||
}
|
||||
|
||||
#[tokio::test]
|
||||
async fn test_fetch_accept() {
|
||||
let _http_server_guard = test_util::http_server();
|
||||
// Relies on external http server. See target/debug/test_server
|
||||
let url = Url::parse("http://127.0.0.1:4545/echo_accept").unwrap();
|
||||
let client = create_test_client();
|
||||
let result = fetch_no_follow(
|
||||
&client,
|
||||
FetchOnceArgs {
|
||||
url,
|
||||
maybe_accept: Some("application/json".to_string()),
|
||||
maybe_etag: None,
|
||||
maybe_auth_token: None,
|
||||
maybe_progress_guard: None,
|
||||
},
|
||||
)
|
||||
.await;
|
||||
if let Ok(FetchOnceResult::Code(body, _)) = result {
|
||||
assert_eq!(body, r#"{"accept":"application/json"}"#.as_bytes());
|
||||
} else {
|
||||
panic!();
|
||||
}
|
||||
}
|
||||
|
||||
#[tokio::test]
|
||||
async fn test_fetch_no_follow_with_redirect() {
|
||||
let _http_server_guard = test_util::http_server();
|
||||
// Relies on external http server. See target/debug/test_server
|
||||
let url = Url::parse("http://127.0.0.1:4546/assets/fixture.json").unwrap();
|
||||
// Dns resolver substitutes `127.0.0.1` with `localhost`
|
||||
let target_url =
|
||||
Url::parse("http://localhost:4545/assets/fixture.json").unwrap();
|
||||
let client = create_test_client();
|
||||
let result = fetch_no_follow(
|
||||
&client,
|
||||
FetchOnceArgs {
|
||||
url,
|
||||
maybe_accept: None,
|
||||
maybe_etag: None,
|
||||
maybe_auth_token: None,
|
||||
maybe_progress_guard: None,
|
||||
},
|
||||
)
|
||||
.await;
|
||||
if let Ok(FetchOnceResult::Redirect(url, _)) = result {
|
||||
assert_eq!(url, target_url);
|
||||
} else {
|
||||
panic!();
|
||||
}
|
||||
}
|
||||
|
||||
#[tokio::test]
|
||||
async fn test_fetch_with_cafile_string() {
|
||||
let _http_server_guard = test_util::http_server();
|
||||
// Relies on external http server. See target/debug/test_server
|
||||
let url = Url::parse("https://localhost:5545/assets/fixture.json").unwrap();
|
||||
|
||||
let client = HttpClient::from_client(
|
||||
create_http_client(
|
||||
version::get_user_agent(),
|
||||
CreateHttpClientOptions {
|
||||
ca_certs: vec![read(
|
||||
test_util::testdata_path().join("tls/RootCA.pem"),
|
||||
)
|
||||
.unwrap()],
|
||||
..Default::default()
|
||||
},
|
||||
)
|
||||
.unwrap(),
|
||||
);
|
||||
let result = fetch_no_follow(
|
||||
&client,
|
||||
FetchOnceArgs {
|
||||
url,
|
||||
maybe_accept: None,
|
||||
maybe_etag: None,
|
||||
maybe_auth_token: None,
|
||||
maybe_progress_guard: None,
|
||||
},
|
||||
)
|
||||
.await;
|
||||
if let Ok(FetchOnceResult::Code(body, headers)) = result {
|
||||
assert!(!body.is_empty());
|
||||
assert_eq!(headers.get("content-type").unwrap(), "application/json");
|
||||
assert_eq!(headers.get("etag"), None);
|
||||
assert_eq!(headers.get("x-typescript-types"), None);
|
||||
} else {
|
||||
panic!();
|
||||
}
|
||||
}
|
||||
|
||||
static PUBLIC_HTTPS_URLS: &[&str] = &[
|
||||
"https://deno.com/",
|
||||
"https://example.com/",
|
||||
"https://github.com/",
|
||||
"https://www.w3.org/",
|
||||
];
|
||||
|
||||
/// This test depends on external servers, so we need to be careful to avoid mistaking an offline machine with a
|
||||
/// test failure.
|
||||
#[tokio::test]
|
||||
async fn test_fetch_with_default_certificate_store() {
|
||||
let urls: HashSet<_, RandomState> =
|
||||
HashSet::from_iter(PUBLIC_HTTPS_URLS.iter());
|
||||
|
||||
// Rely on the randomization of hashset iteration
|
||||
for url in urls {
|
||||
// Relies on external http server with a valid mozilla root CA cert.
|
||||
let url = Url::parse(url).unwrap();
|
||||
eprintln!("Attempting to fetch {url}...");
|
||||
|
||||
let client = HttpClient::from_client(
|
||||
create_http_client(
|
||||
version::get_user_agent(),
|
||||
CreateHttpClientOptions::default(),
|
||||
)
|
||||
.unwrap(),
|
||||
);
|
||||
|
||||
let result = fetch_no_follow(
|
||||
&client,
|
||||
FetchOnceArgs {
|
||||
url,
|
||||
maybe_accept: None,
|
||||
maybe_etag: None,
|
||||
maybe_auth_token: None,
|
||||
maybe_progress_guard: None,
|
||||
},
|
||||
)
|
||||
.await;
|
||||
|
||||
match result {
|
||||
Err(_) => {
|
||||
eprintln!("Fetch error: {result:?}");
|
||||
continue;
|
||||
}
|
||||
Ok(
|
||||
FetchOnceResult::Code(..)
|
||||
| FetchOnceResult::NotModified
|
||||
| FetchOnceResult::Redirect(..),
|
||||
) => return,
|
||||
Ok(
|
||||
FetchOnceResult::RequestError(_) | FetchOnceResult::ServerError(_),
|
||||
) => {
|
||||
eprintln!("HTTP error: {result:?}");
|
||||
continue;
|
||||
}
|
||||
};
|
||||
}
|
||||
|
||||
// Use 1.1.1.1 and 8.8.8.8 as our last-ditch internet check
|
||||
if std::net::TcpStream::connect("8.8.8.8:80").is_err()
|
||||
&& std::net::TcpStream::connect("1.1.1.1:80").is_err()
|
||||
{
|
||||
return;
|
||||
}
|
||||
|
||||
panic!("None of the expected public URLs were available but internet appears to be available");
|
||||
}
|
||||
|
||||
#[tokio::test]
|
||||
async fn test_fetch_with_empty_certificate_store() {
|
||||
let root_cert_store = RootCertStore::empty();
|
||||
let urls: HashSet<_, RandomState> =
|
||||
HashSet::from_iter(PUBLIC_HTTPS_URLS.iter());
|
||||
|
||||
// Rely on the randomization of hashset iteration
|
||||
let url = urls.into_iter().next().unwrap();
|
||||
// Relies on external http server with a valid mozilla root CA cert.
|
||||
let url = Url::parse(url).unwrap();
|
||||
eprintln!("Attempting to fetch {url}...");
|
||||
|
||||
let client = HttpClient::from_client(
|
||||
create_http_client(
|
||||
version::get_user_agent(),
|
||||
CreateHttpClientOptions {
|
||||
root_cert_store: Some(root_cert_store),
|
||||
..Default::default()
|
||||
},
|
||||
)
|
||||
.unwrap(),
|
||||
);
|
||||
|
||||
let result = fetch_no_follow(
|
||||
&client,
|
||||
FetchOnceArgs {
|
||||
url,
|
||||
maybe_accept: None,
|
||||
maybe_etag: None,
|
||||
maybe_auth_token: None,
|
||||
maybe_progress_guard: None,
|
||||
},
|
||||
)
|
||||
.await;
|
||||
|
||||
match result {
|
||||
Err(_) => {
|
||||
eprintln!("Fetch error (expected): {result:?}");
|
||||
return;
|
||||
}
|
||||
Ok(
|
||||
FetchOnceResult::Code(..)
|
||||
| FetchOnceResult::NotModified
|
||||
| FetchOnceResult::Redirect(..),
|
||||
) => {
|
||||
panic!("Should not have successfully fetched a URL");
|
||||
}
|
||||
Ok(
|
||||
FetchOnceResult::RequestError(_) | FetchOnceResult::ServerError(_),
|
||||
) => {
|
||||
eprintln!("HTTP error (expected): {result:?}");
|
||||
return;
|
||||
}
|
||||
};
|
||||
}
|
||||
|
||||
#[tokio::test]
|
||||
async fn test_fetch_with_cafile_gzip() {
|
||||
let _http_server_guard = test_util::http_server();
|
||||
// Relies on external http server. See target/debug/test_server
|
||||
let url =
|
||||
Url::parse("https://localhost:5545/run/import_compression/gziped")
|
||||
.unwrap();
|
||||
let client = HttpClient::from_client(
|
||||
create_http_client(
|
||||
version::get_user_agent(),
|
||||
CreateHttpClientOptions {
|
||||
ca_certs: vec![read(
|
||||
test_util::testdata_path()
|
||||
.join("tls/RootCA.pem")
|
||||
.to_string(),
|
||||
)
|
||||
.unwrap()],
|
||||
..Default::default()
|
||||
},
|
||||
)
|
||||
.unwrap(),
|
||||
);
|
||||
let result = fetch_no_follow(
|
||||
&client,
|
||||
FetchOnceArgs {
|
||||
url,
|
||||
maybe_accept: None,
|
||||
maybe_etag: None,
|
||||
maybe_auth_token: None,
|
||||
maybe_progress_guard: None,
|
||||
},
|
||||
)
|
||||
.await;
|
||||
if let Ok(FetchOnceResult::Code(body, headers)) = result {
|
||||
assert_eq!(String::from_utf8(body).unwrap(), "console.log('gzip')");
|
||||
assert_eq!(
|
||||
headers.get("content-type").unwrap(),
|
||||
"application/javascript"
|
||||
);
|
||||
assert_eq!(headers.get("etag"), None);
|
||||
assert_eq!(headers.get("x-typescript-types"), None);
|
||||
} else {
|
||||
panic!();
|
||||
}
|
||||
}
|
||||
|
||||
#[tokio::test]
|
||||
async fn test_fetch_with_cafile_with_etag() {
|
||||
let _http_server_guard = test_util::http_server();
|
||||
let url = Url::parse("https://localhost:5545/etag_script.ts").unwrap();
|
||||
let client = HttpClient::from_client(
|
||||
create_http_client(
|
||||
version::get_user_agent(),
|
||||
CreateHttpClientOptions {
|
||||
ca_certs: vec![read(
|
||||
test_util::testdata_path()
|
||||
.join("tls/RootCA.pem")
|
||||
.to_string(),
|
||||
)
|
||||
.unwrap()],
|
||||
..Default::default()
|
||||
},
|
||||
)
|
||||
.unwrap(),
|
||||
);
|
||||
let result = fetch_no_follow(
|
||||
&client,
|
||||
FetchOnceArgs {
|
||||
url: url.clone(),
|
||||
maybe_accept: None,
|
||||
maybe_etag: None,
|
||||
maybe_auth_token: None,
|
||||
maybe_progress_guard: None,
|
||||
},
|
||||
)
|
||||
.await;
|
||||
if let Ok(FetchOnceResult::Code(body, headers)) = result {
|
||||
assert!(!body.is_empty());
|
||||
assert_eq!(String::from_utf8(body).unwrap(), "console.log('etag')");
|
||||
assert_eq!(
|
||||
headers.get("content-type").unwrap(),
|
||||
"application/typescript"
|
||||
);
|
||||
assert_eq!(headers.get("etag").unwrap(), "33a64df551425fcc55e");
|
||||
assert_eq!(headers.get("x-typescript-types"), None);
|
||||
} else {
|
||||
panic!();
|
||||
}
|
||||
|
||||
let res = fetch_no_follow(
|
||||
&client,
|
||||
FetchOnceArgs {
|
||||
url,
|
||||
maybe_accept: None,
|
||||
maybe_etag: Some("33a64df551425fcc55e".to_string()),
|
||||
maybe_auth_token: None,
|
||||
maybe_progress_guard: None,
|
||||
},
|
||||
)
|
||||
.await;
|
||||
assert_eq!(res.unwrap(), FetchOnceResult::NotModified);
|
||||
}
|
||||
|
||||
#[tokio::test]
|
||||
async fn test_fetch_with_cafile_brotli() {
|
||||
let _http_server_guard = test_util::http_server();
|
||||
// Relies on external http server. See target/debug/test_server
|
||||
let url =
|
||||
Url::parse("https://localhost:5545/run/import_compression/brotli")
|
||||
.unwrap();
|
||||
let client = HttpClient::from_client(
|
||||
create_http_client(
|
||||
version::get_user_agent(),
|
||||
CreateHttpClientOptions {
|
||||
ca_certs: vec![read(
|
||||
test_util::testdata_path()
|
||||
.join("tls/RootCA.pem")
|
||||
.to_string(),
|
||||
)
|
||||
.unwrap()],
|
||||
..Default::default()
|
||||
},
|
||||
)
|
||||
.unwrap(),
|
||||
);
|
||||
let result = fetch_no_follow(
|
||||
&client,
|
||||
FetchOnceArgs {
|
||||
url,
|
||||
maybe_accept: None,
|
||||
maybe_etag: None,
|
||||
maybe_auth_token: None,
|
||||
maybe_progress_guard: None,
|
||||
},
|
||||
)
|
||||
.await;
|
||||
if let Ok(FetchOnceResult::Code(body, headers)) = result {
|
||||
assert!(!body.is_empty());
|
||||
assert_eq!(String::from_utf8(body).unwrap(), "console.log('brotli');");
|
||||
assert_eq!(
|
||||
headers.get("content-type").unwrap(),
|
||||
"application/javascript"
|
||||
);
|
||||
assert_eq!(headers.get("etag"), None);
|
||||
assert_eq!(headers.get("x-typescript-types"), None);
|
||||
} else {
|
||||
panic!();
|
||||
}
|
||||
}
|
||||
|
||||
#[tokio::test]
|
||||
async fn bad_redirect() {
|
||||
let _g = test_util::http_server();
|
||||
let url_str = "http://127.0.0.1:4545/bad_redirect";
|
||||
let url = Url::parse(url_str).unwrap();
|
||||
let client = create_test_client();
|
||||
let result = fetch_no_follow(
|
||||
&client,
|
||||
FetchOnceArgs {
|
||||
url,
|
||||
maybe_accept: None,
|
||||
maybe_etag: None,
|
||||
maybe_auth_token: None,
|
||||
maybe_progress_guard: None,
|
||||
},
|
||||
)
|
||||
.await;
|
||||
assert!(result.is_err());
|
||||
let err = result.unwrap_err();
|
||||
// Check that the error message contains the original URL
|
||||
assert!(err.to_string().contains(url_str));
|
||||
}
|
||||
|
||||
#[tokio::test]
|
||||
async fn server_error() {
|
||||
let _g = test_util::http_server();
|
||||
let url_str = "http://127.0.0.1:4545/server_error";
|
||||
let url = Url::parse(url_str).unwrap();
|
||||
let client = create_test_client();
|
||||
let result = fetch_no_follow(
|
||||
&client,
|
||||
FetchOnceArgs {
|
||||
url,
|
||||
maybe_accept: None,
|
||||
maybe_etag: None,
|
||||
maybe_auth_token: None,
|
||||
maybe_progress_guard: None,
|
||||
},
|
||||
)
|
||||
.await;
|
||||
|
||||
if let Ok(FetchOnceResult::ServerError(status)) = result {
|
||||
assert_eq!(status, 500);
|
||||
} else {
|
||||
panic!();
|
||||
}
|
||||
}
|
||||
|
||||
#[tokio::test]
|
||||
async fn request_error() {
|
||||
let _g = test_util::http_server();
|
||||
let url_str = "http://127.0.0.1:9999/";
|
||||
let url = Url::parse(url_str).unwrap();
|
||||
let client = create_test_client();
|
||||
let result = fetch_no_follow(
|
||||
&client,
|
||||
FetchOnceArgs {
|
||||
url,
|
||||
maybe_accept: None,
|
||||
maybe_etag: None,
|
||||
maybe_auth_token: None,
|
||||
maybe_progress_guard: None,
|
||||
},
|
||||
)
|
||||
.await;
|
||||
|
||||
assert!(matches!(result, Ok(FetchOnceResult::RequestError(_))));
|
||||
}
|
||||
|
||||
#[track_caller]
|
||||
fn get_text_from_cache(
|
||||
file_fetcher: &FileFetcher,
|
||||
|
|
888
cli/http_util.rs
888
cli/http_util.rs
File diff suppressed because it is too large
Load diff
26
cli/jsr.rs
26
cli/jsr.rs
|
@ -213,11 +213,11 @@ pub struct JsrFetchResolver {
|
|||
/// It can be large and we don't want to store it.
|
||||
info_by_nv: DashMap<PackageNv, Option<Arc<JsrPackageVersionInfo>>>,
|
||||
info_by_name: DashMap<String, Option<Arc<JsrPackageInfo>>>,
|
||||
file_fetcher: FileFetcher,
|
||||
file_fetcher: Arc<FileFetcher>,
|
||||
}
|
||||
|
||||
impl JsrFetchResolver {
|
||||
pub fn new(file_fetcher: FileFetcher) -> Self {
|
||||
pub fn new(file_fetcher: Arc<FileFetcher>) -> Self {
|
||||
Self {
|
||||
nv_by_req: Default::default(),
|
||||
info_by_nv: Default::default(),
|
||||
|
@ -258,11 +258,16 @@ impl JsrFetchResolver {
|
|||
}
|
||||
let fetch_package_info = || async {
|
||||
let meta_url = jsr_url().join(&format!("{}/meta.json", name)).ok()?;
|
||||
let file = self
|
||||
.file_fetcher
|
||||
let file_fetcher = self.file_fetcher.clone();
|
||||
// spawn due to the lsp's `Send` requirement
|
||||
let file = deno_core::unsync::spawn(async move {
|
||||
file_fetcher
|
||||
.fetch(&meta_url, &PermissionsContainer::allow_all())
|
||||
.await
|
||||
.ok()?;
|
||||
.ok()
|
||||
})
|
||||
.await
|
||||
.ok()??;
|
||||
serde_json::from_slice::<JsrPackageInfo>(&file.source).ok()
|
||||
};
|
||||
let info = fetch_package_info().await.map(Arc::new);
|
||||
|
@ -281,11 +286,16 @@ impl JsrFetchResolver {
|
|||
let meta_url = jsr_url()
|
||||
.join(&format!("{}/{}_meta.json", &nv.name, &nv.version))
|
||||
.ok()?;
|
||||
let file = self
|
||||
.file_fetcher
|
||||
let file_fetcher = self.file_fetcher.clone();
|
||||
// spawn due to the lsp's `Send` requirement
|
||||
let file = deno_core::unsync::spawn(async move {
|
||||
file_fetcher
|
||||
.fetch(&meta_url, &PermissionsContainer::allow_all())
|
||||
.await
|
||||
.ok()?;
|
||||
.ok()
|
||||
})
|
||||
.await
|
||||
.ok()??;
|
||||
partial_jsr_package_version_info_from_slice(&file.source).ok()
|
||||
};
|
||||
let info = fetch_package_version_info().await.map(Arc::new);
|
||||
|
|
|
@ -1112,7 +1112,7 @@ impl ConfigData {
|
|||
scope: &ModuleSpecifier,
|
||||
parent: Option<(&ModuleSpecifier, &ConfigData)>,
|
||||
settings: &Settings,
|
||||
file_fetcher: Option<&FileFetcher>,
|
||||
file_fetcher: Option<&Arc<FileFetcher>>,
|
||||
) -> Self {
|
||||
if let Some(specifier) = config_file_specifier {
|
||||
match ConfigFile::from_specifier(
|
||||
|
@ -1167,7 +1167,7 @@ impl ConfigData {
|
|||
scope: &ModuleSpecifier,
|
||||
parent: Option<(&ModuleSpecifier, &ConfigData)>,
|
||||
settings: &Settings,
|
||||
file_fetcher: Option<&FileFetcher>,
|
||||
file_fetcher: Option<&Arc<FileFetcher>>,
|
||||
) -> Self {
|
||||
let (settings, workspace_folder) = settings.get_for_specifier(scope);
|
||||
let mut watched_files = HashMap::with_capacity(6);
|
||||
|
@ -1411,9 +1411,18 @@ impl ConfigData {
|
|||
}
|
||||
if import_map_value.is_none() {
|
||||
if let Some(file_fetcher) = file_fetcher {
|
||||
let fetch_result = file_fetcher
|
||||
.fetch(specifier, &PermissionsContainer::allow_all())
|
||||
.await;
|
||||
// spawn due to the lsp's `Send` requirement
|
||||
let fetch_result = deno_core::unsync::spawn({
|
||||
let file_fetcher = file_fetcher.clone();
|
||||
let specifier = specifier.clone();
|
||||
async move {
|
||||
file_fetcher
|
||||
.fetch(&specifier, &PermissionsContainer::allow_all())
|
||||
.await
|
||||
}
|
||||
})
|
||||
.await
|
||||
.unwrap();
|
||||
let value_result = fetch_result.and_then(|f| {
|
||||
serde_json::from_slice::<Value>(&f.source).map_err(|e| e.into())
|
||||
});
|
||||
|
@ -1601,7 +1610,7 @@ impl ConfigTree {
|
|||
&mut self,
|
||||
settings: &Settings,
|
||||
workspace_files: &BTreeSet<ModuleSpecifier>,
|
||||
file_fetcher: &FileFetcher,
|
||||
file_fetcher: &Arc<FileFetcher>,
|
||||
) {
|
||||
lsp_log!("Refreshing configuration tree...");
|
||||
let mut scopes = BTreeMap::new();
|
||||
|
|
|
@ -17,7 +17,7 @@ use super::search::PackageSearchApi;
|
|||
|
||||
#[derive(Debug)]
|
||||
pub struct CliJsrSearchApi {
|
||||
file_fetcher: FileFetcher,
|
||||
file_fetcher: Arc<FileFetcher>,
|
||||
resolver: JsrFetchResolver,
|
||||
search_cache: DashMap<String, Arc<Vec<String>>>,
|
||||
versions_cache: DashMap<String, Arc<Vec<Version>>>,
|
||||
|
@ -25,7 +25,7 @@ pub struct CliJsrSearchApi {
|
|||
}
|
||||
|
||||
impl CliJsrSearchApi {
|
||||
pub fn new(file_fetcher: FileFetcher) -> Self {
|
||||
pub fn new(file_fetcher: Arc<FileFetcher>) -> Self {
|
||||
let resolver = JsrFetchResolver::new(file_fetcher.clone());
|
||||
Self {
|
||||
file_fetcher,
|
||||
|
@ -56,11 +56,15 @@ impl PackageSearchApi for CliJsrSearchApi {
|
|||
}
|
||||
let mut search_url = jsr_api_url().join("packages")?;
|
||||
search_url.query_pairs_mut().append_pair("query", query);
|
||||
let file = self
|
||||
.file_fetcher
|
||||
let file_fetcher = self.file_fetcher.clone();
|
||||
// spawn due to the lsp's `Send` requirement
|
||||
let file = deno_core::unsync::spawn(async move {
|
||||
file_fetcher
|
||||
.fetch(&search_url, &PermissionsContainer::allow_all())
|
||||
.await?
|
||||
.into_text_decoded()?;
|
||||
.into_text_decoded()
|
||||
})
|
||||
.await??;
|
||||
let names = Arc::new(parse_jsr_search_response(&file.source)?);
|
||||
self.search_cache.insert(query.to_string(), names.clone());
|
||||
Ok(names)
|
||||
|
|
|
@ -92,7 +92,7 @@ use crate::args::Flags;
|
|||
use crate::factory::CliFactory;
|
||||
use crate::file_fetcher::FileFetcher;
|
||||
use crate::graph_util;
|
||||
use crate::http_util::HttpClient;
|
||||
use crate::http_util::HttpClientProvider;
|
||||
use crate::lsp::config::ConfigWatchedFileType;
|
||||
use crate::lsp::logging::init_log_file;
|
||||
use crate::lsp::tsc::file_text_changes_to_workspace_edit;
|
||||
|
@ -191,7 +191,7 @@ pub struct Inner {
|
|||
/// The collection of documents that the server is currently handling, either
|
||||
/// on disk or "open" within the client.
|
||||
pub documents: Documents,
|
||||
http_client: Arc<HttpClient>,
|
||||
http_client_provider: Arc<HttpClientProvider>,
|
||||
initial_cwd: PathBuf,
|
||||
jsr_search_api: CliJsrSearchApi,
|
||||
/// Handles module registries, which allow discovery of modules
|
||||
|
@ -475,10 +475,10 @@ impl LanguageServer {
|
|||
impl Inner {
|
||||
fn new(client: Client) -> Self {
|
||||
let cache = LspCache::default();
|
||||
let http_client = Arc::new(HttpClient::new(None, None));
|
||||
let http_client_provider = Arc::new(HttpClientProvider::new(None, None));
|
||||
let module_registry = ModuleRegistry::new(
|
||||
cache.deno_dir().registries_folder_path(),
|
||||
http_client.clone(),
|
||||
http_client_provider.clone(),
|
||||
);
|
||||
let jsr_search_api =
|
||||
CliJsrSearchApi::new(module_registry.file_fetcher.clone());
|
||||
|
@ -508,7 +508,7 @@ impl Inner {
|
|||
diagnostics_state,
|
||||
diagnostics_server,
|
||||
documents,
|
||||
http_client,
|
||||
http_client_provider,
|
||||
initial_cwd: initial_cwd.clone(),
|
||||
jsr_search_api,
|
||||
project_version: 0,
|
||||
|
@ -652,7 +652,7 @@ impl Inner {
|
|||
.unwrap_or_else(|_| RootCertStore::empty());
|
||||
let root_cert_store_provider =
|
||||
Arc::new(LspRootCertStoreProvider(root_cert_store));
|
||||
self.http_client = Arc::new(HttpClient::new(
|
||||
self.http_client_provider = Arc::new(HttpClientProvider::new(
|
||||
Some(root_cert_store_provider),
|
||||
workspace_settings
|
||||
.unsafely_ignore_certificate_errors
|
||||
|
@ -660,7 +660,7 @@ impl Inner {
|
|||
));
|
||||
self.module_registry = ModuleRegistry::new(
|
||||
deno_dir.registries_folder_path(),
|
||||
self.http_client.clone(),
|
||||
self.http_client_provider.clone(),
|
||||
);
|
||||
let workspace_settings = self.config.workspace_settings();
|
||||
for (registry, enabled) in workspace_settings.suggest.imports.hosts.iter() {
|
||||
|
@ -939,11 +939,12 @@ impl Inner {
|
|||
self.cache.global().clone(),
|
||||
CacheSetting::RespectHeaders,
|
||||
true,
|
||||
self.http_client.clone(),
|
||||
self.http_client_provider.clone(),
|
||||
Default::default(),
|
||||
None,
|
||||
);
|
||||
file_fetcher.set_download_log_level(super::logging::lsp_log_level());
|
||||
let file_fetcher = Arc::new(file_fetcher);
|
||||
self
|
||||
.config
|
||||
.tree
|
||||
|
@ -983,7 +984,7 @@ impl Inner {
|
|||
LspResolver::from_config(
|
||||
&self.config,
|
||||
&self.cache,
|
||||
Some(&self.http_client),
|
||||
Some(&self.http_client_provider),
|
||||
)
|
||||
.await,
|
||||
);
|
||||
|
@ -1108,7 +1109,7 @@ impl Inner {
|
|||
async fn refresh_npm_specifiers(&mut self) {
|
||||
let package_reqs = self.documents.npm_package_reqs();
|
||||
let resolver = self.resolver.clone();
|
||||
// spawn to avoid the LSP's Send requirements
|
||||
// spawn due to the lsp's `Send` requirement
|
||||
let handle =
|
||||
spawn(async move { resolver.set_npm_package_reqs(&package_reqs).await });
|
||||
if let Err(err) = handle.await.unwrap() {
|
||||
|
@ -2966,7 +2967,7 @@ impl tower_lsp::LanguageServer for LanguageServer {
|
|||
);
|
||||
}
|
||||
|
||||
(inner.client.clone(), inner.http_client.clone())
|
||||
(inner.client.clone(), inner.http_client_provider.clone())
|
||||
};
|
||||
|
||||
for registration in registrations {
|
||||
|
|
|
@ -18,14 +18,14 @@ use super::search::PackageSearchApi;
|
|||
|
||||
#[derive(Debug)]
|
||||
pub struct CliNpmSearchApi {
|
||||
file_fetcher: FileFetcher,
|
||||
file_fetcher: Arc<FileFetcher>,
|
||||
resolver: NpmFetchResolver,
|
||||
search_cache: DashMap<String, Arc<Vec<String>>>,
|
||||
versions_cache: DashMap<String, Arc<Vec<Version>>>,
|
||||
}
|
||||
|
||||
impl CliNpmSearchApi {
|
||||
pub fn new(file_fetcher: FileFetcher) -> Self {
|
||||
pub fn new(file_fetcher: Arc<FileFetcher>) -> Self {
|
||||
let resolver = NpmFetchResolver::new(file_fetcher.clone());
|
||||
Self {
|
||||
file_fetcher,
|
||||
|
@ -52,11 +52,14 @@ impl PackageSearchApi for CliNpmSearchApi {
|
|||
search_url
|
||||
.query_pairs_mut()
|
||||
.append_pair("text", &format!("{} boost-exact:false", query));
|
||||
let file = self
|
||||
.file_fetcher
|
||||
let file_fetcher = self.file_fetcher.clone();
|
||||
let file = deno_core::unsync::spawn(async move {
|
||||
file_fetcher
|
||||
.fetch(&search_url, &PermissionsContainer::allow_all())
|
||||
.await?
|
||||
.into_text_decoded()?;
|
||||
.into_text_decoded()
|
||||
})
|
||||
.await??;
|
||||
let names = Arc::new(parse_npm_search_response(&file.source)?);
|
||||
self.search_cache.insert(query.to_string(), names.clone());
|
||||
Ok(names)
|
||||
|
|
|
@ -17,7 +17,7 @@ use crate::cache::GlobalHttpCache;
|
|||
use crate::cache::HttpCache;
|
||||
use crate::file_fetcher::FetchOptions;
|
||||
use crate::file_fetcher::FileFetcher;
|
||||
use crate::http_util::HttpClient;
|
||||
use crate::http_util::HttpClientProvider;
|
||||
|
||||
use deno_core::anyhow::anyhow;
|
||||
use deno_core::error::AnyError;
|
||||
|
@ -417,12 +417,15 @@ enum VariableItems {
|
|||
pub struct ModuleRegistry {
|
||||
origins: HashMap<String, Vec<RegistryConfiguration>>,
|
||||
pub location: PathBuf,
|
||||
pub file_fetcher: FileFetcher,
|
||||
pub file_fetcher: Arc<FileFetcher>,
|
||||
http_cache: Arc<GlobalHttpCache>,
|
||||
}
|
||||
|
||||
impl ModuleRegistry {
|
||||
pub fn new(location: PathBuf, http_client: Arc<HttpClient>) -> Self {
|
||||
pub fn new(
|
||||
location: PathBuf,
|
||||
http_client_provider: Arc<HttpClientProvider>,
|
||||
) -> Self {
|
||||
// the http cache should always be the global one for registry completions
|
||||
let http_cache = Arc::new(GlobalHttpCache::new(
|
||||
location.clone(),
|
||||
|
@ -432,7 +435,7 @@ impl ModuleRegistry {
|
|||
http_cache.clone(),
|
||||
CacheSetting::RespectHeaders,
|
||||
true,
|
||||
http_client,
|
||||
http_client_provider,
|
||||
Default::default(),
|
||||
None,
|
||||
);
|
||||
|
@ -441,7 +444,7 @@ impl ModuleRegistry {
|
|||
Self {
|
||||
origins: HashMap::new(),
|
||||
location,
|
||||
file_fetcher,
|
||||
file_fetcher: Arc::new(file_fetcher),
|
||||
http_cache,
|
||||
}
|
||||
}
|
||||
|
@ -512,15 +515,21 @@ impl ModuleRegistry {
|
|||
&self,
|
||||
specifier: &ModuleSpecifier,
|
||||
) -> Result<Vec<RegistryConfiguration>, AnyError> {
|
||||
let fetch_result = self
|
||||
.file_fetcher
|
||||
// spawn due to the lsp's `Send` requirement
|
||||
let fetch_result = deno_core::unsync::spawn({
|
||||
let file_fetcher = self.file_fetcher.clone();
|
||||
let specifier = specifier.clone();
|
||||
async move {
|
||||
file_fetcher
|
||||
.fetch_with_options(FetchOptions {
|
||||
specifier,
|
||||
specifier: &specifier,
|
||||
permissions: &PermissionsContainer::allow_all(),
|
||||
maybe_accept: Some("application/vnd.deno.reg.v2+json, application/vnd.deno.reg.v1+json;q=0.9, application/json;q=0.8"),
|
||||
maybe_cache_setting: None,
|
||||
})
|
||||
.await;
|
||||
.await
|
||||
}
|
||||
}).await?;
|
||||
// if there is an error fetching, we will cache an empty file, so that
|
||||
// subsequent requests they are just an empty doc which will error without
|
||||
// needing to connect to the remote URL. We will cache it for 1 week.
|
||||
|
@ -612,13 +621,20 @@ impl ModuleRegistry {
|
|||
None,
|
||||
)
|
||||
.ok()?;
|
||||
let file = self
|
||||
.file_fetcher
|
||||
let file_fetcher = self.file_fetcher.clone();
|
||||
// spawn due to the lsp's `Send` requirement
|
||||
let file = deno_core::unsync::spawn({
|
||||
async move {
|
||||
file_fetcher
|
||||
.fetch(&endpoint, &PermissionsContainer::allow_all())
|
||||
.await
|
||||
.ok()?
|
||||
.into_text_decoded()
|
||||
.ok()?;
|
||||
.ok()
|
||||
}
|
||||
})
|
||||
.await
|
||||
.ok()??;
|
||||
let documentation: lsp::Documentation =
|
||||
serde_json::from_str(&file.source).ok()?;
|
||||
return match documentation {
|
||||
|
@ -978,13 +994,18 @@ impl ModuleRegistry {
|
|||
url: &str,
|
||||
) -> Option<lsp::Documentation> {
|
||||
let specifier = Url::parse(url).ok()?;
|
||||
let file = self
|
||||
.file_fetcher
|
||||
let file_fetcher = self.file_fetcher.clone();
|
||||
// spawn due to the lsp's `Send` requirement
|
||||
let file = deno_core::unsync::spawn(async move {
|
||||
file_fetcher
|
||||
.fetch(&specifier, &PermissionsContainer::allow_all())
|
||||
.await
|
||||
.ok()?
|
||||
.into_text_decoded()
|
||||
.ok()?;
|
||||
.ok()
|
||||
})
|
||||
.await
|
||||
.ok()??;
|
||||
serde_json::from_str(&file.source).ok()
|
||||
}
|
||||
|
||||
|
@ -1037,8 +1058,12 @@ impl ModuleRegistry {
|
|||
|
||||
async fn get_items(&self, url: &str) -> Option<VariableItems> {
|
||||
let specifier = ModuleSpecifier::parse(url).ok()?;
|
||||
let file = self
|
||||
.file_fetcher
|
||||
// spawn due to the lsp's `Send` requirement
|
||||
let file = deno_core::unsync::spawn({
|
||||
let file_fetcher = self.file_fetcher.clone();
|
||||
let specifier = specifier.clone();
|
||||
async move {
|
||||
file_fetcher
|
||||
.fetch(&specifier, &PermissionsContainer::allow_all())
|
||||
.await
|
||||
.map_err(|err| {
|
||||
|
@ -1049,7 +1074,11 @@ impl ModuleRegistry {
|
|||
})
|
||||
.ok()?
|
||||
.into_text_decoded()
|
||||
.ok()?;
|
||||
.ok()
|
||||
}
|
||||
})
|
||||
.await
|
||||
.ok()??;
|
||||
let items: VariableItems = serde_json::from_str(&file.source)
|
||||
.map_err(|err| {
|
||||
error!(
|
||||
|
@ -1075,8 +1104,12 @@ impl ModuleRegistry {
|
|||
error!("Internal error mapping endpoint \"{}\". {}", url, err);
|
||||
})
|
||||
.ok()?;
|
||||
let file = self
|
||||
.file_fetcher
|
||||
// spawn due to the lsp's `Send` requirement
|
||||
let file = deno_core::unsync::spawn({
|
||||
let file_fetcher = self.file_fetcher.clone();
|
||||
let specifier = specifier.clone();
|
||||
async move {
|
||||
file_fetcher
|
||||
.fetch(&specifier, &PermissionsContainer::allow_all())
|
||||
.await
|
||||
.map_err(|err| {
|
||||
|
@ -1087,7 +1120,11 @@ impl ModuleRegistry {
|
|||
})
|
||||
.ok()?
|
||||
.into_text_decoded()
|
||||
.ok()?;
|
||||
.ok()
|
||||
}
|
||||
})
|
||||
.await
|
||||
.ok()??;
|
||||
let items: VariableItems = serde_json::from_str(&file.source)
|
||||
.map_err(|err| {
|
||||
error!(
|
||||
|
@ -1264,8 +1301,10 @@ mod tests {
|
|||
let _g = test_util::http_server();
|
||||
let temp_dir = TempDir::new();
|
||||
let location = temp_dir.path().join("registries").to_path_buf();
|
||||
let mut module_registry =
|
||||
ModuleRegistry::new(location, Arc::new(HttpClient::new(None, None)));
|
||||
let mut module_registry = ModuleRegistry::new(
|
||||
location,
|
||||
Arc::new(HttpClientProvider::new(None, None)),
|
||||
);
|
||||
module_registry.enable("http://localhost:4545/").await;
|
||||
let range = lsp::Range {
|
||||
start: lsp::Position {
|
||||
|
@ -1322,8 +1361,10 @@ mod tests {
|
|||
let _g = test_util::http_server();
|
||||
let temp_dir = TempDir::new();
|
||||
let location = temp_dir.path().join("registries").to_path_buf();
|
||||
let mut module_registry =
|
||||
ModuleRegistry::new(location, Arc::new(HttpClient::new(None, None)));
|
||||
let mut module_registry = ModuleRegistry::new(
|
||||
location,
|
||||
Arc::new(HttpClientProvider::new(None, None)),
|
||||
);
|
||||
module_registry.enable("http://localhost:4545/").await;
|
||||
let range = lsp::Range {
|
||||
start: lsp::Position {
|
||||
|
@ -1542,8 +1583,10 @@ mod tests {
|
|||
let _g = test_util::http_server();
|
||||
let temp_dir = TempDir::new();
|
||||
let location = temp_dir.path().join("registries").to_path_buf();
|
||||
let mut module_registry =
|
||||
ModuleRegistry::new(location, Arc::new(HttpClient::new(None, None)));
|
||||
let mut module_registry = ModuleRegistry::new(
|
||||
location,
|
||||
Arc::new(HttpClientProvider::new(None, None)),
|
||||
);
|
||||
module_registry
|
||||
.enable_custom("http://localhost:4545/lsp/registries/deno-import-intellisense-key-first.json")
|
||||
.await
|
||||
|
@ -1612,8 +1655,10 @@ mod tests {
|
|||
let _g = test_util::http_server();
|
||||
let temp_dir = TempDir::new();
|
||||
let location = temp_dir.path().join("registries").to_path_buf();
|
||||
let mut module_registry =
|
||||
ModuleRegistry::new(location, Arc::new(HttpClient::new(None, None)));
|
||||
let mut module_registry = ModuleRegistry::new(
|
||||
location,
|
||||
Arc::new(HttpClientProvider::new(None, None)),
|
||||
);
|
||||
module_registry
|
||||
.enable_custom("http://localhost:4545/lsp/registries/deno-import-intellisense-complex.json")
|
||||
.await
|
||||
|
@ -1663,8 +1708,10 @@ mod tests {
|
|||
let _g = test_util::http_server();
|
||||
let temp_dir = TempDir::new();
|
||||
let location = temp_dir.path().join("registries").to_path_buf();
|
||||
let module_registry =
|
||||
ModuleRegistry::new(location, Arc::new(HttpClient::new(None, None)));
|
||||
let module_registry = ModuleRegistry::new(
|
||||
location,
|
||||
Arc::new(HttpClientProvider::new(None, None)),
|
||||
);
|
||||
let result = module_registry.check_origin("http://localhost:4545").await;
|
||||
assert!(result.is_ok());
|
||||
}
|
||||
|
@ -1674,8 +1721,10 @@ mod tests {
|
|||
let _g = test_util::http_server();
|
||||
let temp_dir = TempDir::new();
|
||||
let location = temp_dir.path().join("registries").to_path_buf();
|
||||
let module_registry =
|
||||
ModuleRegistry::new(location, Arc::new(HttpClient::new(None, None)));
|
||||
let module_registry = ModuleRegistry::new(
|
||||
location,
|
||||
Arc::new(HttpClientProvider::new(None, None)),
|
||||
);
|
||||
let result = module_registry.check_origin("https://example.com").await;
|
||||
assert!(result.is_err());
|
||||
let err = result.unwrap_err().to_string();
|
||||
|
|
|
@ -4,7 +4,7 @@ use crate::args::create_default_npmrc;
|
|||
use crate::args::package_json;
|
||||
use crate::args::CacheSetting;
|
||||
use crate::graph_util::CliJsrUrlProvider;
|
||||
use crate::http_util::HttpClient;
|
||||
use crate::http_util::HttpClientProvider;
|
||||
use crate::jsr::JsrCacheResolver;
|
||||
use crate::lsp::config::Config;
|
||||
use crate::lsp::config::ConfigData;
|
||||
|
@ -82,12 +82,14 @@ impl LspResolver {
|
|||
pub async fn from_config(
|
||||
config: &Config,
|
||||
cache: &LspCache,
|
||||
http_client: Option<&Arc<HttpClient>>,
|
||||
http_client_provider: Option<&Arc<HttpClientProvider>>,
|
||||
) -> Self {
|
||||
let config_data = config.tree.root_data();
|
||||
let mut npm_resolver = None;
|
||||
let mut node_resolver = None;
|
||||
if let (Some(http_client), Some(config_data)) = (http_client, config_data) {
|
||||
if let (Some(http_client), Some(config_data)) =
|
||||
(http_client_provider, config_data)
|
||||
{
|
||||
npm_resolver = create_npm_resolver(config_data, cache, http_client).await;
|
||||
node_resolver = create_node_resolver(npm_resolver.as_ref());
|
||||
}
|
||||
|
@ -313,7 +315,7 @@ impl LspResolver {
|
|||
async fn create_npm_resolver(
|
||||
config_data: &ConfigData,
|
||||
cache: &LspCache,
|
||||
http_client: &Arc<HttpClient>,
|
||||
http_client_provider: &Arc<HttpClientProvider>,
|
||||
) -> Option<Arc<dyn CliNpmResolver>> {
|
||||
let node_modules_dir = config_data
|
||||
.node_modules_dir
|
||||
|
@ -326,7 +328,7 @@ async fn create_npm_resolver(
|
|||
})
|
||||
} else {
|
||||
CliNpmResolverCreateOptions::Managed(CliNpmResolverManagedCreateOptions {
|
||||
http_client: http_client.clone(),
|
||||
http_client_provider: http_client_provider.clone(),
|
||||
snapshot: match config_data.lockfile.as_ref() {
|
||||
Some(lockfile) => {
|
||||
CliNpmResolverManagedSnapshotOption::ResolveFromLockfile(
|
||||
|
|
28
cli/npm/managed/cache/registry_info.rs
vendored
28
cli/npm/managed/cache/registry_info.rs
vendored
|
@ -19,7 +19,7 @@ use deno_npm::npm_rc::ResolvedNpmRc;
|
|||
use deno_npm::registry::NpmPackageInfo;
|
||||
|
||||
use crate::args::CacheSetting;
|
||||
use crate::http_util::HttpClient;
|
||||
use crate::http_util::HttpClientProvider;
|
||||
use crate::npm::common::maybe_auth_header_for_npm_registry;
|
||||
use crate::util::progress_bar::ProgressBar;
|
||||
|
||||
|
@ -56,6 +56,7 @@ type PendingRegistryLoadFuture =
|
|||
#[derive(Debug)]
|
||||
pub struct RegistryInfoDownloader {
|
||||
cache: Arc<NpmCache>,
|
||||
http_client_provider: Arc<HttpClientProvider>,
|
||||
npmrc: Arc<ResolvedNpmRc>,
|
||||
progress_bar: ProgressBar,
|
||||
memory_cache: Mutex<HashMap<String, MemoryCacheItem>>,
|
||||
|
@ -64,11 +65,13 @@ pub struct RegistryInfoDownloader {
|
|||
impl RegistryInfoDownloader {
|
||||
pub fn new(
|
||||
cache: Arc<NpmCache>,
|
||||
http_client_provider: Arc<HttpClientProvider>,
|
||||
npmrc: Arc<ResolvedNpmRc>,
|
||||
progress_bar: ProgressBar,
|
||||
) -> Self {
|
||||
Self {
|
||||
cache,
|
||||
http_client_provider,
|
||||
npmrc,
|
||||
progress_bar,
|
||||
memory_cache: Default::default(),
|
||||
|
@ -78,18 +81,12 @@ impl RegistryInfoDownloader {
|
|||
pub async fn load_package_info(
|
||||
&self,
|
||||
name: &str,
|
||||
current_runtime_http_client: &Arc<HttpClient>,
|
||||
) -> Result<Option<Arc<NpmPackageInfo>>, AnyError> {
|
||||
let registry_url = self.npmrc.get_registry_url(name);
|
||||
let registry_config = self.npmrc.get_registry_config(name);
|
||||
|
||||
self
|
||||
.load_package_info_inner(
|
||||
name,
|
||||
registry_url,
|
||||
registry_config,
|
||||
current_runtime_http_client,
|
||||
)
|
||||
.load_package_info_inner(name, registry_url, registry_config)
|
||||
.await
|
||||
.with_context(|| {
|
||||
format!(
|
||||
|
@ -105,7 +102,6 @@ impl RegistryInfoDownloader {
|
|||
name: &str,
|
||||
registry_url: &Url,
|
||||
registry_config: &RegistryConfig,
|
||||
current_runtime_http_client: &Arc<HttpClient>,
|
||||
) -> Result<Option<Arc<NpmPackageInfo>>, AnyError> {
|
||||
if *self.cache.cache_setting() == CacheSetting::Only {
|
||||
return Err(custom_error(
|
||||
|
@ -121,12 +117,8 @@ impl RegistryInfoDownloader {
|
|||
if let Some(cache_item) = mem_cache.get(name) {
|
||||
(false, cache_item.clone())
|
||||
} else {
|
||||
let future = self.create_load_future(
|
||||
name,
|
||||
registry_url,
|
||||
registry_config,
|
||||
current_runtime_http_client,
|
||||
);
|
||||
let future =
|
||||
self.create_load_future(name, registry_url, registry_config);
|
||||
let cache_item = MemoryCacheItem::PendingFuture(future);
|
||||
mem_cache.insert(name.to_string(), cache_item.clone());
|
||||
(true, cache_item)
|
||||
|
@ -215,20 +207,20 @@ impl RegistryInfoDownloader {
|
|||
name: &str,
|
||||
registry_url: &Url,
|
||||
registry_config: &RegistryConfig,
|
||||
current_runtime_http_client: &Arc<HttpClient>,
|
||||
) -> Shared<PendingRegistryLoadFuture> {
|
||||
let package_url = self.get_package_url(name, registry_url);
|
||||
let maybe_auth_header = maybe_auth_header_for_npm_registry(registry_config);
|
||||
let guard = self.progress_bar.update(package_url.as_str());
|
||||
let cache = self.cache.clone();
|
||||
let http_client = current_runtime_http_client.clone();
|
||||
let http_client_provider = self.http_client_provider.clone();
|
||||
let name = name.to_string();
|
||||
// force this future to be polled on the current runtime because it's not
|
||||
// safe to share `HttpClient`s across runtimes and because a restart of
|
||||
// npm resolution might cause this package not to be resolved again
|
||||
// causing the future to never be polled
|
||||
deno_core::unsync::spawn(async move {
|
||||
let maybe_bytes = http_client
|
||||
let maybe_bytes = http_client_provider
|
||||
.get_or_create()?
|
||||
.download_with_progress(package_url, maybe_auth_header, &guard)
|
||||
.await?;
|
||||
match maybe_bytes {
|
||||
|
|
20
cli/npm/managed/cache/tarball.rs
vendored
20
cli/npm/managed/cache/tarball.rs
vendored
|
@ -18,7 +18,7 @@ use deno_runtime::deno_fs::FileSystem;
|
|||
use deno_semver::package::PackageNv;
|
||||
|
||||
use crate::args::CacheSetting;
|
||||
use crate::http_util::HttpClient;
|
||||
use crate::http_util::HttpClientProvider;
|
||||
use crate::npm::common::maybe_auth_header_for_npm_registry;
|
||||
use crate::util::progress_bar::ProgressBar;
|
||||
|
||||
|
@ -46,6 +46,7 @@ enum MemoryCacheItem {
|
|||
pub struct TarballCache {
|
||||
cache: Arc<NpmCache>,
|
||||
fs: Arc<dyn FileSystem>,
|
||||
http_client_provider: Arc<HttpClientProvider>,
|
||||
npmrc: Arc<ResolvedNpmRc>,
|
||||
progress_bar: ProgressBar,
|
||||
memory_cache: Mutex<HashMap<PackageNv, MemoryCacheItem>>,
|
||||
|
@ -55,12 +56,14 @@ impl TarballCache {
|
|||
pub fn new(
|
||||
cache: Arc<NpmCache>,
|
||||
fs: Arc<dyn FileSystem>,
|
||||
http_client_provider: Arc<HttpClientProvider>,
|
||||
npmrc: Arc<ResolvedNpmRc>,
|
||||
progress_bar: ProgressBar,
|
||||
) -> Self {
|
||||
Self {
|
||||
cache,
|
||||
fs,
|
||||
http_client_provider,
|
||||
npmrc,
|
||||
progress_bar,
|
||||
memory_cache: Default::default(),
|
||||
|
@ -71,11 +74,9 @@ impl TarballCache {
|
|||
&self,
|
||||
package: &PackageNv,
|
||||
dist: &NpmPackageVersionDistInfo,
|
||||
// it's not safe to share these across runtimes
|
||||
http_client_for_runtime: &Arc<HttpClient>,
|
||||
) -> Result<(), AnyError> {
|
||||
self
|
||||
.ensure_package_inner(package, dist, http_client_for_runtime)
|
||||
.ensure_package_inner(package, dist)
|
||||
.await
|
||||
.with_context(|| format!("Failed caching npm package '{}'.", package))
|
||||
}
|
||||
|
@ -84,18 +85,13 @@ impl TarballCache {
|
|||
&self,
|
||||
package_nv: &PackageNv,
|
||||
dist: &NpmPackageVersionDistInfo,
|
||||
http_client_for_runtime: &Arc<HttpClient>,
|
||||
) -> Result<(), AnyError> {
|
||||
let (created, cache_item) = {
|
||||
let mut mem_cache = self.memory_cache.lock();
|
||||
if let Some(cache_item) = mem_cache.get(package_nv) {
|
||||
(false, cache_item.clone())
|
||||
} else {
|
||||
let future = self.create_setup_future(
|
||||
package_nv.clone(),
|
||||
dist.clone(),
|
||||
http_client_for_runtime.clone(),
|
||||
);
|
||||
let future = self.create_setup_future(package_nv.clone(), dist.clone());
|
||||
let cache_item = MemoryCacheItem::PendingFuture(future);
|
||||
mem_cache.insert(package_nv.clone(), cache_item.clone());
|
||||
(true, cache_item)
|
||||
|
@ -131,7 +127,6 @@ impl TarballCache {
|
|||
&self,
|
||||
package_nv: PackageNv,
|
||||
dist: NpmPackageVersionDistInfo,
|
||||
http_client_for_runtime: Arc<HttpClient>,
|
||||
) -> Shared<BoxFuture<'static, Result<(), Arc<AnyError>>>> {
|
||||
let registry_url = self.npmrc.get_registry_url(&package_nv.name);
|
||||
let registry_config =
|
||||
|
@ -142,6 +137,7 @@ impl TarballCache {
|
|||
let progress_bar = self.progress_bar.clone();
|
||||
let package_folder =
|
||||
cache.package_folder_for_nv_and_url(&package_nv, registry_url);
|
||||
let http_client_provider = self.http_client_provider.clone();
|
||||
|
||||
deno_core::unsync::spawn(async move {
|
||||
let should_use_cache = cache.should_use_cache_for_package(&package_nv);
|
||||
|
@ -167,7 +163,7 @@ impl TarballCache {
|
|||
maybe_auth_header_for_npm_registry(®istry_config);
|
||||
|
||||
let guard = progress_bar.update(&dist.tarball);
|
||||
let maybe_bytes = http_client_for_runtime
|
||||
let maybe_bytes = http_client_provider.get_or_create()?
|
||||
.download_with_progress(&dist.tarball, maybe_auth_header, &guard)
|
||||
.await?;
|
||||
match maybe_bytes {
|
||||
|
|
|
@ -5,6 +5,7 @@ use std::path::PathBuf;
|
|||
use std::sync::Arc;
|
||||
|
||||
use cache::RegistryInfoDownloader;
|
||||
use cache::TarballCache;
|
||||
use deno_ast::ModuleSpecifier;
|
||||
use deno_core::anyhow::Context;
|
||||
use deno_core::error::AnyError;
|
||||
|
@ -31,7 +32,7 @@ use crate::args::NpmProcessState;
|
|||
use crate::args::NpmProcessStateKind;
|
||||
use crate::args::PackageJsonDepsProvider;
|
||||
use crate::cache::FastInsecureHasher;
|
||||
use crate::http_util::HttpClient;
|
||||
use crate::http_util::HttpClientProvider;
|
||||
use crate::util::fs::canonicalize_path_maybe_not_exists_with_fs;
|
||||
use crate::util::progress_bar::ProgressBar;
|
||||
|
||||
|
@ -66,7 +67,7 @@ pub struct CliNpmResolverManagedCreateOptions {
|
|||
pub snapshot: CliNpmResolverManagedSnapshotOption,
|
||||
pub maybe_lockfile: Option<Arc<Mutex<Lockfile>>>,
|
||||
pub fs: Arc<dyn deno_runtime::deno_fs::FileSystem>,
|
||||
pub http_client: Arc<crate::http_util::HttpClient>,
|
||||
pub http_client_provider: Arc<crate::http_util::HttpClientProvider>,
|
||||
pub npm_global_cache_dir: PathBuf,
|
||||
pub cache_setting: crate::args::CacheSetting,
|
||||
pub text_only_progress_bar: crate::util::progress_bar::ProgressBar,
|
||||
|
@ -90,7 +91,7 @@ pub async fn create_managed_npm_resolver_for_lsp(
|
|||
};
|
||||
create_inner(
|
||||
options.fs,
|
||||
options.http_client,
|
||||
options.http_client_provider,
|
||||
options.maybe_lockfile,
|
||||
npm_api,
|
||||
npm_cache,
|
||||
|
@ -111,7 +112,7 @@ pub async fn create_managed_npm_resolver(
|
|||
let snapshot = resolve_snapshot(&npm_api, options.snapshot).await?;
|
||||
Ok(create_inner(
|
||||
options.fs,
|
||||
options.http_client,
|
||||
options.http_client_provider,
|
||||
options.maybe_lockfile,
|
||||
npm_api,
|
||||
npm_cache,
|
||||
|
@ -127,7 +128,7 @@ pub async fn create_managed_npm_resolver(
|
|||
#[allow(clippy::too_many_arguments)]
|
||||
fn create_inner(
|
||||
fs: Arc<dyn deno_runtime::deno_fs::FileSystem>,
|
||||
http_client: Arc<HttpClient>,
|
||||
http_client_provider: Arc<HttpClientProvider>,
|
||||
maybe_lockfile: Option<Arc<Mutex<Lockfile>>>,
|
||||
npm_api: Arc<CliNpmRegistryApi>,
|
||||
npm_cache: Arc<NpmCache>,
|
||||
|
@ -143,12 +144,19 @@ fn create_inner(
|
|||
snapshot,
|
||||
maybe_lockfile.clone(),
|
||||
));
|
||||
let tarball_cache = Arc::new(TarballCache::new(
|
||||
npm_cache.clone(),
|
||||
fs.clone(),
|
||||
http_client_provider.clone(),
|
||||
npm_rc.clone(),
|
||||
text_only_progress_bar.clone(),
|
||||
));
|
||||
let fs_resolver = create_npm_fs_resolver(
|
||||
fs.clone(),
|
||||
npm_cache.clone(),
|
||||
npm_rc.clone(),
|
||||
&text_only_progress_bar,
|
||||
resolution.clone(),
|
||||
tarball_cache.clone(),
|
||||
node_modules_dir_path,
|
||||
npm_system_info.clone(),
|
||||
);
|
||||
|
@ -167,13 +175,12 @@ fn create_inner(
|
|||
Arc::new(ManagedCliNpmResolver::new(
|
||||
fs,
|
||||
fs_resolver,
|
||||
http_client,
|
||||
maybe_lockfile,
|
||||
npm_api,
|
||||
npm_cache,
|
||||
npm_rc,
|
||||
package_json_deps_installer,
|
||||
resolution,
|
||||
tarball_cache,
|
||||
text_only_progress_bar,
|
||||
npm_system_info,
|
||||
))
|
||||
|
@ -196,9 +203,9 @@ fn create_api(
|
|||
) -> Arc<CliNpmRegistryApi> {
|
||||
Arc::new(CliNpmRegistryApi::new(
|
||||
npm_cache.clone(),
|
||||
options.http_client.clone(),
|
||||
RegistryInfoDownloader::new(
|
||||
npm_cache,
|
||||
options.http_client_provider.clone(),
|
||||
options.npmrc.clone(),
|
||||
options.text_only_progress_bar.clone(),
|
||||
),
|
||||
|
@ -256,13 +263,12 @@ async fn snapshot_from_lockfile(
|
|||
pub struct ManagedCliNpmResolver {
|
||||
fs: Arc<dyn FileSystem>,
|
||||
fs_resolver: Arc<dyn NpmPackageFsResolver>,
|
||||
http_client: Arc<HttpClient>,
|
||||
maybe_lockfile: Option<Arc<Mutex<Lockfile>>>,
|
||||
npm_api: Arc<CliNpmRegistryApi>,
|
||||
npm_cache: Arc<NpmCache>,
|
||||
npm_rc: Arc<ResolvedNpmRc>,
|
||||
package_json_deps_installer: Arc<PackageJsonDepsInstaller>,
|
||||
resolution: Arc<NpmResolution>,
|
||||
tarball_cache: Arc<TarballCache>,
|
||||
text_only_progress_bar: ProgressBar,
|
||||
npm_system_info: NpmSystemInfo,
|
||||
}
|
||||
|
@ -280,27 +286,25 @@ impl ManagedCliNpmResolver {
|
|||
pub fn new(
|
||||
fs: Arc<dyn FileSystem>,
|
||||
fs_resolver: Arc<dyn NpmPackageFsResolver>,
|
||||
http_client: Arc<HttpClient>,
|
||||
maybe_lockfile: Option<Arc<Mutex<Lockfile>>>,
|
||||
npm_api: Arc<CliNpmRegistryApi>,
|
||||
npm_cache: Arc<NpmCache>,
|
||||
npm_rc: Arc<ResolvedNpmRc>,
|
||||
package_json_deps_installer: Arc<PackageJsonDepsInstaller>,
|
||||
resolution: Arc<NpmResolution>,
|
||||
tarball_cache: Arc<TarballCache>,
|
||||
text_only_progress_bar: ProgressBar,
|
||||
npm_system_info: NpmSystemInfo,
|
||||
) -> Self {
|
||||
Self {
|
||||
fs,
|
||||
fs_resolver,
|
||||
http_client,
|
||||
maybe_lockfile,
|
||||
npm_api,
|
||||
npm_cache,
|
||||
npm_rc,
|
||||
package_json_deps_installer,
|
||||
text_only_progress_bar,
|
||||
resolution,
|
||||
tarball_cache,
|
||||
npm_system_info,
|
||||
}
|
||||
}
|
||||
|
@ -381,7 +385,7 @@ impl ManagedCliNpmResolver {
|
|||
}
|
||||
|
||||
self.resolution.add_package_reqs(packages).await?;
|
||||
self.fs_resolver.cache_packages(&self.http_client).await?;
|
||||
self.fs_resolver.cache_packages().await?;
|
||||
|
||||
// If there's a lock file, update it with all discovered npm packages
|
||||
if let Some(lockfile) = &self.maybe_lockfile {
|
||||
|
@ -435,7 +439,7 @@ impl ManagedCliNpmResolver {
|
|||
}
|
||||
|
||||
pub async fn cache_packages(&self) -> Result<(), AnyError> {
|
||||
self.fs_resolver.cache_packages(&self.http_client).await
|
||||
self.fs_resolver.cache_packages().await
|
||||
}
|
||||
|
||||
/// Resolves a package requirement for deno graph. This should only be
|
||||
|
@ -567,19 +571,18 @@ impl CliNpmResolver for ManagedCliNpmResolver {
|
|||
create_npm_fs_resolver(
|
||||
self.fs.clone(),
|
||||
self.npm_cache.clone(),
|
||||
self.npm_rc.clone(),
|
||||
&self.text_only_progress_bar,
|
||||
npm_resolution.clone(),
|
||||
self.tarball_cache.clone(),
|
||||
self.root_node_modules_path().map(ToOwned::to_owned),
|
||||
self.npm_system_info.clone(),
|
||||
),
|
||||
self.http_client.clone(),
|
||||
self.maybe_lockfile.clone(),
|
||||
self.npm_api.clone(),
|
||||
self.npm_cache.clone(),
|
||||
self.npm_rc.clone(),
|
||||
self.package_json_deps_installer.clone(),
|
||||
npm_resolution,
|
||||
self.tarball_cache.clone(),
|
||||
self.text_only_progress_bar.clone(),
|
||||
self.npm_system_info.clone(),
|
||||
))
|
||||
|
|
|
@ -16,20 +16,18 @@ use deno_npm::registry::NpmRegistryApi;
|
|||
use deno_npm::registry::NpmRegistryPackageInfoLoadError;
|
||||
|
||||
use crate::args::CacheSetting;
|
||||
use crate::http_util::HttpClient;
|
||||
use crate::util::sync::AtomicFlag;
|
||||
|
||||
use super::cache::NpmCache;
|
||||
use super::cache::RegistryInfoDownloader;
|
||||
|
||||
// todo(dsherret): make this per worker and make HttpClient an Rc<HttpClient>
|
||||
// todo(dsherret): make this per worker
|
||||
#[derive(Debug)]
|
||||
pub struct CliNpmRegistryApi(Option<Arc<CliNpmRegistryApiInner>>);
|
||||
|
||||
impl CliNpmRegistryApi {
|
||||
pub fn new(
|
||||
cache: Arc<NpmCache>,
|
||||
http_client: Arc<HttpClient>,
|
||||
registry_info_downloader: RegistryInfoDownloader,
|
||||
) -> Self {
|
||||
Self(Some(Arc::new(CliNpmRegistryApiInner {
|
||||
|
@ -37,7 +35,6 @@ impl CliNpmRegistryApi {
|
|||
force_reload_flag: Default::default(),
|
||||
mem_cache: Default::default(),
|
||||
previously_reloaded_packages: Default::default(),
|
||||
http_client,
|
||||
registry_info_downloader,
|
||||
})))
|
||||
}
|
||||
|
@ -111,7 +108,6 @@ struct CliNpmRegistryApiInner {
|
|||
force_reload_flag: AtomicFlag,
|
||||
mem_cache: Mutex<HashMap<String, CacheItem>>,
|
||||
previously_reloaded_packages: Mutex<HashSet<String>>,
|
||||
http_client: Arc<HttpClient>,
|
||||
registry_info_downloader: RegistryInfoDownloader,
|
||||
}
|
||||
|
||||
|
@ -144,7 +140,7 @@ impl CliNpmRegistryApiInner {
|
|||
}
|
||||
}
|
||||
api.registry_info_downloader
|
||||
.load_package_info(&name, &api.http_client)
|
||||
.load_package_info(&name)
|
||||
.await
|
||||
.map_err(Arc::new)
|
||||
}
|
||||
|
|
|
@ -21,7 +21,6 @@ use deno_runtime::deno_fs::FileSystem;
|
|||
use deno_runtime::deno_node::NodePermissions;
|
||||
use deno_runtime::deno_node::NodeResolutionMode;
|
||||
|
||||
use crate::http_util::HttpClient;
|
||||
use crate::npm::managed::cache::TarballCache;
|
||||
|
||||
/// Part of the resolution that interacts with the file system.
|
||||
|
@ -50,10 +49,7 @@ pub trait NpmPackageFsResolver: Send + Sync {
|
|||
specifier: &ModuleSpecifier,
|
||||
) -> Result<Option<NpmPackageCacheFolderId>, AnyError>;
|
||||
|
||||
async fn cache_packages(
|
||||
&self,
|
||||
http_client: &Arc<HttpClient>,
|
||||
) -> Result<(), AnyError>;
|
||||
async fn cache_packages(&self) -> Result<(), AnyError>;
|
||||
|
||||
fn ensure_read_permission(
|
||||
&self,
|
||||
|
@ -131,13 +127,12 @@ impl RegistryReadPermissionChecker {
|
|||
pub async fn cache_packages(
|
||||
packages: Vec<NpmResolutionPackage>,
|
||||
tarball_cache: &Arc<TarballCache>,
|
||||
http_client: &Arc<HttpClient>,
|
||||
) -> Result<(), AnyError> {
|
||||
let mut futures_unordered = futures::stream::FuturesUnordered::new();
|
||||
for package in packages {
|
||||
futures_unordered.push(async move {
|
||||
tarball_cache
|
||||
.ensure_package(&package.id.nv, &package.dist, http_client)
|
||||
.ensure_package(&package.id.nv, &package.dist)
|
||||
.await
|
||||
});
|
||||
}
|
||||
|
|
|
@ -20,8 +20,6 @@ use deno_runtime::deno_fs::FileSystem;
|
|||
use deno_runtime::deno_node::NodePermissions;
|
||||
use deno_runtime::deno_node::NodeResolutionMode;
|
||||
|
||||
use crate::http_util::HttpClient;
|
||||
|
||||
use super::super::super::common::types_package_name;
|
||||
use super::super::cache::NpmCache;
|
||||
use super::super::cache::TarballCache;
|
||||
|
@ -129,20 +127,12 @@ impl NpmPackageFsResolver for GlobalNpmPackageResolver {
|
|||
)
|
||||
}
|
||||
|
||||
async fn cache_packages(
|
||||
&self,
|
||||
http_client: &Arc<HttpClient>,
|
||||
) -> Result<(), AnyError> {
|
||||
async fn cache_packages(&self) -> Result<(), AnyError> {
|
||||
let package_partitions = self
|
||||
.resolution
|
||||
.all_system_packages_partitioned(&self.system_info);
|
||||
|
||||
cache_packages(
|
||||
package_partitions.packages,
|
||||
&self.tarball_cache,
|
||||
http_client,
|
||||
)
|
||||
.await?;
|
||||
cache_packages(package_partitions.packages, &self.tarball_cache).await?;
|
||||
|
||||
// create the copy package folders
|
||||
for copy in package_partitions.copy_packages {
|
||||
|
|
|
@ -14,7 +14,6 @@ use std::path::PathBuf;
|
|||
use std::sync::Arc;
|
||||
|
||||
use crate::cache::CACHE_PERM;
|
||||
use crate::http_util::HttpClient;
|
||||
use crate::npm::cache_dir::mixed_case_package_name_decode;
|
||||
use crate::util::fs::atomic_write_file;
|
||||
use crate::util::fs::canonicalize_path_maybe_not_exists_with_fs;
|
||||
|
@ -229,14 +228,10 @@ impl NpmPackageFsResolver for LocalNpmPackageResolver {
|
|||
Ok(get_package_folder_id_from_folder_name(&folder_name))
|
||||
}
|
||||
|
||||
async fn cache_packages(
|
||||
&self,
|
||||
http_client: &Arc<HttpClient>,
|
||||
) -> Result<(), AnyError> {
|
||||
async fn cache_packages(&self) -> Result<(), AnyError> {
|
||||
sync_resolution_with_fs(
|
||||
&self.resolution.snapshot(),
|
||||
&self.cache,
|
||||
http_client,
|
||||
&self.progress_bar,
|
||||
&self.tarball_cache,
|
||||
&self.root_node_modules_path,
|
||||
|
@ -260,7 +255,6 @@ impl NpmPackageFsResolver for LocalNpmPackageResolver {
|
|||
async fn sync_resolution_with_fs(
|
||||
snapshot: &NpmResolutionSnapshot,
|
||||
cache: &Arc<NpmCache>,
|
||||
http_client: &Arc<HttpClient>,
|
||||
progress_bar: &ProgressBar,
|
||||
tarball_cache: &Arc<TarballCache>,
|
||||
root_node_modules_dir_path: &Path,
|
||||
|
@ -330,7 +324,7 @@ async fn sync_resolution_with_fs(
|
|||
let bin_entries_to_setup = bin_entries.clone();
|
||||
cache_futures.push(async move {
|
||||
tarball_cache
|
||||
.ensure_package(&package.id.nv, &package.dist, http_client)
|
||||
.ensure_package(&package.id.nv, &package.dist)
|
||||
.await?;
|
||||
let pb_guard = progress_bar.update_with_prompt(
|
||||
ProgressMessagePrompt::Initialize,
|
||||
|
|
|
@ -7,7 +7,6 @@ mod local;
|
|||
use std::path::PathBuf;
|
||||
use std::sync::Arc;
|
||||
|
||||
use deno_npm::npm_rc::ResolvedNpmRc;
|
||||
use deno_npm::NpmSystemInfo;
|
||||
use deno_runtime::deno_fs::FileSystem;
|
||||
|
||||
|
@ -25,18 +24,12 @@ use super::resolution::NpmResolution;
|
|||
pub fn create_npm_fs_resolver(
|
||||
fs: Arc<dyn FileSystem>,
|
||||
npm_cache: Arc<NpmCache>,
|
||||
npm_rc: Arc<ResolvedNpmRc>,
|
||||
progress_bar: &ProgressBar,
|
||||
resolution: Arc<NpmResolution>,
|
||||
tarball_cache: Arc<TarballCache>,
|
||||
maybe_node_modules_path: Option<PathBuf>,
|
||||
system_info: NpmSystemInfo,
|
||||
) -> Arc<dyn NpmPackageFsResolver> {
|
||||
let tarball_cache = Arc::new(TarballCache::new(
|
||||
npm_cache.clone(),
|
||||
fs.clone(),
|
||||
npm_rc,
|
||||
progress_bar.clone(),
|
||||
));
|
||||
match maybe_node_modules_path {
|
||||
Some(node_modules_folder) => Arc::new(LocalNpmPackageResolver::new(
|
||||
npm_cache,
|
||||
|
|
|
@ -100,11 +100,11 @@ pub trait CliNpmResolver: NpmResolver {
|
|||
pub struct NpmFetchResolver {
|
||||
nv_by_req: DashMap<PackageReq, Option<PackageNv>>,
|
||||
info_by_name: DashMap<String, Option<Arc<NpmPackageInfo>>>,
|
||||
file_fetcher: FileFetcher,
|
||||
file_fetcher: Arc<FileFetcher>,
|
||||
}
|
||||
|
||||
impl NpmFetchResolver {
|
||||
pub fn new(file_fetcher: FileFetcher) -> Self {
|
||||
pub fn new(file_fetcher: Arc<FileFetcher>) -> Self {
|
||||
Self {
|
||||
nv_by_req: Default::default(),
|
||||
info_by_name: Default::default(),
|
||||
|
@ -140,11 +140,16 @@ impl NpmFetchResolver {
|
|||
}
|
||||
let fetch_package_info = || async {
|
||||
let info_url = npm_registry_url().join(name).ok()?;
|
||||
let file = self
|
||||
.file_fetcher
|
||||
let file_fetcher = self.file_fetcher.clone();
|
||||
// spawn due to the lsp's `Send` requirement
|
||||
let file = deno_core::unsync::spawn(async move {
|
||||
file_fetcher
|
||||
.fetch(&info_url, &PermissionsContainer::allow_all())
|
||||
.await
|
||||
.ok()?;
|
||||
.ok()
|
||||
})
|
||||
.await
|
||||
.ok()??;
|
||||
serde_json::from_slice::<NpmPackageInfo>(&file.source).ok()
|
||||
};
|
||||
let info = fetch_package_info().await.map(Arc::new);
|
||||
|
|
|
@ -40,7 +40,7 @@ use crate::args::PermissionFlags;
|
|||
use crate::args::UnstableConfig;
|
||||
use crate::cache::DenoDir;
|
||||
use crate::file_fetcher::FileFetcher;
|
||||
use crate::http_util::HttpClient;
|
||||
use crate::http_util::HttpClientProvider;
|
||||
use crate::npm::CliNpmResolver;
|
||||
use crate::npm::InnerCliNpmResolverRef;
|
||||
use crate::util::progress_bar::ProgressBar;
|
||||
|
@ -417,9 +417,9 @@ pub fn unpack_into_dir(
|
|||
Ok(exe_path)
|
||||
}
|
||||
pub struct DenoCompileBinaryWriter<'a> {
|
||||
file_fetcher: &'a FileFetcher,
|
||||
client: &'a HttpClient,
|
||||
deno_dir: &'a DenoDir,
|
||||
file_fetcher: &'a FileFetcher,
|
||||
http_client_provider: &'a HttpClientProvider,
|
||||
npm_resolver: &'a dyn CliNpmResolver,
|
||||
npm_system_info: NpmSystemInfo,
|
||||
package_json_deps_provider: &'a PackageJsonDepsProvider,
|
||||
|
@ -428,17 +428,17 @@ pub struct DenoCompileBinaryWriter<'a> {
|
|||
impl<'a> DenoCompileBinaryWriter<'a> {
|
||||
#[allow(clippy::too_many_arguments)]
|
||||
pub fn new(
|
||||
file_fetcher: &'a FileFetcher,
|
||||
client: &'a HttpClient,
|
||||
deno_dir: &'a DenoDir,
|
||||
file_fetcher: &'a FileFetcher,
|
||||
http_client_provider: &'a HttpClientProvider,
|
||||
npm_resolver: &'a dyn CliNpmResolver,
|
||||
npm_system_info: NpmSystemInfo,
|
||||
package_json_deps_provider: &'a PackageJsonDepsProvider,
|
||||
) -> Self {
|
||||
Self {
|
||||
file_fetcher,
|
||||
client,
|
||||
deno_dir,
|
||||
file_fetcher,
|
||||
http_client_provider,
|
||||
npm_resolver,
|
||||
npm_system_info,
|
||||
package_json_deps_provider,
|
||||
|
@ -536,7 +536,8 @@ impl<'a> DenoCompileBinaryWriter<'a> {
|
|||
let progress = progress_bars.update(&download_url);
|
||||
|
||||
self
|
||||
.client
|
||||
.http_client_provider
|
||||
.get_or_create()?
|
||||
.download_with_progress(download_url, None, &progress)
|
||||
.await?
|
||||
};
|
||||
|
|
|
@ -15,7 +15,7 @@ use crate::args::StorageKeyResolver;
|
|||
use crate::cache::Caches;
|
||||
use crate::cache::DenoDirProvider;
|
||||
use crate::cache::NodeAnalysisCache;
|
||||
use crate::http_util::HttpClient;
|
||||
use crate::http_util::HttpClientProvider;
|
||||
use crate::node::CliCjsCodeAnalyzer;
|
||||
use crate::npm::create_cli_npm_resolver;
|
||||
use crate::npm::CliNpmResolverByonmCreateOptions;
|
||||
|
@ -346,7 +346,7 @@ pub async fn run(
|
|||
cell: Default::default(),
|
||||
});
|
||||
let progress_bar = ProgressBar::new(ProgressBarStyle::TextOnly);
|
||||
let http_client = Arc::new(HttpClient::new(
|
||||
let http_client_provider = Arc::new(HttpClientProvider::new(
|
||||
Some(root_cert_store_provider.clone()),
|
||||
metadata.unsafely_ignore_certificate_errors.clone(),
|
||||
));
|
||||
|
@ -390,7 +390,7 @@ pub async fn run(
|
|||
snapshot: CliNpmResolverManagedSnapshotOption::Specified(Some(snapshot)),
|
||||
maybe_lockfile: None,
|
||||
fs: fs.clone(),
|
||||
http_client: http_client.clone(),
|
||||
http_client_provider: http_client_provider.clone(),
|
||||
npm_global_cache_dir,
|
||||
cache_setting,
|
||||
text_only_progress_bar: progress_bar,
|
||||
|
@ -448,7 +448,7 @@ pub async fn run(
|
|||
snapshot: CliNpmResolverManagedSnapshotOption::Specified(None),
|
||||
maybe_lockfile: None,
|
||||
fs: fs.clone(),
|
||||
http_client: http_client.clone(),
|
||||
http_client_provider: http_client_provider.clone(),
|
||||
npm_global_cache_dir,
|
||||
cache_setting,
|
||||
text_only_progress_bar: progress_bar,
|
||||
|
|
|
@ -3,6 +3,7 @@
|
|||
use crate::args::CompileFlags;
|
||||
use crate::args::Flags;
|
||||
use crate::factory::CliFactory;
|
||||
use crate::http_util::HttpClientProvider;
|
||||
use crate::standalone::is_standalone_binary;
|
||||
use deno_core::anyhow::bail;
|
||||
use deno_core::anyhow::Context;
|
||||
|
@ -26,6 +27,7 @@ pub async fn compile(
|
|||
let module_graph_creator = factory.module_graph_creator().await?;
|
||||
let parsed_source_cache = factory.parsed_source_cache();
|
||||
let binary_writer = factory.create_compile_binary_writer().await?;
|
||||
let http_client = factory.http_client_provider();
|
||||
let module_specifier = cli_options.resolve_main_module()?;
|
||||
let module_roots = {
|
||||
let mut vec = Vec::with_capacity(compile_flags.include.len() + 1);
|
||||
|
@ -49,6 +51,7 @@ pub async fn compile(
|
|||
}
|
||||
|
||||
let output_path = resolve_compile_executable_output_path(
|
||||
http_client,
|
||||
&compile_flags,
|
||||
cli_options.initial_cwd(),
|
||||
)
|
||||
|
@ -174,6 +177,7 @@ fn validate_output_path(output_path: &Path) -> Result<(), AnyError> {
|
|||
}
|
||||
|
||||
async fn resolve_compile_executable_output_path(
|
||||
http_client_provider: &HttpClientProvider,
|
||||
compile_flags: &CompileFlags,
|
||||
current_dir: &Path,
|
||||
) -> Result<PathBuf, AnyError> {
|
||||
|
@ -184,7 +188,8 @@ async fn resolve_compile_executable_output_path(
|
|||
let mut output_path = if let Some(out) = output_flag.as_ref() {
|
||||
let mut out_path = PathBuf::from(out);
|
||||
if out.ends_with('/') || out.ends_with('\\') {
|
||||
if let Some(infer_file_name) = infer_name_from_url(&module_specifier)
|
||||
if let Some(infer_file_name) =
|
||||
infer_name_from_url(http_client_provider, &module_specifier)
|
||||
.await
|
||||
.map(PathBuf::from)
|
||||
{
|
||||
|
@ -199,7 +204,7 @@ async fn resolve_compile_executable_output_path(
|
|||
};
|
||||
|
||||
if output_flag.is_none() {
|
||||
output_path = infer_name_from_url(&module_specifier)
|
||||
output_path = infer_name_from_url(http_client_provider, &module_specifier)
|
||||
.await
|
||||
.map(PathBuf::from)
|
||||
}
|
||||
|
@ -237,7 +242,9 @@ mod test {
|
|||
|
||||
#[tokio::test]
|
||||
async fn resolve_compile_executable_output_path_target_linux() {
|
||||
let http_client = HttpClientProvider::new(None, None);
|
||||
let path = resolve_compile_executable_output_path(
|
||||
&http_client,
|
||||
&CompileFlags {
|
||||
source_file: "mod.ts".to_string(),
|
||||
output: Some(String::from("./file")),
|
||||
|
@ -259,7 +266,9 @@ mod test {
|
|||
|
||||
#[tokio::test]
|
||||
async fn resolve_compile_executable_output_path_target_windows() {
|
||||
let http_client = HttpClientProvider::new(None, None);
|
||||
let path = resolve_compile_executable_output_path(
|
||||
&http_client,
|
||||
&CompileFlags {
|
||||
source_file: "mod.ts".to_string(),
|
||||
output: Some(String::from("./file")),
|
||||
|
|
|
@ -12,7 +12,7 @@ use crate::args::TypeCheckMode;
|
|||
use crate::args::UninstallFlags;
|
||||
use crate::args::UninstallKind;
|
||||
use crate::factory::CliFactory;
|
||||
use crate::http_util::HttpClient;
|
||||
use crate::http_util::HttpClientProvider;
|
||||
use crate::util::fs::canonicalize_path_maybe_not_exists;
|
||||
|
||||
use deno_config::ConfigFlag;
|
||||
|
@ -133,15 +133,21 @@ fn get_installer_root() -> Result<PathBuf, io::Error> {
|
|||
Ok(home_path)
|
||||
}
|
||||
|
||||
pub async fn infer_name_from_url(url: &Url) -> Option<String> {
|
||||
pub async fn infer_name_from_url(
|
||||
http_client_provider: &HttpClientProvider,
|
||||
url: &Url,
|
||||
) -> Option<String> {
|
||||
// If there's an absolute url with no path, eg. https://my-cli.com
|
||||
// perform a request, and see if it redirects another file instead.
|
||||
let mut url = url.clone();
|
||||
|
||||
if url.path() == "/" {
|
||||
let client = HttpClient::new(None, None);
|
||||
if let Ok(res) = client.get_redirected_response(url.clone(), None).await {
|
||||
url = res.url().clone();
|
||||
if let Ok(client) = http_client_provider.get_or_create() {
|
||||
if let Ok(redirected_url) =
|
||||
client.get_redirected_url(url.clone(), None).await
|
||||
{
|
||||
url = redirected_url;
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
|
@ -295,16 +301,20 @@ pub async fn install_command(
|
|||
.await?
|
||||
.load_and_type_check_files(&[install_flags_global.module_url.clone()])
|
||||
.await?;
|
||||
let http_client = factory.http_client_provider();
|
||||
|
||||
// create the install shim
|
||||
create_install_shim(flags, install_flags_global).await
|
||||
create_install_shim(http_client, flags, install_flags_global).await
|
||||
}
|
||||
|
||||
async fn create_install_shim(
|
||||
http_client_provider: &HttpClientProvider,
|
||||
flags: Flags,
|
||||
install_flags_global: InstallFlagsGlobal,
|
||||
) -> Result<(), AnyError> {
|
||||
let shim_data = resolve_shim_data(&flags, &install_flags_global).await?;
|
||||
let shim_data =
|
||||
resolve_shim_data(http_client_provider, &flags, &install_flags_global)
|
||||
.await?;
|
||||
|
||||
// ensure directory exists
|
||||
if let Ok(metadata) = fs::metadata(&shim_data.installation_dir) {
|
||||
|
@ -355,6 +365,7 @@ struct ShimData {
|
|||
}
|
||||
|
||||
async fn resolve_shim_data(
|
||||
http_client_provider: &HttpClientProvider,
|
||||
flags: &Flags,
|
||||
install_flags_global: &InstallFlagsGlobal,
|
||||
) -> Result<ShimData, AnyError> {
|
||||
|
@ -372,7 +383,7 @@ async fn resolve_shim_data(
|
|||
let name = if install_flags_global.name.is_some() {
|
||||
install_flags_global.name.clone()
|
||||
} else {
|
||||
infer_name_from_url(&module_url).await
|
||||
infer_name_from_url(http_client_provider, &module_url).await
|
||||
};
|
||||
|
||||
let name = match name {
|
||||
|
@ -561,8 +572,10 @@ mod tests {
|
|||
|
||||
#[tokio::test]
|
||||
async fn install_infer_name_from_url() {
|
||||
let http_client = HttpClientProvider::new(None, None);
|
||||
assert_eq!(
|
||||
infer_name_from_url(
|
||||
&http_client,
|
||||
&Url::parse("https://example.com/abc/server.ts").unwrap()
|
||||
)
|
||||
.await,
|
||||
|
@ -570,6 +583,7 @@ mod tests {
|
|||
);
|
||||
assert_eq!(
|
||||
infer_name_from_url(
|
||||
&http_client,
|
||||
&Url::parse("https://example.com/abc/main.ts").unwrap()
|
||||
)
|
||||
.await,
|
||||
|
@ -577,6 +591,7 @@ mod tests {
|
|||
);
|
||||
assert_eq!(
|
||||
infer_name_from_url(
|
||||
&http_client,
|
||||
&Url::parse("https://example.com/abc/mod.ts").unwrap()
|
||||
)
|
||||
.await,
|
||||
|
@ -584,6 +599,7 @@ mod tests {
|
|||
);
|
||||
assert_eq!(
|
||||
infer_name_from_url(
|
||||
&http_client,
|
||||
&Url::parse("https://example.com/ab%20c/mod.ts").unwrap()
|
||||
)
|
||||
.await,
|
||||
|
@ -591,6 +607,7 @@ mod tests {
|
|||
);
|
||||
assert_eq!(
|
||||
infer_name_from_url(
|
||||
&http_client,
|
||||
&Url::parse("https://example.com/abc/index.ts").unwrap()
|
||||
)
|
||||
.await,
|
||||
|
@ -598,42 +615,67 @@ mod tests {
|
|||
);
|
||||
assert_eq!(
|
||||
infer_name_from_url(
|
||||
&http_client,
|
||||
&Url::parse("https://example.com/abc/cli.ts").unwrap()
|
||||
)
|
||||
.await,
|
||||
Some("abc".to_string())
|
||||
);
|
||||
assert_eq!(
|
||||
infer_name_from_url(&Url::parse("https://example.com/main.ts").unwrap())
|
||||
infer_name_from_url(
|
||||
&http_client,
|
||||
&Url::parse("https://example.com/main.ts").unwrap()
|
||||
)
|
||||
.await,
|
||||
Some("main".to_string())
|
||||
);
|
||||
assert_eq!(
|
||||
infer_name_from_url(&Url::parse("https://example.com").unwrap()).await,
|
||||
None
|
||||
);
|
||||
assert_eq!(
|
||||
infer_name_from_url(&Url::parse("file:///abc/server.ts").unwrap()).await,
|
||||
Some("server".to_string())
|
||||
);
|
||||
assert_eq!(
|
||||
infer_name_from_url(&Url::parse("file:///abc/main.ts").unwrap()).await,
|
||||
Some("abc".to_string())
|
||||
);
|
||||
assert_eq!(
|
||||
infer_name_from_url(&Url::parse("file:///ab%20c/main.ts").unwrap()).await,
|
||||
Some("ab c".to_string())
|
||||
);
|
||||
assert_eq!(
|
||||
infer_name_from_url(&Url::parse("file:///main.ts").unwrap()).await,
|
||||
Some("main".to_string())
|
||||
);
|
||||
assert_eq!(
|
||||
infer_name_from_url(&Url::parse("file:///").unwrap()).await,
|
||||
infer_name_from_url(
|
||||
&http_client,
|
||||
&Url::parse("https://example.com").unwrap()
|
||||
)
|
||||
.await,
|
||||
None
|
||||
);
|
||||
assert_eq!(
|
||||
infer_name_from_url(
|
||||
&http_client,
|
||||
&Url::parse("file:///abc/server.ts").unwrap()
|
||||
)
|
||||
.await,
|
||||
Some("server".to_string())
|
||||
);
|
||||
assert_eq!(
|
||||
infer_name_from_url(
|
||||
&http_client,
|
||||
&Url::parse("file:///abc/main.ts").unwrap()
|
||||
)
|
||||
.await,
|
||||
Some("abc".to_string())
|
||||
);
|
||||
assert_eq!(
|
||||
infer_name_from_url(
|
||||
&http_client,
|
||||
&Url::parse("file:///ab%20c/main.ts").unwrap()
|
||||
)
|
||||
.await,
|
||||
Some("ab c".to_string())
|
||||
);
|
||||
assert_eq!(
|
||||
infer_name_from_url(
|
||||
&http_client,
|
||||
&Url::parse("file:///main.ts").unwrap()
|
||||
)
|
||||
.await,
|
||||
Some("main".to_string())
|
||||
);
|
||||
assert_eq!(
|
||||
infer_name_from_url(&http_client, &Url::parse("file:///").unwrap()).await,
|
||||
None
|
||||
);
|
||||
assert_eq!(
|
||||
infer_name_from_url(
|
||||
&http_client,
|
||||
&Url::parse("https://example.com/abc@0.1.0").unwrap()
|
||||
)
|
||||
.await,
|
||||
|
@ -641,6 +683,7 @@ mod tests {
|
|||
);
|
||||
assert_eq!(
|
||||
infer_name_from_url(
|
||||
&http_client,
|
||||
&Url::parse("https://example.com/abc@0.1.0/main.ts").unwrap()
|
||||
)
|
||||
.await,
|
||||
|
@ -648,47 +691,71 @@ mod tests {
|
|||
);
|
||||
assert_eq!(
|
||||
infer_name_from_url(
|
||||
&http_client,
|
||||
&Url::parse("https://example.com/abc@def@ghi").unwrap()
|
||||
)
|
||||
.await,
|
||||
Some("abc".to_string())
|
||||
);
|
||||
assert_eq!(
|
||||
infer_name_from_url(&Url::parse("https://example.com/@abc.ts").unwrap())
|
||||
infer_name_from_url(
|
||||
&http_client,
|
||||
&Url::parse("https://example.com/@abc.ts").unwrap()
|
||||
)
|
||||
.await,
|
||||
Some("@abc".to_string())
|
||||
);
|
||||
assert_eq!(
|
||||
infer_name_from_url(
|
||||
&http_client,
|
||||
&Url::parse("https://example.com/@abc/mod.ts").unwrap()
|
||||
)
|
||||
.await,
|
||||
Some("@abc".to_string())
|
||||
);
|
||||
assert_eq!(
|
||||
infer_name_from_url(&Url::parse("file:///@abc.ts").unwrap()).await,
|
||||
infer_name_from_url(
|
||||
&http_client,
|
||||
&Url::parse("file:///@abc.ts").unwrap()
|
||||
)
|
||||
.await,
|
||||
Some("@abc".to_string())
|
||||
);
|
||||
assert_eq!(
|
||||
infer_name_from_url(&Url::parse("file:///@abc/cli.ts").unwrap()).await,
|
||||
infer_name_from_url(
|
||||
&http_client,
|
||||
&Url::parse("file:///@abc/cli.ts").unwrap()
|
||||
)
|
||||
.await,
|
||||
Some("@abc".to_string())
|
||||
);
|
||||
assert_eq!(
|
||||
infer_name_from_url(&Url::parse("npm:cowsay@1.2/cowthink").unwrap())
|
||||
infer_name_from_url(
|
||||
&http_client,
|
||||
&Url::parse("npm:cowsay@1.2/cowthink").unwrap()
|
||||
)
|
||||
.await,
|
||||
Some("cowthink".to_string())
|
||||
);
|
||||
assert_eq!(
|
||||
infer_name_from_url(&Url::parse("npm:cowsay@1.2/cowthink/test").unwrap())
|
||||
infer_name_from_url(
|
||||
&http_client,
|
||||
&Url::parse("npm:cowsay@1.2/cowthink/test").unwrap()
|
||||
)
|
||||
.await,
|
||||
Some("cowsay".to_string())
|
||||
);
|
||||
assert_eq!(
|
||||
infer_name_from_url(&Url::parse("npm:cowsay@1.2").unwrap()).await,
|
||||
infer_name_from_url(&http_client, &Url::parse("npm:cowsay@1.2").unwrap())
|
||||
.await,
|
||||
Some("cowsay".to_string())
|
||||
);
|
||||
assert_eq!(
|
||||
infer_name_from_url(&Url::parse("npm:@types/node@1.2").unwrap()).await,
|
||||
infer_name_from_url(
|
||||
&http_client,
|
||||
&Url::parse("npm:@types/node@1.2").unwrap()
|
||||
)
|
||||
.await,
|
||||
None
|
||||
);
|
||||
}
|
||||
|
@ -700,6 +767,7 @@ mod tests {
|
|||
std::fs::create_dir(&bin_dir).unwrap();
|
||||
|
||||
create_install_shim(
|
||||
&HttpClientProvider::new(None, None),
|
||||
Flags {
|
||||
unstable_config: UnstableConfig {
|
||||
legacy_flag_enabled: true,
|
||||
|
@ -740,6 +808,7 @@ mod tests {
|
|||
#[tokio::test]
|
||||
async fn install_inferred_name() {
|
||||
let shim_data = resolve_shim_data(
|
||||
&HttpClientProvider::new(None, None),
|
||||
&Flags::default(),
|
||||
&InstallFlagsGlobal {
|
||||
module_url: "http://localhost:4545/echo_server.ts".to_string(),
|
||||
|
@ -762,6 +831,7 @@ mod tests {
|
|||
#[tokio::test]
|
||||
async fn install_unstable_legacy() {
|
||||
let shim_data = resolve_shim_data(
|
||||
&HttpClientProvider::new(None, None),
|
||||
&Flags {
|
||||
unstable_config: UnstableConfig {
|
||||
legacy_flag_enabled: true,
|
||||
|
@ -795,6 +865,7 @@ mod tests {
|
|||
#[tokio::test]
|
||||
async fn install_unstable_features() {
|
||||
let shim_data = resolve_shim_data(
|
||||
&HttpClientProvider::new(None, None),
|
||||
&Flags {
|
||||
unstable_config: UnstableConfig {
|
||||
features: vec!["kv".to_string(), "cron".to_string()],
|
||||
|
@ -829,6 +900,7 @@ mod tests {
|
|||
#[tokio::test]
|
||||
async fn install_inferred_name_from_parent() {
|
||||
let shim_data = resolve_shim_data(
|
||||
&HttpClientProvider::new(None, None),
|
||||
&Flags::default(),
|
||||
&InstallFlagsGlobal {
|
||||
module_url: "http://localhost:4545/subdir/main.ts".to_string(),
|
||||
|
@ -852,6 +924,7 @@ mod tests {
|
|||
async fn install_inferred_name_after_redirect_for_no_path_url() {
|
||||
let _http_server_guard = test_util::http_server();
|
||||
let shim_data = resolve_shim_data(
|
||||
&HttpClientProvider::new(None, None),
|
||||
&Flags::default(),
|
||||
&InstallFlagsGlobal {
|
||||
module_url: "http://localhost:4550/?redirect_to=/subdir/redirects/a.ts"
|
||||
|
@ -879,6 +952,7 @@ mod tests {
|
|||
#[tokio::test]
|
||||
async fn install_custom_dir_option() {
|
||||
let shim_data = resolve_shim_data(
|
||||
&HttpClientProvider::new(None, None),
|
||||
&Flags::default(),
|
||||
&InstallFlagsGlobal {
|
||||
module_url: "http://localhost:4545/echo_server.ts".to_string(),
|
||||
|
@ -901,6 +975,7 @@ mod tests {
|
|||
#[tokio::test]
|
||||
async fn install_with_flags() {
|
||||
let shim_data = resolve_shim_data(
|
||||
&HttpClientProvider::new(None, None),
|
||||
&Flags {
|
||||
permissions: PermissionFlags {
|
||||
allow_net: Some(vec![]),
|
||||
|
@ -940,6 +1015,7 @@ mod tests {
|
|||
#[tokio::test]
|
||||
async fn install_prompt() {
|
||||
let shim_data = resolve_shim_data(
|
||||
&HttpClientProvider::new(None, None),
|
||||
&Flags {
|
||||
permissions: PermissionFlags {
|
||||
no_prompt: true,
|
||||
|
@ -972,6 +1048,7 @@ mod tests {
|
|||
#[tokio::test]
|
||||
async fn install_allow_all() {
|
||||
let shim_data = resolve_shim_data(
|
||||
&HttpClientProvider::new(None, None),
|
||||
&Flags {
|
||||
permissions: PermissionFlags {
|
||||
allow_all: true,
|
||||
|
@ -1005,6 +1082,7 @@ mod tests {
|
|||
async fn install_npm_lockfile_default() {
|
||||
let temp_dir = canonicalize_path(&env::temp_dir()).unwrap();
|
||||
let shim_data = resolve_shim_data(
|
||||
&HttpClientProvider::new(None, None),
|
||||
&Flags {
|
||||
permissions: PermissionFlags {
|
||||
allow_all: true,
|
||||
|
@ -1041,6 +1119,7 @@ mod tests {
|
|||
#[tokio::test]
|
||||
async fn install_npm_no_lock() {
|
||||
let shim_data = resolve_shim_data(
|
||||
&HttpClientProvider::new(None, None),
|
||||
&Flags {
|
||||
permissions: PermissionFlags {
|
||||
allow_all: true,
|
||||
|
@ -1083,6 +1162,7 @@ mod tests {
|
|||
let local_module_str = local_module.to_string_lossy();
|
||||
|
||||
create_install_shim(
|
||||
&HttpClientProvider::new(None, None),
|
||||
Flags::default(),
|
||||
InstallFlagsGlobal {
|
||||
module_url: local_module_str.to_string(),
|
||||
|
@ -1112,6 +1192,7 @@ mod tests {
|
|||
std::fs::create_dir(&bin_dir).unwrap();
|
||||
|
||||
create_install_shim(
|
||||
&HttpClientProvider::new(None, None),
|
||||
Flags::default(),
|
||||
InstallFlagsGlobal {
|
||||
module_url: "http://localhost:4545/echo_server.ts".to_string(),
|
||||
|
@ -1132,6 +1213,7 @@ mod tests {
|
|||
|
||||
// No force. Install failed.
|
||||
let no_force_result = create_install_shim(
|
||||
&HttpClientProvider::new(None, None),
|
||||
Flags::default(),
|
||||
InstallFlagsGlobal {
|
||||
module_url: "http://localhost:4545/cat.ts".to_string(), // using a different URL
|
||||
|
@ -1153,6 +1235,7 @@ mod tests {
|
|||
|
||||
// Force. Install success.
|
||||
let force_result = create_install_shim(
|
||||
&HttpClientProvider::new(None, None),
|
||||
Flags::default(),
|
||||
InstallFlagsGlobal {
|
||||
module_url: "http://localhost:4545/cat.ts".to_string(), // using a different URL
|
||||
|
@ -1180,6 +1263,7 @@ mod tests {
|
|||
assert!(result.is_ok());
|
||||
|
||||
let result = create_install_shim(
|
||||
&HttpClientProvider::new(None, None),
|
||||
Flags {
|
||||
config_flag: ConfigFlag::Path(config_file_path.to_string()),
|
||||
..Flags::default()
|
||||
|
@ -1212,6 +1296,7 @@ mod tests {
|
|||
std::fs::create_dir(&bin_dir).unwrap();
|
||||
|
||||
create_install_shim(
|
||||
&HttpClientProvider::new(None, None),
|
||||
Flags::default(),
|
||||
InstallFlagsGlobal {
|
||||
module_url: "http://localhost:4545/echo_server.ts".to_string(),
|
||||
|
@ -1252,6 +1337,7 @@ mod tests {
|
|||
std::fs::write(&local_module, "// Some JavaScript I guess").unwrap();
|
||||
|
||||
create_install_shim(
|
||||
&HttpClientProvider::new(None, None),
|
||||
Flags::default(),
|
||||
InstallFlagsGlobal {
|
||||
module_url: local_module_str.to_string(),
|
||||
|
@ -1293,6 +1379,7 @@ mod tests {
|
|||
assert!(result.is_ok());
|
||||
|
||||
let result = create_install_shim(
|
||||
&HttpClientProvider::new(None, None),
|
||||
Flags {
|
||||
import_map_path: Some(import_map_path.to_string()),
|
||||
..Flags::default()
|
||||
|
@ -1338,6 +1425,7 @@ mod tests {
|
|||
assert!(file_module_string.starts_with("file:///"));
|
||||
|
||||
let result = create_install_shim(
|
||||
&HttpClientProvider::new(None, None),
|
||||
Flags::default(),
|
||||
InstallFlagsGlobal {
|
||||
module_url: file_module_string.to_string(),
|
||||
|
|
|
@ -6,6 +6,8 @@ use deno_runtime::deno_fetch::reqwest;
|
|||
use lsp_types::Url;
|
||||
use serde::de::DeserializeOwned;
|
||||
|
||||
use crate::http_util::HttpClient;
|
||||
|
||||
#[derive(serde::Deserialize)]
|
||||
#[serde(rename_all = "camelCase")]
|
||||
pub struct CreateAuthorizationResponse {
|
||||
|
@ -116,8 +118,8 @@ pub async fn parse_response<T: DeserializeOwned>(
|
|||
}
|
||||
|
||||
pub async fn get_scope(
|
||||
client: &reqwest::Client,
|
||||
registry_api_url: &str,
|
||||
client: &HttpClient,
|
||||
registry_api_url: &Url,
|
||||
scope: &str,
|
||||
) -> Result<reqwest::Response, AnyError> {
|
||||
let scope_url = format!("{}scopes/{}", registry_api_url, scope);
|
||||
|
@ -126,7 +128,7 @@ pub async fn get_scope(
|
|||
}
|
||||
|
||||
pub fn get_package_api_url(
|
||||
registry_api_url: &str,
|
||||
registry_api_url: &Url,
|
||||
scope: &str,
|
||||
package: &str,
|
||||
) -> String {
|
||||
|
@ -134,8 +136,8 @@ pub fn get_package_api_url(
|
|||
}
|
||||
|
||||
pub async fn get_package(
|
||||
client: &reqwest::Client,
|
||||
registry_api_url: &str,
|
||||
client: &HttpClient,
|
||||
registry_api_url: &Url,
|
||||
scope: &str,
|
||||
package: &str,
|
||||
) -> Result<reqwest::Response, AnyError> {
|
||||
|
|
|
@ -17,11 +17,13 @@ use deno_config::WorkspaceMemberConfig;
|
|||
use deno_core::anyhow::bail;
|
||||
use deno_core::anyhow::Context;
|
||||
use deno_core::error::AnyError;
|
||||
use deno_core::futures::future::LocalBoxFuture;
|
||||
use deno_core::futures::stream::FuturesUnordered;
|
||||
use deno_core::futures::FutureExt;
|
||||
use deno_core::futures::StreamExt;
|
||||
use deno_core::serde_json;
|
||||
use deno_core::serde_json::json;
|
||||
use deno_core::serde_json::Value;
|
||||
use deno_core::unsync::JoinSet;
|
||||
use deno_runtime::deno_fetch::reqwest;
|
||||
use deno_runtime::deno_fs::FileSystem;
|
||||
use deno_terminal::colors;
|
||||
|
@ -154,7 +156,7 @@ pub async fn publish(
|
|||
}
|
||||
|
||||
perform_publish(
|
||||
cli_factory.http_client(),
|
||||
&cli_factory.http_client_provider().get_or_create()?,
|
||||
prepared_data.publish_order_graph,
|
||||
prepared_data.package_by_name,
|
||||
auth_method,
|
||||
|
@ -523,9 +525,9 @@ pub enum Permission<'s> {
|
|||
}
|
||||
|
||||
async fn get_auth_headers(
|
||||
client: &reqwest::Client,
|
||||
registry_url: String,
|
||||
packages: Vec<Rc<PreparedPublishPackage>>,
|
||||
client: &HttpClient,
|
||||
registry_url: &Url,
|
||||
packages: &[Rc<PreparedPublishPackage>],
|
||||
auth_method: AuthMethod,
|
||||
) -> Result<HashMap<(String, String, String), Rc<str>>, AnyError> {
|
||||
let permissions = packages
|
||||
|
@ -600,7 +602,7 @@ async fn get_auth_headers(
|
|||
colors::cyan(res.user.name)
|
||||
);
|
||||
let authorization: Rc<str> = format!("Bearer {}", res.token).into();
|
||||
for pkg in &packages {
|
||||
for pkg in packages {
|
||||
authorizations.insert(
|
||||
(pkg.scope.clone(), pkg.package.clone(), pkg.version.clone()),
|
||||
authorization.clone(),
|
||||
|
@ -620,7 +622,7 @@ async fn get_auth_headers(
|
|||
}
|
||||
AuthMethod::Token(token) => {
|
||||
let authorization: Rc<str> = format!("Bearer {}", token).into();
|
||||
for pkg in &packages {
|
||||
for pkg in packages {
|
||||
authorizations.insert(
|
||||
(pkg.scope.clone(), pkg.package.clone(), pkg.version.clone()),
|
||||
authorization.clone(),
|
||||
|
@ -682,9 +684,9 @@ async fn get_auth_headers(
|
|||
/// Check if both `scope` and `package` already exist, if not return
|
||||
/// a URL to the management panel to create them.
|
||||
async fn check_if_scope_and_package_exist(
|
||||
client: &reqwest::Client,
|
||||
registry_api_url: &str,
|
||||
registry_manage_url: &str,
|
||||
client: &HttpClient,
|
||||
registry_api_url: &Url,
|
||||
registry_manage_url: &Url,
|
||||
scope: &str,
|
||||
package: &str,
|
||||
) -> Result<Option<String>, AnyError> {
|
||||
|
@ -714,18 +716,18 @@ async fn check_if_scope_and_package_exist(
|
|||
}
|
||||
|
||||
async fn ensure_scopes_and_packages_exist(
|
||||
client: &reqwest::Client,
|
||||
registry_api_url: String,
|
||||
registry_manage_url: String,
|
||||
packages: Vec<Rc<PreparedPublishPackage>>,
|
||||
client: &HttpClient,
|
||||
registry_api_url: &Url,
|
||||
registry_manage_url: &Url,
|
||||
packages: &[Rc<PreparedPublishPackage>],
|
||||
) -> Result<(), AnyError> {
|
||||
if !std::io::stdin().is_terminal() {
|
||||
let mut missing_packages_lines = vec![];
|
||||
for package in packages {
|
||||
let maybe_create_package_url = check_if_scope_and_package_exist(
|
||||
client,
|
||||
®istry_api_url,
|
||||
®istry_manage_url,
|
||||
registry_api_url,
|
||||
registry_manage_url,
|
||||
&package.scope,
|
||||
&package.package,
|
||||
)
|
||||
|
@ -748,8 +750,8 @@ async fn ensure_scopes_and_packages_exist(
|
|||
for package in packages {
|
||||
let maybe_create_package_url = check_if_scope_and_package_exist(
|
||||
client,
|
||||
®istry_api_url,
|
||||
®istry_manage_url,
|
||||
registry_api_url,
|
||||
registry_manage_url,
|
||||
&package.scope,
|
||||
&package.package,
|
||||
)
|
||||
|
@ -770,7 +772,7 @@ async fn ensure_scopes_and_packages_exist(
|
|||
let _ = open::that_detached(&create_package_url);
|
||||
|
||||
let package_api_url = api::get_package_api_url(
|
||||
®istry_api_url,
|
||||
registry_api_url,
|
||||
&package.scope,
|
||||
&package.package,
|
||||
);
|
||||
|
@ -790,15 +792,14 @@ async fn ensure_scopes_and_packages_exist(
|
|||
}
|
||||
|
||||
async fn perform_publish(
|
||||
http_client: &Arc<HttpClient>,
|
||||
http_client: &HttpClient,
|
||||
mut publish_order_graph: PublishOrderGraph,
|
||||
mut prepared_package_by_name: HashMap<String, Rc<PreparedPublishPackage>>,
|
||||
auth_method: AuthMethod,
|
||||
provenance: bool,
|
||||
) -> Result<(), AnyError> {
|
||||
let client = http_client.client()?;
|
||||
let registry_api_url = jsr_api_url().to_string();
|
||||
let registry_url = jsr_url().to_string();
|
||||
let registry_api_url = jsr_api_url();
|
||||
let registry_url = jsr_url();
|
||||
|
||||
let packages = prepared_package_by_name
|
||||
.values()
|
||||
|
@ -806,19 +807,20 @@ async fn perform_publish(
|
|||
.collect::<Vec<_>>();
|
||||
|
||||
ensure_scopes_and_packages_exist(
|
||||
client,
|
||||
registry_api_url.clone(),
|
||||
registry_url.clone(),
|
||||
packages.clone(),
|
||||
http_client,
|
||||
registry_api_url,
|
||||
registry_url,
|
||||
&packages,
|
||||
)
|
||||
.await?;
|
||||
|
||||
let mut authorizations =
|
||||
get_auth_headers(client, registry_api_url.clone(), packages, auth_method)
|
||||
get_auth_headers(http_client, registry_api_url, &packages, auth_method)
|
||||
.await?;
|
||||
|
||||
assert_eq!(prepared_package_by_name.len(), authorizations.len());
|
||||
let mut futures: JoinSet<Result<String, AnyError>> = JoinSet::default();
|
||||
let mut futures: FuturesUnordered<LocalBoxFuture<Result<String, AnyError>>> =
|
||||
Default::default();
|
||||
loop {
|
||||
let next_batch = publish_order_graph.next();
|
||||
|
||||
|
@ -844,32 +846,32 @@ async fn perform_publish(
|
|||
package.version.clone(),
|
||||
))
|
||||
.unwrap();
|
||||
let registry_api_url = registry_api_url.clone();
|
||||
let registry_url = registry_url.clone();
|
||||
let http_client = http_client.clone();
|
||||
futures.spawn(async move {
|
||||
futures.push(
|
||||
async move {
|
||||
let display_name = package.display_name();
|
||||
publish_package(
|
||||
&http_client,
|
||||
http_client,
|
||||
package,
|
||||
®istry_api_url,
|
||||
®istry_url,
|
||||
registry_api_url,
|
||||
registry_url,
|
||||
&authorization,
|
||||
provenance,
|
||||
)
|
||||
.await
|
||||
.with_context(|| format!("Failed to publish {}", display_name))?;
|
||||
Ok(package_name)
|
||||
});
|
||||
}
|
||||
.boxed_local(),
|
||||
);
|
||||
}
|
||||
|
||||
let Some(result) = futures.join_next().await else {
|
||||
let Some(result) = futures.next().await else {
|
||||
// done, ensure no circular dependency
|
||||
publish_order_graph.ensure_no_pending()?;
|
||||
break;
|
||||
};
|
||||
|
||||
let package_name = result??;
|
||||
let package_name = result?;
|
||||
publish_order_graph.finish_package(&package_name);
|
||||
}
|
||||
|
||||
|
@ -879,12 +881,11 @@ async fn perform_publish(
|
|||
async fn publish_package(
|
||||
http_client: &HttpClient,
|
||||
package: Rc<PreparedPublishPackage>,
|
||||
registry_api_url: &str,
|
||||
registry_url: &str,
|
||||
registry_api_url: &Url,
|
||||
registry_url: &Url,
|
||||
authorization: &str,
|
||||
provenance: bool,
|
||||
) -> Result<(), AnyError> {
|
||||
let client = http_client.client()?;
|
||||
log::info!(
|
||||
"{} @{}/{}@{} ...",
|
||||
colors::intense_blue("Publishing"),
|
||||
|
@ -902,7 +903,7 @@ async fn publish_package(
|
|||
package.config
|
||||
);
|
||||
|
||||
let response = client
|
||||
let response = http_client
|
||||
.post(url)
|
||||
.header(reqwest::header::AUTHORIZATION, authorization)
|
||||
.header(reqwest::header::CONTENT_ENCODING, "gzip")
|
||||
|
@ -950,7 +951,7 @@ async fn publish_package(
|
|||
let interval = std::time::Duration::from_secs(2);
|
||||
while task.status != "success" && task.status != "failure" {
|
||||
tokio::time::sleep(interval).await;
|
||||
let resp = client
|
||||
let resp = http_client
|
||||
.get(format!("{}publish_status/{}", registry_api_url, task.id))
|
||||
.send()
|
||||
.await
|
||||
|
@ -1000,7 +1001,7 @@ async fn publish_package(
|
|||
package.scope, package.package, package.version
|
||||
))?;
|
||||
|
||||
let meta_bytes = client.get(meta_url).send().await?.bytes().await?;
|
||||
let meta_bytes = http_client.get(meta_url).send().await?.bytes().await?;
|
||||
|
||||
if std::env::var("DISABLE_JSR_MANIFEST_VERIFICATION_FOR_TESTING").is_err() {
|
||||
verify_version_manifest(&meta_bytes, &package)?;
|
||||
|
@ -1015,7 +1016,7 @@ async fn publish_package(
|
|||
sha256: faster_hex::hex_string(&sha2::Sha256::digest(&meta_bytes)),
|
||||
},
|
||||
};
|
||||
let bundle = provenance::generate_provenance(subject).await?;
|
||||
let bundle = provenance::generate_provenance(http_client, subject).await?;
|
||||
|
||||
let tlog_entry = &bundle.verification_material.tlog_entries[0];
|
||||
log::info!("{}",
|
||||
|
@ -1030,7 +1031,7 @@ async fn publish_package(
|
|||
"{}scopes/{}/packages/{}/versions/{}/provenance",
|
||||
registry_api_url, package.scope, package.package, package.version
|
||||
);
|
||||
client
|
||||
http_client
|
||||
.post(provenance_url)
|
||||
.header(reqwest::header::AUTHORIZATION, authorization)
|
||||
.json(&json!({ "bundle": bundle }))
|
||||
|
|
|
@ -188,7 +188,7 @@ pub async fn add(flags: Flags, add_flags: AddFlags) -> Result<(), AnyError> {
|
|||
}
|
||||
let config_file_path = config_specifier.to_file_path().unwrap();
|
||||
|
||||
let http_client = cli_factory.http_client();
|
||||
let http_client = cli_factory.http_client_provider();
|
||||
|
||||
let mut selected_packages = Vec::with_capacity(add_flags.packages.len());
|
||||
let mut package_reqs = Vec::with_capacity(add_flags.packages.len());
|
||||
|
@ -227,6 +227,7 @@ pub async fn add(flags: Flags, add_flags: AddFlags) -> Result<(), AnyError> {
|
|||
None,
|
||||
);
|
||||
deps_file_fetcher.set_download_log_level(log::Level::Trace);
|
||||
let deps_file_fetcher = Arc::new(deps_file_fetcher);
|
||||
let jsr_resolver = Arc::new(JsrFetchResolver::new(deps_file_fetcher.clone()));
|
||||
let npm_resolver = Arc::new(NpmFetchResolver::new(deps_file_fetcher));
|
||||
|
||||
|
|
|
@ -1,5 +1,7 @@
|
|||
// Copyright 2018-2024 the Deno authors. All rights reserved. MIT license.
|
||||
|
||||
use crate::http_util::HttpClient;
|
||||
|
||||
use super::api::OidcTokenResponse;
|
||||
use super::auth::gha_oidc_token;
|
||||
use super::auth::is_gha;
|
||||
|
@ -13,7 +15,6 @@ use deno_core::serde_json;
|
|||
use once_cell::sync::Lazy;
|
||||
use p256::elliptic_curve;
|
||||
use p256::pkcs8::AssociatedOid;
|
||||
use reqwest::Client;
|
||||
use ring::rand::SystemRandom;
|
||||
use ring::signature::EcdsaKeyPair;
|
||||
use ring::signature::KeyPair;
|
||||
|
@ -291,6 +292,7 @@ pub struct ProvenanceBundle {
|
|||
}
|
||||
|
||||
pub async fn generate_provenance(
|
||||
http_client: &HttpClient,
|
||||
subject: Subject,
|
||||
) -> Result<ProvenanceBundle, AnyError> {
|
||||
if !is_gha() {
|
||||
|
@ -306,19 +308,20 @@ pub async fn generate_provenance(
|
|||
let slsa = ProvenanceAttestation::new_github_actions(subject);
|
||||
|
||||
let attestation = serde_json::to_string(&slsa)?;
|
||||
let bundle = attest(&attestation, INTOTO_PAYLOAD_TYPE).await?;
|
||||
let bundle = attest(http_client, &attestation, INTOTO_PAYLOAD_TYPE).await?;
|
||||
|
||||
Ok(bundle)
|
||||
}
|
||||
|
||||
pub async fn attest(
|
||||
http_client: &HttpClient,
|
||||
data: &str,
|
||||
type_: &str,
|
||||
) -> Result<ProvenanceBundle, AnyError> {
|
||||
// DSSE Pre-Auth Encoding (PAE) payload
|
||||
let pae = pre_auth_encoding(type_, data);
|
||||
|
||||
let signer = FulcioSigner::new()?;
|
||||
let signer = FulcioSigner::new(http_client)?;
|
||||
let (signature, key_material) = signer.sign(&pae).await?;
|
||||
|
||||
let content = SignatureBundle {
|
||||
|
@ -332,7 +335,8 @@ pub async fn attest(
|
|||
}],
|
||||
},
|
||||
};
|
||||
let transparency_logs = testify(&content, &key_material.certificate).await?;
|
||||
let transparency_logs =
|
||||
testify(http_client, &content, &key_material.certificate).await?;
|
||||
|
||||
// First log entry is the one we're interested in
|
||||
let (_, log_entry) = transparency_logs.iter().next().unwrap();
|
||||
|
@ -363,13 +367,6 @@ static DEFAULT_FULCIO_URL: Lazy<String> = Lazy::new(|| {
|
|||
.unwrap_or_else(|_| "https://fulcio.sigstore.dev".to_string())
|
||||
});
|
||||
|
||||
struct FulcioSigner {
|
||||
// The ephemeral key pair used to sign.
|
||||
ephemeral_signer: EcdsaKeyPair,
|
||||
rng: SystemRandom,
|
||||
client: Client,
|
||||
}
|
||||
|
||||
static ALGORITHM: &ring::signature::EcdsaSigningAlgorithm =
|
||||
&ring::signature::ECDSA_P256_SHA256_ASN1_SIGNING;
|
||||
|
||||
|
@ -424,8 +421,15 @@ struct SigningCertificateResponse {
|
|||
signed_certificate_detached_sct: Option<SignedCertificate>,
|
||||
}
|
||||
|
||||
impl FulcioSigner {
|
||||
pub fn new() -> Result<Self, AnyError> {
|
||||
struct FulcioSigner<'a> {
|
||||
// The ephemeral key pair used to sign.
|
||||
ephemeral_signer: EcdsaKeyPair,
|
||||
rng: SystemRandom,
|
||||
http_client: &'a HttpClient,
|
||||
}
|
||||
|
||||
impl<'a> FulcioSigner<'a> {
|
||||
pub fn new(http_client: &'a HttpClient) -> Result<Self, AnyError> {
|
||||
let rng = SystemRandom::new();
|
||||
let document = EcdsaKeyPair::generate_pkcs8(ALGORITHM, &rng)?;
|
||||
let ephemeral_signer =
|
||||
|
@ -434,7 +438,7 @@ impl FulcioSigner {
|
|||
Ok(Self {
|
||||
ephemeral_signer,
|
||||
rng,
|
||||
client: Client::new(),
|
||||
http_client,
|
||||
})
|
||||
}
|
||||
|
||||
|
@ -443,7 +447,7 @@ impl FulcioSigner {
|
|||
data: &[u8],
|
||||
) -> Result<(ring::signature::Signature, KeyMaterial), AnyError> {
|
||||
// Request token from GitHub Actions for audience "sigstore"
|
||||
let token = gha_request_token("sigstore").await?;
|
||||
let token = self.gha_request_token("sigstore").await?;
|
||||
// Extract the subject from the token
|
||||
let subject = extract_jwt_subject(&token)?;
|
||||
|
||||
|
@ -498,7 +502,12 @@ impl FulcioSigner {
|
|||
},
|
||||
};
|
||||
|
||||
let response = self.client.post(url).json(&request_body).send().await?;
|
||||
let response = self
|
||||
.http_client
|
||||
.post(url)
|
||||
.json(&request_body)
|
||||
.send()
|
||||
.await?;
|
||||
|
||||
let body: SigningCertificateResponse = response.json().await?;
|
||||
|
||||
|
@ -508,6 +517,27 @@ impl FulcioSigner {
|
|||
.ok_or_else(|| anyhow::anyhow!("No certificate chain returned"))?;
|
||||
Ok(key.chain.certificates)
|
||||
}
|
||||
|
||||
async fn gha_request_token(&self, aud: &str) -> Result<String, AnyError> {
|
||||
let Ok(req_url) = env::var("ACTIONS_ID_TOKEN_REQUEST_URL") else {
|
||||
bail!("Not running in GitHub Actions");
|
||||
};
|
||||
|
||||
let Some(token) = gha_oidc_token() else {
|
||||
bail!("No OIDC token available");
|
||||
};
|
||||
|
||||
let res = self
|
||||
.http_client
|
||||
.get(&req_url)
|
||||
.bearer_auth(token)
|
||||
.query(&[("audience", aud)])
|
||||
.send()
|
||||
.await?
|
||||
.json::<OidcTokenResponse>()
|
||||
.await?;
|
||||
Ok(res.value)
|
||||
}
|
||||
}
|
||||
|
||||
#[derive(Deserialize)]
|
||||
|
@ -532,27 +562,6 @@ fn extract_jwt_subject(token: &str) -> Result<String, AnyError> {
|
|||
}
|
||||
}
|
||||
|
||||
async fn gha_request_token(aud: &str) -> Result<String, AnyError> {
|
||||
let Ok(req_url) = env::var("ACTIONS_ID_TOKEN_REQUEST_URL") else {
|
||||
bail!("Not running in GitHub Actions");
|
||||
};
|
||||
|
||||
let Some(token) = gha_oidc_token() else {
|
||||
bail!("No OIDC token available");
|
||||
};
|
||||
|
||||
let client = Client::new();
|
||||
let res = client
|
||||
.get(&req_url)
|
||||
.bearer_auth(token)
|
||||
.query(&[("audience", aud)])
|
||||
.send()
|
||||
.await?
|
||||
.json::<OidcTokenResponse>()
|
||||
.await?;
|
||||
Ok(res.value)
|
||||
}
|
||||
|
||||
static DEFAULT_REKOR_URL: Lazy<String> = Lazy::new(|| {
|
||||
env::var("REKOR_URL")
|
||||
.unwrap_or_else(|_| "https://rekor.sigstore.dev".to_string())
|
||||
|
@ -616,6 +625,7 @@ struct ProposedIntotoEntryHash {
|
|||
|
||||
// Rekor witness
|
||||
async fn testify(
|
||||
http_client: &HttpClient,
|
||||
content: &SignatureBundle,
|
||||
public_key: &str,
|
||||
) -> Result<RekorEntry, AnyError> {
|
||||
|
@ -672,9 +682,8 @@ async fn testify(
|
|||
},
|
||||
};
|
||||
|
||||
let client = Client::new();
|
||||
let url = format!("{}/api/v1/log/entries", *DEFAULT_REKOR_URL);
|
||||
let res = client
|
||||
let res = http_client
|
||||
.post(&url)
|
||||
.json(&proposed_intoto_entry)
|
||||
.send()
|
||||
|
|
|
@ -42,7 +42,7 @@ To grant permissions, set them before the script argument. For example:
|
|||
// map specified and bare specifier is used on the command line
|
||||
let factory = CliFactory::from_flags(flags)?;
|
||||
let deno_dir = factory.deno_dir()?;
|
||||
let http_client = factory.http_client();
|
||||
let http_client = factory.http_client_provider();
|
||||
let cli_options = factory.cli_options();
|
||||
|
||||
if cli_options.unstable_sloppy_imports() {
|
||||
|
|
|
@ -881,6 +881,7 @@ async fn run_tests_for_worker_inner(
|
|||
// failing. If we don't do this, a connection to a test server we just tore down might be re-used in
|
||||
// the next test.
|
||||
// TODO(mmastrac): this should be some sort of callback that we can implement for any subsystem
|
||||
#[allow(clippy::disallowed_types)] // allow using reqwest::Client here
|
||||
worker
|
||||
.js_runtime
|
||||
.op_state()
|
||||
|
|
|
@ -7,6 +7,7 @@ use crate::args::UpgradeFlags;
|
|||
use crate::colors;
|
||||
use crate::factory::CliFactory;
|
||||
use crate::http_util::HttpClient;
|
||||
use crate::http_util::HttpClientProvider;
|
||||
use crate::standalone::binary::unpack_into_dir;
|
||||
use crate::util::progress_bar::ProgressBar;
|
||||
use crate::util::progress_bar::ProgressBarStyle;
|
||||
|
@ -101,17 +102,17 @@ trait VersionProvider: Clone {
|
|||
|
||||
#[derive(Clone)]
|
||||
struct RealVersionProvider {
|
||||
http_client: Arc<HttpClient>,
|
||||
http_client_provider: Arc<HttpClientProvider>,
|
||||
check_kind: UpgradeCheckKind,
|
||||
}
|
||||
|
||||
impl RealVersionProvider {
|
||||
pub fn new(
|
||||
http_client: Arc<HttpClient>,
|
||||
http_client_provider: Arc<HttpClientProvider>,
|
||||
check_kind: UpgradeCheckKind,
|
||||
) -> Self {
|
||||
Self {
|
||||
http_client,
|
||||
http_client_provider,
|
||||
check_kind,
|
||||
}
|
||||
}
|
||||
|
@ -124,7 +125,11 @@ impl VersionProvider for RealVersionProvider {
|
|||
}
|
||||
|
||||
async fn latest_version(&self) -> Result<String, AnyError> {
|
||||
get_latest_version(&self.http_client, self.release_kind(), self.check_kind)
|
||||
get_latest_version(
|
||||
&self.http_client_provider.get_or_create()?,
|
||||
self.release_kind(),
|
||||
self.check_kind,
|
||||
)
|
||||
.await
|
||||
}
|
||||
|
||||
|
@ -241,7 +246,7 @@ pub fn upgrade_check_enabled() -> bool {
|
|||
}
|
||||
|
||||
pub fn check_for_upgrades(
|
||||
http_client: Arc<HttpClient>,
|
||||
http_client_provider: Arc<HttpClientProvider>,
|
||||
cache_file_path: PathBuf,
|
||||
) {
|
||||
if !upgrade_check_enabled() {
|
||||
|
@ -250,7 +255,7 @@ pub fn check_for_upgrades(
|
|||
|
||||
let env = RealUpdateCheckerEnvironment::new(cache_file_path);
|
||||
let version_provider =
|
||||
RealVersionProvider::new(http_client, UpgradeCheckKind::Execution);
|
||||
RealVersionProvider::new(http_client_provider, UpgradeCheckKind::Execution);
|
||||
let update_checker = UpdateChecker::new(env, version_provider);
|
||||
|
||||
if update_checker.should_check_for_new_version() {
|
||||
|
@ -300,14 +305,14 @@ pub struct LspVersionUpgradeInfo {
|
|||
}
|
||||
|
||||
pub async fn check_for_upgrades_for_lsp(
|
||||
http_client: Arc<HttpClient>,
|
||||
http_client_provider: Arc<HttpClientProvider>,
|
||||
) -> Result<Option<LspVersionUpgradeInfo>, AnyError> {
|
||||
if !upgrade_check_enabled() {
|
||||
return Ok(None);
|
||||
}
|
||||
|
||||
let version_provider =
|
||||
RealVersionProvider::new(http_client, UpgradeCheckKind::Lsp);
|
||||
RealVersionProvider::new(http_client_provider, UpgradeCheckKind::Lsp);
|
||||
check_for_upgrades_for_lsp_with_provider(&version_provider).await
|
||||
}
|
||||
|
||||
|
@ -370,7 +375,7 @@ pub async fn upgrade(
|
|||
upgrade_flags: UpgradeFlags,
|
||||
) -> Result<(), AnyError> {
|
||||
let factory = CliFactory::from_flags(flags)?;
|
||||
let client = factory.http_client();
|
||||
let client = factory.http_client_provider().get_or_create()?;
|
||||
let current_exe_path = std::env::current_exe()?;
|
||||
let full_path_output_flag = upgrade_flags
|
||||
.output
|
||||
|
@ -445,7 +450,7 @@ pub async fn upgrade(
|
|||
};
|
||||
|
||||
let latest_version =
|
||||
get_latest_version(client, release_kind, UpgradeCheckKind::Execution)
|
||||
get_latest_version(&client, release_kind, UpgradeCheckKind::Execution)
|
||||
.await?;
|
||||
|
||||
let current_is_most_recent = if upgrade_flags.canary {
|
||||
|
@ -491,7 +496,7 @@ pub async fn upgrade(
|
|||
)
|
||||
};
|
||||
|
||||
let archive_data = download_package(client, &download_url)
|
||||
let archive_data = download_package(&client, &download_url)
|
||||
.await
|
||||
.with_context(|| format!("Failed downloading {download_url}. The version you requested may not have been built for the current architecture."))?;
|
||||
|
||||
|
|
Loading…
Reference in a new issue