mirror of
https://github.com/denoland/deno.git
synced 2024-11-21 15:04:11 -05:00
parent
36ae37604a
commit
1cefa831fd
35 changed files with 2043 additions and 707 deletions
|
@ -673,6 +673,7 @@ pub struct ConfigFileJson {
|
|||
pub lock: Option<Value>,
|
||||
pub exclude: Option<Value>,
|
||||
pub node_modules_dir: Option<bool>,
|
||||
pub deno_modules_dir: Option<bool>,
|
||||
}
|
||||
|
||||
#[derive(Clone, Debug)]
|
||||
|
@ -858,6 +859,26 @@ impl ConfigFile {
|
|||
self.json.node_modules_dir
|
||||
}
|
||||
|
||||
pub fn deno_modules_dir(&self) -> Option<bool> {
|
||||
self.json.deno_modules_dir
|
||||
}
|
||||
|
||||
pub fn deno_modules_dir_path(&self) -> Option<PathBuf> {
|
||||
if self.json.deno_modules_dir == Some(true) {
|
||||
Some(
|
||||
self
|
||||
.specifier
|
||||
.to_file_path()
|
||||
.unwrap()
|
||||
.parent()
|
||||
.unwrap()
|
||||
.join("deno_modules"),
|
||||
)
|
||||
} else {
|
||||
None
|
||||
}
|
||||
}
|
||||
|
||||
pub fn to_import_map_value(&self) -> Value {
|
||||
let mut value = serde_json::Map::with_capacity(2);
|
||||
if let Some(imports) = &self.json.imports {
|
||||
|
@ -874,13 +895,17 @@ impl ConfigFile {
|
|||
}
|
||||
|
||||
pub fn to_files_config(&self) -> Result<Option<FilesConfig>, AnyError> {
|
||||
let exclude: Vec<String> = if let Some(exclude) = self.json.exclude.clone()
|
||||
{
|
||||
serde_json::from_value(exclude)
|
||||
.context("Failed to parse \"exclude\" configuration")?
|
||||
} else {
|
||||
Vec::new()
|
||||
};
|
||||
let mut exclude: Vec<String> =
|
||||
if let Some(exclude) = self.json.exclude.clone() {
|
||||
serde_json::from_value(exclude)
|
||||
.context("Failed to parse \"exclude\" configuration")?
|
||||
} else {
|
||||
Vec::new()
|
||||
};
|
||||
|
||||
if self.deno_modules_dir() == Some(true) {
|
||||
exclude.push("deno_modules".to_string());
|
||||
}
|
||||
|
||||
let raw_files_config = SerializedFilesConfig {
|
||||
exclude,
|
||||
|
|
|
@ -372,6 +372,7 @@ pub struct Flags {
|
|||
pub type_check_mode: TypeCheckMode,
|
||||
pub config_flag: ConfigFlag,
|
||||
pub node_modules_dir: Option<bool>,
|
||||
pub deno_modules_dir: Option<bool>,
|
||||
pub enable_testing_features: bool,
|
||||
pub ext: Option<String>,
|
||||
pub ignore: Vec<PathBuf>,
|
||||
|
@ -1445,6 +1446,7 @@ TypeScript compiler cache: Subdirectory containing TS compiler output.",
|
|||
.arg(config_arg())
|
||||
.arg(import_map_arg())
|
||||
.arg(node_modules_dir_arg())
|
||||
.arg(deno_modules_dir_arg())
|
||||
.arg(
|
||||
Arg::new("json")
|
||||
.long("json")
|
||||
|
@ -1988,6 +1990,7 @@ Remote modules and multiple modules may also be specified:
|
|||
.arg(import_map_arg())
|
||||
.arg(lock_arg())
|
||||
.arg(node_modules_dir_arg())
|
||||
.arg(deno_modules_dir_arg())
|
||||
.arg(reload_arg())
|
||||
.arg(ca_file_arg()))
|
||||
}
|
||||
|
@ -2002,6 +2005,7 @@ fn compile_args_without_check_args(app: Command) -> Command {
|
|||
.arg(no_remote_arg())
|
||||
.arg(no_npm_arg())
|
||||
.arg(node_modules_dir_arg())
|
||||
.arg(deno_modules_dir_arg())
|
||||
.arg(config_arg())
|
||||
.arg(no_config_arg())
|
||||
.arg(reload_arg())
|
||||
|
@ -2560,6 +2564,16 @@ fn node_modules_dir_arg() -> Arg {
|
|||
.help("Enables or disables the use of a local node_modules folder for npm packages")
|
||||
}
|
||||
|
||||
fn deno_modules_dir_arg() -> Arg {
|
||||
Arg::new("deno-modules-dir")
|
||||
.long("deno-modules-dir")
|
||||
.num_args(0..=1)
|
||||
.value_parser(value_parser!(bool))
|
||||
.default_missing_value("true")
|
||||
.require_equals(true)
|
||||
.help("UNSTABLE: Enables or disables the use of a local deno_modules folder for remote modules")
|
||||
}
|
||||
|
||||
fn unsafely_ignore_certificate_errors_arg() -> Arg {
|
||||
Arg::new("unsafely-ignore-certificate-errors")
|
||||
.long("unsafely-ignore-certificate-errors")
|
||||
|
@ -2847,7 +2861,7 @@ fn info_parse(flags: &mut Flags, matches: &mut ArgMatches) {
|
|||
import_map_arg_parse(flags, matches);
|
||||
location_arg_parse(flags, matches);
|
||||
ca_file_arg_parse(flags, matches);
|
||||
node_modules_dir_arg_parse(flags, matches);
|
||||
node_and_deno_modules_dir_arg_parse(flags, matches);
|
||||
lock_arg_parse(flags, matches);
|
||||
no_lock_arg_parse(flags, matches);
|
||||
no_remote_arg_parse(flags, matches);
|
||||
|
@ -3107,7 +3121,7 @@ fn vendor_parse(flags: &mut Flags, matches: &mut ArgMatches) {
|
|||
config_args_parse(flags, matches);
|
||||
import_map_arg_parse(flags, matches);
|
||||
lock_arg_parse(flags, matches);
|
||||
node_modules_dir_arg_parse(flags, matches);
|
||||
node_and_deno_modules_dir_arg_parse(flags, matches);
|
||||
reload_arg_parse(flags, matches);
|
||||
|
||||
flags.subcommand = DenoSubcommand::Vendor(VendorFlags {
|
||||
|
@ -3133,7 +3147,7 @@ fn compile_args_without_check_parse(
|
|||
import_map_arg_parse(flags, matches);
|
||||
no_remote_arg_parse(flags, matches);
|
||||
no_npm_arg_parse(flags, matches);
|
||||
node_modules_dir_arg_parse(flags, matches);
|
||||
node_and_deno_modules_dir_arg_parse(flags, matches);
|
||||
config_args_parse(flags, matches);
|
||||
reload_arg_parse(flags, matches);
|
||||
lock_args_parse(flags, matches);
|
||||
|
@ -3387,8 +3401,12 @@ fn no_npm_arg_parse(flags: &mut Flags, matches: &mut ArgMatches) {
|
|||
}
|
||||
}
|
||||
|
||||
fn node_modules_dir_arg_parse(flags: &mut Flags, matches: &mut ArgMatches) {
|
||||
fn node_and_deno_modules_dir_arg_parse(
|
||||
flags: &mut Flags,
|
||||
matches: &mut ArgMatches,
|
||||
) {
|
||||
flags.node_modules_dir = matches.remove_one::<bool>("node-modules-dir");
|
||||
flags.deno_modules_dir = matches.remove_one::<bool>("deno-modules-dir");
|
||||
}
|
||||
|
||||
fn reload_arg_validate(urlstr: &str) -> Result<String, String> {
|
||||
|
@ -5723,6 +5741,41 @@ mod tests {
|
|||
);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn deno_modules_dir() {
|
||||
let r =
|
||||
flags_from_vec(svec!["deno", "run", "--deno-modules-dir", "script.ts"]);
|
||||
assert_eq!(
|
||||
r.unwrap(),
|
||||
Flags {
|
||||
subcommand: DenoSubcommand::Run(RunFlags {
|
||||
script: "script.ts".to_string(),
|
||||
watch: Default::default(),
|
||||
}),
|
||||
deno_modules_dir: Some(true),
|
||||
..Flags::default()
|
||||
}
|
||||
);
|
||||
|
||||
let r = flags_from_vec(svec![
|
||||
"deno",
|
||||
"run",
|
||||
"--deno-modules-dir=false",
|
||||
"script.ts"
|
||||
]);
|
||||
assert_eq!(
|
||||
r.unwrap(),
|
||||
Flags {
|
||||
subcommand: DenoSubcommand::Run(RunFlags {
|
||||
script: "script.ts".to_string(),
|
||||
watch: Default::default(),
|
||||
}),
|
||||
deno_modules_dir: Some(false),
|
||||
..Flags::default()
|
||||
}
|
||||
);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn cached_only() {
|
||||
let r = flags_from_vec(svec!["deno", "run", "--cached-only", "script.ts"]);
|
||||
|
|
|
@ -539,6 +539,7 @@ pub struct CliOptions {
|
|||
flags: Flags,
|
||||
initial_cwd: PathBuf,
|
||||
maybe_node_modules_folder: Option<PathBuf>,
|
||||
maybe_deno_modules_folder: Option<PathBuf>,
|
||||
maybe_config_file: Option<ConfigFile>,
|
||||
maybe_package_json: Option<PackageJson>,
|
||||
maybe_lockfile: Option<Arc<Mutex<Lockfile>>>,
|
||||
|
@ -567,13 +568,18 @@ impl CliOptions {
|
|||
eprintln!("{}", colors::yellow(msg));
|
||||
}
|
||||
|
||||
let maybe_node_modules_folder = resolve_local_node_modules_folder(
|
||||
let maybe_node_modules_folder = resolve_node_modules_folder(
|
||||
&initial_cwd,
|
||||
&flags,
|
||||
maybe_config_file.as_ref(),
|
||||
maybe_package_json.as_ref(),
|
||||
)
|
||||
.with_context(|| "Resolving node_modules folder.")?;
|
||||
let maybe_deno_modules_folder = resolve_deno_modules_folder(
|
||||
&initial_cwd,
|
||||
&flags,
|
||||
maybe_config_file.as_ref(),
|
||||
);
|
||||
|
||||
Ok(Self {
|
||||
flags,
|
||||
|
@ -582,6 +588,7 @@ impl CliOptions {
|
|||
maybe_lockfile,
|
||||
maybe_package_json,
|
||||
maybe_node_modules_folder,
|
||||
maybe_deno_modules_folder,
|
||||
overrides: Default::default(),
|
||||
})
|
||||
}
|
||||
|
@ -865,6 +872,10 @@ impl CliOptions {
|
|||
.map(|path| ModuleSpecifier::from_directory_path(path).unwrap())
|
||||
}
|
||||
|
||||
pub fn deno_modules_dir_path(&self) -> Option<&PathBuf> {
|
||||
self.maybe_deno_modules_folder.as_ref()
|
||||
}
|
||||
|
||||
pub fn resolve_root_cert_store_provider(
|
||||
&self,
|
||||
) -> Arc<dyn RootCertStoreProvider> {
|
||||
|
@ -1159,7 +1170,7 @@ impl CliOptions {
|
|||
}
|
||||
|
||||
/// Resolves the path to use for a local node_modules folder.
|
||||
fn resolve_local_node_modules_folder(
|
||||
fn resolve_node_modules_folder(
|
||||
cwd: &Path,
|
||||
flags: &Flags,
|
||||
maybe_config_file: Option<&ConfigFile>,
|
||||
|
@ -1188,6 +1199,31 @@ fn resolve_local_node_modules_folder(
|
|||
Ok(Some(canonicalize_path_maybe_not_exists(&path)?))
|
||||
}
|
||||
|
||||
fn resolve_deno_modules_folder(
|
||||
cwd: &Path,
|
||||
flags: &Flags,
|
||||
maybe_config_file: Option<&ConfigFile>,
|
||||
) -> Option<PathBuf> {
|
||||
let use_deno_modules_dir = flags
|
||||
.deno_modules_dir
|
||||
.or_else(|| maybe_config_file.and_then(|c| c.deno_modules_dir()))
|
||||
.unwrap_or(false);
|
||||
// Unlike the node_modules directory, there is no need to canonicalize
|
||||
// this directory because it's just used as a cache and the resolved
|
||||
// specifier is not based on the canonicalized path (unlike the modules
|
||||
// in the node_modules folder).
|
||||
if !use_deno_modules_dir {
|
||||
None
|
||||
} else if let Some(config_path) = maybe_config_file
|
||||
.as_ref()
|
||||
.and_then(|c| c.specifier.to_file_path().ok())
|
||||
{
|
||||
Some(config_path.parent().unwrap().join("deno_modules"))
|
||||
} else {
|
||||
Some(cwd.join("deno_modules"))
|
||||
}
|
||||
}
|
||||
|
||||
fn resolve_import_map_specifier(
|
||||
maybe_import_map_path: Option<&str>,
|
||||
maybe_config_file: Option<&ConfigFile>,
|
||||
|
|
1
cli/cache/deno_dir.rs
vendored
1
cli/cache/deno_dir.rs
vendored
|
@ -71,7 +71,6 @@ impl DenoDir {
|
|||
root,
|
||||
gen_cache: DiskCache::new(&gen_path),
|
||||
};
|
||||
deno_dir.gen_cache.ensure_dir_exists(&gen_path)?;
|
||||
|
||||
Ok(deno_dir)
|
||||
}
|
||||
|
|
63
cli/cache/disk_cache.rs
vendored
63
cli/cache/disk_cache.rs
vendored
|
@ -8,7 +8,6 @@ use deno_core::url::Host;
|
|||
use deno_core::url::Url;
|
||||
use std::ffi::OsStr;
|
||||
use std::fs;
|
||||
use std::io;
|
||||
use std::path::Component;
|
||||
use std::path::Path;
|
||||
use std::path::PathBuf;
|
||||
|
@ -20,13 +19,6 @@ pub struct DiskCache {
|
|||
pub location: PathBuf,
|
||||
}
|
||||
|
||||
fn with_io_context<T: AsRef<str>>(
|
||||
e: &std::io::Error,
|
||||
context: T,
|
||||
) -> std::io::Error {
|
||||
std::io::Error::new(e.kind(), format!("{} (for '{}')", e, context.as_ref()))
|
||||
}
|
||||
|
||||
impl DiskCache {
|
||||
/// `location` must be an absolute path.
|
||||
pub fn new(location: &Path) -> Self {
|
||||
|
@ -36,27 +28,6 @@ impl DiskCache {
|
|||
}
|
||||
}
|
||||
|
||||
/// Ensures the location of the cache.
|
||||
pub fn ensure_dir_exists(&self, path: &Path) -> io::Result<()> {
|
||||
if path.is_dir() {
|
||||
return Ok(());
|
||||
}
|
||||
fs::create_dir_all(path).map_err(|e| {
|
||||
io::Error::new(
|
||||
e.kind(),
|
||||
format!(
|
||||
concat!(
|
||||
"Could not create TypeScript compiler cache location: {}\n",
|
||||
"Check the permission of the directory.\n",
|
||||
"{:#}",
|
||||
),
|
||||
path.display(),
|
||||
e
|
||||
),
|
||||
)
|
||||
})
|
||||
}
|
||||
|
||||
fn get_cache_filename(&self, url: &Url) -> Option<PathBuf> {
|
||||
let mut out = PathBuf::new();
|
||||
|
||||
|
@ -78,7 +49,7 @@ impl DiskCache {
|
|||
out.push(path_seg);
|
||||
}
|
||||
}
|
||||
"http" | "https" | "data" | "blob" => out = url_to_filename(url)?,
|
||||
"http" | "https" | "data" | "blob" => out = url_to_filename(url).ok()?,
|
||||
"file" => {
|
||||
let path = match url.to_file_path() {
|
||||
Ok(path) => path,
|
||||
|
@ -149,12 +120,7 @@ impl DiskCache {
|
|||
|
||||
pub fn set(&self, filename: &Path, data: &[u8]) -> std::io::Result<()> {
|
||||
let path = self.location.join(filename);
|
||||
match path.parent() {
|
||||
Some(parent) => self.ensure_dir_exists(parent),
|
||||
None => Ok(()),
|
||||
}?;
|
||||
atomic_write_file(&path, data, CACHE_PERM)
|
||||
.map_err(|e| with_io_context(&e, format!("{:#?}", &path)))
|
||||
}
|
||||
}
|
||||
|
||||
|
@ -164,28 +130,13 @@ mod tests {
|
|||
use test_util::TempDir;
|
||||
|
||||
#[test]
|
||||
fn test_create_cache_if_dir_exits() {
|
||||
let cache_location = TempDir::new();
|
||||
let cache_path = cache_location.path().join("foo");
|
||||
let cache = DiskCache::new(cache_path.as_path());
|
||||
cache
|
||||
.ensure_dir_exists(&cache.location)
|
||||
.expect("Testing expect:");
|
||||
assert!(cache_path.is_dir());
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_create_cache_if_dir_not_exits() {
|
||||
fn test_set_get_cache_file() {
|
||||
let temp_dir = TempDir::new();
|
||||
let cache_location = temp_dir.path();
|
||||
cache_location.remove_dir_all();
|
||||
let cache_location = cache_location.join("foo");
|
||||
assert!(!cache_location.is_dir());
|
||||
let cache = DiskCache::new(cache_location.as_path());
|
||||
cache
|
||||
.ensure_dir_exists(&cache.location)
|
||||
.expect("Testing expect:");
|
||||
assert!(cache_location.is_dir());
|
||||
let sub_dir = temp_dir.path().join("sub_dir");
|
||||
let cache = DiskCache::new(&sub_dir.to_path_buf());
|
||||
let path = PathBuf::from("foo/bar.txt");
|
||||
cache.set(&path, b"hello").unwrap();
|
||||
assert_eq!(cache.get(&path).unwrap(), b"hello");
|
||||
}
|
||||
|
||||
#[test]
|
||||
|
|
330
cli/cache/http_cache.rs
vendored
330
cli/cache/http_cache.rs
vendored
|
@ -1,330 +0,0 @@
|
|||
// Copyright 2018-2023 the Deno authors. All rights reserved. MIT license.
|
||||
//! This module is meant to eventually implement HTTP cache
|
||||
//! as defined in RFC 7234 (<https://tools.ietf.org/html/rfc7234>).
|
||||
//! Currently it's a very simplified version to fulfill Deno needs
|
||||
//! at hand.
|
||||
use crate::http_util::HeadersMap;
|
||||
use crate::util;
|
||||
use deno_core::error::generic_error;
|
||||
use deno_core::error::AnyError;
|
||||
use deno_core::serde::Deserialize;
|
||||
use deno_core::serde::Serialize;
|
||||
use deno_core::serde_json;
|
||||
use deno_core::url::Url;
|
||||
use std::fs;
|
||||
use std::io;
|
||||
use std::path::Path;
|
||||
use std::path::PathBuf;
|
||||
use std::time::SystemTime;
|
||||
|
||||
use super::CACHE_PERM;
|
||||
|
||||
/// Turn base of url (scheme, hostname, port) into a valid filename.
|
||||
/// This method replaces port part with a special string token (because
|
||||
/// ":" cannot be used in filename on some platforms).
|
||||
/// Ex: $DENO_DIR/deps/https/deno.land/
|
||||
fn base_url_to_filename(url: &Url) -> Option<PathBuf> {
|
||||
let mut out = PathBuf::new();
|
||||
|
||||
let scheme = url.scheme();
|
||||
out.push(scheme);
|
||||
|
||||
match scheme {
|
||||
"http" | "https" => {
|
||||
let host = url.host_str().unwrap();
|
||||
let host_port = match url.port() {
|
||||
Some(port) => format!("{host}_PORT{port}"),
|
||||
None => host.to_string(),
|
||||
};
|
||||
out.push(host_port);
|
||||
}
|
||||
"data" | "blob" => (),
|
||||
scheme => {
|
||||
log::debug!("Don't know how to create cache name for scheme: {}", scheme);
|
||||
return None;
|
||||
}
|
||||
};
|
||||
|
||||
Some(out)
|
||||
}
|
||||
|
||||
/// Turn provided `url` into a hashed filename.
|
||||
/// URLs can contain a lot of characters that cannot be used
|
||||
/// in filenames (like "?", "#", ":"), so in order to cache
|
||||
/// them properly they are deterministically hashed into ASCII
|
||||
/// strings.
|
||||
///
|
||||
/// NOTE: this method is `pub` because it's used in integration_tests
|
||||
pub fn url_to_filename(url: &Url) -> Option<PathBuf> {
|
||||
let mut cache_filename = base_url_to_filename(url)?;
|
||||
|
||||
let mut rest_str = url.path().to_string();
|
||||
if let Some(query) = url.query() {
|
||||
rest_str.push('?');
|
||||
rest_str.push_str(query);
|
||||
}
|
||||
// NOTE: fragment is omitted on purpose - it's not taken into
|
||||
// account when caching - it denotes parts of webpage, which
|
||||
// in case of static resources doesn't make much sense
|
||||
let hashed_filename = util::checksum::gen(&[rest_str.as_bytes()]);
|
||||
cache_filename.push(hashed_filename);
|
||||
Some(cache_filename)
|
||||
}
|
||||
|
||||
/// Cached metadata about a url.
|
||||
#[derive(Debug, Serialize, Deserialize, PartialEq, Eq)]
|
||||
pub struct CachedUrlMetadata {
|
||||
pub headers: HeadersMap,
|
||||
pub url: String,
|
||||
#[serde(default = "SystemTime::now", rename = "now")]
|
||||
pub time: SystemTime,
|
||||
}
|
||||
|
||||
// DO NOT make the path public. The fact that this is stored in a file
|
||||
// is an implementation detail.
|
||||
pub struct MaybeHttpCacheItem(PathBuf);
|
||||
|
||||
impl MaybeHttpCacheItem {
|
||||
#[cfg(test)]
|
||||
pub fn read_to_string(&self) -> Result<Option<String>, AnyError> {
|
||||
let Some(bytes) = self.read_to_bytes()? else {
|
||||
return Ok(None);
|
||||
};
|
||||
Ok(Some(String::from_utf8(bytes)?))
|
||||
}
|
||||
|
||||
pub fn read_to_bytes(&self) -> Result<Option<Vec<u8>>, AnyError> {
|
||||
match std::fs::read(&self.0) {
|
||||
Ok(s) => Ok(Some(s)),
|
||||
Err(err) if err.kind() == std::io::ErrorKind::NotFound => Ok(None),
|
||||
Err(err) => Err(err.into()),
|
||||
}
|
||||
}
|
||||
|
||||
pub fn read_metadata(&self) -> Result<Option<CachedUrlMetadata>, AnyError> {
|
||||
let metadata_filepath = self.0.with_extension("metadata.json");
|
||||
match fs::read_to_string(metadata_filepath) {
|
||||
Ok(metadata) => Ok(Some(serde_json::from_str(&metadata)?)),
|
||||
Err(err) if err.kind() == std::io::ErrorKind::NotFound => Ok(None),
|
||||
Err(err) => Err(err.into()),
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
#[derive(Debug, Clone, Default)]
|
||||
pub struct HttpCache {
|
||||
pub location: PathBuf,
|
||||
}
|
||||
|
||||
impl HttpCache {
|
||||
/// Returns a new instance.
|
||||
///
|
||||
/// `location` must be an absolute path.
|
||||
pub fn new(location: PathBuf) -> Self {
|
||||
assert!(location.is_absolute());
|
||||
Self { location }
|
||||
}
|
||||
|
||||
/// Ensures the location of the cache.
|
||||
fn ensure_dir_exists(&self, path: &Path) -> io::Result<()> {
|
||||
if path.is_dir() {
|
||||
return Ok(());
|
||||
}
|
||||
fs::create_dir_all(path).map_err(|e| {
|
||||
io::Error::new(
|
||||
e.kind(),
|
||||
format!(
|
||||
"Could not create remote modules cache location: {path:?}\nCheck the permission of the directory."
|
||||
),
|
||||
)
|
||||
})
|
||||
}
|
||||
|
||||
pub fn get_modified_time(
|
||||
&self,
|
||||
url: &Url,
|
||||
) -> Result<Option<SystemTime>, AnyError> {
|
||||
let filepath = self.get_cache_filepath_internal(url)?;
|
||||
match fs::metadata(filepath) {
|
||||
Ok(metadata) => Ok(Some(metadata.modified()?)),
|
||||
Err(err) if err.kind() == io::ErrorKind::NotFound => Ok(None),
|
||||
Err(err) => Err(err.into()),
|
||||
}
|
||||
}
|
||||
|
||||
// DEPRECATED: Where the file is stored and how it's stored should be an implementation
|
||||
// detail of the cache.
|
||||
#[deprecated(note = "Do not assume the cache will be stored at a file path.")]
|
||||
pub fn get_cache_filepath(&self, url: &Url) -> Result<PathBuf, AnyError> {
|
||||
self.get_cache_filepath_internal(url)
|
||||
}
|
||||
|
||||
fn get_cache_filepath_internal(
|
||||
&self,
|
||||
url: &Url,
|
||||
) -> Result<PathBuf, AnyError> {
|
||||
Ok(
|
||||
self.location.join(
|
||||
url_to_filename(url)
|
||||
.ok_or_else(|| generic_error("Can't convert url to filename."))?,
|
||||
),
|
||||
)
|
||||
}
|
||||
|
||||
#[cfg(test)]
|
||||
pub fn write_metadata(
|
||||
&self,
|
||||
url: &Url,
|
||||
meta_data: &CachedUrlMetadata,
|
||||
) -> Result<(), AnyError> {
|
||||
let cache_path = self.get_cache_filepath_internal(url)?;
|
||||
self.write_metadata_at_path(&cache_path, meta_data)
|
||||
}
|
||||
|
||||
fn write_metadata_at_path(
|
||||
&self,
|
||||
path: &Path,
|
||||
meta_data: &CachedUrlMetadata,
|
||||
) -> Result<(), AnyError> {
|
||||
let cache_path = path.with_extension("metadata.json");
|
||||
let json = serde_json::to_string_pretty(meta_data)?;
|
||||
util::fs::atomic_write_file(&cache_path, json, CACHE_PERM)?;
|
||||
Ok(())
|
||||
}
|
||||
|
||||
// TODO(bartlomieju): this method should check headers file
|
||||
// and validate against ETAG/Last-modified-as headers.
|
||||
// ETAG check is currently done in `cli/file_fetcher.rs`.
|
||||
pub fn get(&self, url: &Url) -> Result<MaybeHttpCacheItem, AnyError> {
|
||||
let cache_filepath = self.get_cache_filepath_internal(url)?;
|
||||
Ok(MaybeHttpCacheItem(cache_filepath))
|
||||
}
|
||||
|
||||
pub fn set(
|
||||
&self,
|
||||
url: &Url,
|
||||
headers_map: HeadersMap,
|
||||
content: &[u8],
|
||||
) -> Result<(), AnyError> {
|
||||
let cache_filepath = self.get_cache_filepath_internal(url)?;
|
||||
// Create parent directory
|
||||
let parent_filename = cache_filepath
|
||||
.parent()
|
||||
.expect("Cache filename should have a parent dir");
|
||||
self.ensure_dir_exists(parent_filename)?;
|
||||
// Cache content
|
||||
util::fs::atomic_write_file(&cache_filepath, content, CACHE_PERM)?;
|
||||
|
||||
let metadata = CachedUrlMetadata {
|
||||
time: SystemTime::now(),
|
||||
url: url.to_string(),
|
||||
headers: headers_map,
|
||||
};
|
||||
self.write_metadata_at_path(&cache_filepath, &metadata)?;
|
||||
|
||||
Ok(())
|
||||
}
|
||||
|
||||
pub fn contains(&self, url: &Url) -> bool {
|
||||
let Ok(cache_filepath) = self.get_cache_filepath_internal(url) else {
|
||||
return false
|
||||
};
|
||||
cache_filepath.is_file()
|
||||
}
|
||||
}
|
||||
|
||||
#[cfg(test)]
|
||||
mod tests {
|
||||
use super::*;
|
||||
use std::collections::HashMap;
|
||||
use test_util::TempDir;
|
||||
|
||||
#[test]
|
||||
fn test_create_cache() {
|
||||
let dir = TempDir::new();
|
||||
let cache_path = dir.path().join("foobar");
|
||||
// HttpCache should be created lazily on first use:
|
||||
// when zipping up a local project with no external dependencies
|
||||
// "$DENO_DIR/deps" is empty. When unzipping such project
|
||||
// "$DENO_DIR/deps" might not get restored and in situation
|
||||
// when directory is owned by root we might not be able
|
||||
// to create that directory. However if it's not needed it
|
||||
// doesn't make sense to return error in such specific scenarios.
|
||||
// For more details check issue:
|
||||
// https://github.com/denoland/deno/issues/5688
|
||||
let cache = HttpCache::new(cache_path.to_path_buf());
|
||||
assert!(!cache.location.exists());
|
||||
cache
|
||||
.set(
|
||||
&Url::parse("http://example.com/foo/bar.js").unwrap(),
|
||||
HeadersMap::new(),
|
||||
b"hello world",
|
||||
)
|
||||
.expect("Failed to add to cache");
|
||||
assert!(cache.ensure_dir_exists(&cache.location).is_ok());
|
||||
assert!(cache_path.is_dir());
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_get_set() {
|
||||
let dir = TempDir::new();
|
||||
let cache = HttpCache::new(dir.path().to_path_buf());
|
||||
let url = Url::parse("https://deno.land/x/welcome.ts").unwrap();
|
||||
let mut headers = HashMap::new();
|
||||
headers.insert(
|
||||
"content-type".to_string(),
|
||||
"application/javascript".to_string(),
|
||||
);
|
||||
headers.insert("etag".to_string(), "as5625rqdsfb".to_string());
|
||||
let content = b"Hello world";
|
||||
let r = cache.set(&url, headers, content);
|
||||
eprintln!("result {r:?}");
|
||||
assert!(r.is_ok());
|
||||
let cache_item = cache.get(&url).unwrap();
|
||||
let content = cache_item.read_to_string().unwrap().unwrap();
|
||||
let headers = cache_item.read_metadata().unwrap().unwrap().headers;
|
||||
assert_eq!(content, "Hello world");
|
||||
assert_eq!(
|
||||
headers.get("content-type").unwrap(),
|
||||
"application/javascript"
|
||||
);
|
||||
assert_eq!(headers.get("etag").unwrap(), "as5625rqdsfb");
|
||||
assert_eq!(headers.get("foobar"), None);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_url_to_filename() {
|
||||
let test_cases = [
|
||||
("https://deno.land/x/foo.ts", "https/deno.land/2c0a064891b9e3fbe386f5d4a833bce5076543f5404613656042107213a7bbc8"),
|
||||
(
|
||||
"https://deno.land:8080/x/foo.ts",
|
||||
"https/deno.land_PORT8080/2c0a064891b9e3fbe386f5d4a833bce5076543f5404613656042107213a7bbc8",
|
||||
),
|
||||
("https://deno.land/", "https/deno.land/8a5edab282632443219e051e4ade2d1d5bbc671c781051bf1437897cbdfea0f1"),
|
||||
(
|
||||
"https://deno.land/?asdf=qwer",
|
||||
"https/deno.land/e4edd1f433165141015db6a823094e6bd8f24dd16fe33f2abd99d34a0a21a3c0",
|
||||
),
|
||||
// should be the same as case above, fragment (#qwer) is ignored
|
||||
// when hashing
|
||||
(
|
||||
"https://deno.land/?asdf=qwer#qwer",
|
||||
"https/deno.land/e4edd1f433165141015db6a823094e6bd8f24dd16fe33f2abd99d34a0a21a3c0",
|
||||
),
|
||||
(
|
||||
"data:application/typescript;base64,ZXhwb3J0IGNvbnN0IGEgPSAiYSI7CgpleHBvcnQgZW51bSBBIHsKICBBLAogIEIsCiAgQywKfQo=",
|
||||
"data/c21c7fc382b2b0553dc0864aa81a3acacfb7b3d1285ab5ae76da6abec213fb37",
|
||||
),
|
||||
(
|
||||
"data:text/plain,Hello%2C%20Deno!",
|
||||
"data/967374e3561d6741234131e342bf5c6848b70b13758adfe23ee1a813a8131818",
|
||||
)
|
||||
];
|
||||
|
||||
for (url, expected) in test_cases.iter() {
|
||||
let u = Url::parse(url).unwrap();
|
||||
let p = url_to_filename(&u).unwrap();
|
||||
assert_eq!(p, PathBuf::from(expected));
|
||||
}
|
||||
}
|
||||
}
|
42
cli/cache/http_cache/common.rs
vendored
Normal file
42
cli/cache/http_cache/common.rs
vendored
Normal file
|
@ -0,0 +1,42 @@
|
|||
// Copyright 2018-2023 the Deno authors. All rights reserved. MIT license.
|
||||
|
||||
use std::path::Path;
|
||||
|
||||
use deno_core::url::Url;
|
||||
|
||||
pub fn base_url_to_filename_parts(
|
||||
url: &Url,
|
||||
port_separator: &str,
|
||||
) -> Option<Vec<String>> {
|
||||
let mut out = Vec::with_capacity(2);
|
||||
|
||||
let scheme = url.scheme();
|
||||
out.push(scheme.to_string());
|
||||
|
||||
match scheme {
|
||||
"http" | "https" => {
|
||||
let host = url.host_str().unwrap();
|
||||
let host_port = match url.port() {
|
||||
// underscores are not allowed in domains, so adding one here is fine
|
||||
Some(port) => format!("{host}{port_separator}{port}"),
|
||||
None => host.to_string(),
|
||||
};
|
||||
out.push(host_port);
|
||||
}
|
||||
"data" | "blob" => (),
|
||||
scheme => {
|
||||
log::debug!("Don't know how to create cache name for scheme: {}", scheme);
|
||||
return None;
|
||||
}
|
||||
};
|
||||
|
||||
Some(out)
|
||||
}
|
||||
|
||||
pub fn read_file_bytes(path: &Path) -> std::io::Result<Option<Vec<u8>>> {
|
||||
match std::fs::read(path) {
|
||||
Ok(s) => Ok(Some(s)),
|
||||
Err(err) if err.kind() == std::io::ErrorKind::NotFound => Ok(None),
|
||||
Err(err) => Err(err),
|
||||
}
|
||||
}
|
296
cli/cache/http_cache/global.rs
vendored
Normal file
296
cli/cache/http_cache/global.rs
vendored
Normal file
|
@ -0,0 +1,296 @@
|
|||
// Copyright 2018-2023 the Deno authors. All rights reserved. MIT license.
|
||||
|
||||
use std::io;
|
||||
use std::path::Path;
|
||||
use std::path::PathBuf;
|
||||
use std::time::SystemTime;
|
||||
|
||||
use deno_core::error::AnyError;
|
||||
use deno_core::serde_json;
|
||||
use deno_core::url::Url;
|
||||
use thiserror::Error;
|
||||
|
||||
use crate::cache::CACHE_PERM;
|
||||
use crate::http_util::HeadersMap;
|
||||
use crate::util;
|
||||
use crate::util::fs::atomic_write_file;
|
||||
|
||||
use super::common::base_url_to_filename_parts;
|
||||
use super::common::read_file_bytes;
|
||||
use super::CachedUrlMetadata;
|
||||
use super::HttpCache;
|
||||
use super::HttpCacheItemKey;
|
||||
|
||||
#[derive(Debug, Error)]
|
||||
#[error("Can't convert url (\"{}\") to filename.", .url)]
|
||||
pub struct UrlToFilenameConversionError {
|
||||
pub(super) url: String,
|
||||
}
|
||||
|
||||
/// Turn provided `url` into a hashed filename.
|
||||
/// URLs can contain a lot of characters that cannot be used
|
||||
/// in filenames (like "?", "#", ":"), so in order to cache
|
||||
/// them properly they are deterministically hashed into ASCII
|
||||
/// strings.
|
||||
pub fn url_to_filename(
|
||||
url: &Url,
|
||||
) -> Result<PathBuf, UrlToFilenameConversionError> {
|
||||
let Some(mut cache_filename) = base_url_to_filename(url) else {
|
||||
return Err(UrlToFilenameConversionError { url: url.to_string() });
|
||||
};
|
||||
|
||||
let mut rest_str = url.path().to_string();
|
||||
if let Some(query) = url.query() {
|
||||
rest_str.push('?');
|
||||
rest_str.push_str(query);
|
||||
}
|
||||
// NOTE: fragment is omitted on purpose - it's not taken into
|
||||
// account when caching - it denotes parts of webpage, which
|
||||
// in case of static resources doesn't make much sense
|
||||
let hashed_filename = util::checksum::gen(&[rest_str.as_bytes()]);
|
||||
cache_filename.push(hashed_filename);
|
||||
Ok(cache_filename)
|
||||
}
|
||||
|
||||
// Turn base of url (scheme, hostname, port) into a valid filename.
|
||||
/// This method replaces port part with a special string token (because
|
||||
/// ":" cannot be used in filename on some platforms).
|
||||
/// Ex: $DENO_DIR/deps/https/deno.land/
|
||||
fn base_url_to_filename(url: &Url) -> Option<PathBuf> {
|
||||
base_url_to_filename_parts(url, "_PORT").map(|parts| {
|
||||
let mut out = PathBuf::new();
|
||||
for part in parts {
|
||||
out.push(part);
|
||||
}
|
||||
out
|
||||
})
|
||||
}
|
||||
|
||||
#[derive(Debug)]
|
||||
pub struct GlobalHttpCache(PathBuf);
|
||||
|
||||
impl GlobalHttpCache {
|
||||
pub fn new(path: PathBuf) -> Self {
|
||||
assert!(path.is_absolute());
|
||||
Self(path)
|
||||
}
|
||||
|
||||
// Deprecated to discourage using this as where the file is stored and
|
||||
// how it's stored should be an implementation detail of the cache.
|
||||
#[deprecated(note = "Should only be used for deno info.")]
|
||||
pub fn get_global_cache_location(&self) -> &PathBuf {
|
||||
&self.0
|
||||
}
|
||||
|
||||
// DEPRECATED: Where the file is stored and how it's stored should be an implementation
|
||||
// detail of the cache.
|
||||
#[deprecated(note = "Do not assume the cache will be stored at a file path.")]
|
||||
pub fn get_global_cache_filepath(
|
||||
&self,
|
||||
url: &Url,
|
||||
) -> Result<PathBuf, AnyError> {
|
||||
Ok(self.0.join(url_to_filename(url)?))
|
||||
}
|
||||
|
||||
fn get_cache_filepath(&self, url: &Url) -> Result<PathBuf, AnyError> {
|
||||
Ok(self.0.join(url_to_filename(url)?))
|
||||
}
|
||||
|
||||
#[inline]
|
||||
fn key_file_path<'a>(&self, key: &'a HttpCacheItemKey) -> &'a PathBuf {
|
||||
// The key file path is always set for the global cache because
|
||||
// the file will always exist, unlike the local cache, which won't
|
||||
// have this for redirects.
|
||||
key.file_path.as_ref().unwrap()
|
||||
}
|
||||
}
|
||||
|
||||
impl HttpCache for GlobalHttpCache {
|
||||
fn cache_item_key<'a>(
|
||||
&self,
|
||||
url: &'a Url,
|
||||
) -> Result<HttpCacheItemKey<'a>, AnyError> {
|
||||
Ok(HttpCacheItemKey {
|
||||
#[cfg(debug_assertions)]
|
||||
is_local_key: false,
|
||||
url,
|
||||
file_path: Some(self.get_cache_filepath(url)?),
|
||||
})
|
||||
}
|
||||
|
||||
fn contains(&self, url: &Url) -> bool {
|
||||
let Ok(cache_filepath) = self.get_cache_filepath(url) else {
|
||||
return false
|
||||
};
|
||||
cache_filepath.is_file()
|
||||
}
|
||||
|
||||
fn read_modified_time(
|
||||
&self,
|
||||
key: &HttpCacheItemKey,
|
||||
) -> Result<Option<SystemTime>, AnyError> {
|
||||
#[cfg(debug_assertions)]
|
||||
debug_assert!(!key.is_local_key);
|
||||
|
||||
match std::fs::metadata(self.key_file_path(key)) {
|
||||
Ok(metadata) => Ok(Some(metadata.modified()?)),
|
||||
Err(err) if err.kind() == io::ErrorKind::NotFound => Ok(None),
|
||||
Err(err) => Err(err.into()),
|
||||
}
|
||||
}
|
||||
|
||||
fn set(
|
||||
&self,
|
||||
url: &Url,
|
||||
headers: HeadersMap,
|
||||
content: &[u8],
|
||||
) -> Result<(), AnyError> {
|
||||
let cache_filepath = self.get_cache_filepath(url)?;
|
||||
// Cache content
|
||||
atomic_write_file(&cache_filepath, content, CACHE_PERM)?;
|
||||
|
||||
let metadata = CachedUrlMetadata {
|
||||
time: SystemTime::now(),
|
||||
url: url.to_string(),
|
||||
headers,
|
||||
};
|
||||
write_metadata(&cache_filepath, &metadata)?;
|
||||
|
||||
Ok(())
|
||||
}
|
||||
|
||||
fn read_file_bytes(
|
||||
&self,
|
||||
key: &HttpCacheItemKey,
|
||||
) -> Result<Option<Vec<u8>>, AnyError> {
|
||||
#[cfg(debug_assertions)]
|
||||
debug_assert!(!key.is_local_key);
|
||||
|
||||
Ok(read_file_bytes(self.key_file_path(key))?)
|
||||
}
|
||||
|
||||
fn read_metadata(
|
||||
&self,
|
||||
key: &HttpCacheItemKey,
|
||||
) -> Result<Option<CachedUrlMetadata>, AnyError> {
|
||||
#[cfg(debug_assertions)]
|
||||
debug_assert!(!key.is_local_key);
|
||||
|
||||
match read_metadata(self.key_file_path(key))? {
|
||||
Some(metadata) => Ok(Some(metadata)),
|
||||
None => Ok(None),
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
fn read_metadata(path: &Path) -> Result<Option<CachedUrlMetadata>, AnyError> {
|
||||
let path = path.with_extension("metadata.json");
|
||||
match read_file_bytes(&path)? {
|
||||
Some(metadata) => Ok(Some(serde_json::from_slice(&metadata)?)),
|
||||
None => Ok(None),
|
||||
}
|
||||
}
|
||||
|
||||
fn write_metadata(
|
||||
path: &Path,
|
||||
meta_data: &CachedUrlMetadata,
|
||||
) -> Result<(), AnyError> {
|
||||
let path = path.with_extension("metadata.json");
|
||||
let json = serde_json::to_string_pretty(meta_data)?;
|
||||
atomic_write_file(&path, json, CACHE_PERM)?;
|
||||
Ok(())
|
||||
}
|
||||
|
||||
#[cfg(test)]
|
||||
mod test {
|
||||
use super::*;
|
||||
use std::collections::HashMap;
|
||||
use test_util::TempDir;
|
||||
|
||||
#[test]
|
||||
fn test_url_to_filename() {
|
||||
let test_cases = [
|
||||
("https://deno.land/x/foo.ts", "https/deno.land/2c0a064891b9e3fbe386f5d4a833bce5076543f5404613656042107213a7bbc8"),
|
||||
(
|
||||
"https://deno.land:8080/x/foo.ts",
|
||||
"https/deno.land_PORT8080/2c0a064891b9e3fbe386f5d4a833bce5076543f5404613656042107213a7bbc8",
|
||||
),
|
||||
("https://deno.land/", "https/deno.land/8a5edab282632443219e051e4ade2d1d5bbc671c781051bf1437897cbdfea0f1"),
|
||||
(
|
||||
"https://deno.land/?asdf=qwer",
|
||||
"https/deno.land/e4edd1f433165141015db6a823094e6bd8f24dd16fe33f2abd99d34a0a21a3c0",
|
||||
),
|
||||
// should be the same as case above, fragment (#qwer) is ignored
|
||||
// when hashing
|
||||
(
|
||||
"https://deno.land/?asdf=qwer#qwer",
|
||||
"https/deno.land/e4edd1f433165141015db6a823094e6bd8f24dd16fe33f2abd99d34a0a21a3c0",
|
||||
),
|
||||
(
|
||||
"data:application/typescript;base64,ZXhwb3J0IGNvbnN0IGEgPSAiYSI7CgpleHBvcnQgZW51bSBBIHsKICBBLAogIEIsCiAgQywKfQo=",
|
||||
"data/c21c7fc382b2b0553dc0864aa81a3acacfb7b3d1285ab5ae76da6abec213fb37",
|
||||
),
|
||||
(
|
||||
"data:text/plain,Hello%2C%20Deno!",
|
||||
"data/967374e3561d6741234131e342bf5c6848b70b13758adfe23ee1a813a8131818",
|
||||
)
|
||||
];
|
||||
|
||||
for (url, expected) in test_cases.iter() {
|
||||
let u = Url::parse(url).unwrap();
|
||||
let p = url_to_filename(&u).unwrap();
|
||||
assert_eq!(p, PathBuf::from(expected));
|
||||
}
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_create_cache() {
|
||||
let dir = TempDir::new();
|
||||
let cache_path = dir.path().join("foobar");
|
||||
// HttpCache should be created lazily on first use:
|
||||
// when zipping up a local project with no external dependencies
|
||||
// "$DENO_DIR/deps" is empty. When unzipping such project
|
||||
// "$DENO_DIR/deps" might not get restored and in situation
|
||||
// when directory is owned by root we might not be able
|
||||
// to create that directory. However if it's not needed it
|
||||
// doesn't make sense to return error in such specific scenarios.
|
||||
// For more details check issue:
|
||||
// https://github.com/denoland/deno/issues/5688
|
||||
let cache = GlobalHttpCache::new(cache_path.to_path_buf());
|
||||
assert!(!cache.0.exists());
|
||||
let url = Url::parse("http://example.com/foo/bar.js").unwrap();
|
||||
cache
|
||||
.set(&url, HeadersMap::new(), b"hello world")
|
||||
.expect("Failed to add to cache");
|
||||
assert!(cache_path.is_dir());
|
||||
assert!(cache.get_cache_filepath(&url).unwrap().is_file());
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_get_set() {
|
||||
let dir = TempDir::new();
|
||||
let cache = GlobalHttpCache::new(dir.path().to_path_buf());
|
||||
let url = Url::parse("https://deno.land/x/welcome.ts").unwrap();
|
||||
let mut headers = HashMap::new();
|
||||
headers.insert(
|
||||
"content-type".to_string(),
|
||||
"application/javascript".to_string(),
|
||||
);
|
||||
headers.insert("etag".to_string(), "as5625rqdsfb".to_string());
|
||||
let content = b"Hello world";
|
||||
let r = cache.set(&url, headers, content);
|
||||
eprintln!("result {r:?}");
|
||||
assert!(r.is_ok());
|
||||
let key = cache.cache_item_key(&url).unwrap();
|
||||
let content =
|
||||
String::from_utf8(cache.read_file_bytes(&key).unwrap().unwrap()).unwrap();
|
||||
let headers = cache.read_metadata(&key).unwrap().unwrap().headers;
|
||||
assert_eq!(content, "Hello world");
|
||||
assert_eq!(
|
||||
headers.get("content-type").unwrap(),
|
||||
"application/javascript"
|
||||
);
|
||||
assert_eq!(headers.get("etag").unwrap(), "as5625rqdsfb");
|
||||
assert_eq!(headers.get("foobar"), None);
|
||||
}
|
||||
}
|
872
cli/cache/http_cache/local.rs
vendored
Normal file
872
cli/cache/http_cache/local.rs
vendored
Normal file
|
@ -0,0 +1,872 @@
|
|||
// Copyright 2018-2023 the Deno authors. All rights reserved. MIT license.
|
||||
|
||||
use std::borrow::Cow;
|
||||
use std::collections::HashMap;
|
||||
use std::collections::HashSet;
|
||||
use std::path::Path;
|
||||
use std::path::PathBuf;
|
||||
use std::sync::Arc;
|
||||
use std::time::SystemTime;
|
||||
|
||||
use deno_ast::MediaType;
|
||||
use deno_core::error::AnyError;
|
||||
use deno_core::parking_lot::RwLock;
|
||||
use deno_core::serde_json;
|
||||
use deno_core::url::Url;
|
||||
use indexmap::IndexMap;
|
||||
use once_cell::sync::Lazy;
|
||||
use serde::Deserialize;
|
||||
use serde::Serialize;
|
||||
|
||||
use crate::cache::CACHE_PERM;
|
||||
use crate::http_util::HeadersMap;
|
||||
use crate::util;
|
||||
use crate::util::fs::atomic_write_file;
|
||||
|
||||
use super::common::base_url_to_filename_parts;
|
||||
use super::common::read_file_bytes;
|
||||
use super::global::GlobalHttpCache;
|
||||
use super::global::UrlToFilenameConversionError;
|
||||
use super::CachedUrlMetadata;
|
||||
use super::HttpCache;
|
||||
use super::HttpCacheItemKey;
|
||||
|
||||
#[derive(Debug)]
|
||||
pub struct LocalHttpCache {
|
||||
path: PathBuf,
|
||||
manifest: LocalCacheManifest,
|
||||
global_cache: Arc<GlobalHttpCache>,
|
||||
}
|
||||
|
||||
impl LocalHttpCache {
|
||||
pub fn new(path: PathBuf, global_cache: Arc<GlobalHttpCache>) -> Self {
|
||||
assert!(path.is_absolute());
|
||||
let manifest = LocalCacheManifest::new(path.join("manifest.json"));
|
||||
Self {
|
||||
path,
|
||||
manifest,
|
||||
global_cache,
|
||||
}
|
||||
}
|
||||
|
||||
fn get_cache_filepath(
|
||||
&self,
|
||||
url: &Url,
|
||||
headers: &HeadersMap,
|
||||
) -> Result<PathBuf, AnyError> {
|
||||
Ok(url_to_local_sub_path(url, headers)?.as_path_from_root(&self.path))
|
||||
}
|
||||
|
||||
/// Copies the file from the global cache to the local cache returning
|
||||
/// if the data was successfully copied to the local cache.
|
||||
fn check_copy_global_to_local(&self, url: &Url) -> Result<bool, AnyError> {
|
||||
let global_key = self.global_cache.cache_item_key(url)?;
|
||||
let Some(metadata) = self.global_cache.read_metadata(&global_key)? else {
|
||||
return Ok(false);
|
||||
};
|
||||
|
||||
if !metadata.is_redirect() {
|
||||
let Some(cached_bytes) = self.global_cache.read_file_bytes(&global_key)? else {
|
||||
return Ok(false);
|
||||
};
|
||||
|
||||
let local_file_path = self.get_cache_filepath(url, &metadata.headers)?;
|
||||
// if we're here, then this will be set
|
||||
atomic_write_file(&local_file_path, cached_bytes, CACHE_PERM)?;
|
||||
}
|
||||
self.manifest.insert_data(
|
||||
url_to_local_sub_path(url, &metadata.headers)?,
|
||||
url.clone(),
|
||||
metadata.headers,
|
||||
);
|
||||
|
||||
Ok(true)
|
||||
}
|
||||
|
||||
fn get_url_metadata_checking_global_cache(
|
||||
&self,
|
||||
url: &Url,
|
||||
) -> Result<Option<CachedUrlMetadata>, AnyError> {
|
||||
if let Some(metadata) = self.manifest.get_metadata(url) {
|
||||
Ok(Some(metadata))
|
||||
} else if self.check_copy_global_to_local(url)? {
|
||||
// try again now that it's saved
|
||||
Ok(self.manifest.get_metadata(url))
|
||||
} else {
|
||||
Ok(None)
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
impl HttpCache for LocalHttpCache {
|
||||
fn cache_item_key<'a>(
|
||||
&self,
|
||||
url: &'a Url,
|
||||
) -> Result<HttpCacheItemKey<'a>, AnyError> {
|
||||
Ok(HttpCacheItemKey {
|
||||
#[cfg(debug_assertions)]
|
||||
is_local_key: true,
|
||||
url,
|
||||
file_path: None, // need to compute this every time
|
||||
})
|
||||
}
|
||||
|
||||
fn contains(&self, url: &Url) -> bool {
|
||||
self.manifest.get_metadata(url).is_some()
|
||||
}
|
||||
|
||||
fn read_modified_time(
|
||||
&self,
|
||||
key: &HttpCacheItemKey,
|
||||
) -> Result<Option<SystemTime>, AnyError> {
|
||||
#[cfg(debug_assertions)]
|
||||
debug_assert!(key.is_local_key);
|
||||
|
||||
self
|
||||
.get_url_metadata_checking_global_cache(key.url)
|
||||
.map(|m| m.map(|m| m.time))
|
||||
}
|
||||
|
||||
fn set(
|
||||
&self,
|
||||
url: &Url,
|
||||
headers: crate::http_util::HeadersMap,
|
||||
content: &[u8],
|
||||
) -> Result<(), AnyError> {
|
||||
let is_redirect = headers.contains_key("location");
|
||||
if !is_redirect {
|
||||
let cache_filepath = self.get_cache_filepath(url, &headers)?;
|
||||
// Cache content
|
||||
atomic_write_file(&cache_filepath, content, CACHE_PERM)?;
|
||||
}
|
||||
|
||||
let sub_path = url_to_local_sub_path(url, &headers)?;
|
||||
self.manifest.insert_data(sub_path, url.clone(), headers);
|
||||
|
||||
Ok(())
|
||||
}
|
||||
|
||||
fn read_file_bytes(
|
||||
&self,
|
||||
key: &HttpCacheItemKey,
|
||||
) -> Result<Option<Vec<u8>>, AnyError> {
|
||||
#[cfg(debug_assertions)]
|
||||
debug_assert!(key.is_local_key);
|
||||
|
||||
let metadata = self.get_url_metadata_checking_global_cache(key.url)?;
|
||||
match metadata {
|
||||
Some(data) => {
|
||||
if data.is_redirect() {
|
||||
// return back an empty file for redirect
|
||||
Ok(Some(Vec::new()))
|
||||
} else {
|
||||
// if it's not a redirect, then it should have a file path
|
||||
let cache_filepath =
|
||||
self.get_cache_filepath(key.url, &data.headers)?;
|
||||
Ok(read_file_bytes(&cache_filepath)?)
|
||||
}
|
||||
}
|
||||
None => Ok(None),
|
||||
}
|
||||
}
|
||||
|
||||
fn read_metadata(
|
||||
&self,
|
||||
key: &HttpCacheItemKey,
|
||||
) -> Result<Option<CachedUrlMetadata>, AnyError> {
|
||||
#[cfg(debug_assertions)]
|
||||
debug_assert!(key.is_local_key);
|
||||
|
||||
self.get_url_metadata_checking_global_cache(key.url)
|
||||
}
|
||||
}
|
||||
|
||||
struct LocalCacheSubPath {
|
||||
pub has_hash: bool,
|
||||
pub parts: Vec<String>,
|
||||
}
|
||||
|
||||
impl LocalCacheSubPath {
|
||||
pub fn as_path_from_root(&self, root_path: &Path) -> PathBuf {
|
||||
let mut path = root_path.to_path_buf();
|
||||
for part in &self.parts {
|
||||
path.push(part);
|
||||
}
|
||||
path
|
||||
}
|
||||
}
|
||||
|
||||
fn url_to_local_sub_path(
|
||||
url: &Url,
|
||||
headers: &HeadersMap,
|
||||
) -> Result<LocalCacheSubPath, UrlToFilenameConversionError> {
|
||||
// https://stackoverflow.com/a/31976060/188246
|
||||
static FORBIDDEN_CHARS: Lazy<HashSet<char>> = Lazy::new(|| {
|
||||
HashSet::from(['?', '<', '>', ':', '*', '|', '\\', ':', '"', '\'', '/'])
|
||||
});
|
||||
|
||||
fn has_forbidden_chars(segment: &str) -> bool {
|
||||
segment.chars().any(|c| {
|
||||
let is_uppercase = c.is_ascii_alphabetic() && !c.is_ascii_lowercase();
|
||||
FORBIDDEN_CHARS.contains(&c)
|
||||
// do not allow uppercase letters in order to make this work
|
||||
// the same on case insensitive file systems
|
||||
|| is_uppercase
|
||||
})
|
||||
}
|
||||
|
||||
fn has_known_extension(path: &str) -> bool {
|
||||
let path = path.to_lowercase();
|
||||
path.ends_with(".js")
|
||||
|| path.ends_with(".ts")
|
||||
|| path.ends_with(".jsx")
|
||||
|| path.ends_with(".tsx")
|
||||
|| path.ends_with(".mts")
|
||||
|| path.ends_with(".mjs")
|
||||
|| path.ends_with(".json")
|
||||
|| path.ends_with(".wasm")
|
||||
}
|
||||
|
||||
fn get_extension(url: &Url, headers: &HeadersMap) -> &'static str {
|
||||
MediaType::from_specifier_and_headers(url, Some(headers)).as_ts_extension()
|
||||
}
|
||||
|
||||
fn short_hash(data: &str, last_ext: Option<&str>) -> String {
|
||||
// This function is a bit of a balancing act between readability
|
||||
// and avoiding collisions.
|
||||
let hash = util::checksum::gen(&[data.as_bytes()]);
|
||||
// keep the paths short because of windows path limit
|
||||
const MAX_LENGTH: usize = 20;
|
||||
let mut sub = String::with_capacity(MAX_LENGTH);
|
||||
for c in data.chars().take(MAX_LENGTH) {
|
||||
// don't include the query string (only use it in the hash)
|
||||
if c == '?' {
|
||||
break;
|
||||
}
|
||||
if FORBIDDEN_CHARS.contains(&c) {
|
||||
sub.push('_');
|
||||
} else {
|
||||
sub.extend(c.to_lowercase());
|
||||
}
|
||||
}
|
||||
let sub = match last_ext {
|
||||
Some(ext) => sub.strip_suffix(ext).unwrap_or(&sub),
|
||||
None => &sub,
|
||||
};
|
||||
let ext = last_ext.unwrap_or("");
|
||||
if sub.is_empty() {
|
||||
format!("#{}{}", &hash[..7], ext)
|
||||
} else {
|
||||
format!("#{}_{}{}", &sub, &hash[..5], ext)
|
||||
}
|
||||
}
|
||||
|
||||
fn should_hash_part(part: &str, last_ext: Option<&str>) -> bool {
|
||||
if part.is_empty() || part.len() > 30 {
|
||||
// keep short due to windows path limit
|
||||
return true;
|
||||
}
|
||||
let hash_context_specific = if let Some(last_ext) = last_ext {
|
||||
// if the last part does not have a known extension, hash it in order to
|
||||
// prevent collisions with a directory of the same name
|
||||
!has_known_extension(part) || !part.ends_with(last_ext)
|
||||
} else {
|
||||
// if any non-ending path part has a known extension, hash it in order to
|
||||
// prevent collisions where a filename has the same name as a directory name
|
||||
has_known_extension(part)
|
||||
};
|
||||
|
||||
// the hash symbol at the start designates a hash for the url part
|
||||
hash_context_specific || part.starts_with('#') || has_forbidden_chars(part)
|
||||
}
|
||||
|
||||
// get the base url
|
||||
let port_separator = "_"; // make this shorter with just an underscore
|
||||
let Some(mut base_parts) = base_url_to_filename_parts(url, port_separator) else {
|
||||
return Err(UrlToFilenameConversionError { url: url.to_string() });
|
||||
};
|
||||
|
||||
if base_parts[0] == "https" {
|
||||
base_parts.remove(0);
|
||||
} else {
|
||||
let scheme = base_parts.remove(0);
|
||||
base_parts[0] = format!("{}_{}", scheme, base_parts[0]);
|
||||
}
|
||||
|
||||
// first, try to get the filename of the path
|
||||
let path_segments = url
|
||||
.path()
|
||||
.strip_prefix('/')
|
||||
.unwrap_or(url.path())
|
||||
.split('/');
|
||||
let mut parts = base_parts
|
||||
.into_iter()
|
||||
.chain(path_segments.map(|s| s.to_string()))
|
||||
.collect::<Vec<_>>();
|
||||
|
||||
// push the query parameter onto the last part
|
||||
if let Some(query) = url.query() {
|
||||
let last_part = parts.last_mut().unwrap();
|
||||
last_part.push('?');
|
||||
last_part.push_str(query);
|
||||
}
|
||||
|
||||
let mut has_hash = false;
|
||||
let parts_len = parts.len();
|
||||
let parts = parts
|
||||
.into_iter()
|
||||
.enumerate()
|
||||
.map(|(i, part)| {
|
||||
let is_last = i == parts_len - 1;
|
||||
let last_ext = if is_last {
|
||||
Some(get_extension(url, headers))
|
||||
} else {
|
||||
None
|
||||
};
|
||||
if should_hash_part(&part, last_ext) {
|
||||
has_hash = true;
|
||||
short_hash(&part, last_ext)
|
||||
} else {
|
||||
part
|
||||
}
|
||||
})
|
||||
.collect::<Vec<_>>();
|
||||
|
||||
Ok(LocalCacheSubPath { has_hash, parts })
|
||||
}
|
||||
|
||||
#[derive(Debug, Default, Clone)]
|
||||
struct LocalCacheManifestData {
|
||||
serialized: SerializedLocalCacheManifestData,
|
||||
}
|
||||
|
||||
#[derive(Debug, Clone, Serialize, Deserialize, PartialEq, Eq)]
|
||||
struct SerializedLocalCacheManifestDataModule {
|
||||
#[serde(skip_serializing_if = "Option::is_none")]
|
||||
pub path: Option<String>,
|
||||
#[serde(
|
||||
default = "IndexMap::new",
|
||||
skip_serializing_if = "IndexMap::is_empty"
|
||||
)]
|
||||
pub headers: IndexMap<String, String>,
|
||||
}
|
||||
|
||||
#[derive(Debug, Default, Clone, Serialize, Deserialize)]
|
||||
struct SerializedLocalCacheManifestData {
|
||||
pub modules: IndexMap<Url, SerializedLocalCacheManifestDataModule>,
|
||||
}
|
||||
|
||||
#[derive(Debug)]
|
||||
struct LocalCacheManifest {
|
||||
file_path: PathBuf,
|
||||
data: RwLock<LocalCacheManifestData>,
|
||||
}
|
||||
|
||||
impl LocalCacheManifest {
|
||||
pub fn new(file_path: PathBuf) -> Self {
|
||||
let serialized: SerializedLocalCacheManifestData =
|
||||
std::fs::read(&file_path)
|
||||
.ok()
|
||||
.and_then(|data| match serde_json::from_slice(&data) {
|
||||
Ok(data) => Some(data),
|
||||
Err(err) => {
|
||||
log::debug!("Failed deserializing local cache manifest: {:#}", err);
|
||||
None
|
||||
}
|
||||
})
|
||||
.unwrap_or_default();
|
||||
Self {
|
||||
data: RwLock::new(LocalCacheManifestData { serialized }),
|
||||
file_path,
|
||||
}
|
||||
}
|
||||
|
||||
pub fn insert_data(
|
||||
&self,
|
||||
sub_path: LocalCacheSubPath,
|
||||
url: Url,
|
||||
mut original_headers: HashMap<String, String>,
|
||||
) {
|
||||
fn should_keep_content_type_header(
|
||||
url: &Url,
|
||||
headers: &HashMap<String, String>,
|
||||
) -> bool {
|
||||
// only keep the location header if it can't be derived from the url
|
||||
MediaType::from_specifier(url)
|
||||
!= MediaType::from_specifier_and_headers(url, Some(headers))
|
||||
}
|
||||
|
||||
let mut headers_subset = IndexMap::new();
|
||||
|
||||
const HEADER_KEYS_TO_KEEP: [&str; 4] = [
|
||||
// keep alphabetical for cleanliness in the output
|
||||
"content-type",
|
||||
"location",
|
||||
"x-deno-warning",
|
||||
"x-typescript-types",
|
||||
];
|
||||
for key in HEADER_KEYS_TO_KEEP {
|
||||
if key == "content-type"
|
||||
&& !should_keep_content_type_header(&url, &original_headers)
|
||||
{
|
||||
continue;
|
||||
}
|
||||
if let Some((k, v)) = original_headers.remove_entry(key) {
|
||||
headers_subset.insert(k, v);
|
||||
}
|
||||
}
|
||||
|
||||
let mut data = self.data.write();
|
||||
let is_empty = headers_subset.is_empty() && !sub_path.has_hash;
|
||||
let has_changed = if is_empty {
|
||||
data.serialized.modules.remove(&url).is_some()
|
||||
} else {
|
||||
let new_data = SerializedLocalCacheManifestDataModule {
|
||||
path: if headers_subset.contains_key("location") {
|
||||
None
|
||||
} else {
|
||||
Some(sub_path.parts.join("/"))
|
||||
},
|
||||
headers: headers_subset,
|
||||
};
|
||||
if data.serialized.modules.get(&url) == Some(&new_data) {
|
||||
false
|
||||
} else {
|
||||
data.serialized.modules.insert(url.clone(), new_data);
|
||||
true
|
||||
}
|
||||
};
|
||||
|
||||
if has_changed {
|
||||
// don't bother ensuring the directory here because it will
|
||||
// eventually be created by files being added to the cache
|
||||
let result = atomic_write_file(
|
||||
&self.file_path,
|
||||
serde_json::to_string_pretty(&data.serialized).unwrap(),
|
||||
CACHE_PERM,
|
||||
);
|
||||
if let Err(err) = result {
|
||||
log::debug!("Failed saving local cache manifest: {:#}", err);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
pub fn get_metadata(&self, url: &Url) -> Option<CachedUrlMetadata> {
|
||||
let data = self.data.read();
|
||||
match data.serialized.modules.get(url) {
|
||||
Some(module) => {
|
||||
let headers = module
|
||||
.headers
|
||||
.iter()
|
||||
.map(|(k, v)| (k.to_string(), v.to_string()))
|
||||
.collect::<HashMap<_, _>>();
|
||||
let sub_path = match &module.path {
|
||||
Some(sub_path) => {
|
||||
Cow::Owned(self.file_path.parent().unwrap().join(sub_path))
|
||||
}
|
||||
None => Cow::Borrowed(&self.file_path),
|
||||
};
|
||||
|
||||
let Ok(metadata) = sub_path.metadata() else {
|
||||
return None;
|
||||
};
|
||||
|
||||
Some(CachedUrlMetadata {
|
||||
headers,
|
||||
url: url.to_string(),
|
||||
time: metadata.modified().unwrap_or_else(|_| SystemTime::now()),
|
||||
})
|
||||
}
|
||||
None => {
|
||||
let folder_path = self.file_path.parent().unwrap();
|
||||
let sub_path = url_to_local_sub_path(url, &Default::default()).ok()?;
|
||||
if sub_path.has_hash {
|
||||
// only paths without a hash are considered as in the cache
|
||||
// when they don't have a metadata entry
|
||||
return None;
|
||||
}
|
||||
let file_path = sub_path.as_path_from_root(folder_path);
|
||||
if let Ok(metadata) = file_path.metadata() {
|
||||
Some(CachedUrlMetadata {
|
||||
headers: Default::default(),
|
||||
url: url.to_string(),
|
||||
time: metadata.modified().unwrap_or_else(|_| SystemTime::now()),
|
||||
})
|
||||
} else {
|
||||
None
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
#[cfg(test)]
|
||||
mod test {
|
||||
use super::*;
|
||||
|
||||
use deno_core::serde_json::json;
|
||||
use pretty_assertions::assert_eq;
|
||||
use test_util::TempDir;
|
||||
|
||||
#[test]
|
||||
fn test_url_to_local_sub_path() {
|
||||
run_test("https://deno.land/x/mod.ts", &[], "deno.land/x/mod.ts");
|
||||
run_test(
|
||||
"http://deno.land/x/mod.ts",
|
||||
&[],
|
||||
// http gets added to the folder name, but not https
|
||||
"http_deno.land/x/mod.ts",
|
||||
);
|
||||
run_test(
|
||||
// capital letter in filename
|
||||
"https://deno.land/x/MOD.ts",
|
||||
&[],
|
||||
"deno.land/x/#mod_fa860.ts",
|
||||
);
|
||||
run_test(
|
||||
// query string
|
||||
"https://deno.land/x/mod.ts?testing=1",
|
||||
&[],
|
||||
"deno.land/x/#mod_2eb80.ts",
|
||||
);
|
||||
run_test(
|
||||
// capital letter in directory
|
||||
"https://deno.land/OTHER/mod.ts",
|
||||
&[],
|
||||
"deno.land/#other_1c55d/mod.ts",
|
||||
);
|
||||
run_test(
|
||||
// under max of 30 chars
|
||||
"https://deno.land/x/012345678901234567890123456.js",
|
||||
&[],
|
||||
"deno.land/x/012345678901234567890123456.js",
|
||||
);
|
||||
run_test(
|
||||
// max 30 chars
|
||||
"https://deno.land/x/0123456789012345678901234567.js",
|
||||
&[],
|
||||
"deno.land/x/#01234567890123456789_836de.js",
|
||||
);
|
||||
run_test(
|
||||
// forbidden char
|
||||
"https://deno.land/x/mod's.js",
|
||||
&[],
|
||||
"deno.land/x/#mod_s_44fc8.js",
|
||||
);
|
||||
run_test(
|
||||
// no extension
|
||||
"https://deno.land/x/mod",
|
||||
&[("content-type", "application/typescript")],
|
||||
"deno.land/x/#mod_e55cf.ts",
|
||||
);
|
||||
run_test(
|
||||
// known extension in directory is not allowed
|
||||
// because it could conflict with a file of the same name
|
||||
"https://deno.land/x/mod.js/mod.js",
|
||||
&[],
|
||||
"deno.land/x/#mod.js_59c58/mod.js",
|
||||
);
|
||||
run_test(
|
||||
// slash slash in path
|
||||
"http://localhost//mod.js",
|
||||
&[],
|
||||
"http_localhost/#e3b0c44/mod.js",
|
||||
);
|
||||
run_test(
|
||||
// headers same extension
|
||||
"https://deno.land/x/mod.ts",
|
||||
&[("content-type", "application/typescript")],
|
||||
"deno.land/x/mod.ts",
|
||||
);
|
||||
run_test(
|
||||
// headers different extension... We hash this because
|
||||
// if someone deletes the manifest file, then we don't want
|
||||
// https://deno.land/x/mod.ts to resolve as a typescript file
|
||||
"https://deno.land/x/mod.ts",
|
||||
&[("content-type", "application/javascript")],
|
||||
"deno.land/x/#mod.ts_e8c36.js",
|
||||
);
|
||||
|
||||
#[track_caller]
|
||||
fn run_test(url: &str, headers: &[(&str, &str)], expected: &str) {
|
||||
let url = Url::parse(url).unwrap();
|
||||
let headers = headers
|
||||
.iter()
|
||||
.map(|(k, v)| (k.to_string(), v.to_string()))
|
||||
.collect();
|
||||
let result = url_to_local_sub_path(&url, &headers).unwrap();
|
||||
let parts = result.parts.join("/");
|
||||
assert_eq!(parts, expected);
|
||||
assert_eq!(
|
||||
result.parts.iter().any(|p| p.starts_with('#')),
|
||||
result.has_hash
|
||||
)
|
||||
}
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_local_global_cache() {
|
||||
let temp_dir = TempDir::new();
|
||||
let global_cache_path = temp_dir.path().join("global");
|
||||
let local_cache_path = temp_dir.path().join("local");
|
||||
let global_cache =
|
||||
Arc::new(GlobalHttpCache::new(global_cache_path.to_path_buf()));
|
||||
let local_cache =
|
||||
LocalHttpCache::new(local_cache_path.to_path_buf(), global_cache.clone());
|
||||
|
||||
let manifest_file = local_cache_path.join("manifest.json");
|
||||
// mapped url
|
||||
{
|
||||
let url = Url::parse("https://deno.land/x/mod.ts").unwrap();
|
||||
let content = "export const test = 5;";
|
||||
global_cache
|
||||
.set(
|
||||
&url,
|
||||
HashMap::from([(
|
||||
"content-type".to_string(),
|
||||
"application/typescript".to_string(),
|
||||
)]),
|
||||
content.as_bytes(),
|
||||
)
|
||||
.unwrap();
|
||||
let key = local_cache.cache_item_key(&url).unwrap();
|
||||
assert_eq!(
|
||||
String::from_utf8(local_cache.read_file_bytes(&key).unwrap().unwrap())
|
||||
.unwrap(),
|
||||
content
|
||||
);
|
||||
let metadata = local_cache.read_metadata(&key).unwrap().unwrap();
|
||||
// won't have any headers because the content-type is derivable from the url
|
||||
assert_eq!(metadata.headers, HashMap::new());
|
||||
assert_eq!(metadata.url, url.to_string());
|
||||
// no manifest file yet
|
||||
assert!(!manifest_file.exists());
|
||||
|
||||
// now try deleting the global cache and we should still be able to load it
|
||||
global_cache_path.remove_dir_all();
|
||||
assert_eq!(
|
||||
String::from_utf8(local_cache.read_file_bytes(&key).unwrap().unwrap())
|
||||
.unwrap(),
|
||||
content
|
||||
);
|
||||
}
|
||||
|
||||
// file that's directly mappable to a url
|
||||
{
|
||||
let content = "export const a = 1;";
|
||||
local_cache_path
|
||||
.join("deno.land")
|
||||
.join("main.js")
|
||||
.write(content);
|
||||
|
||||
// now we should be able to read this file because it's directly mappable to a url
|
||||
let url = Url::parse("https://deno.land/main.js").unwrap();
|
||||
let key = local_cache.cache_item_key(&url).unwrap();
|
||||
assert_eq!(
|
||||
String::from_utf8(local_cache.read_file_bytes(&key).unwrap().unwrap())
|
||||
.unwrap(),
|
||||
content
|
||||
);
|
||||
let metadata = local_cache.read_metadata(&key).unwrap().unwrap();
|
||||
assert_eq!(metadata.headers, HashMap::new());
|
||||
assert_eq!(metadata.url, url.to_string());
|
||||
}
|
||||
|
||||
// now try a file with a different content-type header
|
||||
{
|
||||
let url =
|
||||
Url::parse("https://deno.land/x/different_content_type.ts").unwrap();
|
||||
let content = "export const test = 5;";
|
||||
global_cache
|
||||
.set(
|
||||
&url,
|
||||
HashMap::from([(
|
||||
"content-type".to_string(),
|
||||
"application/javascript".to_string(),
|
||||
)]),
|
||||
content.as_bytes(),
|
||||
)
|
||||
.unwrap();
|
||||
let key = local_cache.cache_item_key(&url).unwrap();
|
||||
assert_eq!(
|
||||
String::from_utf8(local_cache.read_file_bytes(&key).unwrap().unwrap())
|
||||
.unwrap(),
|
||||
content
|
||||
);
|
||||
let metadata = local_cache.read_metadata(&key).unwrap().unwrap();
|
||||
assert_eq!(
|
||||
metadata.headers,
|
||||
HashMap::from([(
|
||||
"content-type".to_string(),
|
||||
"application/javascript".to_string(),
|
||||
)])
|
||||
);
|
||||
assert_eq!(metadata.url, url.to_string());
|
||||
assert_eq!(
|
||||
manifest_file.read_json_value(),
|
||||
json!({
|
||||
"modules": {
|
||||
"https://deno.land/x/different_content_type.ts": {
|
||||
"path": "deno.land/x/#different_content_ty_f15dc.js",
|
||||
"headers": {
|
||||
"content-type": "application/javascript"
|
||||
}
|
||||
}
|
||||
}
|
||||
})
|
||||
);
|
||||
// delete the manifest file
|
||||
manifest_file.remove_file();
|
||||
|
||||
// Now try resolving the key again and the content type should still be application/javascript.
|
||||
// This is maintained because we hash the filename when the headers don't match the extension.
|
||||
let metadata = local_cache.read_metadata(&key).unwrap().unwrap();
|
||||
assert_eq!(
|
||||
metadata.headers,
|
||||
HashMap::from([(
|
||||
"content-type".to_string(),
|
||||
"application/javascript".to_string(),
|
||||
)])
|
||||
);
|
||||
}
|
||||
|
||||
// reset the local cache
|
||||
local_cache_path.remove_dir_all();
|
||||
let local_cache =
|
||||
LocalHttpCache::new(local_cache_path.to_path_buf(), global_cache.clone());
|
||||
|
||||
// now try caching a file with many headers
|
||||
{
|
||||
let url = Url::parse("https://deno.land/x/my_file.ts").unwrap();
|
||||
let content = "export const test = 5;";
|
||||
global_cache
|
||||
.set(
|
||||
&url,
|
||||
HashMap::from([
|
||||
(
|
||||
"content-type".to_string(),
|
||||
"application/typescript".to_string(),
|
||||
),
|
||||
("x-typescript-types".to_string(), "./types.d.ts".to_string()),
|
||||
("x-deno-warning".to_string(), "Stop right now.".to_string()),
|
||||
(
|
||||
"x-other-header".to_string(),
|
||||
"Thank you very much.".to_string(),
|
||||
),
|
||||
]),
|
||||
content.as_bytes(),
|
||||
)
|
||||
.unwrap();
|
||||
let check_output = |local_cache: &LocalHttpCache| {
|
||||
let key = local_cache.cache_item_key(&url).unwrap();
|
||||
assert_eq!(
|
||||
String::from_utf8(
|
||||
local_cache.read_file_bytes(&key).unwrap().unwrap()
|
||||
)
|
||||
.unwrap(),
|
||||
content
|
||||
);
|
||||
let metadata = local_cache.read_metadata(&key).unwrap().unwrap();
|
||||
assert_eq!(
|
||||
metadata.headers,
|
||||
HashMap::from([
|
||||
("x-typescript-types".to_string(), "./types.d.ts".to_string(),),
|
||||
("x-deno-warning".to_string(), "Stop right now.".to_string(),)
|
||||
])
|
||||
);
|
||||
assert_eq!(metadata.url, url.to_string());
|
||||
assert_eq!(
|
||||
manifest_file.read_json_value(),
|
||||
json!({
|
||||
"modules": {
|
||||
"https://deno.land/x/my_file.ts": {
|
||||
"path": "deno.land/x/my_file.ts",
|
||||
"headers": {
|
||||
"x-deno-warning": "Stop right now.",
|
||||
"x-typescript-types": "./types.d.ts"
|
||||
}
|
||||
}
|
||||
}
|
||||
})
|
||||
);
|
||||
};
|
||||
check_output(&local_cache);
|
||||
// now ensure it's the same when re-creating the cache
|
||||
check_output(&LocalHttpCache::new(
|
||||
local_cache_path.to_path_buf(),
|
||||
global_cache.clone(),
|
||||
));
|
||||
}
|
||||
|
||||
// reset the local cache
|
||||
local_cache_path.remove_dir_all();
|
||||
let local_cache =
|
||||
LocalHttpCache::new(local_cache_path.to_path_buf(), global_cache.clone());
|
||||
|
||||
// try a file that can't be mapped to the file system
|
||||
{
|
||||
let url = Url::parse("https://deno.land/INVALID/Module.ts?dev").unwrap();
|
||||
let content = "export const test = 5;";
|
||||
global_cache
|
||||
.set(&url, HashMap::new(), content.as_bytes())
|
||||
.unwrap();
|
||||
let key = local_cache.cache_item_key(&url).unwrap();
|
||||
assert_eq!(
|
||||
String::from_utf8(local_cache.read_file_bytes(&key).unwrap().unwrap())
|
||||
.unwrap(),
|
||||
content
|
||||
);
|
||||
let metadata = local_cache.read_metadata(&key).unwrap().unwrap();
|
||||
// won't have any headers because the content-type is derivable from the url
|
||||
assert_eq!(metadata.headers, HashMap::new());
|
||||
assert_eq!(metadata.url, url.to_string());
|
||||
|
||||
assert_eq!(
|
||||
manifest_file.read_json_value(),
|
||||
json!({
|
||||
"modules": {
|
||||
"https://deno.land/INVALID/Module.ts?dev": {
|
||||
"path": "deno.land/#invalid_1ee01/#module_b8d2b.ts"
|
||||
}
|
||||
}
|
||||
})
|
||||
);
|
||||
}
|
||||
|
||||
// reset the local cache
|
||||
local_cache_path.remove_dir_all();
|
||||
let local_cache =
|
||||
LocalHttpCache::new(local_cache_path.to_path_buf(), global_cache.clone());
|
||||
|
||||
// now try a redirect
|
||||
{
|
||||
let url = Url::parse("https://deno.land/redirect.ts").unwrap();
|
||||
global_cache
|
||||
.set(
|
||||
&url,
|
||||
HashMap::from([("location".to_string(), "./x/mod.ts".to_string())]),
|
||||
"Redirecting to other url...".as_bytes(),
|
||||
)
|
||||
.unwrap();
|
||||
let key = local_cache.cache_item_key(&url).unwrap();
|
||||
let metadata = local_cache.read_metadata(&key).unwrap().unwrap();
|
||||
assert_eq!(
|
||||
metadata.headers,
|
||||
HashMap::from([("location".to_string(), "./x/mod.ts".to_string())])
|
||||
);
|
||||
assert_eq!(metadata.url, url.to_string());
|
||||
assert_eq!(
|
||||
manifest_file.read_json_value(),
|
||||
json!({
|
||||
"modules": {
|
||||
"https://deno.land/redirect.ts": {
|
||||
"headers": {
|
||||
"location": "./x/mod.ts"
|
||||
}
|
||||
}
|
||||
}
|
||||
})
|
||||
);
|
||||
}
|
||||
}
|
||||
}
|
76
cli/cache/http_cache/mod.rs
vendored
Normal file
76
cli/cache/http_cache/mod.rs
vendored
Normal file
|
@ -0,0 +1,76 @@
|
|||
// Copyright 2018-2023 the Deno authors. All rights reserved. MIT license.
|
||||
|
||||
use deno_core::error::AnyError;
|
||||
use deno_core::serde::Deserialize;
|
||||
use deno_core::serde::Serialize;
|
||||
use deno_core::url::Url;
|
||||
use std::path::PathBuf;
|
||||
use std::time::SystemTime;
|
||||
|
||||
use crate::http_util::HeadersMap;
|
||||
|
||||
mod common;
|
||||
mod global;
|
||||
mod local;
|
||||
|
||||
pub use global::url_to_filename;
|
||||
pub use global::GlobalHttpCache;
|
||||
pub use local::LocalHttpCache;
|
||||
|
||||
/// Cached metadata about a url.
|
||||
#[derive(Debug, Serialize, Deserialize, PartialEq, Eq)]
|
||||
pub struct CachedUrlMetadata {
|
||||
pub headers: HeadersMap,
|
||||
pub url: String,
|
||||
#[serde(default = "SystemTime::now", rename = "now")]
|
||||
pub time: SystemTime,
|
||||
}
|
||||
|
||||
impl CachedUrlMetadata {
|
||||
pub fn is_redirect(&self) -> bool {
|
||||
self.headers.contains_key("location")
|
||||
}
|
||||
}
|
||||
|
||||
/// Computed cache key, which can help reduce the work of computing the cache key multiple times.
|
||||
pub struct HttpCacheItemKey<'a> {
|
||||
// The key is specific to the implementation of HttpCache,
|
||||
// so keep these private to the module. For example, the
|
||||
// fact that these may be stored in a file is an implementation
|
||||
// detail.
|
||||
#[cfg(debug_assertions)]
|
||||
pub(super) is_local_key: bool,
|
||||
pub(super) url: &'a Url,
|
||||
/// This will be set all the time for the global cache, but it
|
||||
/// won't ever be set for the local cache because that also needs
|
||||
/// header information to determine the final path.
|
||||
pub(super) file_path: Option<PathBuf>,
|
||||
}
|
||||
|
||||
pub trait HttpCache: Send + Sync + std::fmt::Debug {
|
||||
/// A pre-computed key for looking up items in the cache.
|
||||
fn cache_item_key<'a>(
|
||||
&self,
|
||||
url: &'a Url,
|
||||
) -> Result<HttpCacheItemKey<'a>, AnyError>;
|
||||
|
||||
fn contains(&self, url: &Url) -> bool;
|
||||
fn set(
|
||||
&self,
|
||||
url: &Url,
|
||||
headers: HeadersMap,
|
||||
content: &[u8],
|
||||
) -> Result<(), AnyError>;
|
||||
fn read_modified_time(
|
||||
&self,
|
||||
key: &HttpCacheItemKey,
|
||||
) -> Result<Option<SystemTime>, AnyError>;
|
||||
fn read_file_bytes(
|
||||
&self,
|
||||
key: &HttpCacheItemKey,
|
||||
) -> Result<Option<Vec<u8>>, AnyError>;
|
||||
fn read_metadata(
|
||||
&self,
|
||||
key: &HttpCacheItemKey,
|
||||
) -> Result<Option<CachedUrlMetadata>, AnyError>;
|
||||
}
|
33
cli/cache/mod.rs
vendored
33
cli/cache/mod.rs
vendored
|
@ -12,6 +12,7 @@ use deno_graph::source::LoadResponse;
|
|||
use deno_graph::source::Loader;
|
||||
use deno_runtime::permissions::PermissionsContainer;
|
||||
use std::collections::HashMap;
|
||||
use std::path::PathBuf;
|
||||
use std::sync::Arc;
|
||||
|
||||
mod cache_db;
|
||||
|
@ -34,7 +35,9 @@ pub use deno_dir::DenoDirProvider;
|
|||
pub use disk_cache::DiskCache;
|
||||
pub use emit::EmitCache;
|
||||
pub use http_cache::CachedUrlMetadata;
|
||||
pub use http_cache::GlobalHttpCache;
|
||||
pub use http_cache::HttpCache;
|
||||
pub use http_cache::LocalHttpCache;
|
||||
pub use incremental::IncrementalCache;
|
||||
pub use node::NodeAnalysisCache;
|
||||
pub use parsed_source::ParsedSourceCache;
|
||||
|
@ -48,6 +51,7 @@ pub struct FetchCacher {
|
|||
emit_cache: EmitCache,
|
||||
file_fetcher: Arc<FileFetcher>,
|
||||
file_header_overrides: HashMap<ModuleSpecifier, HashMap<String, String>>,
|
||||
global_http_cache: Arc<GlobalHttpCache>,
|
||||
permissions: PermissionsContainer,
|
||||
cache_info_enabled: bool,
|
||||
maybe_local_node_modules_url: Option<ModuleSpecifier>,
|
||||
|
@ -58,6 +62,7 @@ impl FetchCacher {
|
|||
emit_cache: EmitCache,
|
||||
file_fetcher: Arc<FileFetcher>,
|
||||
file_header_overrides: HashMap<ModuleSpecifier, HashMap<String, String>>,
|
||||
global_http_cache: Arc<GlobalHttpCache>,
|
||||
permissions: PermissionsContainer,
|
||||
maybe_local_node_modules_url: Option<ModuleSpecifier>,
|
||||
) -> Self {
|
||||
|
@ -65,6 +70,7 @@ impl FetchCacher {
|
|||
emit_cache,
|
||||
file_fetcher,
|
||||
file_header_overrides,
|
||||
global_http_cache,
|
||||
permissions,
|
||||
cache_info_enabled: false,
|
||||
maybe_local_node_modules_url,
|
||||
|
@ -76,6 +82,31 @@ impl FetchCacher {
|
|||
pub fn enable_loading_cache_info(&mut self) {
|
||||
self.cache_info_enabled = true;
|
||||
}
|
||||
|
||||
// DEPRECATED: Where the file is stored and how it's stored should be an implementation
|
||||
// detail of the cache.
|
||||
//
|
||||
// todo(dsheret): remove once implementing
|
||||
// * https://github.com/denoland/deno/issues/17707
|
||||
// * https://github.com/denoland/deno/issues/17703
|
||||
#[deprecated(
|
||||
note = "There should not be a way to do this because the file may not be cached at a local path in the future."
|
||||
)]
|
||||
fn get_local_path(&self, specifier: &ModuleSpecifier) -> Option<PathBuf> {
|
||||
// TODO(@kitsonk) fix when deno_graph does not query cache for synthetic
|
||||
// modules
|
||||
if specifier.scheme() == "flags" {
|
||||
None
|
||||
} else if specifier.scheme() == "file" {
|
||||
specifier.to_file_path().ok()
|
||||
} else {
|
||||
#[allow(deprecated)]
|
||||
self
|
||||
.global_http_cache
|
||||
.get_global_cache_filepath(specifier)
|
||||
.ok()
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
impl Loader for FetchCacher {
|
||||
|
@ -85,7 +116,7 @@ impl Loader for FetchCacher {
|
|||
}
|
||||
|
||||
#[allow(deprecated)]
|
||||
let local = self.file_fetcher.get_local_path(specifier)?;
|
||||
let local = self.get_local_path(specifier)?;
|
||||
if local.is_file() {
|
||||
let emit = self
|
||||
.emit_cache
|
||||
|
|
|
@ -12,7 +12,9 @@ use crate::cache::Caches;
|
|||
use crate::cache::DenoDir;
|
||||
use crate::cache::DenoDirProvider;
|
||||
use crate::cache::EmitCache;
|
||||
use crate::cache::GlobalHttpCache;
|
||||
use crate::cache::HttpCache;
|
||||
use crate::cache::LocalHttpCache;
|
||||
use crate::cache::NodeAnalysisCache;
|
||||
use crate::cache::ParsedSourceCache;
|
||||
use crate::emit::Emitter;
|
||||
|
@ -135,6 +137,8 @@ struct CliFactoryServices {
|
|||
deno_dir_provider: Deferred<Arc<DenoDirProvider>>,
|
||||
caches: Deferred<Arc<Caches>>,
|
||||
file_fetcher: Deferred<Arc<FileFetcher>>,
|
||||
global_http_cache: Deferred<Arc<GlobalHttpCache>>,
|
||||
http_cache: Deferred<Arc<dyn HttpCache>>,
|
||||
http_client: Deferred<Arc<HttpClient>>,
|
||||
emit_cache: Deferred<EmitCache>,
|
||||
emitter: Deferred<Arc<Emitter>>,
|
||||
|
@ -233,6 +237,28 @@ impl CliFactory {
|
|||
.get_or_init(|| ProgressBar::new(ProgressBarStyle::TextOnly))
|
||||
}
|
||||
|
||||
pub fn global_http_cache(&self) -> Result<&Arc<GlobalHttpCache>, AnyError> {
|
||||
self.services.global_http_cache.get_or_try_init(|| {
|
||||
Ok(Arc::new(GlobalHttpCache::new(
|
||||
self.deno_dir()?.deps_folder_path(),
|
||||
)))
|
||||
})
|
||||
}
|
||||
|
||||
pub fn http_cache(&self) -> Result<&Arc<dyn HttpCache>, AnyError> {
|
||||
self.services.http_cache.get_or_try_init(|| {
|
||||
let global_cache = self.global_http_cache()?.clone();
|
||||
match self.options.deno_modules_dir_path() {
|
||||
Some(local_path) => {
|
||||
let local_cache =
|
||||
LocalHttpCache::new(local_path.clone(), global_cache);
|
||||
Ok(Arc::new(local_cache))
|
||||
}
|
||||
None => Ok(global_cache),
|
||||
}
|
||||
})
|
||||
}
|
||||
|
||||
pub fn http_client(&self) -> &Arc<HttpClient> {
|
||||
self.services.http_client.get_or_init(|| {
|
||||
Arc::new(HttpClient::new(
|
||||
|
@ -245,7 +271,7 @@ impl CliFactory {
|
|||
pub fn file_fetcher(&self) -> Result<&Arc<FileFetcher>, AnyError> {
|
||||
self.services.file_fetcher.get_or_try_init(|| {
|
||||
Ok(Arc::new(FileFetcher::new(
|
||||
HttpCache::new(self.deno_dir()?.deps_folder_path()),
|
||||
self.http_cache()?.clone(),
|
||||
self.options.cache_setting(),
|
||||
!self.options.no_remote(),
|
||||
self.http_client().clone(),
|
||||
|
@ -519,6 +545,7 @@ impl CliFactory {
|
|||
self.maybe_file_watcher_reporter().clone(),
|
||||
self.emit_cache()?.clone(),
|
||||
self.file_fetcher()?.clone(),
|
||||
self.global_http_cache()?.clone(),
|
||||
self.type_checker().await?.clone(),
|
||||
)))
|
||||
})
|
||||
|
|
|
@ -170,7 +170,7 @@ pub struct FileFetcher {
|
|||
allow_remote: bool,
|
||||
cache: FileCache,
|
||||
cache_setting: CacheSetting,
|
||||
pub http_cache: HttpCache,
|
||||
http_cache: Arc<dyn HttpCache>,
|
||||
http_client: Arc<HttpClient>,
|
||||
blob_store: Arc<BlobStore>,
|
||||
download_log_level: log::Level,
|
||||
|
@ -179,7 +179,7 @@ pub struct FileFetcher {
|
|||
|
||||
impl FileFetcher {
|
||||
pub fn new(
|
||||
http_cache: HttpCache,
|
||||
http_cache: Arc<dyn HttpCache>,
|
||||
cache_setting: CacheSetting,
|
||||
allow_remote: bool,
|
||||
http_client: Arc<HttpClient>,
|
||||
|
@ -245,8 +245,8 @@ impl FileFetcher {
|
|||
return Err(custom_error("Http", "Too many redirects."));
|
||||
}
|
||||
|
||||
let cache_item = self.http_cache.get(specifier)?;
|
||||
let Some(metadata) = cache_item.read_metadata()? else {
|
||||
let cache_key = self.http_cache.cache_item_key(specifier)?; // compute this once
|
||||
let Some(metadata) = self.http_cache.read_metadata(&cache_key)? else {
|
||||
return Ok(None);
|
||||
};
|
||||
let headers = metadata.headers;
|
||||
|
@ -255,7 +255,7 @@ impl FileFetcher {
|
|||
deno_core::resolve_import(redirect_to, specifier.as_str())?;
|
||||
return self.fetch_cached(&redirect, redirect_limit - 1);
|
||||
}
|
||||
let Some(bytes) = cache_item.read_to_bytes()? else {
|
||||
let Some(bytes) = self.http_cache.read_file_bytes(&cache_key)? else {
|
||||
return Ok(None);
|
||||
};
|
||||
let file = self.build_remote_file(specifier, bytes, &headers)?;
|
||||
|
@ -375,9 +375,9 @@ impl FileFetcher {
|
|||
|
||||
let maybe_etag = self
|
||||
.http_cache
|
||||
.get(specifier)
|
||||
.cache_item_key(specifier)
|
||||
.ok()
|
||||
.and_then(|item| item.read_metadata().ok()?)
|
||||
.and_then(|key| self.http_cache.read_metadata(&key).ok().flatten())
|
||||
.and_then(|metadata| metadata.headers.get("etag").cloned());
|
||||
let maybe_auth_token = self.auth_tokens.get(specifier);
|
||||
let specifier = specifier.clone();
|
||||
|
@ -473,10 +473,10 @@ impl FileFetcher {
|
|||
CacheSetting::ReloadAll => false,
|
||||
CacheSetting::Use | CacheSetting::Only => true,
|
||||
CacheSetting::RespectHeaders => {
|
||||
let Ok(item) = self.http_cache.get(specifier) else {
|
||||
let Ok(cache_key) = self.http_cache.cache_item_key(specifier) else {
|
||||
return false;
|
||||
};
|
||||
let Ok(Some(metadata)) = item.read_metadata() else {
|
||||
let Ok(Some(metadata)) = self.http_cache.read_metadata(&cache_key) else {
|
||||
return false;
|
||||
};
|
||||
let cache_semantics = CacheSemantics::new(
|
||||
|
@ -556,33 +556,6 @@ impl FileFetcher {
|
|||
}
|
||||
}
|
||||
|
||||
// DEPRECATED: Where the file is stored and how it's stored should be an implementation
|
||||
// detail of the cache.
|
||||
//
|
||||
// todo(dsheret): remove once implementing
|
||||
// * https://github.com/denoland/deno/issues/17707
|
||||
// * https://github.com/denoland/deno/issues/17703
|
||||
#[deprecated(
|
||||
note = "There should not be a way to do this because the file may not be cached at a local path in the future."
|
||||
)]
|
||||
pub fn get_local_path(&self, specifier: &ModuleSpecifier) -> Option<PathBuf> {
|
||||
// TODO(@kitsonk) fix when deno_graph does not query cache for synthetic
|
||||
// modules
|
||||
if specifier.scheme() == "flags" {
|
||||
None
|
||||
} else if specifier.scheme() == "file" {
|
||||
specifier.to_file_path().ok()
|
||||
} else {
|
||||
#[allow(deprecated)]
|
||||
self.http_cache.get_cache_filepath(specifier).ok()
|
||||
}
|
||||
}
|
||||
|
||||
/// Get the location of the current HTTP cache associated with the fetcher.
|
||||
pub fn get_http_cache_location(&self) -> &PathBuf {
|
||||
&self.http_cache.location
|
||||
}
|
||||
|
||||
/// A synchronous way to retrieve a source file, where if the file has already
|
||||
/// been cached in memory it will be returned, otherwise for local files will
|
||||
/// be read from disk.
|
||||
|
@ -722,6 +695,7 @@ async fn fetch_once<'a>(
|
|||
|
||||
#[cfg(test)]
|
||||
mod tests {
|
||||
use crate::cache::GlobalHttpCache;
|
||||
use crate::http_util::HttpClient;
|
||||
use crate::version;
|
||||
|
||||
|
@ -753,7 +727,7 @@ mod tests {
|
|||
let location = temp_dir.path().join("deps").to_path_buf();
|
||||
let blob_store: Arc<BlobStore> = Default::default();
|
||||
let file_fetcher = FileFetcher::new(
|
||||
HttpCache::new(location),
|
||||
Arc::new(GlobalHttpCache::new(location)),
|
||||
cache_setting,
|
||||
true,
|
||||
Arc::new(HttpClient::new(None, None)),
|
||||
|
@ -790,10 +764,15 @@ mod tests {
|
|||
let result: Result<File, AnyError> = file_fetcher
|
||||
.fetch_remote(specifier, PermissionsContainer::allow_all(), 1, None)
|
||||
.await;
|
||||
let cache_item = file_fetcher.http_cache.get(specifier).unwrap();
|
||||
let cache_key = file_fetcher.http_cache.cache_item_key(specifier).unwrap();
|
||||
(
|
||||
result.unwrap(),
|
||||
cache_item.read_metadata().unwrap().unwrap().headers,
|
||||
file_fetcher
|
||||
.http_cache
|
||||
.read_metadata(&cache_key)
|
||||
.unwrap()
|
||||
.unwrap()
|
||||
.headers,
|
||||
)
|
||||
}
|
||||
|
||||
|
@ -1063,14 +1042,6 @@ mod tests {
|
|||
);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_get_http_cache_location() {
|
||||
let (file_fetcher, temp_dir) = setup(CacheSetting::Use, None);
|
||||
let expected = temp_dir.path().join("deps").to_path_buf();
|
||||
let actual = file_fetcher.get_http_cache_location().to_path_buf();
|
||||
assert_eq!(actual, expected);
|
||||
}
|
||||
|
||||
#[tokio::test]
|
||||
async fn test_fetch_data_url() {
|
||||
let (file_fetcher, _) = setup(CacheSetting::Use, None);
|
||||
|
@ -1142,11 +1113,11 @@ mod tests {
|
|||
);
|
||||
assert_eq!(file.media_type, MediaType::TypeScript);
|
||||
|
||||
let cache_item_key =
|
||||
file_fetcher.http_cache.cache_item_key(&specifier).unwrap();
|
||||
let mut metadata = file_fetcher
|
||||
.http_cache
|
||||
.get(&specifier)
|
||||
.unwrap()
|
||||
.read_metadata()
|
||||
.read_metadata(&cache_item_key)
|
||||
.unwrap()
|
||||
.unwrap();
|
||||
metadata.headers = HashMap::new();
|
||||
|
@ -1155,7 +1126,7 @@ mod tests {
|
|||
.insert("content-type".to_string(), "text/javascript".to_string());
|
||||
file_fetcher
|
||||
.http_cache
|
||||
.write_metadata(&specifier, &metadata)
|
||||
.set(&specifier, metadata.headers.clone(), file.source.as_bytes())
|
||||
.unwrap();
|
||||
|
||||
let result = file_fetcher_01
|
||||
|
@ -1173,9 +1144,7 @@ mod tests {
|
|||
|
||||
let headers = file_fetcher_02
|
||||
.http_cache
|
||||
.get(&specifier)
|
||||
.unwrap()
|
||||
.read_metadata()
|
||||
.read_metadata(&cache_item_key)
|
||||
.unwrap()
|
||||
.unwrap()
|
||||
.headers;
|
||||
|
@ -1186,7 +1155,7 @@ mod tests {
|
|||
.insert("content-type".to_string(), "application/json".to_string());
|
||||
file_fetcher_02
|
||||
.http_cache
|
||||
.write_metadata(&specifier, &metadata)
|
||||
.set(&specifier, metadata.headers.clone(), file.source.as_bytes())
|
||||
.unwrap();
|
||||
|
||||
let result = file_fetcher_02
|
||||
|
@ -1204,7 +1173,7 @@ mod tests {
|
|||
// invocation and indicates to "cache bust".
|
||||
let location = temp_dir.path().join("deps").to_path_buf();
|
||||
let file_fetcher = FileFetcher::new(
|
||||
HttpCache::new(location),
|
||||
Arc::new(GlobalHttpCache::new(location)),
|
||||
CacheSetting::ReloadAll,
|
||||
true,
|
||||
Arc::new(HttpClient::new(None, None)),
|
||||
|
@ -1233,7 +1202,7 @@ mod tests {
|
|||
|
||||
let file_modified_01 = {
|
||||
let file_fetcher = FileFetcher::new(
|
||||
HttpCache::new(location.clone()),
|
||||
Arc::new(GlobalHttpCache::new(location.clone())),
|
||||
CacheSetting::Use,
|
||||
true,
|
||||
Arc::new(HttpClient::new(None, None)),
|
||||
|
@ -1245,16 +1214,16 @@ mod tests {
|
|||
.fetch(&specifier, PermissionsContainer::allow_all())
|
||||
.await;
|
||||
assert!(result.is_ok());
|
||||
let cache_key =
|
||||
file_fetcher.http_cache.cache_item_key(&specifier).unwrap();
|
||||
(
|
||||
file_fetcher
|
||||
.http_cache
|
||||
.get_modified_time(&specifier)
|
||||
.read_modified_time(&cache_key)
|
||||
.unwrap(),
|
||||
file_fetcher
|
||||
.http_cache
|
||||
.get(&specifier)
|
||||
.unwrap()
|
||||
.read_metadata()
|
||||
.read_metadata(&cache_key)
|
||||
.unwrap()
|
||||
.unwrap(),
|
||||
)
|
||||
|
@ -1262,7 +1231,7 @@ mod tests {
|
|||
|
||||
let file_modified_02 = {
|
||||
let file_fetcher = FileFetcher::new(
|
||||
HttpCache::new(location),
|
||||
Arc::new(GlobalHttpCache::new(location)),
|
||||
CacheSetting::Use,
|
||||
true,
|
||||
Arc::new(HttpClient::new(None, None)),
|
||||
|
@ -1274,16 +1243,16 @@ mod tests {
|
|||
.await;
|
||||
assert!(result.is_ok());
|
||||
|
||||
let cache_key =
|
||||
file_fetcher.http_cache.cache_item_key(&specifier).unwrap();
|
||||
(
|
||||
file_fetcher
|
||||
.http_cache
|
||||
.get_modified_time(&specifier)
|
||||
.read_modified_time(&cache_key)
|
||||
.unwrap(),
|
||||
file_fetcher
|
||||
.http_cache
|
||||
.get(&specifier)
|
||||
.unwrap()
|
||||
.read_metadata()
|
||||
.read_metadata(&cache_key)
|
||||
.unwrap()
|
||||
.unwrap(),
|
||||
)
|
||||
|
@ -1310,40 +1279,24 @@ mod tests {
|
|||
let file = result.unwrap();
|
||||
assert_eq!(file.specifier, redirected_specifier);
|
||||
|
||||
{
|
||||
let cache_item = file_fetcher.http_cache.get(&specifier).unwrap();
|
||||
assert_eq!(
|
||||
cache_item.read_to_string().unwrap().unwrap(),
|
||||
"",
|
||||
"redirected files should have empty cached contents"
|
||||
);
|
||||
assert_eq!(
|
||||
cache_item
|
||||
.read_metadata()
|
||||
.unwrap()
|
||||
.unwrap()
|
||||
.headers
|
||||
.get("location")
|
||||
.unwrap(),
|
||||
"http://localhost:4545/subdir/redirects/redirect1.js"
|
||||
);
|
||||
}
|
||||
assert_eq!(
|
||||
get_text_from_cache(&file_fetcher, &specifier),
|
||||
"",
|
||||
"redirected files should have empty cached contents"
|
||||
);
|
||||
assert_eq!(
|
||||
get_location_header_from_cache(&file_fetcher, &specifier),
|
||||
Some("http://localhost:4545/subdir/redirects/redirect1.js".to_string()),
|
||||
);
|
||||
|
||||
{
|
||||
let cache_item =
|
||||
file_fetcher.http_cache.get(&redirected_specifier).unwrap();
|
||||
assert_eq!(
|
||||
cache_item.read_to_string().unwrap().unwrap(),
|
||||
"export const redirect = 1;\n"
|
||||
);
|
||||
assert!(cache_item
|
||||
.read_metadata()
|
||||
.unwrap()
|
||||
.unwrap()
|
||||
.headers
|
||||
.get("location")
|
||||
.is_none());
|
||||
}
|
||||
assert_eq!(
|
||||
get_text_from_cache(&file_fetcher, &redirected_specifier),
|
||||
"export const redirect = 1;\n"
|
||||
);
|
||||
assert_eq!(
|
||||
get_location_header_from_cache(&file_fetcher, &redirected_specifier),
|
||||
None,
|
||||
);
|
||||
}
|
||||
|
||||
#[tokio::test]
|
||||
|
@ -1367,64 +1320,34 @@ mod tests {
|
|||
let file = result.unwrap();
|
||||
assert_eq!(file.specifier, redirected_02_specifier);
|
||||
|
||||
{
|
||||
let cache_item = file_fetcher.http_cache.get(&specifier).unwrap();
|
||||
assert_eq!(
|
||||
cache_item.read_to_string().unwrap().unwrap(),
|
||||
"",
|
||||
"redirected files should have empty cached contents"
|
||||
);
|
||||
assert_eq!(
|
||||
cache_item
|
||||
.read_metadata()
|
||||
.unwrap()
|
||||
.unwrap()
|
||||
.headers
|
||||
.get("location")
|
||||
.unwrap(),
|
||||
"http://localhost:4546/subdir/redirects/redirect1.js"
|
||||
);
|
||||
}
|
||||
assert_eq!(
|
||||
get_text_from_cache(&file_fetcher, &specifier),
|
||||
"",
|
||||
"redirected files should have empty cached contents"
|
||||
);
|
||||
assert_eq!(
|
||||
get_location_header_from_cache(&file_fetcher, &specifier),
|
||||
Some("http://localhost:4546/subdir/redirects/redirect1.js".to_string()),
|
||||
);
|
||||
|
||||
{
|
||||
let cache_item = file_fetcher
|
||||
.http_cache
|
||||
.get(&redirected_01_specifier)
|
||||
.unwrap();
|
||||
assert_eq!(
|
||||
cache_item.read_to_string().unwrap().unwrap(),
|
||||
"",
|
||||
"redirected files should have empty cached contents"
|
||||
);
|
||||
assert_eq!(
|
||||
cache_item
|
||||
.read_metadata()
|
||||
.unwrap()
|
||||
.unwrap()
|
||||
.headers
|
||||
.get("location")
|
||||
.unwrap(),
|
||||
"http://localhost:4545/subdir/redirects/redirect1.js"
|
||||
);
|
||||
}
|
||||
assert_eq!(
|
||||
get_text_from_cache(&file_fetcher, &redirected_01_specifier),
|
||||
"",
|
||||
"redirected files should have empty cached contents"
|
||||
);
|
||||
assert_eq!(
|
||||
get_location_header_from_cache(&file_fetcher, &redirected_01_specifier),
|
||||
Some("http://localhost:4545/subdir/redirects/redirect1.js".to_string()),
|
||||
);
|
||||
|
||||
{
|
||||
let cache_item = file_fetcher
|
||||
.http_cache
|
||||
.get(&redirected_02_specifier)
|
||||
.unwrap();
|
||||
assert_eq!(
|
||||
cache_item.read_to_string().unwrap().unwrap(),
|
||||
"export const redirect = 1;\n"
|
||||
);
|
||||
assert!(cache_item
|
||||
.read_metadata()
|
||||
.unwrap()
|
||||
.unwrap()
|
||||
.headers
|
||||
.get("location")
|
||||
.is_none());
|
||||
}
|
||||
assert_eq!(
|
||||
get_text_from_cache(&file_fetcher, &redirected_02_specifier),
|
||||
"export const redirect = 1;\n"
|
||||
);
|
||||
assert_eq!(
|
||||
get_location_header_from_cache(&file_fetcher, &redirected_02_specifier),
|
||||
None,
|
||||
);
|
||||
}
|
||||
|
||||
#[tokio::test]
|
||||
|
@ -1439,7 +1362,7 @@ mod tests {
|
|||
|
||||
let metadata_file_modified_01 = {
|
||||
let file_fetcher = FileFetcher::new(
|
||||
HttpCache::new(location.clone()),
|
||||
Arc::new(GlobalHttpCache::new(location.clone())),
|
||||
CacheSetting::Use,
|
||||
true,
|
||||
Arc::new(HttpClient::new(None, None)),
|
||||
|
@ -1452,16 +1375,18 @@ mod tests {
|
|||
.await;
|
||||
assert!(result.is_ok());
|
||||
|
||||
let cache_key = file_fetcher
|
||||
.http_cache
|
||||
.cache_item_key(&redirected_specifier)
|
||||
.unwrap();
|
||||
(
|
||||
file_fetcher
|
||||
.http_cache
|
||||
.get_modified_time(&redirected_specifier)
|
||||
.read_modified_time(&cache_key)
|
||||
.unwrap(),
|
||||
file_fetcher
|
||||
.http_cache
|
||||
.get(&redirected_specifier)
|
||||
.unwrap()
|
||||
.read_metadata()
|
||||
.read_metadata(&cache_key)
|
||||
.unwrap()
|
||||
.unwrap(),
|
||||
)
|
||||
|
@ -1469,7 +1394,7 @@ mod tests {
|
|||
|
||||
let metadata_file_modified_02 = {
|
||||
let file_fetcher = FileFetcher::new(
|
||||
HttpCache::new(location),
|
||||
Arc::new(GlobalHttpCache::new(location)),
|
||||
CacheSetting::Use,
|
||||
true,
|
||||
Arc::new(HttpClient::new(None, None)),
|
||||
|
@ -1481,16 +1406,18 @@ mod tests {
|
|||
.await;
|
||||
assert!(result.is_ok());
|
||||
|
||||
let cache_key = file_fetcher
|
||||
.http_cache
|
||||
.cache_item_key(&redirected_specifier)
|
||||
.unwrap();
|
||||
(
|
||||
file_fetcher
|
||||
.http_cache
|
||||
.get_modified_time(&redirected_specifier)
|
||||
.read_modified_time(&cache_key)
|
||||
.unwrap(),
|
||||
file_fetcher
|
||||
.http_cache
|
||||
.get(&redirected_specifier)
|
||||
.unwrap()
|
||||
.read_metadata()
|
||||
.read_metadata(&cache_key)
|
||||
.unwrap()
|
||||
.unwrap(),
|
||||
)
|
||||
|
@ -1543,40 +1470,24 @@ mod tests {
|
|||
let file = result.unwrap();
|
||||
assert_eq!(file.specifier, redirected_specifier);
|
||||
|
||||
{
|
||||
let cache_item = file_fetcher.http_cache.get(&specifier).unwrap();
|
||||
assert_eq!(
|
||||
cache_item.read_to_string().unwrap().unwrap(),
|
||||
"",
|
||||
"redirected files should have empty cached contents"
|
||||
);
|
||||
assert_eq!(
|
||||
cache_item
|
||||
.read_metadata()
|
||||
.unwrap()
|
||||
.unwrap()
|
||||
.headers
|
||||
.get("location")
|
||||
.unwrap(),
|
||||
"/subdir/redirects/redirect1.js"
|
||||
);
|
||||
}
|
||||
assert_eq!(
|
||||
get_text_from_cache(&file_fetcher, &specifier),
|
||||
"",
|
||||
"redirected files should have empty cached contents"
|
||||
);
|
||||
assert_eq!(
|
||||
get_location_header_from_cache(&file_fetcher, &specifier),
|
||||
Some("/subdir/redirects/redirect1.js".to_string()),
|
||||
);
|
||||
|
||||
{
|
||||
let cache_item =
|
||||
file_fetcher.http_cache.get(&redirected_specifier).unwrap();
|
||||
assert_eq!(
|
||||
cache_item.read_to_string().unwrap().unwrap(),
|
||||
"export const redirect = 1;\n"
|
||||
);
|
||||
assert!(cache_item
|
||||
.read_metadata()
|
||||
.unwrap()
|
||||
.unwrap()
|
||||
.headers
|
||||
.get("location")
|
||||
.is_none());
|
||||
}
|
||||
assert_eq!(
|
||||
get_text_from_cache(&file_fetcher, &redirected_specifier),
|
||||
"export const redirect = 1;\n"
|
||||
);
|
||||
assert_eq!(
|
||||
get_location_header_from_cache(&file_fetcher, &redirected_specifier),
|
||||
None
|
||||
);
|
||||
}
|
||||
|
||||
#[tokio::test]
|
||||
|
@ -1585,7 +1496,7 @@ mod tests {
|
|||
let temp_dir = TempDir::new();
|
||||
let location = temp_dir.path().join("deps").to_path_buf();
|
||||
let file_fetcher = FileFetcher::new(
|
||||
HttpCache::new(location),
|
||||
Arc::new(GlobalHttpCache::new(location)),
|
||||
CacheSetting::Use,
|
||||
false,
|
||||
Arc::new(HttpClient::new(None, None)),
|
||||
|
@ -1610,7 +1521,7 @@ mod tests {
|
|||
let temp_dir = TempDir::new();
|
||||
let location = temp_dir.path().join("deps").to_path_buf();
|
||||
let file_fetcher_01 = FileFetcher::new(
|
||||
HttpCache::new(location.clone()),
|
||||
Arc::new(GlobalHttpCache::new(location.clone())),
|
||||
CacheSetting::Only,
|
||||
true,
|
||||
Arc::new(HttpClient::new(None, None)),
|
||||
|
@ -1618,7 +1529,7 @@ mod tests {
|
|||
None,
|
||||
);
|
||||
let file_fetcher_02 = FileFetcher::new(
|
||||
HttpCache::new(location),
|
||||
Arc::new(GlobalHttpCache::new(location)),
|
||||
CacheSetting::Use,
|
||||
true,
|
||||
Arc::new(HttpClient::new(None, None)),
|
||||
|
@ -2331,4 +2242,33 @@ mod tests {
|
|||
|
||||
assert!(matches!(result, Ok(FetchOnceResult::RequestError(_))));
|
||||
}
|
||||
|
||||
#[track_caller]
|
||||
fn get_text_from_cache(
|
||||
file_fetcher: &FileFetcher,
|
||||
url: &ModuleSpecifier,
|
||||
) -> String {
|
||||
let cache_key = file_fetcher.http_cache.cache_item_key(url).unwrap();
|
||||
let bytes = file_fetcher
|
||||
.http_cache
|
||||
.read_file_bytes(&cache_key)
|
||||
.unwrap()
|
||||
.unwrap();
|
||||
String::from_utf8(bytes).unwrap()
|
||||
}
|
||||
|
||||
#[track_caller]
|
||||
fn get_location_header_from_cache(
|
||||
file_fetcher: &FileFetcher,
|
||||
url: &ModuleSpecifier,
|
||||
) -> Option<String> {
|
||||
let cache_key = file_fetcher.http_cache.cache_item_key(url).unwrap();
|
||||
file_fetcher
|
||||
.http_cache
|
||||
.read_metadata(&cache_key)
|
||||
.unwrap()
|
||||
.unwrap()
|
||||
.headers
|
||||
.remove("location")
|
||||
}
|
||||
}
|
||||
|
|
|
@ -4,6 +4,7 @@ use crate::args::CliOptions;
|
|||
use crate::args::Lockfile;
|
||||
use crate::args::TsTypeLib;
|
||||
use crate::cache;
|
||||
use crate::cache::GlobalHttpCache;
|
||||
use crate::cache::ParsedSourceCache;
|
||||
use crate::colors;
|
||||
use crate::errors::get_error_class_name;
|
||||
|
@ -174,6 +175,7 @@ pub struct ModuleGraphBuilder {
|
|||
maybe_file_watcher_reporter: Option<FileWatcherReporter>,
|
||||
emit_cache: cache::EmitCache,
|
||||
file_fetcher: Arc<FileFetcher>,
|
||||
global_http_cache: Arc<GlobalHttpCache>,
|
||||
type_checker: Arc<TypeChecker>,
|
||||
}
|
||||
|
||||
|
@ -188,6 +190,7 @@ impl ModuleGraphBuilder {
|
|||
maybe_file_watcher_reporter: Option<FileWatcherReporter>,
|
||||
emit_cache: cache::EmitCache,
|
||||
file_fetcher: Arc<FileFetcher>,
|
||||
global_http_cache: Arc<GlobalHttpCache>,
|
||||
type_checker: Arc<TypeChecker>,
|
||||
) -> Self {
|
||||
Self {
|
||||
|
@ -199,6 +202,7 @@ impl ModuleGraphBuilder {
|
|||
maybe_file_watcher_reporter,
|
||||
emit_cache,
|
||||
file_fetcher,
|
||||
global_http_cache,
|
||||
type_checker,
|
||||
}
|
||||
}
|
||||
|
@ -345,6 +349,7 @@ impl ModuleGraphBuilder {
|
|||
self.emit_cache.clone(),
|
||||
self.file_fetcher.clone(),
|
||||
self.options.resolve_file_header_overrides(),
|
||||
self.global_http_cache.clone(),
|
||||
permissions,
|
||||
self.options.node_modules_dir_specifier(),
|
||||
)
|
||||
|
|
|
@ -8,12 +8,11 @@ use deno_core::ModuleSpecifier;
|
|||
use std::collections::HashMap;
|
||||
use std::fs;
|
||||
use std::path::Path;
|
||||
use std::path::PathBuf;
|
||||
use std::sync::Arc;
|
||||
use std::time::SystemTime;
|
||||
|
||||
pub fn calculate_fs_version(
|
||||
cache: &HttpCache,
|
||||
cache: &Arc<dyn HttpCache>,
|
||||
specifier: &ModuleSpecifier,
|
||||
) -> Option<String> {
|
||||
match specifier.scheme() {
|
||||
|
@ -40,10 +39,13 @@ pub fn calculate_fs_version_at_path(path: &Path) -> Option<String> {
|
|||
}
|
||||
|
||||
fn calculate_fs_version_in_cache(
|
||||
cache: &HttpCache,
|
||||
cache: &Arc<dyn HttpCache>,
|
||||
specifier: &ModuleSpecifier,
|
||||
) -> Option<String> {
|
||||
match cache.get_modified_time(specifier) {
|
||||
let Ok(cache_key) = cache.cache_item_key(specifier) else {
|
||||
return Some("1".to_string());
|
||||
};
|
||||
match cache.read_modified_time(&cache_key) {
|
||||
Ok(Some(modified)) => {
|
||||
match modified.duration_since(SystemTime::UNIX_EPOCH) {
|
||||
Ok(n) => Some(n.as_millis().to_string()),
|
||||
|
@ -80,12 +82,12 @@ struct Metadata {
|
|||
|
||||
#[derive(Debug, Clone)]
|
||||
pub struct CacheMetadata {
|
||||
cache: HttpCache,
|
||||
cache: Arc<dyn HttpCache>,
|
||||
metadata: Arc<Mutex<HashMap<ModuleSpecifier, Metadata>>>,
|
||||
}
|
||||
|
||||
impl CacheMetadata {
|
||||
pub fn new(cache: HttpCache) -> Self {
|
||||
pub fn new(cache: Arc<dyn HttpCache>) -> Self {
|
||||
Self {
|
||||
cache,
|
||||
metadata: Default::default(),
|
||||
|
@ -120,8 +122,8 @@ impl CacheMetadata {
|
|||
) {
|
||||
return None;
|
||||
}
|
||||
let specifier_metadata =
|
||||
self.cache.get(specifier).ok()?.read_metadata().ok()??;
|
||||
let cache_key = self.cache.cache_item_key(specifier).ok()?;
|
||||
let specifier_metadata = self.cache.read_metadata(&cache_key).ok()??;
|
||||
let values = Arc::new(parse_metadata(&specifier_metadata.headers));
|
||||
let version = calculate_fs_version_in_cache(&self.cache, specifier);
|
||||
let mut metadata_map = self.metadata.lock();
|
||||
|
@ -130,8 +132,8 @@ impl CacheMetadata {
|
|||
Some(metadata)
|
||||
}
|
||||
|
||||
pub fn set_location(&mut self, location: PathBuf) {
|
||||
self.cache = HttpCache::new(location);
|
||||
pub fn set_cache(&mut self, cache: Arc<dyn HttpCache>) {
|
||||
self.cache = cache;
|
||||
self.metadata.lock().clear();
|
||||
}
|
||||
}
|
||||
|
|
|
@ -505,6 +505,7 @@ fn get_workspace_completions(
|
|||
#[cfg(test)]
|
||||
mod tests {
|
||||
use super::*;
|
||||
use crate::cache::GlobalHttpCache;
|
||||
use crate::cache::HttpCache;
|
||||
use crate::lsp::documents::Documents;
|
||||
use crate::lsp::documents::LanguageId;
|
||||
|
@ -519,14 +520,14 @@ mod tests {
|
|||
source_fixtures: &[(&str, &str)],
|
||||
location: &Path,
|
||||
) -> Documents {
|
||||
let cache = HttpCache::new(location.to_path_buf());
|
||||
let cache = Arc::new(GlobalHttpCache::new(location.to_path_buf()));
|
||||
let mut documents = Documents::new(cache);
|
||||
for (specifier, source, version, language_id) in fixtures {
|
||||
let specifier =
|
||||
resolve_url(specifier).expect("failed to create specifier");
|
||||
documents.open(specifier, *version, *language_id, (*source).into());
|
||||
}
|
||||
let http_cache = HttpCache::new(location.to_path_buf());
|
||||
let http_cache = GlobalHttpCache::new(location.to_path_buf());
|
||||
for (specifier, source) in source_fixtures {
|
||||
let specifier =
|
||||
resolve_url(specifier).expect("failed to create specifier");
|
||||
|
|
|
@ -475,6 +475,12 @@ impl Config {
|
|||
.and_then(|p| p.maybe_node_modules_dir.as_ref())
|
||||
}
|
||||
|
||||
pub fn maybe_deno_modules_dir_path(&self) -> Option<PathBuf> {
|
||||
self
|
||||
.maybe_config_file()
|
||||
.and_then(|c| c.deno_modules_dir_path())
|
||||
}
|
||||
|
||||
pub fn maybe_config_file(&self) -> Option<&ConfigFile> {
|
||||
self
|
||||
.maybe_config_file_info
|
||||
|
|
|
@ -1302,7 +1302,7 @@ fn generate_deno_diagnostics(
|
|||
#[cfg(test)]
|
||||
mod tests {
|
||||
use super::*;
|
||||
use crate::cache::HttpCache;
|
||||
use crate::cache::GlobalHttpCache;
|
||||
use crate::lsp::config::ConfigSnapshot;
|
||||
use crate::lsp::config::Settings;
|
||||
use crate::lsp::config::SpecifierSettings;
|
||||
|
@ -1321,7 +1321,7 @@ mod tests {
|
|||
location: &Path,
|
||||
maybe_import_map: Option<(&str, &str)>,
|
||||
) -> StateSnapshot {
|
||||
let cache = HttpCache::new(location.to_path_buf());
|
||||
let cache = Arc::new(GlobalHttpCache::new(location.to_path_buf()));
|
||||
let mut documents = Documents::new(cache);
|
||||
for (specifier, source, version, language_id) in fixtures {
|
||||
let specifier =
|
||||
|
@ -1345,8 +1345,8 @@ mod tests {
|
|||
documents,
|
||||
maybe_import_map,
|
||||
assets: Default::default(),
|
||||
cache_metadata: cache::CacheMetadata::new(HttpCache::new(
|
||||
location.to_path_buf(),
|
||||
cache_metadata: cache::CacheMetadata::new(Arc::new(
|
||||
GlobalHttpCache::new(location.to_path_buf()),
|
||||
)),
|
||||
maybe_node_resolver: None,
|
||||
maybe_npm_resolver: None,
|
||||
|
@ -1396,7 +1396,7 @@ let c: number = "a";
|
|||
None,
|
||||
);
|
||||
let snapshot = Arc::new(snapshot);
|
||||
let cache = HttpCache::new(cache_location);
|
||||
let cache = Arc::new(GlobalHttpCache::new(cache_location));
|
||||
let ts_server = TsServer::new(Default::default(), cache);
|
||||
|
||||
// test enabled
|
||||
|
@ -1489,7 +1489,7 @@ let c: number = "a";
|
|||
None,
|
||||
);
|
||||
let snapshot = Arc::new(snapshot);
|
||||
let cache = HttpCache::new(cache_location);
|
||||
let cache = Arc::new(GlobalHttpCache::new(cache_location));
|
||||
let ts_server = TsServer::new(Default::default(), cache);
|
||||
|
||||
let config = mock_config();
|
||||
|
|
|
@ -658,12 +658,12 @@ fn recurse_dependents(
|
|||
|
||||
#[derive(Debug)]
|
||||
struct SpecifierResolver {
|
||||
cache: HttpCache,
|
||||
cache: Arc<dyn HttpCache>,
|
||||
redirects: Mutex<HashMap<ModuleSpecifier, ModuleSpecifier>>,
|
||||
}
|
||||
|
||||
impl SpecifierResolver {
|
||||
pub fn new(cache: HttpCache) -> Self {
|
||||
pub fn new(cache: Arc<dyn HttpCache>) -> Self {
|
||||
Self {
|
||||
cache,
|
||||
redirects: Mutex::new(HashMap::new()),
|
||||
|
@ -699,11 +699,12 @@ impl SpecifierResolver {
|
|||
redirect_limit: usize,
|
||||
) -> Option<ModuleSpecifier> {
|
||||
if redirect_limit > 0 {
|
||||
let cache_key = self.cache.cache_item_key(specifier).ok()?;
|
||||
let headers = self
|
||||
.cache
|
||||
.get(specifier)
|
||||
.read_metadata(&cache_key)
|
||||
.ok()
|
||||
.and_then(|i| i.read_metadata().ok()?)
|
||||
.flatten()
|
||||
.map(|m| m.headers)?;
|
||||
if let Some(location) = headers.get("location") {
|
||||
let redirect =
|
||||
|
@ -727,7 +728,7 @@ struct FileSystemDocuments {
|
|||
impl FileSystemDocuments {
|
||||
pub fn get(
|
||||
&mut self,
|
||||
cache: &HttpCache,
|
||||
cache: &Arc<dyn HttpCache>,
|
||||
resolver: &dyn deno_graph::source::Resolver,
|
||||
specifier: &ModuleSpecifier,
|
||||
) -> Option<Document> {
|
||||
|
@ -749,7 +750,7 @@ impl FileSystemDocuments {
|
|||
/// returning the document.
|
||||
fn refresh_document(
|
||||
&mut self,
|
||||
cache: &HttpCache,
|
||||
cache: &Arc<dyn HttpCache>,
|
||||
resolver: &dyn deno_graph::source::Resolver,
|
||||
specifier: &ModuleSpecifier,
|
||||
) -> Option<Document> {
|
||||
|
@ -778,9 +779,9 @@ impl FileSystemDocuments {
|
|||
)
|
||||
} else {
|
||||
let fs_version = calculate_fs_version(cache, specifier)?;
|
||||
let cache_item = cache.get(specifier).ok()?;
|
||||
let bytes = cache_item.read_to_bytes().ok()??;
|
||||
let specifier_metadata = cache_item.read_metadata().ok()??;
|
||||
let cache_key = cache.cache_item_key(specifier).ok()?;
|
||||
let bytes = cache.read_file_bytes(&cache_key).ok()??;
|
||||
let specifier_metadata = cache.read_metadata(&cache_key).ok()??;
|
||||
let maybe_content_type = specifier_metadata.headers.get("content-type");
|
||||
let (_, maybe_charset) = map_content_type(specifier, maybe_content_type);
|
||||
let maybe_headers = Some(specifier_metadata.headers);
|
||||
|
@ -823,7 +824,7 @@ pub enum DocumentsFilter {
|
|||
#[derive(Debug, Clone)]
|
||||
pub struct Documents {
|
||||
/// The DENO_DIR that the documents looks for non-file based modules.
|
||||
cache: HttpCache,
|
||||
cache: Arc<dyn HttpCache>,
|
||||
/// A flag that indicates that stated data is potentially invalid and needs to
|
||||
/// be recalculated before being considered valid.
|
||||
dirty: bool,
|
||||
|
@ -853,7 +854,7 @@ pub struct Documents {
|
|||
}
|
||||
|
||||
impl Documents {
|
||||
pub fn new(cache: HttpCache) -> Self {
|
||||
pub fn new(cache: Arc<dyn HttpCache>) -> Self {
|
||||
Self {
|
||||
cache: cache.clone(),
|
||||
dirty: true,
|
||||
|
@ -1139,9 +1140,8 @@ impl Documents {
|
|||
}
|
||||
|
||||
/// Update the location of the on disk cache for the document store.
|
||||
pub fn set_location(&mut self, location: PathBuf) {
|
||||
pub fn set_cache(&mut self, cache: Arc<dyn HttpCache>) {
|
||||
// TODO update resolved dependencies?
|
||||
let cache = HttpCache::new(location);
|
||||
self.cache = cache.clone();
|
||||
self.specifier_resolver = Arc::new(SpecifierResolver::new(cache));
|
||||
self.dirty = true;
|
||||
|
@ -1177,6 +1177,7 @@ impl Documents {
|
|||
document_preload_limit: usize,
|
||||
maybe_import_map: Option<&import_map::ImportMap>,
|
||||
maybe_jsx_config: Option<&JsxImportSourceConfig>,
|
||||
maybe_deno_modules_dir: Option<bool>,
|
||||
maybe_package_json_deps: Option<&PackageJsonDeps>,
|
||||
) -> u64 {
|
||||
let mut hasher = FastInsecureHasher::default();
|
||||
|
@ -1191,6 +1192,7 @@ impl Documents {
|
|||
hasher.write_str(&import_map.to_json());
|
||||
hasher.write_str(import_map.base_url().as_str());
|
||||
}
|
||||
hasher.write_hashable(maybe_deno_modules_dir);
|
||||
hasher.write_hashable(maybe_jsx_config);
|
||||
if let Some(package_json_deps) = &maybe_package_json_deps {
|
||||
// We need to ensure the hashing is deterministic so explicitly type
|
||||
|
@ -1225,6 +1227,7 @@ impl Documents {
|
|||
options.document_preload_limit,
|
||||
options.maybe_import_map.as_deref(),
|
||||
maybe_jsx_config.as_ref(),
|
||||
options.maybe_config_file.and_then(|c| c.deno_modules_dir()),
|
||||
maybe_package_json_deps.as_ref(),
|
||||
);
|
||||
let deps_provider =
|
||||
|
@ -1840,6 +1843,7 @@ fn sort_and_remove_non_leaf_dirs(mut dirs: Vec<PathBuf>) -> Vec<PathBuf> {
|
|||
|
||||
#[cfg(test)]
|
||||
mod tests {
|
||||
use crate::cache::GlobalHttpCache;
|
||||
use crate::npm::NpmResolution;
|
||||
|
||||
use super::*;
|
||||
|
@ -1850,7 +1854,7 @@ mod tests {
|
|||
|
||||
fn setup(temp_dir: &TempDir) -> (Documents, PathRef) {
|
||||
let location = temp_dir.path().join("deps");
|
||||
let cache = HttpCache::new(location.to_path_buf());
|
||||
let cache = Arc::new(GlobalHttpCache::new(location.to_path_buf()));
|
||||
let documents = Documents::new(cache);
|
||||
(documents, location)
|
||||
}
|
||||
|
|
|
@ -86,7 +86,9 @@ use crate::args::LintOptions;
|
|||
use crate::args::TsConfig;
|
||||
use crate::cache::DenoDir;
|
||||
use crate::cache::FastInsecureHasher;
|
||||
use crate::cache::GlobalHttpCache;
|
||||
use crate::cache::HttpCache;
|
||||
use crate::cache::LocalHttpCache;
|
||||
use crate::factory::CliFactory;
|
||||
use crate::file_fetcher::FileFetcher;
|
||||
use crate::graph_util;
|
||||
|
@ -134,7 +136,7 @@ impl LspNpmConfigHash {
|
|||
pub fn from_inner(inner: &Inner) -> Self {
|
||||
let mut hasher = FastInsecureHasher::new();
|
||||
hasher.write_hashable(inner.config.maybe_node_modules_dir_path());
|
||||
hasher.write_hashable(&inner.maybe_cache_path);
|
||||
hasher.write_hashable(&inner.maybe_global_cache_path);
|
||||
if let Some(lockfile) = inner.config.maybe_lockfile() {
|
||||
hasher.write_hashable(&*lockfile.lock());
|
||||
}
|
||||
|
@ -168,7 +170,7 @@ pub struct Inner {
|
|||
pub client: Client,
|
||||
/// Configuration information.
|
||||
pub config: Config,
|
||||
deps_http_cache: HttpCache,
|
||||
deps_http_cache: Arc<dyn HttpCache>,
|
||||
diagnostics_server: diagnostics::DiagnosticsServer,
|
||||
/// The collection of documents that the server is currently handling, either
|
||||
/// on disk or "open" within the client.
|
||||
|
@ -180,7 +182,7 @@ pub struct Inner {
|
|||
module_registries_location: PathBuf,
|
||||
/// An optional path to the DENO_DIR which has been specified in the client
|
||||
/// options.
|
||||
maybe_cache_path: Option<PathBuf>,
|
||||
maybe_global_cache_path: Option<PathBuf>,
|
||||
/// An optional import map which is used to resolve modules.
|
||||
maybe_import_map: Option<Arc<ImportMap>>,
|
||||
/// The URL for the import map which is used to determine relative imports.
|
||||
|
@ -202,7 +204,7 @@ pub struct Inner {
|
|||
/// An abstraction that handles interactions with TypeScript.
|
||||
pub ts_server: Arc<TsServer>,
|
||||
/// A map of specifiers and URLs used to translate over the LSP.
|
||||
pub url_map: urls::LspUrlMap,
|
||||
pub url_map: Arc<urls::LspUrlMap>,
|
||||
}
|
||||
|
||||
impl LanguageServer {
|
||||
|
@ -553,7 +555,7 @@ impl Inner {
|
|||
http_client.clone(),
|
||||
);
|
||||
let location = dir.deps_folder_path();
|
||||
let deps_http_cache = HttpCache::new(location);
|
||||
let deps_http_cache = Arc::new(GlobalHttpCache::new(location));
|
||||
let documents = Documents::new(deps_http_cache.clone());
|
||||
let cache_metadata = cache::CacheMetadata::new(deps_http_cache.clone());
|
||||
let performance = Arc::new(Performance::default());
|
||||
|
@ -593,7 +595,7 @@ impl Inner {
|
|||
diagnostics_server,
|
||||
documents,
|
||||
http_client,
|
||||
maybe_cache_path: None,
|
||||
maybe_global_cache_path: None,
|
||||
maybe_import_map: None,
|
||||
maybe_import_map_uri: None,
|
||||
maybe_package_json: None,
|
||||
|
@ -829,8 +831,8 @@ impl Inner {
|
|||
let mark = self.performance.mark("update_cache", None::<()>);
|
||||
self.performance.measure(mark);
|
||||
let maybe_cache = &self.config.workspace_settings().cache;
|
||||
let maybe_cache_path = if let Some(cache_str) = maybe_cache {
|
||||
lsp_log!("Setting cache path from: \"{}\"", cache_str);
|
||||
let maybe_global_cache_path = if let Some(cache_str) = maybe_cache {
|
||||
lsp_log!("Setting global cache path from: \"{}\"", cache_str);
|
||||
let cache_url = if let Ok(url) = Url::from_file_path(cache_str) {
|
||||
Ok(url)
|
||||
} else if let Some(root_uri) = &self.config.root_uri {
|
||||
|
@ -847,24 +849,32 @@ impl Inner {
|
|||
}?;
|
||||
let cache_path = specifier_to_file_path(&cache_url)?;
|
||||
lsp_log!(
|
||||
" Resolved cache path: \"{}\"",
|
||||
" Resolved global cache path: \"{}\"",
|
||||
cache_path.to_string_lossy()
|
||||
);
|
||||
Some(cache_path)
|
||||
} else {
|
||||
None
|
||||
};
|
||||
if self.maybe_cache_path != maybe_cache_path {
|
||||
if self.maybe_global_cache_path != maybe_global_cache_path {
|
||||
self
|
||||
.recreate_http_client_and_dependents(maybe_cache_path)
|
||||
.set_new_global_cache_path(maybe_global_cache_path)
|
||||
.await?;
|
||||
}
|
||||
Ok(())
|
||||
}
|
||||
|
||||
/// Recreates the http client and all dependent structs.
|
||||
async fn recreate_http_client_and_dependents(
|
||||
&mut self,
|
||||
) -> Result<(), AnyError> {
|
||||
self
|
||||
.set_new_global_cache_path(self.maybe_global_cache_path.clone())
|
||||
.await
|
||||
}
|
||||
|
||||
/// Recreates the http client and all dependent structs.
|
||||
async fn set_new_global_cache_path(
|
||||
&mut self,
|
||||
new_cache_path: Option<PathBuf>,
|
||||
) -> Result<(), AnyError> {
|
||||
let dir = DenoDir::new(new_cache_path.clone())?;
|
||||
|
@ -894,15 +904,23 @@ impl Inner {
|
|||
);
|
||||
self.module_registries_location = module_registries_location;
|
||||
// update the cache path
|
||||
let location = dir.deps_folder_path();
|
||||
self.documents.set_location(location.clone());
|
||||
self.cache_metadata.set_location(location);
|
||||
self.maybe_cache_path = new_cache_path;
|
||||
let global_cache = Arc::new(GlobalHttpCache::new(dir.deps_folder_path()));
|
||||
let cache: Arc<dyn HttpCache> =
|
||||
match self.config.maybe_deno_modules_dir_path() {
|
||||
Some(local_path) => {
|
||||
Arc::new(LocalHttpCache::new(local_path, global_cache))
|
||||
}
|
||||
None => global_cache,
|
||||
};
|
||||
self.deps_http_cache = cache.clone();
|
||||
self.documents.set_cache(cache.clone());
|
||||
self.cache_metadata.set_cache(cache);
|
||||
self.maybe_global_cache_path = new_cache_path;
|
||||
Ok(())
|
||||
}
|
||||
|
||||
async fn recreate_npm_services_if_necessary(&mut self) {
|
||||
let deno_dir = match DenoDir::new(self.maybe_cache_path.clone()) {
|
||||
let deno_dir = match DenoDir::new(self.maybe_global_cache_path.clone()) {
|
||||
Ok(deno_dir) => deno_dir,
|
||||
Err(err) => {
|
||||
lsp_warn!("Error getting deno dir: {}", err);
|
||||
|
@ -1090,9 +1108,7 @@ impl Inner {
|
|||
|
||||
async fn update_registries(&mut self) -> Result<(), AnyError> {
|
||||
let mark = self.performance.mark("update_registries", None::<()>);
|
||||
self
|
||||
.recreate_http_client_and_dependents(self.maybe_cache_path.clone())
|
||||
.await?;
|
||||
self.recreate_http_client_and_dependents().await?;
|
||||
let workspace_settings = self.config.workspace_settings();
|
||||
for (registry, enabled) in workspace_settings.suggest.imports.hosts.iter() {
|
||||
if *enabled {
|
||||
|
@ -1131,6 +1147,7 @@ impl Inner {
|
|||
self.config.set_config_file(config_file);
|
||||
self.lint_options = lint_options;
|
||||
self.fmt_options = fmt_options;
|
||||
self.recreate_http_client_and_dependents().await?;
|
||||
}
|
||||
|
||||
Ok(())
|
||||
|
@ -3372,7 +3389,7 @@ impl Inner {
|
|||
let workspace_settings = self.config.workspace_settings();
|
||||
let mut cli_options = CliOptions::new(
|
||||
Flags {
|
||||
cache_path: self.maybe_cache_path.clone(),
|
||||
cache_path: self.maybe_global_cache_path.clone(),
|
||||
ca_stores: workspace_settings.certificate_stores.clone(),
|
||||
ca_data: workspace_settings.tls_certificate.clone().map(CaData::File),
|
||||
unsafely_ignore_certificate_errors: workspace_settings
|
||||
|
|
|
@ -13,6 +13,7 @@ use super::path_to_regex::StringOrVec;
|
|||
use super::path_to_regex::Token;
|
||||
|
||||
use crate::args::CacheSetting;
|
||||
use crate::cache::GlobalHttpCache;
|
||||
use crate::cache::HttpCache;
|
||||
use crate::file_fetcher::FileFetcher;
|
||||
use crate::http_util::HttpClient;
|
||||
|
@ -415,13 +416,15 @@ enum VariableItems {
|
|||
pub struct ModuleRegistry {
|
||||
origins: HashMap<String, Vec<RegistryConfiguration>>,
|
||||
file_fetcher: FileFetcher,
|
||||
http_cache: Arc<GlobalHttpCache>,
|
||||
}
|
||||
|
||||
impl ModuleRegistry {
|
||||
pub fn new(location: PathBuf, http_client: Arc<HttpClient>) -> Self {
|
||||
let http_cache = HttpCache::new(location);
|
||||
// the http cache should always be the global one for registry completions
|
||||
let http_cache = Arc::new(GlobalHttpCache::new(location));
|
||||
let mut file_fetcher = FileFetcher::new(
|
||||
http_cache,
|
||||
http_cache.clone(),
|
||||
CacheSetting::RespectHeaders,
|
||||
true,
|
||||
http_client,
|
||||
|
@ -433,6 +436,7 @@ impl ModuleRegistry {
|
|||
Self {
|
||||
origins: HashMap::new(),
|
||||
file_fetcher,
|
||||
http_cache,
|
||||
}
|
||||
}
|
||||
|
||||
|
@ -517,10 +521,7 @@ impl ModuleRegistry {
|
|||
"cache-control".to_string(),
|
||||
"max-age=604800, immutable".to_string(),
|
||||
);
|
||||
self
|
||||
.file_fetcher
|
||||
.http_cache
|
||||
.set(specifier, headers_map, &[])?;
|
||||
self.http_cache.set(specifier, headers_map, &[])?;
|
||||
}
|
||||
let file = fetch_result?;
|
||||
let config: RegistryConfigurationJson = serde_json::from_str(&file.source)?;
|
||||
|
|
|
@ -99,7 +99,7 @@ type Request = (
|
|||
pub struct TsServer(mpsc::UnboundedSender<Request>);
|
||||
|
||||
impl TsServer {
|
||||
pub fn new(performance: Arc<Performance>, cache: HttpCache) -> Self {
|
||||
pub fn new(performance: Arc<Performance>, cache: Arc<dyn HttpCache>) -> Self {
|
||||
let (tx, mut rx) = mpsc::unbounded_channel::<Request>();
|
||||
let _join_handle = thread::spawn(move || {
|
||||
let mut ts_runtime = js_runtime(performance, cache);
|
||||
|
@ -3245,7 +3245,10 @@ fn op_script_version(
|
|||
/// Create and setup a JsRuntime based on a snapshot. It is expected that the
|
||||
/// supplied snapshot is an isolate that contains the TypeScript language
|
||||
/// server.
|
||||
fn js_runtime(performance: Arc<Performance>, cache: HttpCache) -> JsRuntime {
|
||||
fn js_runtime(
|
||||
performance: Arc<Performance>,
|
||||
cache: Arc<dyn HttpCache>,
|
||||
) -> JsRuntime {
|
||||
JsRuntime::new(RuntimeOptions {
|
||||
extensions: vec![deno_tsc::init_ops(performance, cache)],
|
||||
startup_snapshot: Some(tsc::compiler_snapshot()),
|
||||
|
@ -3265,7 +3268,7 @@ deno_core::extension!(deno_tsc,
|
|||
],
|
||||
options = {
|
||||
performance: Arc<Performance>,
|
||||
cache: HttpCache,
|
||||
cache: Arc<dyn HttpCache>,
|
||||
},
|
||||
state = |state, options| {
|
||||
state.put(State::new(
|
||||
|
@ -3906,6 +3909,7 @@ fn request(
|
|||
#[cfg(test)]
|
||||
mod tests {
|
||||
use super::*;
|
||||
use crate::cache::GlobalHttpCache;
|
||||
use crate::cache::HttpCache;
|
||||
use crate::http_util::HeadersMap;
|
||||
use crate::lsp::cache::CacheMetadata;
|
||||
|
@ -3923,7 +3927,7 @@ mod tests {
|
|||
fixtures: &[(&str, &str, i32, LanguageId)],
|
||||
location: &Path,
|
||||
) -> StateSnapshot {
|
||||
let cache = HttpCache::new(location.to_path_buf());
|
||||
let cache = Arc::new(GlobalHttpCache::new(location.to_path_buf()));
|
||||
let mut documents = Documents::new(cache.clone());
|
||||
for (specifier, source, version, language_id) in fixtures {
|
||||
let specifier =
|
||||
|
@ -3952,7 +3956,7 @@ mod tests {
|
|||
sources: &[(&str, &str, i32, LanguageId)],
|
||||
) -> (JsRuntime, Arc<StateSnapshot>, PathBuf) {
|
||||
let location = temp_dir.path().join("deps").to_path_buf();
|
||||
let cache = HttpCache::new(location.clone());
|
||||
let cache = Arc::new(GlobalHttpCache::new(location.clone()));
|
||||
let state_snapshot = Arc::new(mock_state_snapshot(sources, &location));
|
||||
let mut runtime = js_runtime(Default::default(), cache);
|
||||
start(&mut runtime, debug).unwrap();
|
||||
|
@ -4432,7 +4436,7 @@ mod tests {
|
|||
LanguageId::TypeScript,
|
||||
)],
|
||||
);
|
||||
let cache = HttpCache::new(location);
|
||||
let cache = Arc::new(GlobalHttpCache::new(location));
|
||||
let specifier_dep =
|
||||
resolve_url("https://deno.land/x/example/a.ts").unwrap();
|
||||
cache
|
||||
|
|
|
@ -12,7 +12,6 @@ use deno_core::url::Url;
|
|||
use deno_core::ModuleSpecifier;
|
||||
use once_cell::sync::Lazy;
|
||||
use std::collections::HashMap;
|
||||
use std::sync::Arc;
|
||||
|
||||
/// Used in situations where a default URL needs to be used where otherwise a
|
||||
/// panic is undesired.
|
||||
|
@ -120,8 +119,8 @@ pub enum LspUrlKind {
|
|||
/// A bi-directional map of URLs sent to the LSP client and internal module
|
||||
/// specifiers. We need to map internal specifiers into `deno:` schema URLs
|
||||
/// to allow the Deno language server to manage these as virtual documents.
|
||||
#[derive(Debug, Default, Clone)]
|
||||
pub struct LspUrlMap(Arc<Mutex<LspUrlMapInner>>);
|
||||
#[derive(Debug, Default)]
|
||||
pub struct LspUrlMap(Mutex<LspUrlMapInner>);
|
||||
|
||||
impl LspUrlMap {
|
||||
/// Normalize a specifier that is used internally within Deno (or tsc) to a
|
||||
|
|
|
@ -311,7 +311,6 @@ impl CliNpmRegistryApiInner {
|
|||
) -> Result<(), AnyError> {
|
||||
let file_cache_path = self.get_package_file_cache_path(name);
|
||||
let file_text = serde_json::to_string(&package_info)?;
|
||||
std::fs::create_dir_all(file_cache_path.parent().unwrap())?;
|
||||
atomic_write_file(&file_cache_path, file_text, CACHE_PERM)?;
|
||||
Ok(())
|
||||
}
|
||||
|
|
|
@ -432,6 +432,10 @@
|
|||
"description": "Enables or disables the use of a local node_modules folder for npm packages. Alternatively, use the `--node-modules-dir` or `--node-modules-dir=false` flag. Requires Deno 1.34 or later.",
|
||||
"type": "boolean"
|
||||
},
|
||||
"denoModulesDir": {
|
||||
"description": "UNSTABLE: Enables or disables the use of a local deno_modules folder as a local cache for remote modules. Alternatively, use the `--deno-modules-dir` or `--deno-modules-dir=false` flag. Requires Deno 1.36 or later.",
|
||||
"type": "boolean"
|
||||
},
|
||||
"tasks": {
|
||||
"description": "Configuration for deno task",
|
||||
"type": "object",
|
||||
|
|
|
@ -6643,7 +6643,7 @@ fn lsp_cache_location() {
|
|||
);
|
||||
let cache_path = temp_dir.path().join(".cache");
|
||||
assert!(cache_path.is_dir());
|
||||
assert!(cache_path.join("gen").is_dir());
|
||||
assert!(!cache_path.join("gen").is_dir()); // not created because no emitting has occurred
|
||||
client.shutdown();
|
||||
}
|
||||
|
||||
|
@ -8766,3 +8766,95 @@ fn lsp_node_modules_dir() {
|
|||
|
||||
client.shutdown();
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn lsp_deno_modules_dir() {
|
||||
let context = TestContextBuilder::new()
|
||||
.use_http_server()
|
||||
.use_temp_cwd()
|
||||
.build();
|
||||
let temp_dir = context.temp_dir();
|
||||
|
||||
let mut client = context.new_lsp_command().build();
|
||||
client.initialize_default();
|
||||
let file_uri = temp_dir.uri().join("file.ts").unwrap();
|
||||
client.did_open(json!({
|
||||
"textDocument": {
|
||||
"uri": file_uri,
|
||||
"languageId": "typescript",
|
||||
"version": 1,
|
||||
"text": "import { returnsHi } from 'http://localhost:4545/subdir/mod1.ts'; console.log(returnsHi());",
|
||||
}
|
||||
}));
|
||||
let cache = |client: &mut LspClient| {
|
||||
client.write_request(
|
||||
"deno/cache",
|
||||
json!({
|
||||
"referrer": {
|
||||
"uri": file_uri,
|
||||
},
|
||||
"uris": [
|
||||
{
|
||||
"uri": "http://localhost:4545/subdir/mod1.ts",
|
||||
}
|
||||
]
|
||||
}),
|
||||
);
|
||||
};
|
||||
|
||||
cache(&mut client);
|
||||
|
||||
assert!(!temp_dir.path().join("deno_modules").exists());
|
||||
|
||||
temp_dir.write(
|
||||
temp_dir.path().join("deno.json"),
|
||||
"{ \"denoModulesDir\": true, \"lock\": false }\n",
|
||||
);
|
||||
let refresh_config = |client: &mut LspClient| {
|
||||
client.write_notification(
|
||||
"workspace/didChangeConfiguration",
|
||||
json!({
|
||||
"settings": {
|
||||
"enable": true,
|
||||
"config": "./deno.json",
|
||||
}
|
||||
}),
|
||||
);
|
||||
|
||||
let request = json!([{
|
||||
"enable": true,
|
||||
"config": "./deno.json",
|
||||
"codeLens": {
|
||||
"implementations": true,
|
||||
"references": true
|
||||
},
|
||||
"importMap": null,
|
||||
"lint": false,
|
||||
"suggest": {
|
||||
"autoImports": true,
|
||||
"completeFunctionCalls": false,
|
||||
"names": true,
|
||||
"paths": true,
|
||||
"imports": {}
|
||||
},
|
||||
"unstable": false
|
||||
}]);
|
||||
// one for the workspace
|
||||
client.handle_configuration_request(request.clone());
|
||||
// one for the specifier
|
||||
client.handle_configuration_request(request);
|
||||
};
|
||||
refresh_config(&mut client);
|
||||
|
||||
let diagnostics = client.read_diagnostics();
|
||||
assert_eq!(diagnostics.all().len(), 0, "{:#?}", diagnostics); // cached
|
||||
|
||||
// no caching necessary because it was already cached. It should exist now
|
||||
|
||||
assert!(temp_dir
|
||||
.path()
|
||||
.join("deno_modules/http_localhost_4545/subdir/mod1.ts")
|
||||
.exists());
|
||||
|
||||
client.shutdown();
|
||||
}
|
||||
|
|
|
@ -96,11 +96,17 @@ itest!(_017_import_redirect {
|
|||
output: "run/017_import_redirect.ts.out",
|
||||
});
|
||||
|
||||
itest!(_017_import_redirect_nocheck {
|
||||
args: "run --quiet --reload --no-check run/017_import_redirect.ts",
|
||||
itest!(_017_import_redirect_check {
|
||||
args: "run --quiet --reload --check run/017_import_redirect.ts",
|
||||
output: "run/017_import_redirect.ts.out",
|
||||
});
|
||||
|
||||
itest!(_017_import_redirect_deno_modules_dir {
|
||||
args: "run --quiet --reload --deno-modules-dir --check $TESTDATA/run/017_import_redirect.ts",
|
||||
output: "run/017_import_redirect.ts.out",
|
||||
temp_cwd: true,
|
||||
});
|
||||
|
||||
itest!(_017_import_redirect_info {
|
||||
args: "info --quiet --reload run/017_import_redirect.ts",
|
||||
output: "run/017_import_redirect_info.out",
|
||||
|
@ -156,6 +162,14 @@ itest!(_027_redirect_typescript {
|
|||
http_server: true,
|
||||
});
|
||||
|
||||
itest!(_027_redirect_typescript_deno_modules_dir {
|
||||
args:
|
||||
"run --quiet --reload --deno-modules-dir $TESTDATA/run/027_redirect_typescript.ts",
|
||||
output: "run/027_redirect_typescript.ts.out",
|
||||
http_server: true,
|
||||
temp_cwd: true,
|
||||
});
|
||||
|
||||
itest!(_028_args {
|
||||
args:
|
||||
"run --quiet --reload run/028_args.ts --arg1 val1 --arg2=val2 -- arg3 arg4",
|
||||
|
@ -186,6 +200,14 @@ itest!(_033_import_map_remote {
|
|||
http_server: true,
|
||||
});
|
||||
|
||||
itest!(_033_import_map_deno_modules_dir_remote {
|
||||
args:
|
||||
"run --quiet --reload --import-map=http://127.0.0.1:4545/import_maps/import_map_remote.json --deno-modules-dir --unstable $TESTDATA/import_maps/test_remote.ts",
|
||||
output: "run/033_import_map_remote.out",
|
||||
http_server: true,
|
||||
temp_cwd: true,
|
||||
});
|
||||
|
||||
itest!(_033_import_map_data_uri {
|
||||
args:
|
||||
"run --quiet --reload --import-map=data:application/json;charset=utf-8;base64,ewogICJpbXBvcnRzIjogewogICAgInRlc3Rfc2VydmVyLyI6ICJodHRwOi8vbG9jYWxob3N0OjQ1NDUvIgogIH0KfQ== run/import_maps/test_data.ts",
|
||||
|
@ -1653,6 +1675,14 @@ itest!(jsx_import_source_pragma_with_config_no_check {
|
|||
http_server: true,
|
||||
});
|
||||
|
||||
itest!(jsx_import_source_pragma_with_config_deno_modules_dir {
|
||||
args: "run --reload --config jsx/deno-jsx.jsonc --no-lock --deno-modules-dir $TESTDATA/run/jsx_import_source_pragma.tsx",
|
||||
output: "run/jsx_import_source.out",
|
||||
http_server: true,
|
||||
temp_cwd: true,
|
||||
copy_temp_dir: Some("jsx/"),
|
||||
});
|
||||
|
||||
itest!(jsx_import_source_no_pragma_no_check {
|
||||
args:
|
||||
"run --reload --config jsx/deno-jsx.jsonc --no-lock --no-check run/jsx_import_source_no_pragma.tsx",
|
||||
|
@ -1706,6 +1736,13 @@ itest!(reference_types_error {
|
|||
exit_code: 1,
|
||||
});
|
||||
|
||||
itest!(reference_types_error_deno_modules_dir {
|
||||
args:
|
||||
"run --config run/checkjs.tsconfig.json --check --deno-modules-dir $TESTDATA/run/reference_types_error.js",
|
||||
output: "run/reference_types_error.js.out",
|
||||
exit_code: 1,
|
||||
});
|
||||
|
||||
itest!(reference_types_error_no_check {
|
||||
args: "run --no-check run/reference_types_error.js",
|
||||
output_str: Some(""),
|
||||
|
@ -3000,7 +3037,7 @@ itest!(
|
|||
args: "run -A main.js",
|
||||
output: "run/with_package_json/no_deno_json/sub_dir/main.out",
|
||||
cwd: Some("run/with_package_json/no_deno_json/sub_dir"),
|
||||
copy_temp_dir: Some("run/with_package_json/"),
|
||||
copy_temp_dir: Some("run/with_package_json/no_deno_json/"),
|
||||
envs: env_vars_for_npm_tests_no_sync_download(),
|
||||
http_server: true,
|
||||
}
|
||||
|
@ -4424,3 +4461,66 @@ itest!(extension_dynamic_import {
|
|||
output: "run/extension_dynamic_import.ts.out",
|
||||
exit_code: 1,
|
||||
});
|
||||
|
||||
#[test]
|
||||
pub fn deno_modules_dir_config_file() {
|
||||
let test_context = TestContextBuilder::new()
|
||||
.use_http_server()
|
||||
.use_temp_cwd()
|
||||
.build();
|
||||
let temp_dir = test_context.temp_dir();
|
||||
let deno_modules_dir = temp_dir.path().join("deno_modules");
|
||||
let rm_deno_modules = || std::fs::remove_dir_all(&deno_modules_dir).unwrap();
|
||||
|
||||
temp_dir.write("deno.json", r#"{ "denoModulesDir": true }"#);
|
||||
temp_dir.write(
|
||||
"main.ts",
|
||||
r#"import { returnsHi } from 'http://localhost:4545/subdir/mod1.ts';
|
||||
console.log(returnsHi());"#,
|
||||
);
|
||||
|
||||
let deno_run_cmd = test_context.new_command().args("run --quiet main.ts");
|
||||
deno_run_cmd.run().assert_matches_text("Hi\n");
|
||||
|
||||
assert!(deno_modules_dir.exists());
|
||||
rm_deno_modules();
|
||||
temp_dir.write("deno.json", r#"{ "denoModulesDir": false }"#);
|
||||
|
||||
deno_run_cmd.run().assert_matches_text("Hi\n");
|
||||
assert!(!deno_modules_dir.exists());
|
||||
test_context
|
||||
.new_command()
|
||||
.args("cache --quiet --deno-modules-dir main.ts")
|
||||
.run();
|
||||
assert!(deno_modules_dir.exists());
|
||||
rm_deno_modules();
|
||||
|
||||
temp_dir.write("deno.json", r#"{ "denoModulesDir": true }"#);
|
||||
let cache_command = test_context.new_command().args("cache --quiet main.ts");
|
||||
cache_command.run();
|
||||
|
||||
assert!(deno_modules_dir.exists());
|
||||
let mod1_file = deno_modules_dir
|
||||
.join("http_localhost_4545")
|
||||
.join("subdir")
|
||||
.join("mod1.ts");
|
||||
mod1_file.write("export function returnsHi() { return 'bye bye bye'; }");
|
||||
|
||||
// won't match the lockfile now
|
||||
deno_run_cmd
|
||||
.run()
|
||||
.assert_matches_text(r#"error: The source code is invalid, as it does not match the expected hash in the lock file.
|
||||
Specifier: http://localhost:4545/subdir/mod1.ts
|
||||
Lock file: [WILDCARD]deno.lock
|
||||
"#)
|
||||
.assert_exit_code(10);
|
||||
|
||||
// try updating by deleting the lockfile
|
||||
let lockfile = temp_dir.path().join("deno.lock");
|
||||
lockfile.remove_file();
|
||||
cache_command.run();
|
||||
|
||||
// now it should run
|
||||
deno_run_cmd.run().assert_matches_text("bye bye bye\n");
|
||||
assert!(lockfile.exists());
|
||||
}
|
||||
|
|
|
@ -1 +0,0 @@
|
|||
node_modules/
|
|
@ -571,6 +571,7 @@ fn collect_coverages(
|
|||
})
|
||||
.ignore_git_folder()
|
||||
.ignore_node_modules()
|
||||
.ignore_deno_modules()
|
||||
.add_ignore_paths(&files.ignore)
|
||||
.collect_files(&files.include)?;
|
||||
|
||||
|
|
|
@ -151,6 +151,7 @@ fn collect_fmt_files(files: &FilesConfig) -> Result<Vec<PathBuf>, AnyError> {
|
|||
FileCollector::new(is_supported_ext_fmt)
|
||||
.ignore_git_folder()
|
||||
.ignore_node_modules()
|
||||
.ignore_deno_modules()
|
||||
.add_ignore_paths(&files.exclude)
|
||||
.collect_files(&files.include)
|
||||
}
|
||||
|
|
|
@ -95,7 +95,8 @@ fn print_cache_info(
|
|||
location: Option<&deno_core::url::Url>,
|
||||
) -> Result<(), AnyError> {
|
||||
let dir = factory.deno_dir()?;
|
||||
let modules_cache = factory.file_fetcher()?.get_http_cache_location();
|
||||
#[allow(deprecated)]
|
||||
let modules_cache = factory.global_http_cache()?.get_global_cache_location();
|
||||
let npm_cache = factory.npm_cache()?.as_readonly().get_cache_location();
|
||||
let typescript_cache = &dir.gen_cache.location;
|
||||
let registry_cache = dir.registries_folder_path();
|
||||
|
|
|
@ -198,6 +198,7 @@ fn collect_lint_files(files: &FilesConfig) -> Result<Vec<PathBuf>, AnyError> {
|
|||
FileCollector::new(is_supported_ext)
|
||||
.ignore_git_folder()
|
||||
.ignore_node_modules()
|
||||
.ignore_deno_modules()
|
||||
.add_ignore_paths(&files.exclude)
|
||||
.collect_files(&files.include)
|
||||
}
|
||||
|
|
112
cli/util/fs.rs
112
cli/util/fs.rs
|
@ -26,19 +26,79 @@ use crate::util::progress_bar::ProgressMessagePrompt;
|
|||
|
||||
use super::path::specifier_to_file_path;
|
||||
|
||||
/// Writes the file to the file system at a temporary path, then
|
||||
/// renames it to the destination in a single sys call in order
|
||||
/// to never leave the file system in a corrupted state.
|
||||
///
|
||||
/// This also handles creating the directory if a NotFound error
|
||||
/// occurs.
|
||||
pub fn atomic_write_file<T: AsRef<[u8]>>(
|
||||
filename: &Path,
|
||||
file_path: &Path,
|
||||
data: T,
|
||||
mode: u32,
|
||||
) -> std::io::Result<()> {
|
||||
let rand: String = (0..4)
|
||||
.map(|_| format!("{:02x}", rand::random::<u8>()))
|
||||
.collect();
|
||||
let extension = format!("{rand}.tmp");
|
||||
let tmp_file = filename.with_extension(extension);
|
||||
write_file(&tmp_file, data, mode)?;
|
||||
std::fs::rename(tmp_file, filename)?;
|
||||
Ok(())
|
||||
fn atomic_write_file_raw(
|
||||
temp_file_path: &Path,
|
||||
file_path: &Path,
|
||||
data: &[u8],
|
||||
mode: u32,
|
||||
) -> std::io::Result<()> {
|
||||
write_file(temp_file_path, data, mode)?;
|
||||
std::fs::rename(temp_file_path, file_path)?;
|
||||
Ok(())
|
||||
}
|
||||
|
||||
fn add_file_context(file_path: &Path, err: Error) -> Error {
|
||||
Error::new(
|
||||
err.kind(),
|
||||
format!("{:#} (for '{}')", err, file_path.display()),
|
||||
)
|
||||
}
|
||||
|
||||
fn inner(file_path: &Path, data: &[u8], mode: u32) -> std::io::Result<()> {
|
||||
let temp_file_path = {
|
||||
let rand: String = (0..4)
|
||||
.map(|_| format!("{:02x}", rand::random::<u8>()))
|
||||
.collect();
|
||||
let extension = format!("{rand}.tmp");
|
||||
file_path.with_extension(extension)
|
||||
};
|
||||
|
||||
if let Err(write_err) =
|
||||
atomic_write_file_raw(&temp_file_path, file_path, data, mode)
|
||||
{
|
||||
if write_err.kind() == ErrorKind::NotFound {
|
||||
let parent_dir_path = file_path.parent().unwrap();
|
||||
match std::fs::create_dir_all(parent_dir_path) {
|
||||
Ok(()) => {
|
||||
return atomic_write_file_raw(
|
||||
&temp_file_path,
|
||||
file_path,
|
||||
data,
|
||||
mode,
|
||||
)
|
||||
.map_err(|err| add_file_context(file_path, err));
|
||||
}
|
||||
Err(create_err) => {
|
||||
if !parent_dir_path.exists() {
|
||||
return Err(Error::new(
|
||||
create_err.kind(),
|
||||
format!(
|
||||
"{:#} (for '{}')\nCheck the permission of the directory.",
|
||||
create_err,
|
||||
parent_dir_path.display()
|
||||
),
|
||||
));
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
return Err(add_file_context(file_path, write_err));
|
||||
}
|
||||
Ok(())
|
||||
}
|
||||
|
||||
inner(file_path, data.as_ref(), mode)
|
||||
}
|
||||
|
||||
pub fn write_file<T: AsRef<[u8]>>(
|
||||
|
@ -140,6 +200,7 @@ pub struct FileCollector<TFilter: Fn(&Path) -> bool> {
|
|||
file_filter: TFilter,
|
||||
ignore_git_folder: bool,
|
||||
ignore_node_modules: bool,
|
||||
ignore_deno_modules: bool,
|
||||
}
|
||||
|
||||
impl<TFilter: Fn(&Path) -> bool> FileCollector<TFilter> {
|
||||
|
@ -149,6 +210,7 @@ impl<TFilter: Fn(&Path) -> bool> FileCollector<TFilter> {
|
|||
file_filter,
|
||||
ignore_git_folder: false,
|
||||
ignore_node_modules: false,
|
||||
ignore_deno_modules: false,
|
||||
}
|
||||
}
|
||||
|
||||
|
@ -165,6 +227,11 @@ impl<TFilter: Fn(&Path) -> bool> FileCollector<TFilter> {
|
|||
self
|
||||
}
|
||||
|
||||
pub fn ignore_deno_modules(mut self) -> Self {
|
||||
self.ignore_deno_modules = true;
|
||||
self
|
||||
}
|
||||
|
||||
pub fn ignore_git_folder(mut self) -> Self {
|
||||
self.ignore_git_folder = true;
|
||||
self
|
||||
|
@ -203,9 +270,12 @@ impl<TFilter: Fn(&Path) -> bool> FileCollector<TFilter> {
|
|||
.file_name()
|
||||
.map(|dir_name| {
|
||||
let dir_name = dir_name.to_string_lossy().to_lowercase();
|
||||
let is_ignored_file = self.ignore_node_modules
|
||||
&& dir_name == "node_modules"
|
||||
|| self.ignore_git_folder && dir_name == ".git";
|
||||
let is_ignored_file = match dir_name.as_str() {
|
||||
"node_modules" => self.ignore_node_modules,
|
||||
"deno_modules" => self.ignore_deno_modules,
|
||||
".git" => self.ignore_git_folder,
|
||||
_ => false,
|
||||
};
|
||||
// allow the user to opt out of ignoring by explicitly specifying the dir
|
||||
file != c && is_ignored_file
|
||||
})
|
||||
|
@ -238,7 +308,8 @@ pub fn collect_specifiers(
|
|||
let file_collector = FileCollector::new(predicate)
|
||||
.add_ignore_paths(&files.exclude)
|
||||
.ignore_git_folder()
|
||||
.ignore_node_modules();
|
||||
.ignore_node_modules()
|
||||
.ignore_deno_modules();
|
||||
|
||||
let root_path = current_dir()?;
|
||||
let include_files = if files.include.is_empty() {
|
||||
|
@ -657,10 +728,12 @@ mod tests {
|
|||
// ├── a.ts
|
||||
// ├── b.js
|
||||
// ├── child
|
||||
// | ├── node_modules
|
||||
// | | └── node_modules.js
|
||||
// | ├── deno_modules
|
||||
// | | └── deno_modules.js
|
||||
// | ├── git
|
||||
// | | └── git.js
|
||||
// | ├── node_modules
|
||||
// | | └── node_modules.js
|
||||
// │ ├── e.mjs
|
||||
// │ ├── f.mjsx
|
||||
// │ ├── .foo.TS
|
||||
|
@ -685,6 +758,8 @@ mod tests {
|
|||
t.write("dir.ts/child/node_modules/node_modules.js", "");
|
||||
t.create_dir_all("dir.ts/child/.git");
|
||||
t.write("dir.ts/child/.git/git.js", "");
|
||||
t.create_dir_all("dir.ts/child/deno_modules");
|
||||
t.write("dir.ts/child/deno_modules/deno_modules.js", "");
|
||||
|
||||
let ignore_dir_path = root_dir_path.join("ignore");
|
||||
let ignore_dir_files = ["g.d.ts", ".gitignore"];
|
||||
|
@ -709,6 +784,7 @@ mod tests {
|
|||
"b.js",
|
||||
"c.tsx",
|
||||
"d.jsx",
|
||||
"deno_modules.js",
|
||||
"e.mjs",
|
||||
"f.mjsx",
|
||||
"git.js",
|
||||
|
@ -722,8 +798,10 @@ mod tests {
|
|||
assert_eq!(file_names, expected);
|
||||
|
||||
// test ignoring the .git and node_modules folder
|
||||
let file_collector =
|
||||
file_collector.ignore_git_folder().ignore_node_modules();
|
||||
let file_collector = file_collector
|
||||
.ignore_git_folder()
|
||||
.ignore_node_modules()
|
||||
.ignore_deno_modules();
|
||||
let result = file_collector
|
||||
.collect_files(&[root_dir_path.to_path_buf()])
|
||||
.unwrap();
|
||||
|
|
|
@ -123,15 +123,13 @@ impl TestContextBuilder {
|
|||
} else {
|
||||
temp_dir
|
||||
};
|
||||
let testdata_dir = if let Some(temp_copy_dir) = &self.copy_temp_dir {
|
||||
let test_data_path = PathRef::new(testdata_path()).join(temp_copy_dir);
|
||||
let testdata_dir = testdata_path();
|
||||
if let Some(temp_copy_dir) = &self.copy_temp_dir {
|
||||
let test_data_path = testdata_dir.join(temp_copy_dir);
|
||||
let temp_copy_dir = temp_dir.path().join(temp_copy_dir);
|
||||
temp_copy_dir.create_dir_all();
|
||||
test_data_path.copy_to_recursive(&temp_copy_dir);
|
||||
temp_dir.path().clone()
|
||||
} else {
|
||||
PathRef::new(testdata_path())
|
||||
};
|
||||
}
|
||||
|
||||
let deno_exe = self.deno_exe.clone().unwrap_or_else(deno_exe_path);
|
||||
println!("deno_exe path {}", deno_exe);
|
||||
|
@ -146,7 +144,7 @@ impl TestContextBuilder {
|
|||
cwd: self.cwd.clone(),
|
||||
deno_exe,
|
||||
envs: self.envs.clone(),
|
||||
use_temp_cwd: self.use_temp_cwd,
|
||||
use_temp_cwd: self.use_temp_cwd || self.copy_temp_dir.is_some(),
|
||||
_http_server_guard: http_server_guard,
|
||||
deno_dir,
|
||||
temp_dir,
|
||||
|
@ -279,14 +277,15 @@ impl TestCommandBuilder {
|
|||
}
|
||||
|
||||
fn build_cwd(&self) -> PathRef {
|
||||
let cwd = self.cwd.as_ref().or(self.context.cwd.as_ref());
|
||||
if self.context.use_temp_cwd {
|
||||
assert!(cwd.is_none());
|
||||
let root_dir = if self.context.use_temp_cwd {
|
||||
self.context.temp_dir.path().to_owned()
|
||||
} else if let Some(cwd_) = cwd {
|
||||
self.context.testdata_dir.join(cwd_)
|
||||
} else {
|
||||
self.context.testdata_dir.clone()
|
||||
};
|
||||
let specified_cwd = self.cwd.as_ref().or(self.context.cwd.as_ref());
|
||||
match specified_cwd {
|
||||
Some(cwd) => root_dir.join(cwd),
|
||||
None => root_dir,
|
||||
}
|
||||
}
|
||||
|
||||
|
|
|
@ -116,6 +116,10 @@ impl PathRef {
|
|||
serde_json::from_str(&self.read_to_string()).unwrap()
|
||||
}
|
||||
|
||||
pub fn read_json_value(&self) -> serde_json::Value {
|
||||
serde_json::from_str(&self.read_to_string()).unwrap()
|
||||
}
|
||||
|
||||
pub fn rename(&self, to: impl AsRef<Path>) {
|
||||
fs::rename(self, self.join(to)).unwrap();
|
||||
}
|
||||
|
|
Loading…
Reference in a new issue