mirror of
https://github.com/denoland/deno.git
synced 2024-11-21 15:04:11 -05:00
chore: rename some helpers on the Fs trait (#20097)
Rename some of the helper methods on the Fs trait to be suffixed with `_sync` / `_async`, in preparation of the introduction of more async methods for some helpers. Also adds a `read_text_file_async` helper to complement the renamed `read_text_file_sync` helper.
This commit is contained in:
parent
f2e30a6f79
commit
03e963f578
9 changed files with 40 additions and 32 deletions
|
@ -745,7 +745,7 @@ impl NpmModuleLoader {
|
|||
let file_path = specifier.to_file_path().unwrap();
|
||||
let code = self
|
||||
.fs
|
||||
.read_to_string(&file_path)
|
||||
.read_text_file_sync(&file_path)
|
||||
.map_err(AnyError::from)
|
||||
.with_context(|| {
|
||||
if file_path.is_dir() {
|
||||
|
|
|
@ -91,9 +91,11 @@ impl CjsCodeAnalyzer for CliCjsCodeAnalyzer {
|
|||
) -> Result<ExtNodeCjsAnalysis, AnyError> {
|
||||
let source = match source {
|
||||
Some(source) => Cow::Borrowed(source),
|
||||
None => {
|
||||
Cow::Owned(self.fs.read_to_string(&specifier.to_file_path().unwrap())?)
|
||||
}
|
||||
None => Cow::Owned(
|
||||
self
|
||||
.fs
|
||||
.read_text_file_sync(&specifier.to_file_path().unwrap())?,
|
||||
),
|
||||
};
|
||||
let analysis = self.inner_cjs_analysis(specifier, &source)?;
|
||||
Ok(ExtNodeCjsAnalysis {
|
||||
|
|
|
@ -344,10 +344,10 @@ impl NpmCache {
|
|||
.cache_dir
|
||||
.package_folder_for_name_and_version(package, registry_url);
|
||||
if self.should_use_global_cache_for_package(package)
|
||||
&& self.fs.exists(&package_folder)
|
||||
&& self.fs.exists_sync(&package_folder)
|
||||
// if this file exists, then the package didn't successfully extract
|
||||
// the first time, or another process is currently extracting the zip file
|
||||
&& !self.fs.exists(&package_folder.join(NPM_PACKAGE_SYNC_LOCK_FILENAME))
|
||||
&& !self.fs.exists_sync(&package_folder.join(NPM_PACKAGE_SYNC_LOCK_FILENAME))
|
||||
{
|
||||
return Ok(());
|
||||
} else if self.cache_setting == CacheSetting::Only {
|
||||
|
|
|
@ -181,7 +181,7 @@ impl NpmPackageFsResolver for LocalNpmPackageResolver {
|
|||
Cow::Owned(current_folder.join("node_modules"))
|
||||
};
|
||||
let sub_dir = join_package_name(&node_modules_folder, name);
|
||||
if self.fs.is_dir(&sub_dir) {
|
||||
if self.fs.is_dir_sync(&sub_dir) {
|
||||
// if doing types resolution, only resolve the package if it specifies a types property
|
||||
if mode.is_types() && !name.starts_with("@types/") {
|
||||
let package_json = PackageJson::load_skip_read_permission(
|
||||
|
@ -200,7 +200,7 @@ impl NpmPackageFsResolver for LocalNpmPackageResolver {
|
|||
if mode.is_types() && !name.starts_with("@types/") {
|
||||
let sub_dir =
|
||||
join_package_name(&node_modules_folder, &types_package_name(name));
|
||||
if self.fs.is_dir(&sub_dir) {
|
||||
if self.fs.is_dir_sync(&sub_dir) {
|
||||
return Ok(sub_dir);
|
||||
}
|
||||
}
|
||||
|
|
|
@ -233,25 +233,31 @@ pub trait FileSystem: std::fmt::Debug + MaybeSend + MaybeSync {
|
|||
Ok(buf)
|
||||
}
|
||||
|
||||
fn is_file(&self, path: &Path) -> bool {
|
||||
fn is_file_sync(&self, path: &Path) -> bool {
|
||||
self.stat_sync(path).map(|m| m.is_file).unwrap_or(false)
|
||||
}
|
||||
|
||||
fn is_dir(&self, path: &Path) -> bool {
|
||||
fn is_dir_sync(&self, path: &Path) -> bool {
|
||||
self
|
||||
.stat_sync(path)
|
||||
.map(|m| m.is_directory)
|
||||
.unwrap_or(false)
|
||||
}
|
||||
|
||||
fn exists(&self, path: &Path) -> bool {
|
||||
fn exists_sync(&self, path: &Path) -> bool {
|
||||
self.stat_sync(path).is_ok()
|
||||
}
|
||||
|
||||
fn read_to_string(&self, path: &Path) -> FsResult<String> {
|
||||
fn read_text_file_sync(&self, path: &Path) -> FsResult<String> {
|
||||
let buf = self.read_file_sync(path)?;
|
||||
String::from_utf8(buf).map_err(|err| {
|
||||
std::io::Error::new(std::io::ErrorKind::InvalidData, err).into()
|
||||
})
|
||||
}
|
||||
async fn read_text_file_async(&self, path: PathBuf) -> FsResult<String> {
|
||||
let buf = self.read_file_async(path).await?;
|
||||
String::from_utf8(buf).map_err(|err| {
|
||||
std::io::Error::new(std::io::ErrorKind::InvalidData, err).into()
|
||||
})
|
||||
}
|
||||
}
|
||||
|
|
|
@ -205,7 +205,7 @@ impl<TCjsCodeAnalyzer: CjsCodeAnalyzer> NodeCodeTranslator<TCjsCodeAnalyzer> {
|
|||
)?;
|
||||
|
||||
let package_json_path = module_dir.join("package.json");
|
||||
if self.fs.exists(&package_json_path) {
|
||||
if self.fs.exists_sync(&package_json_path) {
|
||||
let package_json = PackageJson::load(
|
||||
&*self.fs,
|
||||
&*self.npm_resolver,
|
||||
|
@ -229,10 +229,10 @@ impl<TCjsCodeAnalyzer: CjsCodeAnalyzer> NodeCodeTranslator<TCjsCodeAnalyzer> {
|
|||
// old school
|
||||
if package_subpath != "." {
|
||||
let d = module_dir.join(package_subpath);
|
||||
if self.fs.is_dir(&d) {
|
||||
if self.fs.is_dir_sync(&d) {
|
||||
// subdir might have a package.json that specifies the entrypoint
|
||||
let package_json_path = d.join("package.json");
|
||||
if self.fs.exists(&package_json_path) {
|
||||
if self.fs.exists_sync(&package_json_path) {
|
||||
let package_json = PackageJson::load(
|
||||
&*self.fs,
|
||||
&*self.npm_resolver,
|
||||
|
@ -262,13 +262,13 @@ impl<TCjsCodeAnalyzer: CjsCodeAnalyzer> NodeCodeTranslator<TCjsCodeAnalyzer> {
|
|||
referrer: &Path,
|
||||
) -> Result<PathBuf, AnyError> {
|
||||
let p = p.clean();
|
||||
if self.fs.exists(&p) {
|
||||
if self.fs.exists_sync(&p) {
|
||||
let file_name = p.file_name().unwrap();
|
||||
let p_js =
|
||||
p.with_file_name(format!("{}.js", file_name.to_str().unwrap()));
|
||||
if self.fs.is_file(&p_js) {
|
||||
if self.fs.is_file_sync(&p_js) {
|
||||
return Ok(p_js);
|
||||
} else if self.fs.is_dir(&p) {
|
||||
} else if self.fs.is_dir_sync(&p) {
|
||||
return Ok(p.join("index.js"));
|
||||
} else {
|
||||
return Ok(p);
|
||||
|
@ -276,7 +276,7 @@ impl<TCjsCodeAnalyzer: CjsCodeAnalyzer> NodeCodeTranslator<TCjsCodeAnalyzer> {
|
|||
} else if let Some(file_name) = p.file_name() {
|
||||
let p_js =
|
||||
p.with_file_name(format!("{}.js", file_name.to_str().unwrap()));
|
||||
if self.fs.is_file(&p_js) {
|
||||
if self.fs.is_file_sync(&p_js) {
|
||||
return Ok(p_js);
|
||||
}
|
||||
}
|
||||
|
|
|
@ -426,7 +426,7 @@ where
|
|||
let file_path = PathBuf::from(file_path);
|
||||
ensure_read_permission::<P>(state, &file_path)?;
|
||||
let fs = state.borrow::<FileSystemRc>();
|
||||
Ok(fs.read_to_string(&file_path)?)
|
||||
Ok(fs.read_text_file_sync(&file_path)?)
|
||||
}
|
||||
|
||||
#[op]
|
||||
|
@ -466,7 +466,7 @@ where
|
|||
} else {
|
||||
let original = modules_path.clone();
|
||||
let mod_dir = path_resolve(vec![modules_path, name]);
|
||||
if fs.is_dir(Path::new(&mod_dir)) {
|
||||
if fs.is_dir_sync(Path::new(&mod_dir)) {
|
||||
mod_dir
|
||||
} else {
|
||||
original
|
||||
|
|
|
@ -81,7 +81,7 @@ impl PackageJson {
|
|||
return Ok(CACHE.with(|cache| cache.borrow()[&path].clone()));
|
||||
}
|
||||
|
||||
let source = match fs.read_to_string(&path) {
|
||||
let source = match fs.read_text_file_sync(&path) {
|
||||
Ok(source) => source,
|
||||
Err(err) if err.kind() == ErrorKind::NotFound => {
|
||||
return Ok(PackageJson::empty(path));
|
||||
|
|
|
@ -534,20 +534,20 @@ impl NodeResolver {
|
|||
let mut searched_for_d_cts = false;
|
||||
if lowercase_path.ends_with(".mjs") {
|
||||
let d_mts_path = with_known_extension(path, "d.mts");
|
||||
if fs.exists(&d_mts_path) {
|
||||
if fs.exists_sync(&d_mts_path) {
|
||||
return Some(d_mts_path);
|
||||
}
|
||||
searched_for_d_mts = true;
|
||||
} else if lowercase_path.ends_with(".cjs") {
|
||||
let d_cts_path = with_known_extension(path, "d.cts");
|
||||
if fs.exists(&d_cts_path) {
|
||||
if fs.exists_sync(&d_cts_path) {
|
||||
return Some(d_cts_path);
|
||||
}
|
||||
searched_for_d_cts = true;
|
||||
}
|
||||
|
||||
let dts_path = with_known_extension(path, "d.ts");
|
||||
if fs.exists(&dts_path) {
|
||||
if fs.exists_sync(&dts_path) {
|
||||
return Some(dts_path);
|
||||
}
|
||||
|
||||
|
@ -561,7 +561,7 @@ impl NodeResolver {
|
|||
_ => None, // already searched above
|
||||
};
|
||||
if let Some(specific_dts_path) = specific_dts_path {
|
||||
if fs.exists(&specific_dts_path) {
|
||||
if fs.exists_sync(&specific_dts_path) {
|
||||
return Some(specific_dts_path);
|
||||
}
|
||||
}
|
||||
|
@ -580,7 +580,7 @@ impl NodeResolver {
|
|||
{
|
||||
return Some(path);
|
||||
}
|
||||
if self.fs.is_dir(&path) {
|
||||
if self.fs.is_dir_sync(&path) {
|
||||
let index_path = path.join("index.js");
|
||||
if let Some(path) = probe_extensions(
|
||||
&*self.fs,
|
||||
|
@ -1163,7 +1163,7 @@ impl NodeResolver {
|
|||
);
|
||||
let mut current_dir = current_dir.as_path();
|
||||
let package_json_path = current_dir.join("package.json");
|
||||
if self.fs.exists(&package_json_path) {
|
||||
if self.fs.exists_sync(&package_json_path) {
|
||||
return Ok(Some(package_json_path));
|
||||
}
|
||||
let Some(root_pkg_folder) = self
|
||||
|
@ -1174,7 +1174,7 @@ impl NodeResolver {
|
|||
while current_dir.starts_with(&root_pkg_folder) {
|
||||
current_dir = current_dir.parent().unwrap();
|
||||
let package_json_path = current_dir.join("package.json");
|
||||
if self.fs.exists(&package_json_path) {
|
||||
if self.fs.exists_sync(&package_json_path) {
|
||||
return Ok(Some(package_json_path));
|
||||
}
|
||||
}
|
||||
|
@ -1224,7 +1224,7 @@ impl NodeResolver {
|
|||
|
||||
if let Some(main) = maybe_main {
|
||||
let guess = package_json.path.parent().unwrap().join(main).clean();
|
||||
if self.fs.is_file(&guess) {
|
||||
if self.fs.is_file_sync(&guess) {
|
||||
return Ok(Some(guess));
|
||||
}
|
||||
|
||||
|
@ -1253,7 +1253,7 @@ impl NodeResolver {
|
|||
.unwrap()
|
||||
.join(format!("{main}{ending}"))
|
||||
.clean();
|
||||
if self.fs.is_file(&guess) {
|
||||
if self.fs.is_file_sync(&guess) {
|
||||
// TODO(bartlomieju): emitLegacyIndexDeprecation()
|
||||
return Ok(Some(guess));
|
||||
}
|
||||
|
@ -1276,7 +1276,7 @@ impl NodeResolver {
|
|||
.unwrap()
|
||||
.join(index_file_name)
|
||||
.clean();
|
||||
if self.fs.is_file(&guess) {
|
||||
if self.fs.is_file_sync(&guess) {
|
||||
// TODO(bartlomieju): emitLegacyIndexDeprecation()
|
||||
return Ok(Some(guess));
|
||||
}
|
||||
|
|
Loading…
Reference in a new issue