1
0
Fork 0
mirror of https://github.com/denoland/deno.git synced 2024-12-23 07:44:48 -05:00

fix: Omit buildinfo when --reload passed (#6489)

This commit is contained in:
Kitson Kelly 2020-06-26 22:23:25 +10:00 committed by GitHub
parent 4817c153e4
commit 70463bac7d
No known key found for this signature in database
GPG key ID: 4AEE18F83AFDEB23
2 changed files with 93 additions and 47 deletions

View file

@ -610,6 +610,37 @@ fn ts_dependency_recompilation() {
assert!(stdout_output.is_empty());
}
#[test]
fn ts_reload() {
let hello_ts = util::root_path().join("cli/tests/002_hello.ts");
assert!(hello_ts.is_file());
let mut initial = util::deno_cmd()
.current_dir(util::root_path())
.arg("cache")
.arg("--reload")
.arg(hello_ts.clone())
.spawn()
.expect("failed to spawn script");
let status_initial =
initial.wait().expect("failed to wait for child process");
assert!(status_initial.success());
let output = util::deno_cmd()
.current_dir(util::root_path())
.arg("cache")
.arg("--reload")
.arg("-L")
.arg("debug")
.arg(hello_ts)
.output()
.expect("failed to spawn script");
// check the output of the the bundle program.
assert!(std::str::from_utf8(&output.stdout)
.unwrap()
.trim()
.contains("compiler::host.writeFile deno://002_hello.js"));
}
#[test]
fn bundle_exports() {
// First we have to generate a bundle of some module that has exports.

View file

@ -422,8 +422,8 @@ impl TsCompiler {
c.contains(url)
}
/// Check if there is compiled source in cache that is valid
/// and can be used again.
/// Check if there is compiled source in cache that is valid and can be used
/// again.
fn has_compiled_source(
&self,
file_fetcher: &SourceFileFetcher,
@ -450,6 +450,55 @@ impl TsCompiler {
false
}
fn has_valid_cache(
&self,
file_fetcher: &SourceFileFetcher,
url: &Url,
build_info: &Option<String>,
) -> Result<bool, ErrBox> {
if let Some(build_info_str) = build_info.as_ref() {
let build_inf_json: Value = serde_json::from_str(build_info_str)?;
let program_val = build_inf_json["program"].as_object().unwrap();
let file_infos = program_val["fileInfos"].as_object().unwrap();
if !self.has_compiled_source(file_fetcher, url) {
return Ok(false);
}
for (filename, file_info) in file_infos.iter() {
if filename.starts_with("asset://") {
continue;
}
let url = Url::parse(&filename).expect("Filename is not a valid url");
let specifier = ModuleSpecifier::from(url);
if let Some(source_file) = file_fetcher
.fetch_cached_source_file(&specifier, Permissions::allow_all())
{
let existing_hash = crate::checksum::gen(vec![
&source_file.source_code,
version::DENO.as_bytes(),
]);
let expected_hash =
file_info["version"].as_str().unwrap().to_string();
if existing_hash != expected_hash {
// hashes don't match, somethings changed
return Ok(false);
}
} else {
// no cached source file
return Ok(false);
}
}
} else {
// no build info
return Ok(false);
}
Ok(true)
}
/// Asynchronously compile module and all it's dependencies.
///
/// This method compiled every module at most once.
@ -468,58 +517,24 @@ impl TsCompiler {
module_graph: ModuleGraph,
allow_js: bool,
) -> Result<(), ErrBox> {
let mut has_cached_version = false;
let module_url = source_file.url.clone();
let build_info_key = self
.disk_cache
.get_cache_filename_with_extension(&module_url, "buildinfo");
let build_info = match self.disk_cache.get(&build_info_key) {
Ok(bytes) => Some(String::from_utf8(bytes)?),
Err(_) => None,
};
let file_fetcher = global_state.file_fetcher.clone();
// Only use disk cache if `--reload` flag was not used or
// this file has already been compiled during current process
// lifetime.
if self.use_disk_cache || self.has_compiled(&source_file.url) {
if let Some(build_info_str) = build_info.as_ref() {
let build_inf_json: Value = serde_json::from_str(build_info_str)?;
let program_val = build_inf_json["program"].as_object().unwrap();
let file_infos = program_val["fileInfos"].as_object().unwrap();
has_cached_version = true;
has_cached_version &= self
.has_compiled_source(&global_state.file_fetcher, &source_file.url);
for (filename, file_info) in file_infos.iter() {
if filename.starts_with("asset://") {
continue;
}
let url = Url::parse(&filename).expect("Filename is not a valid url");
let specifier = ModuleSpecifier::from(url);
if let Some(source_file) = file_fetcher
.fetch_cached_source_file(&specifier, Permissions::allow_all())
{
let existing_hash = crate::checksum::gen(vec![
&source_file.source_code,
version::DENO.as_bytes(),
]);
let expected_hash =
file_info["version"].as_str().unwrap().to_string();
has_cached_version &= existing_hash == expected_hash
} else {
has_cached_version &= false
}
}
}
}
if has_cached_version {
// Only use disk cache if `--reload` flag was not used or this file has
// already been compiled during current process lifetime.
if (self.use_disk_cache || self.has_compiled(&source_file.url))
&& self.has_valid_cache(
&global_state.file_fetcher,
&source_file.url,
&build_info,
)?
{
return Ok(());
}
@ -550,7 +565,7 @@ impl TsCompiler {
"config": str::from_utf8(&config_data).unwrap(),
"cwd": cwd,
"sourceFileMap": module_graph_json,
"buildInfo": build_info,
"buildInfo": if self.use_disk_cache { build_info } else { None },
}),
_ => json!({
"type": msg::CompilerRequestType::Compile,
@ -561,7 +576,7 @@ impl TsCompiler {
"performance": performance,
"cwd": cwd,
"sourceFileMap": module_graph_json,
"buildInfo": build_info,
"buildInfo": if self.use_disk_cache { build_info } else { None },
}),
};