1
0
Fork 0
mirror of https://github.com/denoland/deno.git synced 2025-01-11 16:42:21 -05:00

feat: emit files on demand and fix racy emit (#15220)

This commit is contained in:
David Sherret 2022-07-19 11:58:18 -04:00 committed by GitHub
parent e99d64aced
commit 0ab262b901
No known key found for this signature in database
GPG key ID: 4AEE18F83AFDEB23
17 changed files with 591 additions and 547 deletions

View file

@ -286,6 +286,11 @@ impl CliOptions {
self.flags.enable_testing_features self.flags.enable_testing_features
} }
/// If the --inspect or --inspect-brk flags are used.
pub fn is_inspecting(&self) -> bool {
self.flags.inspect.is_some() || self.flags.inspect_brk.is_some()
}
pub fn inspect_brk(&self) -> Option<SocketAddr> { pub fn inspect_brk(&self) -> Option<SocketAddr> {
self.flags.inspect_brk self.flags.inspect_brk
} }

6
cli/cache/check.rs vendored
View file

@ -22,7 +22,7 @@ impl TypeCheckCache {
Err(err) => { Err(err) => {
log::debug!( log::debug!(
concat!( concat!(
"Failed creating internal type checking cache. ", "Failed loading internal type checking cache. ",
"Recreating...\n\nError details:\n{:#}", "Recreating...\n\nError details:\n{:#}",
), ),
err err
@ -35,7 +35,7 @@ impl TypeCheckCache {
Err(err) => { Err(err) => {
log::debug!( log::debug!(
concat!( concat!(
"Unable to create internal cache for type checking. ", "Unable to load internal cache for type checking. ",
"This will reduce the performance of type checking.\n\n", "This will reduce the performance of type checking.\n\n",
"Error details:\n{:#}", "Error details:\n{:#}",
), ),
@ -233,7 +233,7 @@ mod test {
cache.set_tsbuildinfo(&specifier1, "test"); cache.set_tsbuildinfo(&specifier1, "test");
assert_eq!(cache.get_tsbuildinfo(&specifier1), Some("test".to_string())); assert_eq!(cache.get_tsbuildinfo(&specifier1), Some("test".to_string()));
// recreating the cache should not remove the data because the CLI version and state hash is the same // recreating the cache should not remove the data because the CLI version is the same
let conn = cache.0.unwrap(); let conn = cache.0.unwrap();
let cache = let cache =
TypeCheckCache::from_connection(conn, "2.0.0".to_string()).unwrap(); TypeCheckCache::from_connection(conn, "2.0.0".to_string()).unwrap();

35
cli/cache/common.rs vendored
View file

@ -1,16 +1,37 @@
// Copyright 2018-2022 the Deno authors. All rights reserved. MIT license. // Copyright 2018-2022 the Deno authors. All rights reserved. MIT license.
use std::hash::Hasher;
use deno_core::error::AnyError; use deno_core::error::AnyError;
use deno_runtime::deno_webstorage::rusqlite::Connection; use deno_runtime::deno_webstorage::rusqlite::Connection;
/// Very fast non-cryptographically secure hash. /// A very fast insecure hasher that uses the xxHash algorithm.
pub fn fast_insecure_hash(bytes: &[u8]) -> u64 { #[derive(Default)]
use std::hash::Hasher; pub struct FastInsecureHasher(twox_hash::XxHash64);
use twox_hash::XxHash64;
let mut hasher = XxHash64::default(); impl FastInsecureHasher {
hasher.write(bytes); pub fn new() -> Self {
hasher.finish() Self::default()
}
pub fn write_str(&mut self, text: &str) -> &mut Self {
self.write(text.as_bytes());
self
}
pub fn write(&mut self, bytes: &[u8]) -> &mut Self {
self.0.write(bytes);
self
}
pub fn write_u64(&mut self, value: u64) -> &mut Self {
self.0.write_u64(value);
self
}
pub fn finish(&self) -> u64 {
self.0.finish()
}
} }
/// Runs the common sqlite pragma. /// Runs the common sqlite pragma.

View file

@ -3,13 +3,6 @@
use crate::fs_util; use crate::fs_util;
use crate::http_cache::url_to_filename; use crate::http_cache::url_to_filename;
use super::CacheType;
use super::Cacher;
use super::EmitMetadata;
use deno_ast::ModuleSpecifier;
use deno_core::error::AnyError;
use deno_core::serde_json;
use deno_core::url::Host; use deno_core::url::Host;
use deno_core::url::Url; use deno_core::url::Url;
use std::ffi::OsStr; use std::ffi::OsStr;
@ -154,77 +147,6 @@ impl DiskCache {
fs_util::atomic_write_file(&path, data, crate::http_cache::CACHE_PERM) fs_util::atomic_write_file(&path, data, crate::http_cache::CACHE_PERM)
.map_err(|e| with_io_context(&e, format!("{:#?}", &path))) .map_err(|e| with_io_context(&e, format!("{:#?}", &path)))
} }
fn get_emit_metadata(
&self,
specifier: &ModuleSpecifier,
) -> Option<EmitMetadata> {
let filename = self.get_cache_filename_with_extension(specifier, "meta")?;
let bytes = self.get(&filename).ok()?;
serde_json::from_slice(&bytes).ok()
}
fn set_emit_metadata(
&self,
specifier: &ModuleSpecifier,
data: EmitMetadata,
) -> Result<(), AnyError> {
let filename = self
.get_cache_filename_with_extension(specifier, "meta")
.unwrap();
let bytes = serde_json::to_vec(&data)?;
self.set(&filename, &bytes).map_err(|e| e.into())
}
}
// todo(13302): remove and replace with sqlite database
impl Cacher for DiskCache {
fn get(
&self,
cache_type: CacheType,
specifier: &ModuleSpecifier,
) -> Option<String> {
let extension = match cache_type {
CacheType::Emit => "js",
CacheType::SourceMap => "js.map",
CacheType::Version => {
return self.get_emit_metadata(specifier).map(|d| d.version_hash)
}
};
let filename =
self.get_cache_filename_with_extension(specifier, extension)?;
self
.get(&filename)
.ok()
.and_then(|b| String::from_utf8(b).ok())
}
fn set(
&self,
cache_type: CacheType,
specifier: &ModuleSpecifier,
value: String,
) -> Result<(), AnyError> {
let extension = match cache_type {
CacheType::Emit => "js",
CacheType::SourceMap => "js.map",
CacheType::Version => {
let data = if let Some(mut data) = self.get_emit_metadata(specifier) {
data.version_hash = value;
data
} else {
EmitMetadata {
version_hash: value,
}
};
return self.set_emit_metadata(specifier, data);
}
};
let filename = self
.get_cache_filename_with_extension(specifier, extension)
.unwrap();
self.set(&filename, value.as_bytes()).map_err(|e| e.into())
}
} }
#[cfg(test)] #[cfg(test)]

236
cli/cache/emit.rs vendored
View file

@ -1,71 +1,209 @@
// Copyright 2018-2022 the Deno authors. All rights reserved. MIT license. // Copyright 2018-2022 the Deno authors. All rights reserved. MIT license.
use std::path::PathBuf;
use deno_ast::ModuleSpecifier; use deno_ast::ModuleSpecifier;
use deno_core::anyhow::anyhow;
use deno_core::error::AnyError; use deno_core::error::AnyError;
use deno_core::serde_json;
use serde::Deserialize;
use serde::Serialize;
use super::CacheType; use super::DiskCache;
use super::Cacher; use super::FastInsecureHasher;
/// Emit cache for a single file. #[derive(Debug, Deserialize, Serialize)]
#[derive(Debug, Clone, PartialEq)] struct EmitMetadata {
pub struct SpecifierEmitCacheData {
pub source_hash: String, pub source_hash: String,
pub text: String, pub emit_hash: String,
pub map: Option<String>, // purge the cache between cli versions
pub cli_version: String,
} }
pub trait EmitCache { /// The cache that stores previously emitted files.
/// Gets the emit data from the cache. #[derive(Clone)]
fn get_emit_data( pub struct EmitCache {
disk_cache: DiskCache,
cli_version: String,
}
impl EmitCache {
pub fn new(disk_cache: DiskCache) -> Self {
Self {
disk_cache,
cli_version: crate::version::deno(),
}
}
/// Gets the emitted code with embedded sourcemap from the cache.
///
/// The expected source hash is used in order to verify
/// that you're getting a value from the cache that is
/// for the provided source.
///
/// Cached emits from previous CLI releases will not be returned
/// or emits that do not match the source.
pub fn get_emit_code(
&self, &self,
specifier: &ModuleSpecifier, specifier: &ModuleSpecifier,
) -> Option<SpecifierEmitCacheData>; expected_source_hash: u64,
/// Sets the emit data in the cache. ) -> Option<String> {
fn set_emit_data( let meta_filename = self.get_meta_filename(specifier)?;
&self, let emit_filename = self.get_emit_filename(specifier)?;
specifier: ModuleSpecifier,
data: SpecifierEmitCacheData, // load and verify the meta data file is for this source and CLI version
) -> Result<(), AnyError>; let bytes = self.disk_cache.get(&meta_filename).ok()?;
/// Gets the stored hash of the source of the provider specifier let meta: EmitMetadata = serde_json::from_slice(&bytes).ok()?;
/// to tell if the emit is out of sync with the source. if meta.source_hash != expected_source_hash.to_string()
/// TODO(13302): this is actually not reliable and should be removed || meta.cli_version != self.cli_version
/// once switching to an sqlite db {
fn get_source_hash(&self, specifier: &ModuleSpecifier) -> Option<String>; return None;
/// Gets the emitted JavaScript of the TypeScript source.
/// TODO(13302): remove this once switching to an sqlite db
fn get_emit_text(&self, specifier: &ModuleSpecifier) -> Option<String>;
} }
impl<T: Cacher> EmitCache for T { // load and verify the emit is for the meta data
fn get_emit_data( let emit_bytes = self.disk_cache.get(&emit_filename).ok()?;
if meta.emit_hash != compute_emit_hash(&emit_bytes) {
return None;
}
// everything looks good, return it
let emit_text = String::from_utf8(emit_bytes).ok()?;
Some(emit_text)
}
/// Gets the filepath which stores the emit.
pub fn get_emit_filepath(
&self, &self,
specifier: &ModuleSpecifier, specifier: &ModuleSpecifier,
) -> Option<SpecifierEmitCacheData> { ) -> Option<PathBuf> {
Some(SpecifierEmitCacheData { Some(
source_hash: self.get_source_hash(specifier)?, self
text: self.get_emit_text(specifier)?, .disk_cache
map: self.get(CacheType::SourceMap, specifier), .location
}) .join(self.get_emit_filename(specifier)?),
)
} }
fn get_source_hash(&self, specifier: &ModuleSpecifier) -> Option<String> { /// Sets the emit code in the cache.
self.get(CacheType::Version, specifier) pub fn set_emit_code(
}
fn get_emit_text(&self, specifier: &ModuleSpecifier) -> Option<String> {
self.get(CacheType::Emit, specifier)
}
fn set_emit_data(
&self, &self,
specifier: ModuleSpecifier, specifier: &ModuleSpecifier,
data: SpecifierEmitCacheData, source_hash: u64,
code: &str,
) {
if let Err(err) = self.set_emit_code_result(specifier, source_hash, code) {
// should never error here, but if it ever does don't fail
if cfg!(debug_assertions) {
panic!("Error saving emit data ({}): {}", specifier, err);
} else {
log::debug!("Error saving emit data({}): {}", specifier, err);
}
}
}
fn set_emit_code_result(
&self,
specifier: &ModuleSpecifier,
source_hash: u64,
code: &str,
) -> Result<(), AnyError> { ) -> Result<(), AnyError> {
self.set(CacheType::Version, &specifier, data.source_hash)?; let meta_filename = self
self.set(CacheType::Emit, &specifier, data.text)?; .get_meta_filename(specifier)
if let Some(map) = data.map { .ok_or_else(|| anyhow!("Could not get meta filename."))?;
self.set(CacheType::SourceMap, &specifier, map)?; let emit_filename = self
} .get_emit_filename(specifier)
.ok_or_else(|| anyhow!("Could not get emit filename."))?;
// save the metadata
let metadata = EmitMetadata {
cli_version: self.cli_version.to_string(),
source_hash: source_hash.to_string(),
emit_hash: compute_emit_hash(code.as_bytes()),
};
self
.disk_cache
.set(&meta_filename, &serde_json::to_vec(&metadata)?)?;
// save the emit source
self.disk_cache.set(&emit_filename, code.as_bytes())?;
Ok(()) Ok(())
} }
fn get_meta_filename(&self, specifier: &ModuleSpecifier) -> Option<PathBuf> {
self
.disk_cache
.get_cache_filename_with_extension(specifier, "meta")
}
fn get_emit_filename(&self, specifier: &ModuleSpecifier) -> Option<PathBuf> {
self
.disk_cache
.get_cache_filename_with_extension(specifier, "js")
}
}
fn compute_emit_hash(bytes: &[u8]) -> String {
// it's ok to use an insecure hash here because
// if someone can change the emit source then they
// can also change the version hash
FastInsecureHasher::new().write(bytes).finish().to_string()
}
#[cfg(test)]
mod test {
use test_util::TempDir;
use super::*;
#[test]
pub fn emit_cache_general_use() {
let temp_dir = TempDir::new();
let disk_cache = DiskCache::new(temp_dir.path());
let cache = EmitCache {
disk_cache: disk_cache.clone(),
cli_version: "1.0.0".to_string(),
};
let specifier1 =
ModuleSpecifier::from_file_path(temp_dir.path().join("file1.ts"))
.unwrap();
let specifier2 =
ModuleSpecifier::from_file_path(temp_dir.path().join("file2.ts"))
.unwrap();
assert_eq!(cache.get_emit_code(&specifier1, 1), None);
let emit_code1 = "text1".to_string();
let emit_code2 = "text2".to_string();
cache.set_emit_code(&specifier1, 10, &emit_code1);
cache.set_emit_code(&specifier2, 2, &emit_code2);
// providing the incorrect source hash
assert_eq!(cache.get_emit_code(&specifier1, 5), None);
// providing the correct source hash
assert_eq!(
cache.get_emit_code(&specifier1, 10),
Some(emit_code1.clone()),
);
assert_eq!(cache.get_emit_code(&specifier2, 2), Some(emit_code2),);
// try changing the cli version (should not load previous ones)
let cache = EmitCache {
disk_cache: disk_cache.clone(),
cli_version: "2.0.0".to_string(),
};
assert_eq!(cache.get_emit_code(&specifier1, 10), None);
cache.set_emit_code(&specifier1, 5, &emit_code1);
// recreating the cache should still load the data because the CLI version is the same
let cache = EmitCache {
disk_cache,
cli_version: "2.0.0".to_string(),
};
assert_eq!(cache.get_emit_code(&specifier1, 5), Some(emit_code1));
// adding when already exists should not cause issue
let emit_code3 = "asdf".to_string();
cache.set_emit_code(&specifier1, 20, &emit_code3);
assert_eq!(cache.get_emit_code(&specifier1, 5), None);
assert_eq!(cache.get_emit_code(&specifier1, 20), Some(emit_code3));
}
} }

View file

@ -12,8 +12,8 @@ use deno_runtime::deno_webstorage::rusqlite::Connection;
use serde::Serialize; use serde::Serialize;
use tokio::task::JoinHandle; use tokio::task::JoinHandle;
use super::common::fast_insecure_hash;
use super::common::run_sqlite_pragma; use super::common::run_sqlite_pragma;
use super::common::FastInsecureHasher;
/// Cache used to skip formatting/linting a file again when we /// Cache used to skip formatting/linting a file again when we
/// know it is already formatted or has no lint diagnostics. /// know it is already formatted or has no lint diagnostics.
@ -79,8 +79,9 @@ impl IncrementalCacheInner {
state: &TState, state: &TState,
initial_file_paths: &[PathBuf], initial_file_paths: &[PathBuf],
) -> Result<Self, AnyError> { ) -> Result<Self, AnyError> {
let state_hash = let state_hash = FastInsecureHasher::new()
fast_insecure_hash(serde_json::to_string(state).unwrap().as_bytes()); .write_str(&serde_json::to_string(state).unwrap())
.finish();
let sql_cache = SqlIncrementalCache::new(db_file_path, state_hash)?; let sql_cache = SqlIncrementalCache::new(db_file_path, state_hash)?;
Ok(Self::from_sql_incremental_cache( Ok(Self::from_sql_incremental_cache(
sql_cache, sql_cache,
@ -123,13 +124,15 @@ impl IncrementalCacheInner {
pub fn is_file_same(&self, file_path: &Path, file_text: &str) -> bool { pub fn is_file_same(&self, file_path: &Path, file_text: &str) -> bool {
match self.previous_hashes.get(file_path) { match self.previous_hashes.get(file_path) {
Some(hash) => *hash == fast_insecure_hash(file_text.as_bytes()), Some(hash) => {
*hash == FastInsecureHasher::new().write_str(file_text).finish()
}
None => false, None => false,
} }
} }
pub fn update_file(&self, file_path: &Path, file_text: &str) { pub fn update_file(&self, file_path: &Path, file_text: &str) {
let hash = fast_insecure_hash(file_text.as_bytes()); let hash = FastInsecureHasher::new().write_str(file_text).finish();
if let Some(previous_hash) = self.previous_hashes.get(file_path) { if let Some(previous_hash) = self.previous_hashes.get(file_path) {
if *previous_hash == hash { if *previous_hash == hash {
return; // do not bother updating the db file because nothing has changed return; // do not bother updating the db file because nothing has changed
@ -334,7 +337,7 @@ mod test {
.unwrap(); .unwrap();
let file_path = PathBuf::from("/mod.ts"); let file_path = PathBuf::from("/mod.ts");
let file_text = "test"; let file_text = "test";
let file_hash = fast_insecure_hash(file_text.as_bytes()); let file_hash = FastInsecureHasher::new().write_str(file_text).finish();
sql_cache.set_source_hash(&file_path, file_hash).unwrap(); sql_cache.set_source_hash(&file_path, file_hash).unwrap();
let cache = IncrementalCacheInner::from_sql_incremental_cache( let cache = IncrementalCacheInner::from_sql_incremental_cache(
sql_cache, sql_cache,

53
cli/cache/mod.rs vendored
View file

@ -3,10 +3,7 @@
use crate::errors::get_error_class_name; use crate::errors::get_error_class_name;
use crate::file_fetcher::FileFetcher; use crate::file_fetcher::FileFetcher;
use deno_core::error::AnyError;
use deno_core::futures::FutureExt; use deno_core::futures::FutureExt;
use deno_core::serde::Deserialize;
use deno_core::serde::Serialize;
use deno_core::ModuleSpecifier; use deno_core::ModuleSpecifier;
use deno_graph::source::CacheInfo; use deno_graph::source::CacheInfo;
use deno_graph::source::LoadFuture; use deno_graph::source::LoadFuture;
@ -22,44 +19,15 @@ mod emit;
mod incremental; mod incremental;
pub use check::TypeCheckCache; pub use check::TypeCheckCache;
pub use common::FastInsecureHasher;
pub use disk_cache::DiskCache; pub use disk_cache::DiskCache;
pub use emit::EmitCache; pub use emit::EmitCache;
pub use emit::SpecifierEmitCacheData;
pub use incremental::IncrementalCache; pub use incremental::IncrementalCache;
#[derive(Debug, Deserialize, Serialize)]
pub struct EmitMetadata {
pub version_hash: String,
}
pub enum CacheType {
Emit,
SourceMap,
Version,
}
/// A trait which provides a concise implementation to getting and setting
/// values in a cache.
pub trait Cacher {
/// Get a value from the cache.
fn get(
&self,
cache_type: CacheType,
specifier: &ModuleSpecifier,
) -> Option<String>;
/// Set a value in the cache.
fn set(
&self,
cache_type: CacheType,
specifier: &ModuleSpecifier,
value: String,
) -> Result<(), AnyError>;
}
/// A "wrapper" for the FileFetcher and DiskCache for the Deno CLI that provides /// A "wrapper" for the FileFetcher and DiskCache for the Deno CLI that provides
/// a concise interface to the DENO_DIR when building module graphs. /// a concise interface to the DENO_DIR when building module graphs.
pub struct FetchCacher { pub struct FetchCacher {
disk_cache: DiskCache, emit_cache: EmitCache,
dynamic_permissions: Permissions, dynamic_permissions: Permissions,
file_fetcher: Arc<FileFetcher>, file_fetcher: Arc<FileFetcher>,
root_permissions: Permissions, root_permissions: Permissions,
@ -67,7 +35,7 @@ pub struct FetchCacher {
impl FetchCacher { impl FetchCacher {
pub fn new( pub fn new(
disk_cache: DiskCache, emit_cache: EmitCache,
file_fetcher: FileFetcher, file_fetcher: FileFetcher,
root_permissions: Permissions, root_permissions: Permissions,
dynamic_permissions: Permissions, dynamic_permissions: Permissions,
@ -75,7 +43,7 @@ impl FetchCacher {
let file_fetcher = Arc::new(file_fetcher); let file_fetcher = Arc::new(file_fetcher);
Self { Self {
disk_cache, emit_cache,
dynamic_permissions, dynamic_permissions,
file_fetcher, file_fetcher,
root_permissions, root_permissions,
@ -87,21 +55,14 @@ impl Loader for FetchCacher {
fn get_cache_info(&self, specifier: &ModuleSpecifier) -> Option<CacheInfo> { fn get_cache_info(&self, specifier: &ModuleSpecifier) -> Option<CacheInfo> {
let local = self.file_fetcher.get_local_path(specifier)?; let local = self.file_fetcher.get_local_path(specifier)?;
if local.is_file() { if local.is_file() {
let location = &self.disk_cache.location;
let emit = self let emit = self
.disk_cache .emit_cache
.get_cache_filename_with_extension(specifier, "js") .get_emit_filepath(specifier)
.map(|p| location.join(p))
.filter(|p| p.is_file());
let map = self
.disk_cache
.get_cache_filename_with_extension(specifier, "js.map")
.map(|p| location.join(p))
.filter(|p| p.is_file()); .filter(|p| p.is_file());
Some(CacheInfo { Some(CacheInfo {
local: Some(local), local: Some(local),
emit, emit,
map, map: None,
}) })
} else { } else {
None None

View file

@ -58,7 +58,7 @@ impl DenoDir {
self.root.join("lint_incremental_cache_v1") self.root.join("lint_incremental_cache_v1")
} }
/// Path for the incremental cache used for linting. /// Path for the cache used for type checking.
pub fn type_checking_cache_db_file_path(&self) -> PathBuf { pub fn type_checking_cache_db_file_path(&self) -> PathBuf {
// bump this version name to invalidate the entire cache // bump this version name to invalidate the entire cache
self.root.join("check_cache_v1") self.root.join("check_cache_v1")

View file

@ -10,7 +10,7 @@ use crate::args::EmitConfigOptions;
use crate::args::TsConfig; use crate::args::TsConfig;
use crate::args::TypeCheckMode; use crate::args::TypeCheckMode;
use crate::cache::EmitCache; use crate::cache::EmitCache;
use crate::cache::SpecifierEmitCacheData; use crate::cache::FastInsecureHasher;
use crate::cache::TypeCheckCache; use crate::cache::TypeCheckCache;
use crate::colors; use crate::colors;
use crate::diagnostics::Diagnostics; use crate::diagnostics::Diagnostics;
@ -22,6 +22,7 @@ use crate::version;
use deno_ast::swc::bundler::Hook; use deno_ast::swc::bundler::Hook;
use deno_ast::swc::bundler::ModuleRecord; use deno_ast::swc::bundler::ModuleRecord;
use deno_ast::swc::common::Span; use deno_ast::swc::common::Span;
use deno_ast::ParsedSource;
use deno_core::error::AnyError; use deno_core::error::AnyError;
use deno_core::parking_lot::RwLock; use deno_core::parking_lot::RwLock;
use deno_core::serde::Deserialize; use deno_core::serde::Deserialize;
@ -32,14 +33,11 @@ use deno_core::serde_json;
use deno_core::serde_json::json; use deno_core::serde_json::json;
use deno_core::ModuleSpecifier; use deno_core::ModuleSpecifier;
use deno_graph::MediaType; use deno_graph::MediaType;
use deno_graph::ModuleGraph;
use deno_graph::ModuleGraphError; use deno_graph::ModuleGraphError;
use deno_graph::ModuleKind; use deno_graph::ModuleKind;
use deno_graph::ResolutionError; use deno_graph::ResolutionError;
use std::collections::HashSet;
use std::fmt; use std::fmt;
use std::sync::Arc; use std::sync::Arc;
use std::time::Instant;
/// A structure representing stats from an emit operation for a graph. /// A structure representing stats from an emit operation for a graph.
#[derive(Clone, Debug, Default, Eq, PartialEq)] #[derive(Clone, Debug, Default, Eq, PartialEq)]
@ -116,8 +114,8 @@ pub enum TsConfigType {
/// Return a configuration for bundling, using swc to emit the bundle. This is /// Return a configuration for bundling, using swc to emit the bundle. This is
/// independent of type checking. /// independent of type checking.
Bundle, Bundle,
/// Return a configuration to use tsc to type check and optionally emit. This /// Return a configuration to use tsc to type check. This
/// is independent of either bundling or just emitting via swc /// is independent of either bundling or emitting via swc.
Check { lib: TsTypeLib }, Check { lib: TsTypeLib },
/// Return a configuration to use swc to emit single module files. /// Return a configuration to use swc to emit single module files.
Emit, Emit,
@ -234,31 +232,30 @@ fn get_tsc_roots(
/// A hashing function that takes the source code, version and optionally a /// A hashing function that takes the source code, version and optionally a
/// user provided config and generates a string hash which can be stored to /// user provided config and generates a string hash which can be stored to
/// determine if the cached emit is valid or not. /// determine if the cached emit is valid or not.
fn get_version(source_bytes: &[u8], config_bytes: &[u8]) -> String { pub fn get_source_hash(source_text: &str, emit_options_hash: u64) -> u64 {
crate::checksum::gen(&[ FastInsecureHasher::new()
source_bytes, .write_str(source_text)
version::deno().as_bytes(), .write_u64(emit_options_hash)
config_bytes, .finish()
])
} }
/// Determine if a given module kind and media type is emittable or not. pub fn emit_parsed_source(
pub fn is_emittable( cache: &EmitCache,
kind: &ModuleKind, specifier: &ModuleSpecifier,
media_type: &MediaType, parsed_source: &ParsedSource,
include_js: bool, emit_options: &deno_ast::EmitOptions,
) -> bool { emit_config_hash: u64,
if matches!(kind, ModuleKind::Synthetic) { ) -> Result<String, AnyError> {
return false; let source_hash =
} get_source_hash(parsed_source.text_info().text_str(), emit_config_hash);
match &media_type {
MediaType::TypeScript if let Some(emit_code) = cache.get_emit_code(specifier, source_hash) {
| MediaType::Mts Ok(emit_code)
| MediaType::Cts } else {
| MediaType::Tsx let transpiled_source = parsed_source.transpile(emit_options)?;
| MediaType::Jsx => true, debug_assert!(transpiled_source.source_map.is_none());
MediaType::JavaScript | MediaType::Mjs | MediaType::Cjs => include_js, cache.set_emit_code(specifier, source_hash, &transpiled_source.text);
_ => false, Ok(transpiled_source.text)
} }
} }
@ -376,72 +373,6 @@ pub fn check(
}) })
} }
pub struct EmitOptions {
pub ts_config: TsConfig,
pub reload: bool,
pub reload_exclusions: HashSet<ModuleSpecifier>,
}
/// Given a module graph, emit any appropriate modules and cache them.
// TODO(nayeemrmn): This would ideally take `GraphData` like
// `check()`, but the AST isn't stored in that. Cleanup.
pub fn emit(
graph: &ModuleGraph,
cache: &dyn EmitCache,
options: EmitOptions,
) -> Result<CheckResult, AnyError> {
let start = Instant::now();
let config_bytes = options.ts_config.as_bytes();
let include_js = options.ts_config.get_check_js();
let emit_options = options.ts_config.into();
let mut emit_count = 0_u32;
let mut file_count = 0_u32;
for module in graph.modules() {
file_count += 1;
if !is_emittable(&module.kind, &module.media_type, include_js) {
continue;
}
let needs_reload =
options.reload && !options.reload_exclusions.contains(&module.specifier);
let version = get_version(
module.maybe_source.as_ref().map(|s| s.as_bytes()).unwrap(),
&config_bytes,
);
let is_valid = cache
.get_source_hash(&module.specifier)
.map_or(false, |v| v == version);
if is_valid && !needs_reload {
continue;
}
let transpiled_source = module
.maybe_parsed_source
.as_ref()
.map(|source| source.transpile(&emit_options))
.unwrap()?;
emit_count += 1;
cache.set_emit_data(
module.specifier.clone(),
SpecifierEmitCacheData {
source_hash: version,
text: transpiled_source.text,
map: transpiled_source.source_map,
},
)?;
}
let stats = Stats(vec![
("Files".to_string(), file_count),
("Emitted".to_string(), emit_count),
("Total time".to_string(), start.elapsed().as_millis() as u32),
]);
Ok(CheckResult {
diagnostics: Diagnostics::default(),
stats,
})
}
enum CheckHashResult { enum CheckHashResult {
Hash(u64), Hash(u64),
NoFiles, NoFiles,
@ -624,36 +555,3 @@ impl From<TsConfig> for deno_ast::EmitOptions {
} }
} }
} }
#[cfg(test)]
mod tests {
use super::*;
#[test]
fn test_is_emittable() {
assert!(is_emittable(
&ModuleKind::Esm,
&MediaType::TypeScript,
false
));
assert!(!is_emittable(
&ModuleKind::Synthetic,
&MediaType::TypeScript,
false
));
assert!(!is_emittable(&ModuleKind::Esm, &MediaType::Dts, false));
assert!(!is_emittable(&ModuleKind::Esm, &MediaType::Dcts, false));
assert!(!is_emittable(&ModuleKind::Esm, &MediaType::Dmts, false));
assert!(is_emittable(&ModuleKind::Esm, &MediaType::Tsx, false));
assert!(!is_emittable(
&ModuleKind::Esm,
&MediaType::JavaScript,
false
));
assert!(!is_emittable(&ModuleKind::Esm, &MediaType::Cjs, false));
assert!(!is_emittable(&ModuleKind::Esm, &MediaType::Mjs, false));
assert!(is_emittable(&ModuleKind::Esm, &MediaType::JavaScript, true));
assert!(is_emittable(&ModuleKind::Esm, &MediaType::Jsx, false));
assert!(!is_emittable(&ModuleKind::Esm, &MediaType::Json, false));
}
}

View file

@ -4,6 +4,7 @@ use crate::colors;
use crate::emit::TsTypeLib; use crate::emit::TsTypeLib;
use crate::errors::get_error_class_name; use crate::errors::get_error_class_name;
use deno_ast::ParsedSource;
use deno_core::error::custom_error; use deno_core::error::custom_error;
use deno_core::error::AnyError; use deno_core::error::AnyError;
use deno_core::ModuleSpecifier; use deno_core::ModuleSpecifier;
@ -38,6 +39,7 @@ pub fn contains_specifier(
pub enum ModuleEntry { pub enum ModuleEntry {
Module { Module {
code: Arc<str>, code: Arc<str>,
maybe_parsed_source: Option<ParsedSource>,
dependencies: BTreeMap<String, Dependency>, dependencies: BTreeMap<String, Dependency>,
media_type: MediaType, media_type: MediaType,
/// Whether or not this is a JS/JSX module with a `@ts-check` directive. /// Whether or not this is a JS/JSX module with a `@ts-check` directive.
@ -146,6 +148,7 @@ impl GraphData {
}; };
let module_entry = ModuleEntry::Module { let module_entry = ModuleEntry::Module {
code, code,
maybe_parsed_source: module.maybe_parsed_source.clone(),
dependencies: module.dependencies.clone(), dependencies: module.dependencies.clone(),
ts_check, ts_check,
media_type, media_type,

View file

@ -168,11 +168,11 @@ fn create_web_worker_callback(
.map(ToOwned::to_owned), .map(ToOwned::to_owned),
root_cert_store: Some(ps.root_cert_store.clone()), root_cert_store: Some(ps.root_cert_store.clone()),
seed: ps.options.seed(), seed: ps.options.seed(),
module_loader,
create_web_worker_cb, create_web_worker_cb,
preload_module_cb, preload_module_cb,
format_js_error_fn: Some(Arc::new(format_js_error)), format_js_error_fn: Some(Arc::new(format_js_error)),
source_map_getter: Some(Box::new(ps.clone())), source_map_getter: Some(Box::new(module_loader.clone())),
module_loader,
worker_type: args.worker_type, worker_type: args.worker_type,
maybe_inspector_server, maybe_inspector_server,
get_error_class_fn: Some(&errors::get_error_class_name), get_error_class_fn: Some(&errors::get_error_class_name),
@ -248,7 +248,7 @@ pub fn create_main_worker(
.map(ToOwned::to_owned), .map(ToOwned::to_owned),
root_cert_store: Some(ps.root_cert_store.clone()), root_cert_store: Some(ps.root_cert_store.clone()),
seed: ps.options.seed(), seed: ps.options.seed(),
source_map_getter: Some(Box::new(ps.clone())), source_map_getter: Some(Box::new(module_loader.clone())),
format_js_error_fn: Some(Arc::new(format_js_error)), format_js_error_fn: Some(Arc::new(format_js_error)),
create_web_worker_cb, create_web_worker_cb,
web_worker_preload_module_cb, web_worker_preload_module_cb,
@ -518,10 +518,28 @@ async fn cache_command(
cache_flags: CacheFlags, cache_flags: CacheFlags,
) -> Result<i32, AnyError> { ) -> Result<i32, AnyError> {
let ps = ProcState::build(flags).await?; let ps = ProcState::build(flags).await?;
load_and_type_check(&ps, &cache_flags.files).await?;
ps.cache_module_emits()?;
Ok(0)
}
async fn check_command(
flags: Flags,
check_flags: CheckFlags,
) -> Result<i32, AnyError> {
let ps = ProcState::build(flags).await?;
load_and_type_check(&ps, &check_flags.files).await?;
Ok(0)
}
async fn load_and_type_check(
ps: &ProcState,
files: &Vec<String>,
) -> Result<(), AnyError> {
let lib = ps.options.ts_type_lib_window(); let lib = ps.options.ts_type_lib_window();
for file in cache_flags.files { for file in files {
let specifier = resolve_url_or_path(&file)?; let specifier = resolve_url_or_path(file)?;
ps.prepare_module_load( ps.prepare_module_load(
vec![specifier], vec![specifier],
false, false,
@ -533,20 +551,7 @@ async fn cache_command(
.await?; .await?;
} }
Ok(0) Ok(())
}
async fn check_command(
flags: Flags,
check_flags: CheckFlags,
) -> Result<i32, AnyError> {
cache_command(
flags,
CacheFlags {
files: check_flags.files,
},
)
.await
} }
async fn eval_command( async fn eval_command(
@ -609,7 +614,7 @@ async fn create_graph_and_maybe_check(
debug: bool, debug: bool,
) -> Result<Arc<deno_graph::ModuleGraph>, AnyError> { ) -> Result<Arc<deno_graph::ModuleGraph>, AnyError> {
let mut cache = cache::FetchCacher::new( let mut cache = cache::FetchCacher::new(
ps.dir.gen_cache.clone(), ps.emit_cache.clone(),
ps.file_fetcher.clone(), ps.file_fetcher.clone(),
Permissions::allow_all(), Permissions::allow_all(),
Permissions::allow_all(), Permissions::allow_all(),

View file

@ -1,20 +1,36 @@
// Copyright 2018-2022 the Deno authors. All rights reserved. MIT license. // Copyright 2018-2022 the Deno authors. All rights reserved. MIT license.
use crate::emit::emit_parsed_source;
use crate::emit::TsTypeLib; use crate::emit::TsTypeLib;
use crate::graph_util::ModuleEntry;
use crate::proc_state::ProcState; use crate::proc_state::ProcState;
use crate::text_encoding::code_without_source_map;
use crate::text_encoding::source_map_from_code;
use deno_ast::MediaType;
use deno_core::anyhow::anyhow;
use deno_core::error::AnyError; use deno_core::error::AnyError;
use deno_core::futures::future::FutureExt; use deno_core::futures::future::FutureExt;
use deno_core::futures::Future; use deno_core::futures::Future;
use deno_core::resolve_url;
use deno_core::ModuleLoader; use deno_core::ModuleLoader;
use deno_core::ModuleSource;
use deno_core::ModuleSpecifier; use deno_core::ModuleSpecifier;
use deno_core::ModuleType;
use deno_core::OpState; use deno_core::OpState;
use deno_core::SourceMapGetter;
use deno_runtime::permissions::Permissions; use deno_runtime::permissions::Permissions;
use std::cell::RefCell; use std::cell::RefCell;
use std::pin::Pin; use std::pin::Pin;
use std::rc::Rc; use std::rc::Rc;
use std::str; use std::str;
struct ModuleCodeSource {
pub code: String,
pub found_url: ModuleSpecifier,
pub media_type: MediaType,
}
pub struct CliModuleLoader { pub struct CliModuleLoader {
pub lib: TsTypeLib, pub lib: TsTypeLib,
/// The initial set of permissions used to resolve the static imports in the /// The initial set of permissions used to resolve the static imports in the
@ -40,6 +56,65 @@ impl CliModuleLoader {
ps, ps,
}) })
} }
fn load_prepared_module(
&self,
specifier: &ModuleSpecifier,
) -> Result<ModuleCodeSource, AnyError> {
let graph_data = self.ps.graph_data.read();
let found_url = graph_data.follow_redirect(specifier);
match graph_data.get(&found_url) {
Some(ModuleEntry::Module {
code,
media_type,
maybe_parsed_source,
..
}) => {
let code = match media_type {
MediaType::JavaScript
| MediaType::Unknown
| MediaType::Cjs
| MediaType::Mjs
| MediaType::Json => {
if let Some(source) = graph_data.get_cjs_esm_translation(specifier)
{
source.to_owned()
} else {
code.to_string()
}
}
MediaType::Dts | MediaType::Dcts | MediaType::Dmts => "".to_string(),
MediaType::TypeScript
| MediaType::Mts
| MediaType::Cts
| MediaType::Jsx
| MediaType::Tsx => {
// get emit text
let parsed_source = maybe_parsed_source.as_ref().unwrap(); // should always be set
emit_parsed_source(
&self.ps.emit_cache,
&found_url,
parsed_source,
&self.ps.emit_options,
self.ps.emit_options_hash,
)?
}
MediaType::TsBuildInfo | MediaType::Wasm | MediaType::SourceMap => {
panic!("Unexpected media type {} for {}", media_type, found_url)
}
};
Ok(ModuleCodeSource {
code,
found_url,
media_type: *media_type,
})
}
_ => Err(anyhow!(
"Loading unprepared module: {}",
specifier.to_string()
)),
}
}
} }
impl ModuleLoader for CliModuleLoader { impl ModuleLoader for CliModuleLoader {
@ -54,18 +129,35 @@ impl ModuleLoader for CliModuleLoader {
fn load( fn load(
&self, &self,
module_specifier: &ModuleSpecifier, specifier: &ModuleSpecifier,
maybe_referrer: Option<ModuleSpecifier>, _maybe_referrer: Option<ModuleSpecifier>,
is_dynamic: bool, _is_dynamic: bool,
) -> Pin<Box<deno_core::ModuleSourceFuture>> { ) -> Pin<Box<deno_core::ModuleSourceFuture>> {
let module_specifier = module_specifier.clone();
let ps = self.ps.clone();
// NOTE: this block is async only because of `deno_core` interface // NOTE: this block is async only because of `deno_core` interface
// requirements; module was already loaded when constructing module graph // requirements; module was already loaded when constructing module graph
// during call to `prepare_load`. // during call to `prepare_load` so we can load it synchronously.
async move { ps.load(module_specifier, maybe_referrer, is_dynamic) } let result = self.load_prepared_module(specifier).map(|code_source| {
.boxed_local() let code = if self.ps.options.is_inspecting() {
// we need the code with the source map in order for
// it to work with --inspect or --inspect-brk
code_source.code
} else {
// reduce memory and throw away the source map
// because we don't need it
code_without_source_map(code_source.code)
};
ModuleSource {
code: code.into_bytes().into_boxed_slice(),
module_url_specified: specifier.to_string(),
module_url_found: code_source.found_url.to_string(),
module_type: match code_source.media_type {
MediaType::Json => ModuleType::Json,
_ => ModuleType::JavaScript,
},
}
});
Box::pin(deno_core::futures::future::ready(result))
} }
fn prepare_load( fn prepare_load(
@ -103,3 +195,47 @@ impl ModuleLoader for CliModuleLoader {
.boxed_local() .boxed_local()
} }
} }
impl SourceMapGetter for CliModuleLoader {
fn get_source_map(&self, file_name: &str) -> Option<Vec<u8>> {
if let Ok(specifier) = resolve_url(file_name) {
match specifier.scheme() {
// we should only be looking for emits for schemes that denote external
// modules, which the disk_cache supports
"wasm" | "file" | "http" | "https" | "data" | "blob" => (),
_ => return None,
}
if let Ok(source) = self.load_prepared_module(&specifier) {
source_map_from_code(&source.code)
} else {
None
}
} else {
None
}
}
fn get_source_line(
&self,
file_name: &str,
line_number: usize,
) -> Option<String> {
let graph_data = self.ps.graph_data.read();
let specifier = graph_data.follow_redirect(&resolve_url(file_name).ok()?);
let code = match graph_data.get(&specifier) {
Some(ModuleEntry::Module { code, .. }) => code,
_ => return None,
};
// Do NOT use .lines(): it skips the terminating empty line.
// (due to internally using_terminator() instead of .split())
let lines: Vec<&str> = code.split('\n').collect();
if line_number >= lines.len() {
Some(format!(
"{} Couldn't format source line: Line {} is out of bounds (source may have changed at runtime)",
crate::colors::yellow("Warning"), line_number + 1,
))
} else {
Some(lines[line_number].to_string())
}
}
}

View file

@ -6,11 +6,13 @@ use crate::args::Flags;
use crate::args::TypeCheckMode; use crate::args::TypeCheckMode;
use crate::cache; use crate::cache;
use crate::cache::EmitCache; use crate::cache::EmitCache;
use crate::cache::FastInsecureHasher;
use crate::cache::TypeCheckCache; use crate::cache::TypeCheckCache;
use crate::compat; use crate::compat;
use crate::compat::NodeEsmResolver; use crate::compat::NodeEsmResolver;
use crate::deno_dir; use crate::deno_dir;
use crate::emit; use crate::emit;
use crate::emit::emit_parsed_source;
use crate::emit::TsConfigType; use crate::emit::TsConfigType;
use crate::emit::TsTypeLib; use crate::emit::TsTypeLib;
use crate::file_fetcher::FileFetcher; use crate::file_fetcher::FileFetcher;
@ -23,7 +25,6 @@ use crate::lockfile::Lockfile;
use crate::resolver::ImportMapResolver; use crate::resolver::ImportMapResolver;
use crate::resolver::JsxResolver; use crate::resolver::JsxResolver;
use deno_ast::MediaType;
use deno_core::anyhow::anyhow; use deno_core::anyhow::anyhow;
use deno_core::anyhow::Context; use deno_core::anyhow::Context;
use deno_core::error::custom_error; use deno_core::error::custom_error;
@ -31,14 +32,10 @@ use deno_core::error::AnyError;
use deno_core::futures; use deno_core::futures;
use deno_core::parking_lot::Mutex; use deno_core::parking_lot::Mutex;
use deno_core::parking_lot::RwLock; use deno_core::parking_lot::RwLock;
use deno_core::resolve_url;
use deno_core::url::Url; use deno_core::url::Url;
use deno_core::CompiledWasmModuleStore; use deno_core::CompiledWasmModuleStore;
use deno_core::ModuleSource;
use deno_core::ModuleSpecifier; use deno_core::ModuleSpecifier;
use deno_core::ModuleType;
use deno_core::SharedArrayBufferStore; use deno_core::SharedArrayBufferStore;
use deno_core::SourceMapGetter;
use deno_graph::create_graph; use deno_graph::create_graph;
use deno_graph::source::CacheInfo; use deno_graph::source::CacheInfo;
use deno_graph::source::LoadFuture; use deno_graph::source::LoadFuture;
@ -70,7 +67,10 @@ pub struct Inner {
pub coverage_dir: Option<String>, pub coverage_dir: Option<String>,
pub file_fetcher: FileFetcher, pub file_fetcher: FileFetcher,
pub options: Arc<CliOptions>, pub options: Arc<CliOptions>,
graph_data: Arc<RwLock<GraphData>>, pub emit_cache: EmitCache,
pub emit_options: deno_ast::EmitOptions,
pub emit_options_hash: u64,
pub graph_data: Arc<RwLock<GraphData>>,
pub lockfile: Option<Arc<Mutex<Lockfile>>>, pub lockfile: Option<Arc<Mutex<Lockfile>>>,
pub maybe_import_map: Option<Arc<ImportMap>>, pub maybe_import_map: Option<Arc<ImportMap>>,
pub maybe_inspector_server: Option<Arc<InspectorServer>>, pub maybe_inspector_server: Option<Arc<InspectorServer>>,
@ -211,10 +211,23 @@ impl ProcState {
file_paths: Arc::new(Mutex::new(vec![])), file_paths: Arc::new(Mutex::new(vec![])),
}); });
let ts_config_result =
cli_options.resolve_ts_config_for_emit(TsConfigType::Emit)?;
if let Some(ignored_options) = ts_config_result.maybe_ignored_options {
warn!("{}", ignored_options);
}
let emit_cache = EmitCache::new(dir.gen_cache.clone());
Ok(ProcState(Arc::new(Inner { Ok(ProcState(Arc::new(Inner {
dir, dir,
coverage_dir, coverage_dir,
options: cli_options, options: cli_options,
emit_cache,
emit_options_hash: FastInsecureHasher::new()
// todo(dsherret): use hash of emit options instead as it's more specific
.write(&ts_config_result.ts_config.as_bytes())
.finish(),
emit_options: ts_config_result.ts_config.into(),
file_fetcher, file_fetcher,
graph_data: Default::default(), graph_data: Default::default(),
lockfile, lockfile,
@ -300,7 +313,7 @@ impl ProcState {
} }
} }
let mut cache = cache::FetchCacher::new( let mut cache = cache::FetchCacher::new(
self.dir.gen_cache.clone(), self.emit_cache.clone(),
self.file_fetcher.clone(), self.file_fetcher.clone(),
root_permissions.clone(), root_permissions.clone(),
dynamic_permissions.clone(), dynamic_permissions.clone(),
@ -411,21 +424,7 @@ impl ProcState {
.unwrap()?; .unwrap()?;
} }
let config_type = if self.options.type_check_mode() == TypeCheckMode::None { // type check if necessary
TsConfigType::Emit
} else {
TsConfigType::Check { lib }
};
let ts_config_result =
self.options.resolve_ts_config_for_emit(config_type)?;
if let Some(ignored_options) = ts_config_result.maybe_ignored_options {
warn!("{}", ignored_options);
}
// start type checking if necessary
let type_checking_task =
if self.options.type_check_mode() != TypeCheckMode::None { if self.options.type_check_mode() != TypeCheckMode::None {
let maybe_config_specifier = self.options.maybe_config_file_specifier(); let maybe_config_specifier = self.options.maybe_config_file_specifier();
let roots = roots.clone(); let roots = roots.clone();
@ -433,36 +432,23 @@ impl ProcState {
type_check_mode: self.options.type_check_mode(), type_check_mode: self.options.type_check_mode(),
debug: self.options.log_level() == Some(log::Level::Debug), debug: self.options.log_level() == Some(log::Level::Debug),
maybe_config_specifier, maybe_config_specifier,
ts_config: ts_config_result.ts_config.clone(), ts_config: self
.options
.resolve_ts_config_for_emit(TsConfigType::Check { lib })?
.ts_config,
log_checks: true, log_checks: true,
reload: self.options.reload_flag() reload: self.options.reload_flag()
&& !roots.iter().all(|r| reload_exclusions.contains(&r.0)), && !roots.iter().all(|r| reload_exclusions.contains(&r.0)),
}; };
// todo(THIS PR): don't use a cache on failure
let check_cache = let check_cache =
TypeCheckCache::new(&self.dir.type_checking_cache_db_file_path()); TypeCheckCache::new(&self.dir.type_checking_cache_db_file_path());
let graph_data = self.graph_data.clone(); let graph_data = self.graph_data.clone();
Some(tokio::task::spawn_blocking(move || { let check_result =
emit::check(&roots, graph_data, &check_cache, options) emit::check(&roots, graph_data, &check_cache, options)?;
})) if !check_result.diagnostics.is_empty() {
} else { return Err(anyhow!(check_result.diagnostics));
None
};
let options = emit::EmitOptions {
ts_config: ts_config_result.ts_config,
reload: self.options.reload_flag(),
reload_exclusions,
};
let emit_result = emit::emit(&graph, &self.dir.gen_cache, options)?;
log::debug!("{}", emit_result.stats);
if let Some(type_checking_task) = type_checking_task {
let type_check_result = type_checking_task.await??;
if !type_check_result.diagnostics.is_empty() {
return Err(anyhow!(type_check_result.diagnostics));
} }
log::debug!("{}", type_check_result.stats); log::debug!("{}", check_result.stats);
} }
if self.options.type_check_mode() != TypeCheckMode::None { if self.options.type_check_mode() != TypeCheckMode::None {
@ -531,72 +517,24 @@ impl ProcState {
} }
} }
pub fn load( pub fn cache_module_emits(&self) -> Result<(), AnyError> {
&self,
specifier: ModuleSpecifier,
maybe_referrer: Option<ModuleSpecifier>,
is_dynamic: bool,
) -> Result<ModuleSource, AnyError> {
log::debug!(
"specifier: {} maybe_referrer: {} is_dynamic: {}",
specifier,
maybe_referrer
.as_ref()
.map(|s| s.to_string())
.unwrap_or_else(|| "<none>".to_string()),
is_dynamic
);
let graph_data = self.graph_data.read(); let graph_data = self.graph_data.read();
let found_url = graph_data.follow_redirect(&specifier); for (specifier, entry) in graph_data.entries() {
match graph_data.get(&found_url) { if let ModuleEntry::Module {
Some(ModuleEntry::Module { maybe_parsed_source: Some(parsed_source),
code, media_type, .. ..
}) => { } = entry
let code = match media_type {
MediaType::JavaScript
| MediaType::Unknown
| MediaType::Cjs
| MediaType::Mjs
| MediaType::Json => {
if let Some(source) = graph_data.get_cjs_esm_translation(&specifier)
{ {
source.to_owned() emit_parsed_source(
} else { &self.emit_cache,
code.to_string() specifier,
parsed_source,
&self.emit_options,
self.emit_options_hash,
)?;
} }
} }
MediaType::Dts | MediaType::Dcts | MediaType::Dmts => "".to_string(), Ok(())
MediaType::TypeScript
| MediaType::Mts
| MediaType::Cts
| MediaType::Jsx
| MediaType::Tsx => {
let cached_text = self.dir.gen_cache.get_emit_text(&found_url);
match cached_text {
Some(text) => text,
None => unreachable!("Unexpected missing emit: {}\n\nTry reloading with the --reload CLI flag or deleting your DENO_DIR.", found_url),
}
}
MediaType::TsBuildInfo | MediaType::Wasm | MediaType::SourceMap => {
panic!("Unexpected media type {} for {}", media_type, found_url)
}
};
Ok(ModuleSource {
code: code.into_bytes().into_boxed_slice(),
module_url_specified: specifier.to_string(),
module_url_found: found_url.to_string(),
module_type: match media_type {
MediaType::Json => ModuleType::Json,
_ => ModuleType::JavaScript,
},
})
}
_ => Err(anyhow!(
"Loading unprepared module: {}",
specifier.to_string()
)),
}
} }
pub async fn create_graph( pub async fn create_graph(
@ -604,7 +542,7 @@ impl ProcState {
roots: Vec<(ModuleSpecifier, ModuleKind)>, roots: Vec<(ModuleSpecifier, ModuleKind)>,
) -> Result<deno_graph::ModuleGraph, AnyError> { ) -> Result<deno_graph::ModuleGraph, AnyError> {
let mut cache = cache::FetchCacher::new( let mut cache = cache::FetchCacher::new(
self.dir.gen_cache.clone(), self.emit_cache.clone(),
self.file_fetcher.clone(), self.file_fetcher.clone(),
Permissions::allow_all(), Permissions::allow_all(),
Permissions::allow_all(), Permissions::allow_all(),
@ -641,55 +579,6 @@ impl ProcState {
} }
} }
// TODO(@kitsonk) this is only temporary, but should be refactored to somewhere
// else, like a refactored file_fetcher.
impl SourceMapGetter for ProcState {
fn get_source_map(&self, file_name: &str) -> Option<Vec<u8>> {
if let Ok(specifier) = resolve_url(file_name) {
match specifier.scheme() {
// we should only be looking for emits for schemes that denote external
// modules, which the disk_cache supports
"wasm" | "file" | "http" | "https" | "data" | "blob" => (),
_ => return None,
}
if let Some(cache_data) = self.dir.gen_cache.get_emit_data(&specifier) {
source_map_from_code(cache_data.text.as_bytes())
.or_else(|| cache_data.map.map(|t| t.into_bytes()))
} else if let Ok(source) = self.load(specifier, None, false) {
source_map_from_code(&source.code)
} else {
None
}
} else {
None
}
}
fn get_source_line(
&self,
file_name: &str,
line_number: usize,
) -> Option<String> {
let graph_data = self.graph_data.read();
let specifier = graph_data.follow_redirect(&resolve_url(file_name).ok()?);
let code = match graph_data.get(&specifier) {
Some(ModuleEntry::Module { code, .. }) => code,
_ => return None,
};
// Do NOT use .lines(): it skips the terminating empty line.
// (due to internally using_terminator() instead of .split())
let lines: Vec<&str> = code.split('\n').collect();
if line_number >= lines.len() {
Some(format!(
"{} Couldn't format source line: Line {} is out of bounds (source may have changed at runtime)",
crate::colors::yellow("Warning"), line_number + 1,
))
} else {
Some(lines[line_number].to_string())
}
}
}
pub fn import_map_from_text( pub fn import_map_from_text(
specifier: &Url, specifier: &Url,
json_text: &str, json_text: &str,
@ -714,19 +603,6 @@ pub fn import_map_from_text(
Ok(result.import_map) Ok(result.import_map)
} }
fn source_map_from_code(code: &[u8]) -> Option<Vec<u8>> {
static PREFIX: &[u8] = b"//# sourceMappingURL=data:application/json;base64,";
let last_line = code.rsplitn(2, |u| u == &b'\n').next().unwrap();
if last_line.starts_with(PREFIX) {
let input = last_line.split_at(PREFIX.len()).1;
let decoded_map = base64::decode(input)
.expect("Unable to decode source map from emitted file.");
Some(decoded_map)
} else {
None
}
}
#[derive(Debug)] #[derive(Debug)]
struct FileWatcherReporter { struct FileWatcherReporter {
sender: tokio::sync::mpsc::UnboundedSender<Vec<PathBuf>>, sender: tokio::sync::mpsc::UnboundedSender<Vec<PathBuf>>,

View file

@ -1,6 +1,7 @@
// Copyright 2018-2022 the Deno authors. All rights reserved. MIT license. // Copyright 2018-2022 the Deno authors. All rights reserved. MIT license.
use crate::itest; use crate::itest;
use test_util as util; use test_util as util;
use test_util::TempDir; use test_util::TempDir;

View file

@ -54,6 +54,34 @@ pub fn strip_bom(text: &str) -> &str {
} }
} }
static SOURCE_MAP_PREFIX: &str =
"//# sourceMappingURL=data:application/json;base64,";
pub fn source_map_from_code(code: &str) -> Option<Vec<u8>> {
let last_line = code.rsplit(|u| u == '\n').next()?;
if last_line.starts_with(SOURCE_MAP_PREFIX) {
let input = last_line.split_at(SOURCE_MAP_PREFIX.len()).1;
let decoded_map = base64::decode(input)
.expect("Unable to decode source map from emitted file.");
Some(decoded_map)
} else {
None
}
}
pub fn code_without_source_map(mut code: String) -> String {
if let Some(last_line_index) = code.rfind('\n') {
if code[last_line_index + 1..].starts_with(SOURCE_MAP_PREFIX) {
code.truncate(last_line_index + 1);
code
} else {
code
}
} else {
code
}
}
#[cfg(test)] #[cfg(test)]
mod tests { mod tests {
use super::*; use super::*;
@ -103,4 +131,33 @@ mod tests {
let err = result.expect_err("Err expected"); let err = result.expect_err("Err expected");
assert!(err.kind() == ErrorKind::InvalidData); assert!(err.kind() == ErrorKind::InvalidData);
} }
#[test]
fn test_source_without_source_map() {
run_test("", "");
run_test("\n", "\n");
run_test("\r\n", "\r\n");
run_test("a", "a");
run_test("a\n", "a\n");
run_test("a\r\n", "a\r\n");
run_test("a\r\nb", "a\r\nb");
run_test("a\nb\n", "a\nb\n");
run_test("a\r\nb\r\n", "a\r\nb\r\n");
run_test(
"test\n//# sourceMappingURL=data:application/json;base64,test",
"test\n",
);
run_test(
"test\r\n//# sourceMappingURL=data:application/json;base64,test",
"test\r\n",
);
run_test(
"\n//# sourceMappingURL=data:application/json;base64,test",
"\n",
);
fn run_test(input: &str, output: &str) {
assert_eq!(code_without_source_map(input.to_string()), output);
}
}
} }

View file

@ -2,10 +2,11 @@
use crate::args::CoverageFlags; use crate::args::CoverageFlags;
use crate::args::Flags; use crate::args::Flags;
use crate::cache::EmitCache;
use crate::colors; use crate::colors;
use crate::emit::get_source_hash;
use crate::fs_util::collect_files; use crate::fs_util::collect_files;
use crate::proc_state::ProcState; use crate::proc_state::ProcState;
use crate::text_encoding::source_map_from_code;
use crate::tools::fmt::format_json; use crate::tools::fmt::format_json;
use deno_ast::MediaType; use deno_ast::MediaType;
@ -17,7 +18,6 @@ use deno_core::serde_json;
use deno_core::sourcemap::SourceMap; use deno_core::sourcemap::SourceMap;
use deno_core::url::Url; use deno_core::url::Url;
use deno_core::LocalInspectorSession; use deno_core::LocalInspectorSession;
use deno_core::SourceMapGetter;
use regex::Regex; use regex::Regex;
use std::fs; use std::fs;
use std::fs::File; use std::fs::File;
@ -665,7 +665,8 @@ pub async fn cover_files(
})?; })?;
// Check if file was transpiled // Check if file was transpiled
let transpiled_source = match file.media_type { let original_source = &file.source;
let transpiled_code = match file.media_type {
MediaType::JavaScript MediaType::JavaScript
| MediaType::Unknown | MediaType::Unknown
| MediaType::Cjs | MediaType::Cjs
@ -677,8 +678,10 @@ pub async fn cover_files(
| MediaType::Mts | MediaType::Mts
| MediaType::Cts | MediaType::Cts
| MediaType::Tsx => { | MediaType::Tsx => {
match ps.dir.gen_cache.get_emit_text(&file.specifier) { let source_hash =
Some(source) => source, get_source_hash(original_source, ps.emit_options_hash);
match ps.emit_cache.get_emit_code(&file.specifier, source_hash) {
Some(code) => code,
None => { None => {
return Err(anyhow!( return Err(anyhow!(
"Missing transpiled source code for: \"{}\". "Missing transpiled source code for: \"{}\".
@ -693,13 +696,10 @@ pub async fn cover_files(
} }
}; };
let original_source = &file.source;
let maybe_source_map = ps.get_source_map(&script_coverage.url);
let coverage_report = generate_coverage_report( let coverage_report = generate_coverage_report(
&script_coverage, &script_coverage,
&transpiled_source, &transpiled_code,
&maybe_source_map, &source_map_from_code(&transpiled_code),
&out_mode, &out_mode,
); );

View file

@ -5,6 +5,7 @@
use crate::resolve_url; use crate::resolve_url;
pub use sourcemap::SourceMap; pub use sourcemap::SourceMap;
use std::collections::HashMap; use std::collections::HashMap;
use std::rc::Rc;
use std::str; use std::str;
pub trait SourceMapGetter { pub trait SourceMapGetter {
@ -17,6 +18,23 @@ pub trait SourceMapGetter {
) -> Option<String>; ) -> Option<String>;
} }
impl<T> SourceMapGetter for Rc<T>
where
T: SourceMapGetter,
{
fn get_source_map(&self, file_name: &str) -> Option<Vec<u8>> {
(**self).get_source_map(file_name)
}
fn get_source_line(
&self,
file_name: &str,
line_number: usize,
) -> Option<String> {
(**self).get_source_line(file_name, line_number)
}
}
#[derive(Debug, Default)] #[derive(Debug, Default)]
pub struct SourceMapCache { pub struct SourceMapCache {
maps: HashMap<String, Option<SourceMap>>, maps: HashMap<String, Option<SourceMap>>,