mirror of
https://github.com/denoland/deno.git
synced 2024-12-22 07:14:47 -05:00
refactor(cli): migrate run and cache to new infrastructure (#7996)
Co-authored-by: Ryan Dahl <ry@tinyclouds.org>
This commit is contained in:
parent
9fa59f0ca8
commit
7e2c7fb6c5
48 changed files with 1181 additions and 1299 deletions
12
cli/ast.rs
12
cli/ast.rs
|
@ -72,6 +72,18 @@ impl Into<Location> for swc_common::Loc {
|
|||
}
|
||||
}
|
||||
|
||||
impl Into<ModuleSpecifier> for Location {
|
||||
fn into(self) -> ModuleSpecifier {
|
||||
ModuleSpecifier::resolve_url_or_path(&self.filename).unwrap()
|
||||
}
|
||||
}
|
||||
|
||||
impl std::fmt::Display for Location {
|
||||
fn fmt(&self, f: &mut std::fmt::Formatter) -> std::fmt::Result {
|
||||
write!(f, "{}:{}:{}", self.filename, self.line, self.col)
|
||||
}
|
||||
}
|
||||
|
||||
/// A buffer for collecting diagnostic messages from the AST parser.
|
||||
#[derive(Debug)]
|
||||
pub struct DiagnosticBuffer(Vec<String>);
|
||||
|
|
|
@ -107,8 +107,9 @@ impl DiskCache {
|
|||
}
|
||||
scheme => {
|
||||
unimplemented!(
|
||||
"Don't know how to create cache name for scheme: {}",
|
||||
scheme
|
||||
"Don't know how to create cache name for scheme: {}\n Url: {}",
|
||||
scheme,
|
||||
url
|
||||
);
|
||||
}
|
||||
};
|
||||
|
|
|
@ -579,6 +579,29 @@ fn map_js_like_extension(path: &Path, default: MediaType) -> MediaType {
|
|||
None => default,
|
||||
Some("jsx") => MediaType::JSX,
|
||||
Some("tsx") => MediaType::TSX,
|
||||
// Because DTS files do not have a separate media type, or a unique
|
||||
// extension, we have to "guess" at those things that we consider that
|
||||
// look like TypeScript, and end with `.d.ts` are DTS files.
|
||||
Some("ts") => {
|
||||
if default == MediaType::TypeScript {
|
||||
match path.file_stem() {
|
||||
None => default,
|
||||
Some(os_str) => {
|
||||
if let Some(file_stem) = os_str.to_str() {
|
||||
if file_stem.ends_with(".d") {
|
||||
MediaType::Dts
|
||||
} else {
|
||||
default
|
||||
}
|
||||
} else {
|
||||
default
|
||||
}
|
||||
}
|
||||
}
|
||||
} else {
|
||||
default
|
||||
}
|
||||
}
|
||||
Some(_) => default,
|
||||
},
|
||||
}
|
||||
|
@ -1564,7 +1587,7 @@ mod tests {
|
|||
);
|
||||
assert_eq!(
|
||||
map_content_type(Path::new("foo/bar.d.ts"), None).0,
|
||||
MediaType::TypeScript
|
||||
MediaType::Dts
|
||||
);
|
||||
assert_eq!(
|
||||
map_content_type(Path::new("foo/bar.js"), None).0,
|
||||
|
@ -1741,6 +1764,26 @@ mod tests {
|
|||
.0,
|
||||
MediaType::JSX
|
||||
);
|
||||
assert_eq!(
|
||||
map_content_type(
|
||||
Path::new("foo/bar.d.ts"),
|
||||
Some("application/x-javascript")
|
||||
)
|
||||
.0,
|
||||
MediaType::JavaScript
|
||||
);
|
||||
assert_eq!(
|
||||
map_content_type(Path::new("foo/bar.d.ts"), Some("text/plain")).0,
|
||||
MediaType::Dts
|
||||
);
|
||||
assert_eq!(
|
||||
map_content_type(
|
||||
Path::new("foo/bar.d.ts"),
|
||||
Some("video/vnd.dlna.mpeg-tts"),
|
||||
)
|
||||
.0,
|
||||
MediaType::Dts
|
||||
);
|
||||
}
|
||||
|
||||
#[test]
|
||||
|
|
|
@ -7,6 +7,7 @@ use deno_core::error::{AnyError, JsError as CoreJsError, JsStackFrame};
|
|||
use std::error::Error;
|
||||
use std::fmt;
|
||||
use std::ops::Deref;
|
||||
use std::sync::Arc;
|
||||
|
||||
const SOURCE_ABBREV_THRESHOLD: usize = 150;
|
||||
|
||||
|
@ -237,7 +238,7 @@ pub struct JsError(CoreJsError);
|
|||
impl JsError {
|
||||
pub fn create(
|
||||
core_js_error: CoreJsError,
|
||||
source_map_getter: &impl SourceMapGetter,
|
||||
source_map_getter: Arc<impl SourceMapGetter>,
|
||||
) -> AnyError {
|
||||
let core_js_error = apply_source_map(&core_js_error, source_map_getter);
|
||||
let js_error = Self(core_js_error);
|
||||
|
|
14
cli/main.rs
14
cli/main.rs
|
@ -51,7 +51,7 @@ mod test_runner;
|
|||
mod text_encoding;
|
||||
mod tokio_util;
|
||||
mod tsc;
|
||||
pub mod tsc2;
|
||||
mod tsc2;
|
||||
mod tsc_config;
|
||||
mod upgrade;
|
||||
mod version;
|
||||
|
@ -174,14 +174,16 @@ async fn info_command(
|
|||
let specifier = ModuleSpecifier::resolve_url_or_path(&specifier)?;
|
||||
let handler = Rc::new(RefCell::new(specifier_handler::FetchHandler::new(
|
||||
&program_state,
|
||||
// info accesses dynamically imported modules just for their information
|
||||
// so we allow access to all of them.
|
||||
Permissions::allow_all(),
|
||||
)?));
|
||||
let mut builder = module_graph2::GraphBuilder2::new(
|
||||
handler,
|
||||
program_state.maybe_import_map.clone(),
|
||||
);
|
||||
builder.insert(&specifier).await?;
|
||||
let graph = builder.get_graph(&program_state.lockfile)?;
|
||||
builder.add(&specifier, false).await?;
|
||||
let graph = builder.get_graph(&program_state.lockfile);
|
||||
let info = graph.info()?;
|
||||
|
||||
if json {
|
||||
|
@ -312,14 +314,16 @@ async fn bundle_command(
|
|||
let output = if flags.no_check {
|
||||
let handler = Rc::new(RefCell::new(FetchHandler::new(
|
||||
&program_state,
|
||||
// when bundling, dynamic imports are only access for their type safety,
|
||||
// therefore we will allow the graph to access any module.
|
||||
Permissions::allow_all(),
|
||||
)?));
|
||||
let mut builder = module_graph2::GraphBuilder2::new(
|
||||
handler,
|
||||
program_state.maybe_import_map.clone(),
|
||||
);
|
||||
builder.insert(&module_specifier).await?;
|
||||
let graph = builder.get_graph(&program_state.lockfile)?;
|
||||
builder.add(&module_specifier, false).await?;
|
||||
let graph = builder.get_graph(&program_state.lockfile);
|
||||
|
||||
let (s, stats, maybe_ignored_options) =
|
||||
graph.bundle(module_graph2::BundleOptions {
|
||||
|
|
|
@ -77,7 +77,19 @@ impl MediaType {
|
|||
},
|
||||
},
|
||||
Some(os_str) => match os_str.to_str() {
|
||||
Some("ts") => MediaType::TypeScript,
|
||||
Some("ts") => match path.file_stem() {
|
||||
Some(os_str) => match os_str.to_str() {
|
||||
Some(file_name) => {
|
||||
if file_name.ends_with(".d") {
|
||||
MediaType::Dts
|
||||
} else {
|
||||
MediaType::TypeScript
|
||||
}
|
||||
}
|
||||
None => MediaType::TypeScript,
|
||||
},
|
||||
None => MediaType::TypeScript,
|
||||
},
|
||||
Some("tsx") => MediaType::TSX,
|
||||
Some("js") => MediaType::JavaScript,
|
||||
Some("jsx") => MediaType::JSX,
|
||||
|
@ -121,6 +133,19 @@ impl MediaType {
|
|||
|
||||
ext.into()
|
||||
}
|
||||
|
||||
/// Map the media type to a `ts.ScriptKind`
|
||||
pub fn as_ts_script_kind(&self) -> i32 {
|
||||
match self {
|
||||
MediaType::JavaScript => 1,
|
||||
MediaType::JSX => 2,
|
||||
MediaType::TypeScript => 3,
|
||||
MediaType::Dts => 3,
|
||||
MediaType::TSX => 4,
|
||||
MediaType::Json => 5,
|
||||
_ => 0,
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
impl Serialize for MediaType {
|
||||
|
@ -167,10 +192,7 @@ mod tests {
|
|||
MediaType::TypeScript
|
||||
);
|
||||
assert_eq!(MediaType::from(Path::new("foo/bar.tsx")), MediaType::TSX);
|
||||
assert_eq!(
|
||||
MediaType::from(Path::new("foo/bar.d.ts")),
|
||||
MediaType::TypeScript
|
||||
);
|
||||
assert_eq!(MediaType::from(Path::new("foo/bar.d.ts")), MediaType::Dts);
|
||||
assert_eq!(
|
||||
MediaType::from(Path::new("foo/bar.js")),
|
||||
MediaType::JavaScript
|
||||
|
|
File diff suppressed because it is too large
Load diff
|
@ -83,7 +83,7 @@ impl ModuleLoader for CliModuleLoader {
|
|||
op_state: Rc<RefCell<OpState>>,
|
||||
module_specifier: &ModuleSpecifier,
|
||||
maybe_referrer: Option<ModuleSpecifier>,
|
||||
_is_dyn_import: bool,
|
||||
_is_dynamic: bool,
|
||||
) -> Pin<Box<deno_core::ModuleSourceFuture>> {
|
||||
let module_specifier = module_specifier.to_owned();
|
||||
let module_url_specified = module_specifier.to_string();
|
||||
|
@ -92,11 +92,10 @@ impl ModuleLoader for CliModuleLoader {
|
|||
state.borrow::<Arc<ProgramState>>().clone()
|
||||
};
|
||||
|
||||
// TODO(bartlomieju): `fetch_compiled_module` should take `load_id` param
|
||||
// TODO(@kitsonk) this shouldn't be async
|
||||
let fut = async move {
|
||||
let compiled_module = program_state
|
||||
.fetch_compiled_module(module_specifier, maybe_referrer)
|
||||
.await?;
|
||||
.fetch_compiled_module(module_specifier, maybe_referrer)?;
|
||||
Ok(deno_core::ModuleSource {
|
||||
// Real module name, might be different from initial specifier
|
||||
// due to redirections.
|
||||
|
@ -113,44 +112,28 @@ impl ModuleLoader for CliModuleLoader {
|
|||
&self,
|
||||
op_state: Rc<RefCell<OpState>>,
|
||||
_load_id: ModuleLoadId,
|
||||
module_specifier: &ModuleSpecifier,
|
||||
maybe_referrer: Option<String>,
|
||||
is_dyn_import: bool,
|
||||
specifier: &ModuleSpecifier,
|
||||
_maybe_referrer: Option<String>,
|
||||
is_dynamic: bool,
|
||||
) -> Pin<Box<dyn Future<Output = Result<(), AnyError>>>> {
|
||||
let module_specifier = module_specifier.clone();
|
||||
let specifier = specifier.clone();
|
||||
let target_lib = self.target_lib.clone();
|
||||
let maybe_import_map = self.import_map.clone();
|
||||
let state = op_state.borrow();
|
||||
|
||||
// Only "main" module is loaded without permission check,
|
||||
// ie. module that is associated with "is_main" state
|
||||
// and is not a dynamic import.
|
||||
let permissions = if self.is_main && !is_dyn_import {
|
||||
Permissions::allow_all()
|
||||
} else {
|
||||
state.borrow::<Permissions>().clone()
|
||||
};
|
||||
// The permissions that should be applied to any dynamically imported module
|
||||
let dynamic_permissions = state.borrow::<Permissions>().clone();
|
||||
let program_state = state.borrow::<Arc<ProgramState>>().clone();
|
||||
drop(state);
|
||||
|
||||
// TODO(bartlomieju): I'm not sure if it's correct to ignore
|
||||
// bad referrer - this is the case for `Deno.core.evalContext()` where
|
||||
// `ref_str` is `<unknown>`.
|
||||
let maybe_referrer = if let Some(ref_str) = maybe_referrer {
|
||||
ModuleSpecifier::resolve_url(&ref_str).ok()
|
||||
} else {
|
||||
None
|
||||
};
|
||||
|
||||
// TODO(bartlomieju): `prepare_module_load` should take `load_id` param
|
||||
async move {
|
||||
program_state
|
||||
.prepare_module_load(
|
||||
module_specifier,
|
||||
maybe_referrer,
|
||||
specifier,
|
||||
target_lib,
|
||||
permissions,
|
||||
is_dyn_import,
|
||||
dynamic_permissions,
|
||||
is_dynamic,
|
||||
maybe_import_map,
|
||||
)
|
||||
.await
|
||||
|
|
|
@ -39,7 +39,7 @@ fn op_apply_source_map(
|
|||
args.line_number.into(),
|
||||
args.column_number.into(),
|
||||
&mut mappings_map,
|
||||
&super::program_state(state).ts_compiler,
|
||||
super::program_state(state),
|
||||
);
|
||||
|
||||
Ok(json!({
|
||||
|
|
|
@ -8,16 +8,20 @@ use crate::import_map::ImportMap;
|
|||
use crate::inspector::InspectorServer;
|
||||
use crate::lockfile::Lockfile;
|
||||
use crate::media_type::MediaType;
|
||||
use crate::module_graph::ModuleGraphFile;
|
||||
use crate::module_graph::ModuleGraphLoader;
|
||||
use crate::module_graph2::CheckOptions;
|
||||
use crate::module_graph2::GraphBuilder2;
|
||||
use crate::module_graph2::TranspileOptions;
|
||||
use crate::module_graph2::TypeLib;
|
||||
use crate::permissions::Permissions;
|
||||
use crate::source_maps::SourceMapGetter;
|
||||
use crate::specifier_handler::FetchHandler;
|
||||
use crate::tsc::CompiledModule;
|
||||
use crate::tsc::TargetLib;
|
||||
use crate::tsc::TsCompiler;
|
||||
|
||||
use deno_core::error::generic_error;
|
||||
use deno_core::error::AnyError;
|
||||
use deno_core::url::Url;
|
||||
use deno_core::ModuleSpecifier;
|
||||
use std::cell::RefCell;
|
||||
use std::env;
|
||||
|
@ -115,89 +119,66 @@ impl ProgramState {
|
|||
/// and traspilation.
|
||||
pub async fn prepare_module_load(
|
||||
self: &Arc<Self>,
|
||||
module_specifier: ModuleSpecifier,
|
||||
maybe_referrer: Option<ModuleSpecifier>,
|
||||
specifier: ModuleSpecifier,
|
||||
target_lib: TargetLib,
|
||||
permissions: Permissions,
|
||||
is_dyn_import: bool,
|
||||
dynamic_permissions: Permissions,
|
||||
is_dynamic: bool,
|
||||
maybe_import_map: Option<ImportMap>,
|
||||
) -> Result<(), AnyError> {
|
||||
let module_specifier = module_specifier.clone();
|
||||
let specifier = specifier.clone();
|
||||
let handler =
|
||||
Rc::new(RefCell::new(FetchHandler::new(self, dynamic_permissions)?));
|
||||
let mut builder = GraphBuilder2::new(handler, maybe_import_map);
|
||||
builder.add(&specifier, is_dynamic).await?;
|
||||
let mut graph = builder.get_graph(&self.lockfile);
|
||||
let debug = self.flags.log_level == Some(log::Level::Debug);
|
||||
let maybe_config_path = self.flags.config_path.clone();
|
||||
|
||||
if self.flags.no_check {
|
||||
debug!("Transpiling root: {}", module_specifier);
|
||||
// TODO(kitsonk) note that self.permissions != permissions, which is
|
||||
// something that should be handled better in the future.
|
||||
let handler =
|
||||
Rc::new(RefCell::new(FetchHandler::new(self, permissions.clone())?));
|
||||
let mut builder = GraphBuilder2::new(handler, maybe_import_map);
|
||||
builder.insert(&module_specifier).await?;
|
||||
let mut graph = builder.get_graph(&self.lockfile)?;
|
||||
|
||||
let (stats, maybe_ignored_options) =
|
||||
graph.transpile(TranspileOptions {
|
||||
debug: self.flags.log_level == Some(log::Level::Debug),
|
||||
maybe_config_path: self.flags.config_path.clone(),
|
||||
debug,
|
||||
maybe_config_path,
|
||||
reload: self.flags.reload,
|
||||
})?;
|
||||
|
||||
debug!("{}", stats);
|
||||
if let Some(ignored_options) = maybe_ignored_options {
|
||||
eprintln!("{}", ignored_options);
|
||||
}
|
||||
|
||||
debug!("{}", stats);
|
||||
} else {
|
||||
let mut module_graph_loader = ModuleGraphLoader::new(
|
||||
self.file_fetcher.clone(),
|
||||
maybe_import_map,
|
||||
permissions.clone(),
|
||||
is_dyn_import,
|
||||
false,
|
||||
);
|
||||
module_graph_loader
|
||||
.add_to_graph(&module_specifier, maybe_referrer)
|
||||
.await?;
|
||||
let module_graph = module_graph_loader.get_graph();
|
||||
|
||||
let out = self
|
||||
.file_fetcher
|
||||
.fetch_cached_source_file(&module_specifier, permissions.clone())
|
||||
.expect("Source file not found");
|
||||
|
||||
let module_graph_files = module_graph.values().collect::<Vec<_>>();
|
||||
// Check integrity of every file in module graph
|
||||
if let Some(ref lockfile) = self.lockfile {
|
||||
let mut g = lockfile.lock().unwrap();
|
||||
|
||||
for graph_file in &module_graph_files {
|
||||
let check_passed =
|
||||
g.check_or_insert(&graph_file.url, &graph_file.source_code);
|
||||
|
||||
if !check_passed {
|
||||
eprintln!(
|
||||
"Subresource integrity check failed --lock={}\n{}",
|
||||
g.filename.display(),
|
||||
graph_file.url
|
||||
);
|
||||
std::process::exit(10);
|
||||
let lib = match target_lib {
|
||||
TargetLib::Main => {
|
||||
if self.flags.unstable {
|
||||
TypeLib::UnstableDenoWindow
|
||||
} else {
|
||||
TypeLib::DenoWindow
|
||||
}
|
||||
}
|
||||
}
|
||||
TargetLib::Worker => {
|
||||
if self.flags.unstable {
|
||||
TypeLib::UnstableDenoWorker
|
||||
} else {
|
||||
TypeLib::DenoWorker
|
||||
}
|
||||
}
|
||||
};
|
||||
let (stats, diagnostics, maybe_ignored_options) =
|
||||
graph.check(CheckOptions {
|
||||
debug,
|
||||
emit: true,
|
||||
lib,
|
||||
maybe_config_path,
|
||||
reload: self.flags.reload,
|
||||
})?;
|
||||
|
||||
// Check if we need to compile files.
|
||||
let should_compile = needs_compilation(
|
||||
self.ts_compiler.compile_js,
|
||||
out.media_type,
|
||||
&module_graph_files,
|
||||
);
|
||||
let allow_js = should_allow_js(&module_graph_files);
|
||||
|
||||
if should_compile {
|
||||
self
|
||||
.ts_compiler
|
||||
.compile(self, &out, target_lib, &module_graph, allow_js)
|
||||
.await?;
|
||||
debug!("{}", stats);
|
||||
if let Some(ignored_options) = maybe_ignored_options {
|
||||
eprintln!("{}", ignored_options);
|
||||
}
|
||||
}
|
||||
if !diagnostics.0.is_empty() {
|
||||
return Err(generic_error(diagnostics.to_string()));
|
||||
}
|
||||
};
|
||||
|
||||
if let Some(ref lockfile) = self.lockfile {
|
||||
let g = lockfile.lock().unwrap();
|
||||
|
@ -207,44 +188,39 @@ impl ProgramState {
|
|||
Ok(())
|
||||
}
|
||||
|
||||
// TODO(bartlomieju): this method doesn't need to be async anymore
|
||||
/// This method is used after `prepare_module_load` finishes and JsRuntime
|
||||
/// starts loading source and executing source code. This method shouldn't
|
||||
/// perform any IO (besides $DENO_DIR) and only operate on sources collected
|
||||
/// during `prepare_module_load`.
|
||||
pub async fn fetch_compiled_module(
|
||||
pub fn fetch_compiled_module(
|
||||
&self,
|
||||
module_specifier: ModuleSpecifier,
|
||||
_maybe_referrer: Option<ModuleSpecifier>,
|
||||
maybe_referrer: Option<ModuleSpecifier>,
|
||||
) -> Result<CompiledModule, AnyError> {
|
||||
let out = self
|
||||
.file_fetcher
|
||||
.fetch_cached_source_file(&module_specifier, Permissions::allow_all())
|
||||
.expect("Cached source file doesn't exist");
|
||||
|
||||
// Check if we need to compile files
|
||||
let was_compiled = match out.media_type {
|
||||
MediaType::TypeScript | MediaType::TSX | MediaType::JSX => true,
|
||||
MediaType::JavaScript => self.ts_compiler.compile_js,
|
||||
_ => false,
|
||||
};
|
||||
|
||||
let compiled_module = if was_compiled {
|
||||
match self.ts_compiler.get_compiled_module(&out.url) {
|
||||
Ok(module) => module,
|
||||
Err(e) => {
|
||||
let msg = format!(
|
||||
"Failed to get compiled source code of \"{}\".\nReason: {}\n\
|
||||
If the source file provides only type exports, prefer to use \"import type\" or \"export type\" syntax instead.",
|
||||
out.url, e.to_string()
|
||||
);
|
||||
info!("{} {}", crate::colors::yellow("Warning"), msg);
|
||||
|
||||
CompiledModule {
|
||||
code: "".to_string(),
|
||||
name: out.url.to_string(),
|
||||
}
|
||||
}
|
||||
let url = out.url.clone();
|
||||
let compiled_module = if let Some((code, _)) = self.get_emit(&url) {
|
||||
CompiledModule {
|
||||
code: String::from_utf8(code).unwrap(),
|
||||
name: out.url.to_string(),
|
||||
}
|
||||
// We expect a compiled source for any non-JavaScript files, except for
|
||||
// local files that have an unknown media type and no referrer (root modules
|
||||
// that do not have an extension.)
|
||||
} else if out.media_type != MediaType::JavaScript
|
||||
&& !(out.media_type == MediaType::Unknown
|
||||
&& maybe_referrer.is_none()
|
||||
&& url.scheme() == "file")
|
||||
{
|
||||
let message = if let Some(referrer) = maybe_referrer {
|
||||
format!("Compiled module not found \"{}\"\n From: {}\n If the source module contains only types, use `import type` and `export type` to import it instead.", module_specifier, referrer)
|
||||
} else {
|
||||
format!("Compiled module not found \"{}\"\n If the source module contains only types, use `import type` and `export type` to import it instead.", module_specifier)
|
||||
};
|
||||
info!("{}: {}", crate::colors::yellow("warning"), message);
|
||||
CompiledModule {
|
||||
code: "".to_string(),
|
||||
name: out.url.to_string(),
|
||||
}
|
||||
} else {
|
||||
CompiledModule {
|
||||
|
@ -256,6 +232,37 @@ impl ProgramState {
|
|||
Ok(compiled_module)
|
||||
}
|
||||
|
||||
// TODO(@kitsonk) this should be a straight forward API on file_fetcher or
|
||||
// whatever future refactors do...
|
||||
fn get_emit(&self, url: &Url) -> Option<(Vec<u8>, Option<Vec<u8>>)> {
|
||||
match url.scheme() {
|
||||
// we should only be looking for emits for schemes that denote external
|
||||
// modules, which the disk_cache supports
|
||||
"wasm" | "file" | "http" | "https" => (),
|
||||
_ => {
|
||||
return None;
|
||||
}
|
||||
}
|
||||
let emit_path = self
|
||||
.dir
|
||||
.gen_cache
|
||||
.get_cache_filename_with_extension(&url, "js");
|
||||
let emit_map_path = self
|
||||
.dir
|
||||
.gen_cache
|
||||
.get_cache_filename_with_extension(&url, "js.map");
|
||||
if let Ok(code) = self.dir.gen_cache.get(&emit_path) {
|
||||
let maybe_map = if let Ok(map) = self.dir.gen_cache.get(&emit_map_path) {
|
||||
Some(map)
|
||||
} else {
|
||||
None
|
||||
};
|
||||
Some((code, maybe_map))
|
||||
} else {
|
||||
None
|
||||
}
|
||||
}
|
||||
|
||||
/// Quits the process if the --unstable flag was not provided.
|
||||
///
|
||||
/// This is intentionally a non-recoverable check so that people cannot probe
|
||||
|
@ -279,57 +286,62 @@ impl ProgramState {
|
|||
}
|
||||
}
|
||||
|
||||
/// Determine if TS compiler should be run with `allowJs` setting on. This
|
||||
/// is the case when there's either:
|
||||
/// - a JavaScript file with non-JavaScript import
|
||||
/// - JSX import
|
||||
fn should_allow_js(module_graph_files: &[&ModuleGraphFile]) -> bool {
|
||||
module_graph_files.iter().any(|module_file| {
|
||||
if module_file.media_type == MediaType::JSX {
|
||||
true
|
||||
} else if module_file.media_type == MediaType::JavaScript {
|
||||
module_file.imports.iter().any(|import_desc| {
|
||||
let import_file = module_graph_files
|
||||
.iter()
|
||||
.find(|f| {
|
||||
f.specifier == import_desc.resolved_specifier.to_string().as_str()
|
||||
})
|
||||
.expect("Failed to find imported file");
|
||||
let media_type = import_file.media_type;
|
||||
media_type == MediaType::TypeScript
|
||||
|| media_type == MediaType::TSX
|
||||
|| media_type == MediaType::JSX
|
||||
})
|
||||
// TODO(@kitsonk) this is only temporary, but should be refactored to somewhere
|
||||
// else, like a refactored file_fetcher.
|
||||
impl SourceMapGetter for ProgramState {
|
||||
fn get_source_map(&self, file_name: &str) -> Option<Vec<u8>> {
|
||||
if let Ok(specifier) = ModuleSpecifier::resolve_url(file_name) {
|
||||
if let Some((code, maybe_map)) = self.get_emit(&specifier.as_url()) {
|
||||
if maybe_map.is_some() {
|
||||
maybe_map
|
||||
} else {
|
||||
let code = String::from_utf8(code).unwrap();
|
||||
let lines: Vec<&str> = code.split('\n').collect();
|
||||
if let Some(last_line) = lines.last() {
|
||||
if last_line
|
||||
.starts_with("//# sourceMappingURL=data:application/json;base64,")
|
||||
{
|
||||
let input = last_line.trim_start_matches(
|
||||
"//# sourceMappingURL=data:application/json;base64,",
|
||||
);
|
||||
let decoded_map = base64::decode(input)
|
||||
.expect("Unable to decode source map from emitted file.");
|
||||
Some(decoded_map)
|
||||
} else {
|
||||
None
|
||||
}
|
||||
} else {
|
||||
None
|
||||
}
|
||||
}
|
||||
} else {
|
||||
None
|
||||
}
|
||||
} else {
|
||||
false
|
||||
None
|
||||
}
|
||||
})
|
||||
}
|
||||
}
|
||||
|
||||
// Compilation happens if either:
|
||||
// - `checkJs` is set to true in TS config
|
||||
// - entry point is a TS file
|
||||
// - any dependency in module graph is a TS file
|
||||
fn needs_compilation(
|
||||
compile_js: bool,
|
||||
media_type: MediaType,
|
||||
module_graph_files: &[&ModuleGraphFile],
|
||||
) -> bool {
|
||||
let mut needs_compilation = match media_type {
|
||||
MediaType::TypeScript | MediaType::TSX | MediaType::JSX => true,
|
||||
MediaType::JavaScript => compile_js,
|
||||
_ => false,
|
||||
};
|
||||
|
||||
needs_compilation |= module_graph_files.iter().any(|module_file| {
|
||||
let media_type = module_file.media_type;
|
||||
|
||||
media_type == (MediaType::TypeScript)
|
||||
|| media_type == (MediaType::TSX)
|
||||
|| media_type == (MediaType::JSX)
|
||||
});
|
||||
|
||||
needs_compilation
|
||||
fn get_source_line(
|
||||
&self,
|
||||
file_name: &str,
|
||||
line_number: usize,
|
||||
) -> Option<String> {
|
||||
if let Ok(specifier) = ModuleSpecifier::resolve_url(file_name) {
|
||||
self
|
||||
.file_fetcher
|
||||
.fetch_cached_source_file(&specifier, Permissions::allow_all())
|
||||
.map(|out| {
|
||||
// Do NOT use .lines(): it skips the terminating empty line.
|
||||
// (due to internally using .split_terminator() instead of .split())
|
||||
let lines: Vec<&str> = out.source_code.split('\n').collect();
|
||||
assert!(lines.len() > line_number);
|
||||
lines[line_number].to_string()
|
||||
})
|
||||
} else {
|
||||
None
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
#[test]
|
||||
|
@ -337,203 +349,3 @@ fn thread_safe() {
|
|||
fn f<S: Send + Sync>(_: S) {}
|
||||
f(ProgramState::mock(vec![], None));
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_should_allow_js() {
|
||||
use crate::ast::Location;
|
||||
use crate::module_graph::ImportDescriptor;
|
||||
|
||||
assert!(should_allow_js(&[
|
||||
&ModuleGraphFile {
|
||||
specifier: "file:///some/file.ts".to_string(),
|
||||
url: "file:///some/file.ts".to_string(),
|
||||
redirect: None,
|
||||
filename: "some/file.ts".to_string(),
|
||||
imports: vec![],
|
||||
version_hash: "1".to_string(),
|
||||
referenced_files: vec![],
|
||||
lib_directives: vec![],
|
||||
types_directives: vec![],
|
||||
type_headers: vec![],
|
||||
media_type: MediaType::TypeScript,
|
||||
source_code: "function foo() {}".to_string(),
|
||||
},
|
||||
&ModuleGraphFile {
|
||||
specifier: "file:///some/file1.js".to_string(),
|
||||
url: "file:///some/file1.js".to_string(),
|
||||
redirect: None,
|
||||
filename: "some/file1.js".to_string(),
|
||||
version_hash: "1".to_string(),
|
||||
imports: vec![ImportDescriptor {
|
||||
specifier: "./file.ts".to_string(),
|
||||
resolved_specifier: ModuleSpecifier::resolve_url(
|
||||
"file:///some/file.ts",
|
||||
)
|
||||
.unwrap(),
|
||||
type_directive: None,
|
||||
resolved_type_directive: None,
|
||||
location: Location {
|
||||
filename: "file:///some/file1.js".to_string(),
|
||||
line: 0,
|
||||
col: 0,
|
||||
},
|
||||
}],
|
||||
referenced_files: vec![],
|
||||
lib_directives: vec![],
|
||||
types_directives: vec![],
|
||||
type_headers: vec![],
|
||||
media_type: MediaType::JavaScript,
|
||||
source_code: "function foo() {}".to_string(),
|
||||
},
|
||||
],));
|
||||
|
||||
assert!(should_allow_js(&[
|
||||
&ModuleGraphFile {
|
||||
specifier: "file:///some/file.jsx".to_string(),
|
||||
url: "file:///some/file.jsx".to_string(),
|
||||
redirect: None,
|
||||
filename: "some/file.jsx".to_string(),
|
||||
imports: vec![],
|
||||
version_hash: "1".to_string(),
|
||||
referenced_files: vec![],
|
||||
lib_directives: vec![],
|
||||
types_directives: vec![],
|
||||
type_headers: vec![],
|
||||
media_type: MediaType::JSX,
|
||||
source_code: "function foo() {}".to_string(),
|
||||
},
|
||||
&ModuleGraphFile {
|
||||
specifier: "file:///some/file.ts".to_string(),
|
||||
url: "file:///some/file.ts".to_string(),
|
||||
redirect: None,
|
||||
filename: "some/file.ts".to_string(),
|
||||
version_hash: "1".to_string(),
|
||||
imports: vec![ImportDescriptor {
|
||||
specifier: "./file.jsx".to_string(),
|
||||
resolved_specifier: ModuleSpecifier::resolve_url(
|
||||
"file:///some/file.jsx",
|
||||
)
|
||||
.unwrap(),
|
||||
type_directive: None,
|
||||
resolved_type_directive: None,
|
||||
location: Location {
|
||||
filename: "file:///some/file1.ts".to_string(),
|
||||
line: 0,
|
||||
col: 0,
|
||||
},
|
||||
}],
|
||||
referenced_files: vec![],
|
||||
lib_directives: vec![],
|
||||
types_directives: vec![],
|
||||
type_headers: vec![],
|
||||
media_type: MediaType::TypeScript,
|
||||
source_code: "function foo() {}".to_string(),
|
||||
},
|
||||
]));
|
||||
|
||||
assert!(!should_allow_js(&[
|
||||
&ModuleGraphFile {
|
||||
specifier: "file:///some/file.js".to_string(),
|
||||
url: "file:///some/file.js".to_string(),
|
||||
redirect: None,
|
||||
filename: "some/file.js".to_string(),
|
||||
imports: vec![],
|
||||
referenced_files: vec![],
|
||||
lib_directives: vec![],
|
||||
types_directives: vec![],
|
||||
version_hash: "1".to_string(),
|
||||
type_headers: vec![],
|
||||
media_type: MediaType::JavaScript,
|
||||
source_code: "function foo() {}".to_string(),
|
||||
},
|
||||
&ModuleGraphFile {
|
||||
specifier: "file:///some/file1.js".to_string(),
|
||||
url: "file:///some/file1.js".to_string(),
|
||||
redirect: None,
|
||||
filename: "some/file1.js".to_string(),
|
||||
imports: vec![ImportDescriptor {
|
||||
specifier: "./file.js".to_string(),
|
||||
resolved_specifier: ModuleSpecifier::resolve_url(
|
||||
"file:///some/file.js",
|
||||
)
|
||||
.unwrap(),
|
||||
type_directive: None,
|
||||
resolved_type_directive: None,
|
||||
location: Location {
|
||||
filename: "file:///some/file.js".to_string(),
|
||||
line: 0,
|
||||
col: 0,
|
||||
},
|
||||
}],
|
||||
referenced_files: vec![],
|
||||
lib_directives: vec![],
|
||||
types_directives: vec![],
|
||||
version_hash: "1".to_string(),
|
||||
type_headers: vec![],
|
||||
media_type: MediaType::JavaScript,
|
||||
source_code: "function foo() {}".to_string(),
|
||||
},
|
||||
],));
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_needs_compilation() {
|
||||
assert!(!needs_compilation(
|
||||
false,
|
||||
MediaType::JavaScript,
|
||||
&[&ModuleGraphFile {
|
||||
specifier: "some/file.js".to_string(),
|
||||
url: "file:///some/file.js".to_string(),
|
||||
redirect: None,
|
||||
filename: "some/file.js".to_string(),
|
||||
imports: vec![],
|
||||
referenced_files: vec![],
|
||||
lib_directives: vec![],
|
||||
types_directives: vec![],
|
||||
type_headers: vec![],
|
||||
version_hash: "1".to_string(),
|
||||
media_type: MediaType::JavaScript,
|
||||
source_code: "function foo() {}".to_string(),
|
||||
}],
|
||||
));
|
||||
|
||||
assert!(!needs_compilation(false, MediaType::JavaScript, &[]));
|
||||
assert!(needs_compilation(true, MediaType::JavaScript, &[]));
|
||||
assert!(needs_compilation(false, MediaType::TypeScript, &[]));
|
||||
assert!(needs_compilation(false, MediaType::JSX, &[]));
|
||||
assert!(needs_compilation(false, MediaType::TSX, &[]));
|
||||
assert!(needs_compilation(
|
||||
false,
|
||||
MediaType::JavaScript,
|
||||
&[
|
||||
&ModuleGraphFile {
|
||||
specifier: "file:///some/file.ts".to_string(),
|
||||
url: "file:///some/file.ts".to_string(),
|
||||
redirect: None,
|
||||
filename: "some/file.ts".to_string(),
|
||||
imports: vec![],
|
||||
referenced_files: vec![],
|
||||
lib_directives: vec![],
|
||||
types_directives: vec![],
|
||||
type_headers: vec![],
|
||||
media_type: MediaType::TypeScript,
|
||||
version_hash: "1".to_string(),
|
||||
source_code: "function foo() {}".to_string(),
|
||||
},
|
||||
&ModuleGraphFile {
|
||||
specifier: "file:///some/file1.js".to_string(),
|
||||
url: "file:///some/file1.js".to_string(),
|
||||
redirect: None,
|
||||
filename: "some/file1.js".to_string(),
|
||||
imports: vec![],
|
||||
referenced_files: vec![],
|
||||
lib_directives: vec![],
|
||||
types_directives: vec![],
|
||||
type_headers: vec![],
|
||||
version_hash: "1".to_string(),
|
||||
media_type: MediaType::JavaScript,
|
||||
source_code: "function foo() {}".to_string(),
|
||||
},
|
||||
],
|
||||
));
|
||||
}
|
||||
|
|
|
@ -6,8 +6,9 @@ use deno_core::error::JsError as CoreJsError;
|
|||
use sourcemap::SourceMap;
|
||||
use std::collections::HashMap;
|
||||
use std::str;
|
||||
use std::sync::Arc;
|
||||
|
||||
pub trait SourceMapGetter {
|
||||
pub trait SourceMapGetter: Sync + Send {
|
||||
/// Returns the raw source map file.
|
||||
fn get_source_map(&self, file_name: &str) -> Option<Vec<u8>>;
|
||||
fn get_source_line(
|
||||
|
@ -26,7 +27,7 @@ pub type CachedMaps = HashMap<String, Option<SourceMap>>;
|
|||
/// source, rather than the transpiled source code.
|
||||
pub fn apply_source_map<G: SourceMapGetter>(
|
||||
js_error: &CoreJsError,
|
||||
getter: &G,
|
||||
getter: Arc<G>,
|
||||
) -> CoreJsError {
|
||||
// Note that js_error.frames has already been source mapped in
|
||||
// prepareStackTrace().
|
||||
|
@ -39,7 +40,7 @@ pub fn apply_source_map<G: SourceMapGetter>(
|
|||
// start_column is 0-based, we need 1-based here.
|
||||
js_error.start_column.map(|n| n + 1),
|
||||
&mut mappings_map,
|
||||
getter,
|
||||
getter.clone(),
|
||||
);
|
||||
let start_column = start_column.map(|n| n - 1);
|
||||
// It is better to just move end_column to be the same distance away from
|
||||
|
@ -87,7 +88,7 @@ fn get_maybe_orig_position<G: SourceMapGetter>(
|
|||
line_number: Option<i64>,
|
||||
column_number: Option<i64>,
|
||||
mappings_map: &mut CachedMaps,
|
||||
getter: &G,
|
||||
getter: Arc<G>,
|
||||
) -> (Option<String>, Option<i64>, Option<i64>) {
|
||||
match (file_name, line_number, column_number) {
|
||||
(Some(file_name_v), Some(line_v), Some(column_v)) => {
|
||||
|
@ -104,7 +105,7 @@ pub fn get_orig_position<G: SourceMapGetter>(
|
|||
line_number: i64,
|
||||
column_number: i64,
|
||||
mappings_map: &mut CachedMaps,
|
||||
getter: &G,
|
||||
getter: Arc<G>,
|
||||
) -> (String, i64, i64) {
|
||||
let maybe_source_map = get_mappings(&file_name, mappings_map, getter);
|
||||
let default_pos = (file_name, line_number, column_number);
|
||||
|
@ -134,7 +135,7 @@ pub fn get_orig_position<G: SourceMapGetter>(
|
|||
fn get_mappings<'a, G: SourceMapGetter>(
|
||||
file_name: &str,
|
||||
mappings_map: &'a mut CachedMaps,
|
||||
getter: &G,
|
||||
getter: Arc<G>,
|
||||
) -> &'a Option<SourceMap> {
|
||||
mappings_map
|
||||
.entry(file_name.to_string())
|
||||
|
@ -145,7 +146,7 @@ fn get_mappings<'a, G: SourceMapGetter>(
|
|||
// the module meta data.
|
||||
fn parse_map_string<G: SourceMapGetter>(
|
||||
file_name: &str,
|
||||
getter: &G,
|
||||
getter: Arc<G>,
|
||||
) -> Option<SourceMap> {
|
||||
getter
|
||||
.get_source_map(file_name)
|
||||
|
@ -207,8 +208,8 @@ mod tests {
|
|||
frames: vec![],
|
||||
stack: None,
|
||||
};
|
||||
let getter = MockSourceMapGetter {};
|
||||
let actual = apply_source_map(&e, &getter);
|
||||
let getter = Arc::new(MockSourceMapGetter {});
|
||||
let actual = apply_source_map(&e, getter);
|
||||
assert_eq!(actual.source_line, Some("console.log('foo');".to_string()));
|
||||
}
|
||||
}
|
||||
|
|
|
@ -1,5 +1,6 @@
|
|||
// Copyright 2018-2020 the Deno authors. All rights reserved. MIT license.
|
||||
|
||||
use crate::ast::Location;
|
||||
use crate::deno_dir::DenoDir;
|
||||
use crate::disk_cache::DiskCache;
|
||||
use crate::file_fetcher::SourceFileFetcher;
|
||||
|
@ -25,8 +26,29 @@ pub type DependencyMap = HashMap<String, Dependency>;
|
|||
pub type FetchFuture =
|
||||
Pin<Box<(dyn Future<Output = Result<CachedModule, AnyError>> + 'static)>>;
|
||||
|
||||
/// A group of errors that represent errors that can occur with an
|
||||
/// an implementation of `SpecifierHandler`.
|
||||
#[derive(Debug, Clone, Eq, PartialEq)]
|
||||
pub enum HandlerError {
|
||||
/// A fetch error, where we have a location associated with it.
|
||||
FetchErrorWithLocation(String, Location),
|
||||
}
|
||||
|
||||
impl fmt::Display for HandlerError {
|
||||
fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
|
||||
match self {
|
||||
HandlerError::FetchErrorWithLocation(ref err, ref location) => {
|
||||
write!(f, "{}\n at {}", err, location)
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
impl std::error::Error for HandlerError {}
|
||||
|
||||
#[derive(Debug, Clone)]
|
||||
pub struct CachedModule {
|
||||
pub is_remote: bool,
|
||||
pub maybe_dependencies: Option<DependencyMap>,
|
||||
pub maybe_emit: Option<Emit>,
|
||||
pub maybe_emit_path: Option<(PathBuf, Option<PathBuf>)>,
|
||||
|
@ -44,6 +66,7 @@ impl Default for CachedModule {
|
|||
fn default() -> Self {
|
||||
let specifier = ModuleSpecifier::resolve_url("file:///example.js").unwrap();
|
||||
CachedModule {
|
||||
is_remote: false,
|
||||
maybe_dependencies: None,
|
||||
maybe_emit: None,
|
||||
maybe_emit_path: None,
|
||||
|
@ -76,8 +99,12 @@ impl Default for Emit {
|
|||
}
|
||||
}
|
||||
|
||||
#[derive(Debug, Clone, Default)]
|
||||
#[derive(Debug, Clone)]
|
||||
pub struct Dependency {
|
||||
/// Flags if the dependency is a dynamic import or not.
|
||||
pub is_dynamic: bool,
|
||||
/// The location in the source code where the dependency statement occurred.
|
||||
pub location: Location,
|
||||
/// The module specifier that resolves to the runtime code dependency for the
|
||||
/// module.
|
||||
pub maybe_code: Option<ModuleSpecifier>,
|
||||
|
@ -86,17 +113,33 @@ pub struct Dependency {
|
|||
pub maybe_type: Option<ModuleSpecifier>,
|
||||
}
|
||||
|
||||
impl Dependency {
|
||||
pub fn new(location: Location) -> Self {
|
||||
Dependency {
|
||||
is_dynamic: false,
|
||||
location,
|
||||
maybe_code: None,
|
||||
maybe_type: None,
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
pub trait SpecifierHandler {
|
||||
/// Instructs the handler to fetch a specifier or retrieve its value from the
|
||||
/// cache.
|
||||
fn fetch(&mut self, specifier: ModuleSpecifier) -> FetchFuture;
|
||||
fn fetch(
|
||||
&mut self,
|
||||
specifier: ModuleSpecifier,
|
||||
maybe_location: Option<Location>,
|
||||
is_dynamic: bool,
|
||||
) -> FetchFuture;
|
||||
|
||||
/// Get the optional build info from the cache for a given module specifier.
|
||||
/// Because build infos are only associated with the "root" modules, they are
|
||||
/// not expected to be cached for each module, but are "lazily" checked when
|
||||
/// a root module is identified. The `emit_type` also indicates what form
|
||||
/// of the module the build info is valid for.
|
||||
fn get_ts_build_info(
|
||||
fn get_tsbuildinfo(
|
||||
&self,
|
||||
specifier: &ModuleSpecifier,
|
||||
) -> Result<Option<String>, AnyError>;
|
||||
|
@ -117,10 +160,10 @@ pub trait SpecifierHandler {
|
|||
) -> Result<(), AnyError>;
|
||||
|
||||
/// Set the build info for a module specifier, also providing the cache type.
|
||||
fn set_ts_build_info(
|
||||
fn set_tsbuildinfo(
|
||||
&mut self,
|
||||
specifier: &ModuleSpecifier,
|
||||
ts_build_info: String,
|
||||
tsbuildinfo: String,
|
||||
) -> Result<(), AnyError>;
|
||||
|
||||
/// Set the graph dependencies for a given module specifier.
|
||||
|
@ -170,15 +213,18 @@ impl CompiledFileMetadata {
|
|||
/// existing `file_fetcher` interface, which will eventually be refactored to
|
||||
/// align it more to the `SpecifierHandler` trait.
|
||||
pub struct FetchHandler {
|
||||
/// An instance of disk where generated (emitted) files are stored.
|
||||
disk_cache: DiskCache,
|
||||
/// A set of permissions to apply to dynamic imports.
|
||||
dynamic_permissions: Permissions,
|
||||
/// A clone of the `program_state` file fetcher.
|
||||
file_fetcher: SourceFileFetcher,
|
||||
permissions: Permissions,
|
||||
}
|
||||
|
||||
impl FetchHandler {
|
||||
pub fn new(
|
||||
program_state: &Arc<ProgramState>,
|
||||
permissions: Permissions,
|
||||
dynamic_permissions: Permissions,
|
||||
) -> Result<Self, AnyError> {
|
||||
let custom_root = env::var("DENO_DIR").map(String::into).ok();
|
||||
let deno_dir = DenoDir::new(custom_root)?;
|
||||
|
@ -187,23 +233,54 @@ impl FetchHandler {
|
|||
|
||||
Ok(FetchHandler {
|
||||
disk_cache,
|
||||
dynamic_permissions,
|
||||
file_fetcher,
|
||||
permissions,
|
||||
})
|
||||
}
|
||||
}
|
||||
|
||||
impl SpecifierHandler for FetchHandler {
|
||||
fn fetch(&mut self, requested_specifier: ModuleSpecifier) -> FetchFuture {
|
||||
let permissions = self.permissions.clone();
|
||||
fn fetch(
|
||||
&mut self,
|
||||
requested_specifier: ModuleSpecifier,
|
||||
maybe_location: Option<Location>,
|
||||
is_dynamic: bool,
|
||||
) -> FetchFuture {
|
||||
// When the module graph fetches dynamic modules, the set of dynamic
|
||||
// permissions need to be applied. Other static imports have all
|
||||
// permissions.
|
||||
let permissions = if is_dynamic {
|
||||
self.dynamic_permissions.clone()
|
||||
} else {
|
||||
Permissions::allow_all()
|
||||
};
|
||||
let file_fetcher = self.file_fetcher.clone();
|
||||
let disk_cache = self.disk_cache.clone();
|
||||
let maybe_referrer: Option<ModuleSpecifier> =
|
||||
if let Some(location) = &maybe_location {
|
||||
Some(location.clone().into())
|
||||
} else {
|
||||
None
|
||||
};
|
||||
|
||||
async move {
|
||||
let source_file = file_fetcher
|
||||
.fetch_source_file(&requested_specifier, None, permissions)
|
||||
.await?;
|
||||
.fetch_source_file(&requested_specifier, maybe_referrer, permissions)
|
||||
.await
|
||||
.map_err(|err| {
|
||||
if let Some(location) = maybe_location {
|
||||
if !is_dynamic {
|
||||
HandlerError::FetchErrorWithLocation(err.to_string(), location)
|
||||
.into()
|
||||
} else {
|
||||
err
|
||||
}
|
||||
} else {
|
||||
err
|
||||
}
|
||||
})?;
|
||||
let url = source_file.url.clone();
|
||||
let is_remote = url.scheme() != "file";
|
||||
let filename = disk_cache.get_cache_filename_with_extension(&url, "meta");
|
||||
let maybe_version = if let Ok(bytes) = disk_cache.get(&filename) {
|
||||
if let Ok(compiled_file_metadata) =
|
||||
|
@ -237,6 +314,7 @@ impl SpecifierHandler for FetchHandler {
|
|||
let specifier = ModuleSpecifier::from(url);
|
||||
|
||||
Ok(CachedModule {
|
||||
is_remote,
|
||||
maybe_dependencies: None,
|
||||
maybe_emit,
|
||||
maybe_emit_path,
|
||||
|
@ -252,31 +330,32 @@ impl SpecifierHandler for FetchHandler {
|
|||
.boxed_local()
|
||||
}
|
||||
|
||||
fn get_ts_build_info(
|
||||
fn get_tsbuildinfo(
|
||||
&self,
|
||||
specifier: &ModuleSpecifier,
|
||||
) -> Result<Option<String>, AnyError> {
|
||||
let filename = self
|
||||
.disk_cache
|
||||
.get_cache_filename_with_extension(specifier.as_url(), "buildinfo");
|
||||
if let Ok(ts_build_info) = self.disk_cache.get(&filename) {
|
||||
return Ok(Some(String::from_utf8(ts_build_info)?));
|
||||
if let Ok(tsbuildinfo) = self.disk_cache.get(&filename) {
|
||||
Ok(Some(String::from_utf8(tsbuildinfo)?))
|
||||
} else {
|
||||
Ok(None)
|
||||
}
|
||||
|
||||
Ok(None)
|
||||
}
|
||||
|
||||
fn set_ts_build_info(
|
||||
fn set_tsbuildinfo(
|
||||
&mut self,
|
||||
specifier: &ModuleSpecifier,
|
||||
ts_build_info: String,
|
||||
tsbuildinfo: String,
|
||||
) -> Result<(), AnyError> {
|
||||
let filename = self
|
||||
.disk_cache
|
||||
.get_cache_filename_with_extension(specifier.as_url(), "buildinfo");
|
||||
debug!("set_tsbuildinfo - filename {:?}", filename);
|
||||
self
|
||||
.disk_cache
|
||||
.set(&filename, ts_build_info.as_bytes())
|
||||
.set(&filename, tsbuildinfo.as_bytes())
|
||||
.map_err(|e| e.into())
|
||||
}
|
||||
|
||||
|
@ -366,8 +445,8 @@ pub mod tests {
|
|||
|
||||
let fetch_handler = FetchHandler {
|
||||
disk_cache,
|
||||
dynamic_permissions: Permissions::default(),
|
||||
file_fetcher,
|
||||
permissions: Permissions::allow_all(),
|
||||
};
|
||||
|
||||
(temp_dir, fetch_handler)
|
||||
|
@ -381,8 +460,10 @@ pub mod tests {
|
|||
"http://localhost:4545/cli/tests/subdir/mod2.ts",
|
||||
)
|
||||
.unwrap();
|
||||
let cached_module: CachedModule =
|
||||
file_fetcher.fetch(specifier.clone()).await.unwrap();
|
||||
let cached_module: CachedModule = file_fetcher
|
||||
.fetch(specifier.clone(), None, false)
|
||||
.await
|
||||
.unwrap();
|
||||
assert!(cached_module.maybe_emit.is_none());
|
||||
assert!(cached_module.maybe_dependencies.is_none());
|
||||
assert_eq!(cached_module.media_type, MediaType::TypeScript);
|
||||
|
@ -401,18 +482,43 @@ pub mod tests {
|
|||
"http://localhost:4545/cli/tests/subdir/mod2.ts",
|
||||
)
|
||||
.unwrap();
|
||||
let cached_module: CachedModule =
|
||||
file_fetcher.fetch(specifier.clone()).await.unwrap();
|
||||
let cached_module: CachedModule = file_fetcher
|
||||
.fetch(specifier.clone(), None, false)
|
||||
.await
|
||||
.unwrap();
|
||||
assert!(cached_module.maybe_emit.is_none());
|
||||
let code = String::from("some code");
|
||||
file_fetcher
|
||||
.set_cache(&specifier, &Emit::Cli((code, None)))
|
||||
.expect("could not set cache");
|
||||
let cached_module: CachedModule =
|
||||
file_fetcher.fetch(specifier.clone()).await.unwrap();
|
||||
let cached_module: CachedModule = file_fetcher
|
||||
.fetch(specifier.clone(), None, false)
|
||||
.await
|
||||
.unwrap();
|
||||
assert_eq!(
|
||||
cached_module.maybe_emit,
|
||||
Some(Emit::Cli(("some code".to_string(), None)))
|
||||
);
|
||||
}
|
||||
|
||||
#[tokio::test]
|
||||
async fn test_fetch_handler_is_remote() {
|
||||
let _http_server_guard = test_util::http_server();
|
||||
let (_, mut file_fetcher) = setup();
|
||||
let specifier = ModuleSpecifier::resolve_url_or_path(
|
||||
"http://localhost:4545/cli/tests/subdir/mod2.ts",
|
||||
)
|
||||
.unwrap();
|
||||
let cached_module: CachedModule =
|
||||
file_fetcher.fetch(specifier, None, false).await.unwrap();
|
||||
assert_eq!(cached_module.is_remote, true);
|
||||
let c = PathBuf::from(env::var_os("CARGO_MANIFEST_DIR").unwrap());
|
||||
let specifier = ModuleSpecifier::resolve_url_or_path(
|
||||
c.join("tests/subdir/mod1.ts").as_os_str().to_str().unwrap(),
|
||||
)
|
||||
.unwrap();
|
||||
let cached_module: CachedModule =
|
||||
file_fetcher.fetch(specifier, None, false).await.unwrap();
|
||||
assert_eq!(cached_module.is_remote, false);
|
||||
}
|
||||
}
|
||||
|
|
|
@ -1,3 +1,5 @@
|
|||
[WILDCARD]
|
||||
error: TypeError: Cannot resolve extension for "[WILDCARD]config.json" with mediaType "Json".
|
||||
error: An unsupported media type was attempted to be imported as a module.
|
||||
Specifier: [WILDCARD]cli/tests/subdir/config.json
|
||||
MediaType: Json
|
||||
[WILDCARD]
|
2
cli/tests/023_no_ext
Normal file
2
cli/tests/023_no_ext
Normal file
|
@ -0,0 +1,2 @@
|
|||
import * as mod4 from "./subdir/mod4.js";
|
||||
console.log(mod4.isMod4);
|
1
cli/tests/023_no_ext.out
Normal file
1
cli/tests/023_no_ext.out
Normal file
|
@ -0,0 +1 @@
|
|||
true
|
|
@ -1 +0,0 @@
|
|||
console.log("HELLO");
|
|
@ -1 +0,0 @@
|
|||
HELLO
|
|
@ -12,12 +12,12 @@ const lib = function() {
|
|||
};
|
||||
}();
|
||||
const c = function() {
|
||||
const c1;
|
||||
const c1 = [];
|
||||
return {
|
||||
c: c1
|
||||
};
|
||||
}();
|
||||
const mod;
|
||||
const mod = [];
|
||||
return {
|
||||
mod
|
||||
};
|
||||
|
|
|
@ -1,5 +1,17 @@
|
|||
const map = new Map<string, { foo: string }>();
|
||||
|
||||
if (map.get("bar").foo) {
|
||||
console.log("here");
|
||||
/* eslint-disable */
|
||||
function b() {
|
||||
return function (
|
||||
_target: any,
|
||||
_propertyKey: string,
|
||||
_descriptor: PropertyDescriptor,
|
||||
) {
|
||||
console.log("b");
|
||||
};
|
||||
}
|
||||
|
||||
class A {
|
||||
@b()
|
||||
a() {
|
||||
console.log("a");
|
||||
}
|
||||
}
|
||||
|
|
|
@ -1,7 +1,7 @@
|
|||
[WILDCARD]Unsupported compiler options in "[WILDCARD]config.tsconfig.json".
|
||||
The following options were ignored:
|
||||
module, target
|
||||
error: TS2532 [ERROR]: Object is possibly 'undefined'.
|
||||
if (map.get("bar").foo) {
|
||||
~~~~~~~~~~~~~~
|
||||
at [WILDCARD]tests/config.ts:3:5
|
||||
error: TS1219 [ERROR]: Experimental support for decorators is a feature that is subject to change in a future release. Set the 'experimentalDecorators' option in your 'tsconfig' or 'jsconfig' to remove this warning.
|
||||
a() {
|
||||
^
|
||||
at file:///[WILDCARD]cli/tests/config.ts:[WILDCARD]
|
||||
|
|
|
@ -1,7 +1,7 @@
|
|||
{
|
||||
"compilerOptions": {
|
||||
"experimentalDecorators": false,
|
||||
"module": "amd",
|
||||
"strict": true,
|
||||
"target": "es5"
|
||||
}
|
||||
}
|
||||
|
|
|
@ -1,2 +1,3 @@
|
|||
error: Modules loaded over https:// are not allowed to import modules over http://
|
||||
Imported from "https://localhost:5545/cli/tests/disallow_http_from_https.js:2"
|
||||
error: Modules imported via https are not allowed to import http modules.
|
||||
Importing: http://localhost:4545/cli/tests/001_hello.js
|
||||
at https://localhost:5545/cli/tests/disallow_http_from_https.js:2:0
|
||||
|
|
|
@ -1,2 +1,3 @@
|
|||
error: Modules loaded over https:// are not allowed to import modules over http://
|
||||
Imported from "https://localhost:5545/cli/tests/disallow_http_from_https.ts:2"
|
||||
error: Modules imported via https are not allowed to import http modules.
|
||||
Importing: http://localhost:4545/cli/tests/001_hello.js
|
||||
at https://localhost:5545/cli/tests/disallow_http_from_https.ts:2:0
|
||||
|
|
|
@ -1,2 +1,2 @@
|
|||
[WILDCARD]error: Cannot resolve module "[WILDCARD]/bad-module.ts" from "[WILDCARD]/error_004_missing_module.ts"
|
||||
Imported from "[WILDCARD]/error_004_missing_module.ts:2"
|
||||
[WILDCARD]error: Cannot resolve module "file:///[WILDCARD]cli/tests/bad-module.ts" from "file:///[WILDCARD]cli/tests/error_004_missing_module.ts"
|
||||
at file:///[WILDCARD]cli/tests/error_004_missing_module.ts:2:0
|
||||
|
|
|
@ -1 +1 @@
|
|||
error: Uncaught TypeError: Cannot resolve module "[WILDCARD]/bad-module.ts" from "[WILDCARD]/error_005_missing_dynamic_import.ts"
|
||||
error: Cannot resolve module "[WILDCARD]/bad-module.ts" from "[WILDCARD]/error_005_missing_dynamic_import.ts"
|
||||
|
|
|
@ -1,2 +1,2 @@
|
|||
[WILDCARD]error: Cannot resolve module "[WILDCARD]/non-existent" from "[WILDCARD]/error_006_import_ext_failure.ts"
|
||||
Imported from "[WILDCARD]/error_006_import_ext_failure.ts:1"
|
||||
at file:///[WILDCARD]cli/tests/error_006_import_ext_failure.ts:1:0
|
||||
|
|
|
@ -1,2 +1,4 @@
|
|||
// eslint-disable-next-line
|
||||
import * as badModule from "bad-module.ts";
|
||||
|
||||
console.log(badModule);
|
||||
|
|
|
@ -1 +1 @@
|
|||
error: Uncaught TypeError: network access to "http://localhost:4545/cli/tests/subdir/mod4.js", run again with the --allow-net flag
|
||||
error: network access to "http://localhost:4545/cli/tests/subdir/mod4.js", run again with the --allow-net flag
|
||||
|
|
|
@ -1,3 +1,4 @@
|
|||
[WILDCARD]
|
||||
error: Uncaught TypeError: read access to "[WILDCARD]passwd", run again with the --allow-read flag
|
||||
Imported from "[WILDCARD]evil_remote_import.js:3"
|
||||
error: Remote modules are not allowed to import local modules. Consider using a dynamic import instead.
|
||||
Importing: file:///c:/etc/passwd
|
||||
at http://localhost:4545/cli/tests/subdir/evil_remote_import.js:3:0
|
||||
|
|
|
@ -1,3 +1,4 @@
|
|||
[WILDCARD]
|
||||
error: Remote modules are not allowed to statically import local modules. Use dynamic import instead.
|
||||
Imported from "[WILDCARD]error_local_static_import_from_remote.js:1"
|
||||
error: Remote modules are not allowed to import local modules. Consider using a dynamic import instead.
|
||||
Importing: file:///some/dir/file.js
|
||||
at http://localhost:4545/cli/tests/error_local_static_import_from_remote.js:1:0
|
||||
|
|
|
@ -1,3 +1,4 @@
|
|||
[WILDCARD]
|
||||
error: Remote modules are not allowed to statically import local modules. Use dynamic import instead.
|
||||
Imported from "[WILDCARD]error_local_static_import_from_remote.ts:1"
|
||||
error: Remote modules are not allowed to import local modules. Consider using a dynamic import instead.
|
||||
Importing: file:///some/dir/file.ts
|
||||
at http://localhost:4545/cli/tests/error_local_static_import_from_remote.ts:1:0
|
||||
|
|
3
cli/tests/fix_exotic_specifiers.ts
Normal file
3
cli/tests/fix_exotic_specifiers.ts
Normal file
|
@ -0,0 +1,3 @@
|
|||
import clone from "https://jspm.dev/lodash@4/clone";
|
||||
|
||||
console.log(clone);
|
1
cli/tests/fix_exotic_specifiers.ts.out
Normal file
1
cli/tests/fix_exotic_specifiers.ts.out
Normal file
|
@ -0,0 +1 @@
|
|||
[Function: clone]
|
|
@ -1807,9 +1807,9 @@ itest!(_022_info_flag_script {
|
|||
http_server: true,
|
||||
});
|
||||
|
||||
itest!(_023_no_ext_with_headers {
|
||||
args: "run --reload 023_no_ext_with_headers",
|
||||
output: "023_no_ext_with_headers.out",
|
||||
itest!(_023_no_ext {
|
||||
args: "run --reload 023_no_ext",
|
||||
output: "023_no_ext.out",
|
||||
});
|
||||
|
||||
// TODO(lucacasonato): remove --unstable when permissions goes stable
|
||||
|
@ -2018,7 +2018,7 @@ itest!(_044_bad_resource {
|
|||
});
|
||||
|
||||
itest!(_045_proxy {
|
||||
args: "run --allow-net --allow-env --allow-run --allow-read --reload --quiet 045_proxy_test.ts",
|
||||
args: "run -L debug --allow-net --allow-env --allow-run --allow-read --reload --quiet 045_proxy_test.ts",
|
||||
output: "045_proxy_test.ts.out",
|
||||
http_server: true,
|
||||
});
|
||||
|
@ -2764,6 +2764,11 @@ itest!(tsx_imports {
|
|||
output: "tsx_imports.ts.out",
|
||||
});
|
||||
|
||||
itest!(fix_exotic_specifiers {
|
||||
args: "run --quiet --reload fix_exotic_specifiers.ts",
|
||||
output: "fix_exotic_specifiers.ts.out",
|
||||
});
|
||||
|
||||
itest!(fix_js_import_js {
|
||||
args: "run --quiet --reload fix_js_import_js.ts",
|
||||
output: "fix_js_import_js.ts.out",
|
||||
|
|
|
@ -1,2 +1,3 @@
|
|||
[WILDCARD]Subresource integrity check failed --lock=lock_check_err.json
|
||||
http://127.0.0.1:4545/cli/tests/003_relative_import.ts
|
||||
[WILDCARD]The source code is invalid, as it does not match the expected hash in the lock file.
|
||||
Specifier: http://127.0.0.1:4545/cli/tests/003_relative_import.ts
|
||||
Lock file: lock_check_err.json
|
||||
|
|
|
@ -1,2 +1,3 @@
|
|||
[WILDCARD]Subresource integrity check failed --lock=lock_check_err2.json
|
||||
http://localhost:4545/cli/tests/subdir/mt_text_ecmascript.j3.js
|
||||
[WILDCARD]The source code is invalid, as it does not match the expected hash in the lock file.
|
||||
Specifier: http://localhost:4545/cli/tests/subdir/mt_text_ecmascript.j3.js
|
||||
Lock file: lock_check_err2.json
|
||||
|
|
|
@ -1,3 +1,4 @@
|
|||
[WILDCARD]
|
||||
Subresource integrity check failed --lock=lock_dynamic_imports.json
|
||||
http://127.0.0.1:4545/cli/tests/subdir/subdir2/mod2.ts
|
||||
The source code is invalid, as it does not match the expected hash in the lock file.
|
||||
Specifier: http://127.0.0.1:4545/cli/tests/subdir/subdir2/mod2.ts
|
||||
Lock file: lock_dynamic_imports.json
|
||||
|
|
3
cli/tests/module_graph/file_tests-importjson.ts
Normal file
3
cli/tests/module_graph/file_tests-importjson.ts
Normal file
|
@ -0,0 +1,3 @@
|
|||
import * as config from "./some.json";
|
||||
|
||||
console.log(config);
|
5
cli/tests/module_graph/file_tests-some.json
Normal file
5
cli/tests/module_graph/file_tests-some.json
Normal file
|
@ -0,0 +1,5 @@
|
|||
{
|
||||
"config": {
|
||||
"debug": true
|
||||
}
|
||||
}
|
|
@ -1,14 +1,16 @@
|
|||
[WILDCARD]
|
||||
DEBUG RS - [WILDCARD] - Files: [WILDCARD]
|
||||
DEBUG RS - [WILDCARD] - Nodes: [WILDCARD]
|
||||
DEBUG RS - [WILDCARD] - Identifiers: [WILDCARD]
|
||||
DEBUG RS - [WILDCARD] - Symbols: [WILDCARD]
|
||||
DEBUG RS - [WILDCARD] - Types: [WILDCARD]
|
||||
DEBUG RS - [WILDCARD] - Instantiations: [WILDCARD]
|
||||
DEBUG RS - [WILDCARD] - Parse time: [WILDCARD]
|
||||
DEBUG RS - [WILDCARD] - Bind time: [WILDCARD]
|
||||
DEBUG RS - [WILDCARD] - Check time: [WILDCARD]
|
||||
DEBUG RS - [WILDCARD] - Emit time: [WILDCARD]
|
||||
DEBUG RS - [WILDCARD] - Total TS time: [WILDCARD]
|
||||
DEBUG RS - [WILDCARD] - Compile time: [WILDCARD]
|
||||
DEBUG RS - [WILDCARD] - Compilation statistics:
|
||||
Files: [WILDCARD]
|
||||
Nodes: [WILDCARD]
|
||||
Identifiers: [WILDCARD]
|
||||
Symbols: [WILDCARD]
|
||||
Types: [WILDCARD]
|
||||
Instantiations: [WILDCARD]
|
||||
Parse time: [WILDCARD]
|
||||
Bind time: [WILDCARD]
|
||||
Check time: [WILDCARD]
|
||||
Emit time: [WILDCARD]
|
||||
Total TS time: [WILDCARD]
|
||||
Compile time: [WILDCARD]
|
||||
|
||||
[WILDCARD]
|
||||
|
|
|
@ -1,5 +1,4 @@
|
|||
Check [WILDCARD]single_compile_with_reload.ts
|
||||
Check [WILDCARD]single_compile_with_reload_dyn.ts
|
||||
Hello
|
||||
1
|
||||
2
|
||||
|
|
|
@ -1,4 +1,4 @@
|
|||
Check [WILDCARD]ts_type_only_import.ts
|
||||
Warning Failed to get compiled source code of "[WILDCARD]ts_type_only_import.d.ts".
|
||||
Reason: [WILDCARD] (os error 2)
|
||||
If the source file provides only type exports, prefer to use "import type" or "export type" syntax instead.
|
||||
warning: Compiled module not found "[WILDCARD]ts_type_only_import.d.ts"
|
||||
From: [WILDCARD]ts_type_only_import.ts
|
||||
If the source module contains only types, use `import type` and `export type` to import it instead.
|
||||
|
|
|
@ -1,4 +1,4 @@
|
|||
error: Uncaught TypeError: Unsupported scheme "xxx" for module "xxx:". Supported schemes: [
|
||||
error: Unsupported scheme "xxx" for module "xxx:". Supported schemes: [
|
||||
"http",
|
||||
"https",
|
||||
"file",
|
||||
|
|
466
cli/tsc.rs
466
cli/tsc.rs
|
@ -2,7 +2,6 @@
|
|||
|
||||
use crate::ast::parse;
|
||||
use crate::ast::Location;
|
||||
use crate::colors;
|
||||
use crate::diagnostics::Diagnostics;
|
||||
use crate::disk_cache::DiskCache;
|
||||
use crate::file_fetcher::SourceFile;
|
||||
|
@ -14,7 +13,6 @@ use crate::module_graph::ModuleGraph;
|
|||
use crate::module_graph::ModuleGraphLoader;
|
||||
use crate::permissions::Permissions;
|
||||
use crate::program_state::ProgramState;
|
||||
use crate::source_maps::SourceMapGetter;
|
||||
use crate::tsc_config;
|
||||
use crate::version;
|
||||
use deno_core::error::generic_error;
|
||||
|
@ -29,7 +27,6 @@ use deno_core::JsRuntime;
|
|||
use deno_core::ModuleSpecifier;
|
||||
use deno_core::RuntimeOptions;
|
||||
use log::debug;
|
||||
use log::info;
|
||||
use log::Level;
|
||||
use regex::Regex;
|
||||
use serde::Deserialize;
|
||||
|
@ -231,12 +228,6 @@ pub struct CompiledFileMetadata {
|
|||
}
|
||||
|
||||
impl CompiledFileMetadata {
|
||||
pub fn from_json_string(
|
||||
metadata_string: String,
|
||||
) -> Result<Self, serde_json::Error> {
|
||||
serde_json::from_str::<Self>(&metadata_string)
|
||||
}
|
||||
|
||||
pub fn to_json_string(&self) -> Result<String, serde_json::Error> {
|
||||
serde_json::to_string(self)
|
||||
}
|
||||
|
@ -308,15 +299,6 @@ struct BundleResponse {
|
|||
stats: Option<Vec<Stat>>,
|
||||
}
|
||||
|
||||
#[derive(Deserialize)]
|
||||
#[serde(rename_all = "camelCase")]
|
||||
struct CompileResponse {
|
||||
diagnostics: Diagnostics,
|
||||
emit_map: HashMap<String, EmittedSource>,
|
||||
build_info: Option<String>,
|
||||
stats: Option<Vec<Stat>>,
|
||||
}
|
||||
|
||||
// TODO(bartlomieju): possible deduplicate once TS refactor is stabilized
|
||||
#[derive(Deserialize)]
|
||||
#[serde(rename_all = "camelCase")]
|
||||
|
@ -360,197 +342,6 @@ impl TsCompiler {
|
|||
c.insert(url.clone());
|
||||
}
|
||||
|
||||
fn has_compiled(&self, url: &Url) -> bool {
|
||||
let c = self.compiled.lock().unwrap();
|
||||
c.contains(url)
|
||||
}
|
||||
|
||||
/// Check if there is compiled source in cache that is valid and can be used
|
||||
/// again.
|
||||
fn has_compiled_source(&self, url: &Url) -> bool {
|
||||
let specifier = ModuleSpecifier::from(url.clone());
|
||||
if let Some(source_file) = self
|
||||
.file_fetcher
|
||||
.fetch_cached_source_file(&specifier, Permissions::allow_all())
|
||||
{
|
||||
if let Some(metadata) = self.get_metadata(&url) {
|
||||
// Compare version hashes
|
||||
let version_hash_to_validate = source_code_version_hash(
|
||||
&source_file.source_code.as_bytes(),
|
||||
version::DENO,
|
||||
&self.config.hash.as_bytes(),
|
||||
);
|
||||
|
||||
if metadata.version_hash == version_hash_to_validate {
|
||||
return true;
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
false
|
||||
}
|
||||
|
||||
fn has_valid_cache(
|
||||
&self,
|
||||
url: &Url,
|
||||
build_info: &Option<String>,
|
||||
) -> Result<bool, AnyError> {
|
||||
if let Some(build_info_str) = build_info.as_ref() {
|
||||
let build_inf_json: Value = serde_json::from_str(build_info_str)?;
|
||||
let program_val = build_inf_json["program"].as_object().unwrap();
|
||||
let file_infos = program_val["fileInfos"].as_object().unwrap();
|
||||
|
||||
if !self.has_compiled_source(url) {
|
||||
return Ok(false);
|
||||
}
|
||||
|
||||
for (filename, file_info) in file_infos.iter() {
|
||||
if filename.starts_with("asset://") {
|
||||
continue;
|
||||
}
|
||||
|
||||
let url = Url::parse(&filename).expect("Filename is not a valid url");
|
||||
let specifier = ModuleSpecifier::from(url);
|
||||
|
||||
if let Some(source_file) = self
|
||||
.file_fetcher
|
||||
.fetch_cached_source_file(&specifier, Permissions::allow_all())
|
||||
{
|
||||
let existing_hash = crate::checksum::gen(&[
|
||||
&source_file.source_code.as_bytes(),
|
||||
&version::DENO.as_bytes(),
|
||||
]);
|
||||
let expected_hash =
|
||||
file_info["version"].as_str().unwrap().to_string();
|
||||
if existing_hash != expected_hash {
|
||||
// hashes don't match, somethings changed
|
||||
return Ok(false);
|
||||
}
|
||||
} else {
|
||||
// no cached source file
|
||||
return Ok(false);
|
||||
}
|
||||
}
|
||||
} else {
|
||||
// no build info
|
||||
return Ok(false);
|
||||
}
|
||||
|
||||
Ok(true)
|
||||
}
|
||||
|
||||
/// Asynchronously compile module and all it's dependencies.
|
||||
///
|
||||
/// This method compiled every module at most once.
|
||||
///
|
||||
/// If `--reload` flag was provided then compiler will not on-disk cache and
|
||||
/// force recompilation.
|
||||
///
|
||||
/// If compilation is required then new V8 worker is spawned with fresh TS
|
||||
/// compiler.
|
||||
pub async fn compile(
|
||||
&self,
|
||||
program_state: &Arc<ProgramState>,
|
||||
source_file: &SourceFile,
|
||||
target: TargetLib,
|
||||
module_graph: &ModuleGraph,
|
||||
allow_js: bool,
|
||||
) -> Result<(), AnyError> {
|
||||
let module_url = source_file.url.clone();
|
||||
let build_info_key = self
|
||||
.disk_cache
|
||||
.get_cache_filename_with_extension(&module_url, "buildinfo");
|
||||
let build_info = match self.disk_cache.get(&build_info_key) {
|
||||
Ok(bytes) => Some(String::from_utf8(bytes)?),
|
||||
Err(_) => None,
|
||||
};
|
||||
|
||||
// Only use disk cache if `--reload` flag was not used or this file has
|
||||
// already been compiled during current process lifetime.
|
||||
if (self.use_disk_cache || self.has_compiled(&source_file.url))
|
||||
&& self.has_valid_cache(&source_file.url, &build_info)?
|
||||
{
|
||||
return Ok(());
|
||||
}
|
||||
|
||||
let module_graph_json =
|
||||
serde_json::to_value(module_graph).expect("Failed to serialize data");
|
||||
let target = match target {
|
||||
TargetLib::Main => "main",
|
||||
TargetLib::Worker => "worker",
|
||||
};
|
||||
let root_names = vec![module_url.to_string()];
|
||||
let unstable = self.flags.unstable;
|
||||
let performance = matches!(self.flags.log_level, Some(Level::Debug));
|
||||
let compiler_config = self.config.clone();
|
||||
|
||||
// TODO(bartlomieju): lift this call up - TSC shouldn't print anything
|
||||
info!("{} {}", colors::green("Check"), module_url.to_string());
|
||||
|
||||
let mut lib = if target == "main" {
|
||||
vec!["deno.window"]
|
||||
} else {
|
||||
vec!["deno.worker"]
|
||||
};
|
||||
|
||||
if unstable {
|
||||
lib.push("deno.unstable");
|
||||
}
|
||||
|
||||
let mut compiler_options = json!({
|
||||
"allowJs": allow_js,
|
||||
"allowNonTsExtensions": true,
|
||||
"checkJs": false,
|
||||
"esModuleInterop": true,
|
||||
"incremental": true,
|
||||
"inlineSourceMap": true,
|
||||
// TODO(lucacasonato): enable this by default in 1.5.0
|
||||
"isolatedModules": unstable,
|
||||
"jsx": "react",
|
||||
"lib": lib,
|
||||
"module": "esnext",
|
||||
"outDir": "deno://",
|
||||
"resolveJsonModule": true,
|
||||
"sourceMap": false,
|
||||
"strict": true,
|
||||
"removeComments": true,
|
||||
"target": "esnext",
|
||||
"tsBuildInfoFile": "cache:///tsbuildinfo.json",
|
||||
});
|
||||
|
||||
tsc_config::json_merge(&mut compiler_options, &compiler_config.options);
|
||||
|
||||
warn_ignored_options(compiler_config.maybe_ignored_options);
|
||||
|
||||
let j = json!({
|
||||
"type": CompilerRequestType::Compile,
|
||||
"target": target,
|
||||
"rootNames": root_names,
|
||||
"performance": performance,
|
||||
"compilerOptions": compiler_options,
|
||||
"sourceFileMap": module_graph_json,
|
||||
"buildInfo": if self.use_disk_cache { build_info } else { None },
|
||||
});
|
||||
|
||||
let req_msg = j.to_string();
|
||||
|
||||
let json_str = execute_in_tsc(program_state.clone(), req_msg)?;
|
||||
|
||||
let compile_response: CompileResponse = serde_json::from_str(&json_str)?;
|
||||
|
||||
if !compile_response.diagnostics.0.is_empty() {
|
||||
return Err(generic_error(compile_response.diagnostics.to_string()));
|
||||
}
|
||||
|
||||
maybe_log_stats(compile_response.stats);
|
||||
|
||||
if let Some(build_info) = compile_response.build_info {
|
||||
self.cache_build_info(&module_url, build_info)?;
|
||||
}
|
||||
self.cache_emitted_files(compile_response.emit_map)?;
|
||||
Ok(())
|
||||
}
|
||||
|
||||
/// For a given module, generate a single file JavaScript output that includes
|
||||
/// all the dependencies for that module.
|
||||
pub async fn bundle(
|
||||
|
@ -666,39 +457,6 @@ impl TsCompiler {
|
|||
Ok(output)
|
||||
}
|
||||
|
||||
/// Get associated `CompiledFileMetadata` for given module if it exists.
|
||||
fn get_metadata(&self, url: &Url) -> Option<CompiledFileMetadata> {
|
||||
// Try to load cached version:
|
||||
// 1. check if there's 'meta' file
|
||||
let cache_key = self
|
||||
.disk_cache
|
||||
.get_cache_filename_with_extension(url, "meta");
|
||||
if let Ok(metadata_bytes) = self.disk_cache.get(&cache_key) {
|
||||
if let Ok(metadata) = std::str::from_utf8(&metadata_bytes) {
|
||||
if let Ok(read_metadata) =
|
||||
CompiledFileMetadata::from_json_string(metadata.to_string())
|
||||
{
|
||||
return Some(read_metadata);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
None
|
||||
}
|
||||
|
||||
fn cache_build_info(
|
||||
&self,
|
||||
url: &Url,
|
||||
build_info: String,
|
||||
) -> std::io::Result<()> {
|
||||
let js_key = self
|
||||
.disk_cache
|
||||
.get_cache_filename_with_extension(url, "buildinfo");
|
||||
self.disk_cache.set(&js_key, build_info.as_bytes())?;
|
||||
|
||||
Ok(())
|
||||
}
|
||||
|
||||
fn cache_emitted_files(
|
||||
&self,
|
||||
emit_map: HashMap<String, EmittedSource>,
|
||||
|
@ -730,45 +488,6 @@ impl TsCompiler {
|
|||
Ok(())
|
||||
}
|
||||
|
||||
pub fn get_compiled_module(
|
||||
&self,
|
||||
module_url: &Url,
|
||||
) -> Result<CompiledModule, AnyError> {
|
||||
let compiled_source_file = self.get_compiled_source_file(module_url)?;
|
||||
|
||||
let compiled_module = CompiledModule {
|
||||
code: compiled_source_file.source_code,
|
||||
name: module_url.to_string(),
|
||||
};
|
||||
|
||||
Ok(compiled_module)
|
||||
}
|
||||
|
||||
/// Return compiled JS file for given TS module.
|
||||
// TODO: ideally we shouldn't construct SourceFile by hand, but it should be
|
||||
// delegated to SourceFileFetcher.
|
||||
pub fn get_compiled_source_file(
|
||||
&self,
|
||||
module_url: &Url,
|
||||
) -> Result<SourceFile, AnyError> {
|
||||
let cache_key = self
|
||||
.disk_cache
|
||||
.get_cache_filename_with_extension(&module_url, "js");
|
||||
let compiled_code = self.disk_cache.get(&cache_key)?;
|
||||
let compiled_code_filename = self.disk_cache.location.join(cache_key);
|
||||
debug!("compiled filename: {:?}", compiled_code_filename);
|
||||
|
||||
let compiled_module = SourceFile {
|
||||
url: module_url.clone(),
|
||||
filename: compiled_code_filename,
|
||||
media_type: MediaType::JavaScript,
|
||||
source_code: String::from_utf8(compiled_code)?,
|
||||
types_header: None,
|
||||
};
|
||||
|
||||
Ok(compiled_module)
|
||||
}
|
||||
|
||||
/// Save compiled JS file for given TS module to on-disk cache.
|
||||
///
|
||||
/// Along compiled file a special metadata file is saved as well containing
|
||||
|
@ -801,31 +520,6 @@ impl TsCompiler {
|
|||
)
|
||||
}
|
||||
|
||||
/// Return associated source map file for given TS module.
|
||||
// TODO: ideally we shouldn't construct SourceFile by hand, but it should be delegated to
|
||||
// SourceFileFetcher
|
||||
pub fn get_source_map_file(
|
||||
&self,
|
||||
module_specifier: &ModuleSpecifier,
|
||||
) -> Result<SourceFile, AnyError> {
|
||||
let cache_key = self
|
||||
.disk_cache
|
||||
.get_cache_filename_with_extension(module_specifier.as_url(), "js.map");
|
||||
let source_code = self.disk_cache.get(&cache_key)?;
|
||||
let source_map_filename = self.disk_cache.location.join(cache_key);
|
||||
debug!("source map filename: {:?}", source_map_filename);
|
||||
|
||||
let source_map_file = SourceFile {
|
||||
url: module_specifier.as_url().to_owned(),
|
||||
filename: source_map_filename,
|
||||
media_type: MediaType::JavaScript,
|
||||
source_code: String::from_utf8(source_code)?,
|
||||
types_header: None,
|
||||
};
|
||||
|
||||
Ok(source_map_file)
|
||||
}
|
||||
|
||||
/// Save source map file for given TS module to on-disk cache.
|
||||
fn cache_source_map(
|
||||
&self,
|
||||
|
@ -856,91 +550,6 @@ impl TsCompiler {
|
|||
}
|
||||
}
|
||||
|
||||
impl SourceMapGetter for TsCompiler {
|
||||
fn get_source_map(&self, script_name: &str) -> Option<Vec<u8>> {
|
||||
self.try_to_resolve_and_get_source_map(script_name)
|
||||
}
|
||||
|
||||
fn get_source_line(&self, script_name: &str, line: usize) -> Option<String> {
|
||||
self
|
||||
.try_resolve_and_get_source_file(script_name)
|
||||
.map(|out| {
|
||||
// Do NOT use .lines(): it skips the terminating empty line.
|
||||
// (due to internally using .split_terminator() instead of .split())
|
||||
let lines: Vec<&str> = out.source_code.split('\n').collect();
|
||||
assert!(lines.len() > line);
|
||||
lines[line].to_string()
|
||||
})
|
||||
}
|
||||
}
|
||||
|
||||
// `SourceMapGetter` related methods
|
||||
impl TsCompiler {
|
||||
fn try_to_resolve(&self, script_name: &str) -> Option<ModuleSpecifier> {
|
||||
// if `script_name` can't be resolved to ModuleSpecifier it's probably internal
|
||||
// script (like `gen/cli/bundle/compiler.js`) so we won't be
|
||||
// able to get source for it anyway
|
||||
ModuleSpecifier::resolve_url(script_name).ok()
|
||||
}
|
||||
|
||||
fn try_resolve_and_get_source_file(
|
||||
&self,
|
||||
script_name: &str,
|
||||
) -> Option<SourceFile> {
|
||||
if let Some(module_specifier) = self.try_to_resolve(script_name) {
|
||||
return self
|
||||
.file_fetcher
|
||||
.fetch_cached_source_file(&module_specifier, Permissions::allow_all());
|
||||
}
|
||||
|
||||
None
|
||||
}
|
||||
|
||||
fn try_to_resolve_and_get_source_map(
|
||||
&self,
|
||||
script_name: &str,
|
||||
) -> Option<Vec<u8>> {
|
||||
if let Some(module_specifier) = self.try_to_resolve(script_name) {
|
||||
if module_specifier.as_url().scheme() == "deno" {
|
||||
return None;
|
||||
}
|
||||
return match self.get_source_map_file(&module_specifier) {
|
||||
Ok(out) => Some(out.source_code.into_bytes()),
|
||||
Err(_) => {
|
||||
// Check if map is inlined
|
||||
if let Ok(compiled_source) =
|
||||
self.get_compiled_module(module_specifier.as_url())
|
||||
{
|
||||
let mut content_lines = compiled_source
|
||||
.code
|
||||
.split('\n')
|
||||
.map(|s| s.to_string())
|
||||
.collect::<Vec<String>>();
|
||||
|
||||
if !content_lines.is_empty() {
|
||||
let last_line = content_lines.pop().unwrap();
|
||||
if last_line.starts_with(
|
||||
"//# sourceMappingURL=data:application/json;base64,",
|
||||
) {
|
||||
let encoded = last_line.trim_start_matches(
|
||||
"//# sourceMappingURL=data:application/json;base64,",
|
||||
);
|
||||
let decoded_map =
|
||||
base64::decode(encoded).expect("failed to parse source map");
|
||||
return Some(decoded_map);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
None
|
||||
}
|
||||
};
|
||||
}
|
||||
|
||||
None
|
||||
}
|
||||
}
|
||||
|
||||
#[derive(Debug, Deserialize)]
|
||||
struct CreateHashArgs {
|
||||
data: String,
|
||||
|
@ -1425,7 +1034,6 @@ fn parse_deno_types(comment: &str) -> Option<String> {
|
|||
#[repr(i32)]
|
||||
#[derive(Clone, Copy, PartialEq, Debug)]
|
||||
pub enum CompilerRequestType {
|
||||
Compile = 0,
|
||||
Bundle = 1,
|
||||
RuntimeCompile = 2,
|
||||
RuntimeBundle = 3,
|
||||
|
@ -1438,7 +1046,6 @@ impl Serialize for CompilerRequestType {
|
|||
S: Serializer,
|
||||
{
|
||||
let value: i32 = match self {
|
||||
CompilerRequestType::Compile => 0 as i32,
|
||||
CompilerRequestType::Bundle => 1 as i32,
|
||||
CompilerRequestType::RuntimeCompile => 2 as i32,
|
||||
CompilerRequestType::RuntimeBundle => 3 as i32,
|
||||
|
@ -1451,12 +1058,8 @@ impl Serialize for CompilerRequestType {
|
|||
#[cfg(test)]
|
||||
mod tests {
|
||||
use super::*;
|
||||
use crate::deno_dir;
|
||||
use crate::fs as deno_fs;
|
||||
use crate::http_cache;
|
||||
use crate::program_state::ProgramState;
|
||||
use deno_core::ModuleSpecifier;
|
||||
use std::path::PathBuf;
|
||||
use tempfile::TempDir;
|
||||
|
||||
#[test]
|
||||
|
@ -1516,75 +1119,6 @@ mod tests {
|
|||
assert!(parse_ts_reference(r#"/ <asset path="./styles.css" />"#).is_none());
|
||||
}
|
||||
|
||||
#[tokio::test]
|
||||
async fn test_compile() {
|
||||
let p = std::path::PathBuf::from(env!("CARGO_MANIFEST_DIR"))
|
||||
.parent()
|
||||
.unwrap()
|
||||
.join("cli/tests/002_hello.ts");
|
||||
let specifier =
|
||||
ModuleSpecifier::resolve_url_or_path(p.to_str().unwrap()).unwrap();
|
||||
let out = SourceFile {
|
||||
url: specifier.as_url().clone(),
|
||||
filename: PathBuf::from(p.to_str().unwrap().to_string()),
|
||||
media_type: MediaType::TypeScript,
|
||||
source_code: include_str!("./tests/002_hello.ts").to_string(),
|
||||
types_header: None,
|
||||
};
|
||||
let dir =
|
||||
deno_dir::DenoDir::new(Some(test_util::new_deno_dir().path().to_owned()))
|
||||
.unwrap();
|
||||
let http_cache = http_cache::HttpCache::new(&dir.root.join("deps"));
|
||||
let mock_state = ProgramState::mock(
|
||||
vec![String::from("deno"), String::from("hello.ts")],
|
||||
None,
|
||||
);
|
||||
let file_fetcher = SourceFileFetcher::new(
|
||||
http_cache,
|
||||
true,
|
||||
mock_state.flags.cache_blocklist.clone(),
|
||||
false,
|
||||
false,
|
||||
None,
|
||||
)
|
||||
.unwrap();
|
||||
|
||||
let mut module_graph_loader = ModuleGraphLoader::new(
|
||||
file_fetcher.clone(),
|
||||
None,
|
||||
Permissions::allow_all(),
|
||||
false,
|
||||
false,
|
||||
);
|
||||
module_graph_loader
|
||||
.add_to_graph(&specifier, None)
|
||||
.await
|
||||
.expect("Failed to create graph");
|
||||
let module_graph = module_graph_loader.get_graph();
|
||||
|
||||
let ts_compiler = TsCompiler::new(
|
||||
file_fetcher,
|
||||
mock_state.flags.clone(),
|
||||
dir.gen_cache.clone(),
|
||||
)
|
||||
.unwrap();
|
||||
|
||||
let result = ts_compiler
|
||||
.compile(&mock_state, &out, TargetLib::Main, &module_graph, false)
|
||||
.await;
|
||||
assert!(result.is_ok());
|
||||
let compiled_file = ts_compiler.get_compiled_module(&out.url).unwrap();
|
||||
let source_code = compiled_file.code;
|
||||
assert!(source_code
|
||||
.as_bytes()
|
||||
.starts_with(b"\"use strict\";\nconsole.log(\"Hello World\");"));
|
||||
let mut lines: Vec<String> =
|
||||
source_code.split('\n').map(|s| s.to_string()).collect();
|
||||
let last_line = lines.pop().unwrap();
|
||||
assert!(last_line
|
||||
.starts_with("//# sourceMappingURL=data:application/json;base64"));
|
||||
}
|
||||
|
||||
#[tokio::test]
|
||||
async fn test_bundle() {
|
||||
let p = std::path::PathBuf::from(env!("CARGO_MANIFEST_DIR"))
|
||||
|
|
|
@ -163,8 +163,9 @@ delete Object.prototype.__proto__;
|
|||
4: "TSX",
|
||||
5: "Json",
|
||||
6: "Wasm",
|
||||
7: "BuildInfo",
|
||||
8: "Unknown",
|
||||
7: "TsBuildInfo",
|
||||
8: "SourceMap",
|
||||
9: "Unknown",
|
||||
JavaScript: 0,
|
||||
JSX: 1,
|
||||
TypeScript: 2,
|
||||
|
@ -172,8 +173,9 @@ delete Object.prototype.__proto__;
|
|||
TSX: 4,
|
||||
Json: 5,
|
||||
Wasm: 6,
|
||||
BuildInfo: 7,
|
||||
Unknown: 6,
|
||||
TsBuildInfo: 7,
|
||||
SourceMap: 8,
|
||||
Unknown: 9,
|
||||
};
|
||||
|
||||
function getExtension(fileName, mediaType) {
|
||||
|
@ -183,7 +185,9 @@ delete Object.prototype.__proto__;
|
|||
case MediaType.JSX:
|
||||
return ts.Extension.Jsx;
|
||||
case MediaType.TypeScript:
|
||||
return fileName.endsWith(".d.ts") ? ts.Extension.Dts : ts.Extension.Ts;
|
||||
return ts.Extension.Ts;
|
||||
case MediaType.Dts:
|
||||
return ts.Extension.Dts;
|
||||
case MediaType.TSX:
|
||||
return ts.Extension.Tsx;
|
||||
case MediaType.Wasm:
|
||||
|
@ -366,7 +370,7 @@ delete Object.prototype.__proto__;
|
|||
}
|
||||
|
||||
/** @type {{ data: string; hash: string; }} */
|
||||
const { data, hash } = core.jsonOpSync(
|
||||
const { data, hash, scriptKind } = core.jsonOpSync(
|
||||
"op_load",
|
||||
{ specifier },
|
||||
);
|
||||
|
@ -375,6 +379,8 @@ delete Object.prototype.__proto__;
|
|||
specifier,
|
||||
data,
|
||||
languageVersion,
|
||||
false,
|
||||
scriptKind,
|
||||
);
|
||||
sourceFile.moduleName = specifier;
|
||||
sourceFile.version = hash;
|
||||
|
@ -406,7 +412,6 @@ delete Object.prototype.__proto__;
|
|||
let maybeSpecifiers;
|
||||
if (sourceFiles) {
|
||||
maybeSpecifiers = sourceFiles.map((sf) => sf.moduleName);
|
||||
debug(` specifiers: ${maybeSpecifiers.join(", ")}`);
|
||||
}
|
||||
return core.jsonOpSync(
|
||||
"op_emit",
|
||||
|
@ -465,11 +470,12 @@ delete Object.prototype.__proto__;
|
|||
specifiers,
|
||||
base,
|
||||
});
|
||||
return resolved.map(([resolvedFileName, extension]) => ({
|
||||
let r = resolved.map(([resolvedFileName, extension]) => ({
|
||||
resolvedFileName,
|
||||
extension,
|
||||
isExternalLibraryImport: false,
|
||||
}));
|
||||
return r;
|
||||
}
|
||||
},
|
||||
createHash(data) {
|
||||
|
@ -649,7 +655,6 @@ delete Object.prototype.__proto__;
|
|||
// Warning! The values in this enum are duplicated in `cli/msg.rs`
|
||||
// Update carefully!
|
||||
const CompilerRequestType = {
|
||||
Compile: 0,
|
||||
Bundle: 1,
|
||||
RuntimeCompile: 2,
|
||||
RuntimeBundle: 3,
|
||||
|
@ -671,25 +676,6 @@ delete Object.prototype.__proto__;
|
|||
};
|
||||
}
|
||||
|
||||
function createCompileWriteFile(state) {
|
||||
return function writeFile(fileName, data, sourceFiles) {
|
||||
const isBuildInfo = fileName === TS_BUILD_INFO;
|
||||
|
||||
if (isBuildInfo) {
|
||||
assert(isBuildInfo);
|
||||
state.buildInfo = data;
|
||||
return;
|
||||
}
|
||||
|
||||
assert(sourceFiles);
|
||||
assert(sourceFiles.length === 1);
|
||||
state.emitMap[fileName] = {
|
||||
filename: sourceFiles[0].fileName,
|
||||
contents: data,
|
||||
};
|
||||
};
|
||||
}
|
||||
|
||||
function createRuntimeCompileWriteFile(state) {
|
||||
return function writeFile(fileName, data, sourceFiles) {
|
||||
assert(sourceFiles);
|
||||
|
@ -959,101 +945,6 @@ delete Object.prototype.__proto__;
|
|||
.map((sym) => sym.getName());
|
||||
}
|
||||
|
||||
function compile({
|
||||
buildInfo,
|
||||
compilerOptions,
|
||||
rootNames,
|
||||
target,
|
||||
sourceFileMap,
|
||||
type,
|
||||
performance,
|
||||
}) {
|
||||
if (performance) {
|
||||
performanceStart();
|
||||
}
|
||||
debug(">>> compile start", { rootNames, type: CompilerRequestType[type] });
|
||||
|
||||
// When a programme is emitted, TypeScript will call `writeFile` with
|
||||
// each file that needs to be emitted. The Deno compiler host delegates
|
||||
// this, to make it easier to perform the right actions, which vary
|
||||
// based a lot on the request.
|
||||
const state = {
|
||||
rootNames,
|
||||
emitMap: {},
|
||||
};
|
||||
|
||||
let diagnostics = [];
|
||||
|
||||
const { options, diagnostics: diags } = parseCompilerOptions(
|
||||
compilerOptions,
|
||||
);
|
||||
|
||||
diagnostics = diags.filter(
|
||||
({ code }) => code != 5023 && !IGNORED_DIAGNOSTICS.includes(code),
|
||||
);
|
||||
|
||||
// TODO(bartlomieju): this options is excluded by `ts.convertCompilerOptionsFromJson`
|
||||
// however stuff breaks if it's not passed (type_directives_js_main.js, compiler_js_error.ts)
|
||||
options.allowNonTsExtensions = true;
|
||||
|
||||
legacyHostState.target = target;
|
||||
legacyHostState.writeFile = createCompileWriteFile(state);
|
||||
legacyHostState.buildInfo = buildInfo;
|
||||
|
||||
buildSourceFileCache(sourceFileMap);
|
||||
// if there was a configuration and no diagnostics with it, we will continue
|
||||
// to generate the program and possibly emit it.
|
||||
if (diagnostics.length === 0) {
|
||||
const program = ts.createIncrementalProgram({
|
||||
rootNames,
|
||||
options,
|
||||
host,
|
||||
});
|
||||
|
||||
// TODO(bartlomieju): check if this is ok
|
||||
diagnostics = [
|
||||
...program.getConfigFileParsingDiagnostics(),
|
||||
...program.getSyntacticDiagnostics(),
|
||||
...program.getOptionsDiagnostics(),
|
||||
...program.getGlobalDiagnostics(),
|
||||
...program.getSemanticDiagnostics(),
|
||||
];
|
||||
diagnostics = diagnostics.filter(
|
||||
({ code }) =>
|
||||
!IGNORED_DIAGNOSTICS.includes(code) &&
|
||||
!IGNORED_COMPILE_DIAGNOSTICS.includes(code),
|
||||
);
|
||||
|
||||
// We will only proceed with the emit if there are no diagnostics.
|
||||
if (diagnostics.length === 0) {
|
||||
const emitResult = program.emit();
|
||||
// If `checkJs` is off we still might be compiling entry point JavaScript file
|
||||
// (if it has `.ts` imports), but it won't be emitted. In that case we skip
|
||||
// assertion.
|
||||
if (options.checkJs) {
|
||||
assert(
|
||||
emitResult.emitSkipped === false,
|
||||
"Unexpected skip of the emit.",
|
||||
);
|
||||
}
|
||||
// emitResult.diagnostics is `readonly` in TS3.5+ and can't be assigned
|
||||
// without casting.
|
||||
diagnostics = emitResult.diagnostics;
|
||||
}
|
||||
performanceProgram({ program });
|
||||
}
|
||||
|
||||
debug("<<< compile end", { rootNames, type: CompilerRequestType[type] });
|
||||
const stats = performance ? performanceEnd() : undefined;
|
||||
|
||||
return {
|
||||
emitMap: state.emitMap,
|
||||
buildInfo: state.buildInfo,
|
||||
diagnostics: fromTypeScriptDiagnostic(diagnostics),
|
||||
stats,
|
||||
};
|
||||
}
|
||||
|
||||
function bundle({
|
||||
compilerOptions,
|
||||
rootNames,
|
||||
|
@ -1296,11 +1187,6 @@ delete Object.prototype.__proto__;
|
|||
function tsCompilerOnMessage(msg) {
|
||||
const request = msg.data;
|
||||
switch (request.type) {
|
||||
case CompilerRequestType.Compile: {
|
||||
const result = compile(request);
|
||||
opCompilerRespond(result);
|
||||
break;
|
||||
}
|
||||
case CompilerRequestType.Bundle: {
|
||||
const result = bundle(request);
|
||||
opCompilerRespond(result);
|
||||
|
|
93
cli/tsc2.rs
93
cli/tsc2.rs
|
@ -21,6 +21,7 @@ use deno_core::RuntimeOptions;
|
|||
use deno_core::Snapshot;
|
||||
use serde::Deserialize;
|
||||
use serde::Serialize;
|
||||
use std::cell::RefCell;
|
||||
use std::rc::Rc;
|
||||
|
||||
#[derive(Debug, Clone, Default, Eq, PartialEq)]
|
||||
|
@ -40,7 +41,7 @@ pub struct Request {
|
|||
/// Indicates to the tsc runtime if debug logging should occur.
|
||||
pub debug: bool,
|
||||
#[serde(skip_serializing)]
|
||||
pub graph: Rc<Graph2>,
|
||||
pub graph: Rc<RefCell<Graph2>>,
|
||||
#[serde(skip_serializing)]
|
||||
pub hash_data: Vec<Vec<u8>>,
|
||||
#[serde(skip_serializing)]
|
||||
|
@ -65,14 +66,14 @@ pub struct Response {
|
|||
struct State {
|
||||
hash_data: Vec<Vec<u8>>,
|
||||
emitted_files: Vec<EmittedFile>,
|
||||
graph: Rc<Graph2>,
|
||||
graph: Rc<RefCell<Graph2>>,
|
||||
maybe_tsbuildinfo: Option<String>,
|
||||
maybe_response: Option<RespondArgs>,
|
||||
}
|
||||
|
||||
impl State {
|
||||
pub fn new(
|
||||
graph: Rc<Graph2>,
|
||||
graph: Rc<RefCell<Graph2>>,
|
||||
hash_data: Vec<Vec<u8>>,
|
||||
maybe_tsbuildinfo: Option<String>,
|
||||
) -> Self {
|
||||
|
@ -162,10 +163,23 @@ fn load(state: &mut State, args: Value) -> Result<Value, AnyError> {
|
|||
let specifier = ModuleSpecifier::resolve_url_or_path(&v.specifier)
|
||||
.context("Error converting a string module specifier for \"op_load\".")?;
|
||||
let mut hash: Option<String> = None;
|
||||
let mut media_type = MediaType::Unknown;
|
||||
let data = if &v.specifier == "deno:///.tsbuildinfo" {
|
||||
state.maybe_tsbuildinfo.clone()
|
||||
// in certain situations we return a "blank" module to tsc and we need to
|
||||
// handle the request for that module here.
|
||||
} else if &v.specifier == "deno:///none.d.ts" {
|
||||
hash = Some("1".to_string());
|
||||
media_type = MediaType::TypeScript;
|
||||
Some("declare var a: any;\nexport = a;\n".to_string())
|
||||
} else {
|
||||
let maybe_source = state.graph.get_source(&specifier);
|
||||
let graph = state.graph.borrow();
|
||||
let maybe_source = graph.get_source(&specifier);
|
||||
media_type = if let Some(media_type) = graph.get_media_type(&specifier) {
|
||||
media_type
|
||||
} else {
|
||||
MediaType::Unknown
|
||||
};
|
||||
if let Some(source) = &maybe_source {
|
||||
let mut data = vec![source.as_bytes().to_owned()];
|
||||
data.extend_from_slice(&state.hash_data);
|
||||
|
@ -174,7 +188,9 @@ fn load(state: &mut State, args: Value) -> Result<Value, AnyError> {
|
|||
maybe_source
|
||||
};
|
||||
|
||||
Ok(json!({ "data": data, "hash": hash }))
|
||||
Ok(
|
||||
json!({ "data": data, "hash": hash, "scriptKind": media_type.as_ts_script_kind() }),
|
||||
)
|
||||
}
|
||||
|
||||
#[derive(Debug, Deserialize)]
|
||||
|
@ -201,19 +217,31 @@ fn resolve(state: &mut State, args: Value) -> Result<Value, AnyError> {
|
|||
MediaType::from(specifier).as_ts_extension().to_string(),
|
||||
));
|
||||
} else {
|
||||
let resolved_specifier = state.graph.resolve(specifier, &referrer)?;
|
||||
let media_type = if let Some(media_type) =
|
||||
state.graph.get_media_type(&resolved_specifier)
|
||||
{
|
||||
media_type
|
||||
} else {
|
||||
bail!(
|
||||
"Unable to resolve media type for specifier: \"{}\"",
|
||||
resolved_specifier
|
||||
)
|
||||
};
|
||||
resolved
|
||||
.push((resolved_specifier.to_string(), media_type.as_ts_extension()));
|
||||
let graph = state.graph.borrow();
|
||||
match graph.resolve(specifier, &referrer, true) {
|
||||
Ok(resolved_specifier) => {
|
||||
let media_type = if let Some(media_type) =
|
||||
graph.get_media_type(&resolved_specifier)
|
||||
{
|
||||
media_type
|
||||
} else {
|
||||
bail!(
|
||||
"Unable to resolve media type for specifier: \"{}\"",
|
||||
resolved_specifier
|
||||
)
|
||||
};
|
||||
resolved.push((
|
||||
resolved_specifier.to_string(),
|
||||
media_type.as_ts_extension(),
|
||||
));
|
||||
}
|
||||
// in certain situations, like certain dynamic imports, we won't have
|
||||
// the source file in the graph, so we will return a fake module to
|
||||
// make tsc happy.
|
||||
Err(_) => {
|
||||
resolved.push(("deno:///none.d.ts".to_string(), ".d.ts".to_string()));
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
|
@ -221,7 +249,7 @@ fn resolve(state: &mut State, args: Value) -> Result<Value, AnyError> {
|
|||
}
|
||||
|
||||
#[derive(Debug, Deserialize, Eq, PartialEq)]
|
||||
pub struct RespondArgs {
|
||||
struct RespondArgs {
|
||||
pub diagnostics: Diagnostics,
|
||||
pub stats: Stats,
|
||||
}
|
||||
|
@ -269,9 +297,7 @@ pub fn exec(
|
|||
runtime
|
||||
.execute("[native code]", startup_source)
|
||||
.context("Could not properly start the compiler runtime.")?;
|
||||
runtime
|
||||
.execute("[native_code]", &exec_source)
|
||||
.context("Execute request failed.")?;
|
||||
runtime.execute("[native_code]", &exec_source)?;
|
||||
|
||||
let op_state = runtime.op_state();
|
||||
let mut op_state = op_state.borrow_mut();
|
||||
|
@ -324,10 +350,10 @@ mod tests {
|
|||
}));
|
||||
let mut builder = GraphBuilder2::new(handler.clone(), None);
|
||||
builder
|
||||
.insert(&specifier)
|
||||
.add(&specifier, false)
|
||||
.await
|
||||
.expect("module not inserted");
|
||||
let graph = Rc::new(builder.get_graph(&None).expect("could not get graph"));
|
||||
let graph = Rc::new(RefCell::new(builder.get_graph(&None)));
|
||||
State::new(graph, hash_data, maybe_tsbuildinfo)
|
||||
}
|
||||
|
||||
|
@ -410,7 +436,8 @@ mod tests {
|
|||
actual,
|
||||
json!({
|
||||
"data": "console.log(\"hello deno\");\n",
|
||||
"hash": "149c777056afcc973d5fcbe11421b6d5ddc57b81786765302030d7fc893bf729"
|
||||
"hash": "149c777056afcc973d5fcbe11421b6d5ddc57b81786765302030d7fc893bf729",
|
||||
"scriptKind": 3,
|
||||
})
|
||||
);
|
||||
}
|
||||
|
@ -433,7 +460,8 @@ mod tests {
|
|||
actual,
|
||||
json!({
|
||||
"data": "some content",
|
||||
"hash": null
|
||||
"hash": null,
|
||||
"scriptKind": 0,
|
||||
})
|
||||
);
|
||||
}
|
||||
|
@ -451,6 +479,7 @@ mod tests {
|
|||
json!({
|
||||
"data": null,
|
||||
"hash": null,
|
||||
"scriptKind": 0,
|
||||
})
|
||||
)
|
||||
}
|
||||
|
@ -475,7 +504,7 @@ mod tests {
|
|||
}
|
||||
|
||||
#[tokio::test]
|
||||
async fn test_resolve_error() {
|
||||
async fn test_resolve_empty() {
|
||||
let mut state = setup(
|
||||
Some(
|
||||
ModuleSpecifier::resolve_url_or_path("https://deno.land/x/a.ts")
|
||||
|
@ -485,10 +514,11 @@ mod tests {
|
|||
None,
|
||||
)
|
||||
.await;
|
||||
resolve(
|
||||
let actual = resolve(
|
||||
&mut state,
|
||||
json!({ "base": "https://deno.land/x/a.ts", "specifiers": [ "./bad.ts" ]}),
|
||||
).expect_err("should have errored");
|
||||
).expect("should have not errored");
|
||||
assert_eq!(actual, json!([["deno:///none.d.ts", ".d.ts"]]));
|
||||
}
|
||||
|
||||
#[tokio::test]
|
||||
|
@ -544,17 +574,16 @@ mod tests {
|
|||
}));
|
||||
let mut builder = GraphBuilder2::new(handler.clone(), None);
|
||||
builder
|
||||
.insert(&specifier)
|
||||
.add(&specifier, false)
|
||||
.await
|
||||
.expect("module not inserted");
|
||||
let graph = Rc::new(builder.get_graph(&None).expect("could not get graph"));
|
||||
let graph = Rc::new(RefCell::new(builder.get_graph(&None)));
|
||||
let config = TsConfig::new(json!({
|
||||
"allowJs": true,
|
||||
"checkJs": false,
|
||||
"esModuleInterop": true,
|
||||
"emitDecoratorMetadata": false,
|
||||
"incremental": true,
|
||||
"isolatedModules": true,
|
||||
"jsx": "react",
|
||||
"jsxFactory": "React.createElement",
|
||||
"jsxFragmentFactory": "React.Fragment",
|
||||
|
|
|
@ -214,6 +214,21 @@ impl TsConfig {
|
|||
self.0.to_string().as_bytes().to_owned()
|
||||
}
|
||||
|
||||
/// Return the value of the `checkJs` compiler option, defaulting to `false`
|
||||
/// if not present.
|
||||
pub fn get_check_js(&self) -> bool {
|
||||
if let Some(check_js) = self.0.get("checkJs") {
|
||||
check_js.as_bool().unwrap_or(false)
|
||||
} else {
|
||||
false
|
||||
}
|
||||
}
|
||||
|
||||
/// Merge a serde_json value into the configuration.
|
||||
pub fn merge(&mut self, value: &Value) {
|
||||
json_merge(&mut self.0, value);
|
||||
}
|
||||
|
||||
/// Take an optional string representing a user provided TypeScript config file
|
||||
/// which was passed in via the `--config` compiler option and merge it with
|
||||
/// the configuration. Returning the result which optionally contains any
|
||||
|
|
|
@ -121,7 +121,7 @@ impl Worker {
|
|||
module_loader: Some(module_loader),
|
||||
startup_snapshot: Some(startup_snapshot),
|
||||
js_error_create_fn: Some(Box::new(move |core_js_error| {
|
||||
JsError::create(core_js_error, &global_state_.ts_compiler)
|
||||
JsError::create(core_js_error, global_state_.clone())
|
||||
})),
|
||||
..Default::default()
|
||||
});
|
||||
|
|
Loading…
Reference in a new issue