mirror of
https://github.com/denoland/deno.git
synced 2024-12-22 15:24:46 -05:00
refactor: TS compiler and module graph (#5817)
This PR addresses many problems with module graph loading introduced in #5029, as well as many long standing issues. "ModuleGraphLoader" has been wired to "ModuleLoader" implemented on "State" - that means that dependency analysis and fetching is done before spinning up TS compiler worker. Basic dependency tracking for TS compilation has been implemented. Errors caused by import statements are now annotated with import location. Co-authored-by: Ryan Dahl <ry@tinyclouds.org>
This commit is contained in:
parent
b97459b5ae
commit
ad6d2a7734
17 changed files with 612 additions and 392 deletions
|
@ -32,7 +32,7 @@ pub struct ParamDef {
|
|||
pub ts_type: Option<super::ts_type::TsTypeDef>,
|
||||
}
|
||||
|
||||
#[derive(Debug, Serialize, Clone)]
|
||||
#[derive(Debug, Serialize, Clone, PartialEq)]
|
||||
pub struct Location {
|
||||
pub filename: String,
|
||||
pub line: usize,
|
||||
|
|
|
@ -3,9 +3,12 @@ use crate::deno_dir;
|
|||
use crate::file_fetcher::SourceFileFetcher;
|
||||
use crate::flags;
|
||||
use crate::http_cache;
|
||||
use crate::import_map::ImportMap;
|
||||
use crate::lockfile::Lockfile;
|
||||
use crate::module_graph::ModuleGraphLoader;
|
||||
use crate::msg;
|
||||
use crate::permissions::Permissions;
|
||||
use crate::state::exit_unstable;
|
||||
use crate::tsc::CompiledModule;
|
||||
use crate::tsc::TargetLib;
|
||||
use crate::tsc::TsCompiler;
|
||||
|
@ -35,6 +38,7 @@ pub struct GlobalStateInner {
|
|||
pub ts_compiler: TsCompiler,
|
||||
pub lockfile: Option<Mutex<Lockfile>>,
|
||||
pub compiler_starts: AtomicUsize,
|
||||
pub maybe_import_map: Option<ImportMap>,
|
||||
compile_lock: AsyncMutex<()>,
|
||||
}
|
||||
|
||||
|
@ -75,6 +79,17 @@ impl GlobalState {
|
|||
None
|
||||
};
|
||||
|
||||
let maybe_import_map: Option<ImportMap> =
|
||||
match flags.import_map_path.as_ref() {
|
||||
None => None,
|
||||
Some(file_path) => {
|
||||
if !flags.unstable {
|
||||
exit_unstable("--importmap")
|
||||
}
|
||||
Some(ImportMap::load(file_path)?)
|
||||
}
|
||||
};
|
||||
|
||||
let inner = GlobalStateInner {
|
||||
dir,
|
||||
permissions: Permissions::from_flags(&flags),
|
||||
|
@ -82,19 +97,85 @@ impl GlobalState {
|
|||
file_fetcher,
|
||||
ts_compiler,
|
||||
lockfile,
|
||||
maybe_import_map,
|
||||
compiler_starts: AtomicUsize::new(0),
|
||||
compile_lock: AsyncMutex::new(()),
|
||||
};
|
||||
Ok(GlobalState(Arc::new(inner)))
|
||||
}
|
||||
|
||||
pub async fn fetch_compiled_module(
|
||||
/// This function is called when new module load is
|
||||
/// initialized by the EsIsolate. Its resposibility is to collect
|
||||
/// all dependencies and if it is required then also perform TS typecheck
|
||||
/// and traspilation.
|
||||
pub async fn prepare_module_load(
|
||||
&self,
|
||||
module_specifier: ModuleSpecifier,
|
||||
maybe_referrer: Option<ModuleSpecifier>,
|
||||
target_lib: TargetLib,
|
||||
permissions: Permissions,
|
||||
is_dyn_import: bool,
|
||||
maybe_import_map: Option<ImportMap>,
|
||||
) -> Result<(), ErrBox> {
|
||||
let module_specifier = module_specifier.clone();
|
||||
|
||||
// TODO(ry) Try to lift compile_lock as high up in the call stack for
|
||||
// sanity.
|
||||
let compile_lock = self.compile_lock.lock().await;
|
||||
|
||||
let mut module_graph_loader = ModuleGraphLoader::new(
|
||||
self.file_fetcher.clone(),
|
||||
maybe_import_map,
|
||||
permissions.clone(),
|
||||
is_dyn_import,
|
||||
false,
|
||||
);
|
||||
module_graph_loader
|
||||
.add_to_graph(&module_specifier, maybe_referrer)
|
||||
.await?;
|
||||
let module_graph = module_graph_loader.get_graph();
|
||||
|
||||
let out = self
|
||||
.file_fetcher
|
||||
.fetch_cached_source_file(&module_specifier, permissions.clone())
|
||||
.expect("Source file not found");
|
||||
|
||||
// Check if we need to compile files
|
||||
let needs_compilation = match out.media_type {
|
||||
msg::MediaType::TypeScript
|
||||
| msg::MediaType::TSX
|
||||
| msg::MediaType::JSX => true,
|
||||
msg::MediaType::JavaScript => self.ts_compiler.compile_js,
|
||||
_ => false,
|
||||
};
|
||||
|
||||
if needs_compilation {
|
||||
self
|
||||
.ts_compiler
|
||||
.compile_module_graph(
|
||||
self.clone(),
|
||||
&out,
|
||||
target_lib,
|
||||
permissions,
|
||||
module_graph,
|
||||
)
|
||||
.await?;
|
||||
}
|
||||
|
||||
drop(compile_lock);
|
||||
|
||||
Ok(())
|
||||
}
|
||||
|
||||
// TODO(bartlomieju): this method doesn't need to be async anymore
|
||||
/// This method is used after `prepare_module_load` finishes and EsIsolate
|
||||
/// starts loading source and executing source code. This method shouldn't
|
||||
/// perform any IO (besides $DENO_DIR) and only operate on sources collected
|
||||
/// during `prepare_module_load`.
|
||||
pub async fn fetch_compiled_module(
|
||||
&self,
|
||||
module_specifier: ModuleSpecifier,
|
||||
_maybe_referrer: Option<ModuleSpecifier>,
|
||||
) -> Result<CompiledModule, ErrBox> {
|
||||
let state1 = self.clone();
|
||||
let state2 = self.clone();
|
||||
|
@ -102,59 +183,31 @@ impl GlobalState {
|
|||
|
||||
let out = self
|
||||
.file_fetcher
|
||||
.fetch_source_file(&module_specifier, maybe_referrer, permissions.clone())
|
||||
.await?;
|
||||
.fetch_cached_source_file(&module_specifier, Permissions::allow_all())
|
||||
.expect("Cached source file doesn't exist");
|
||||
|
||||
// TODO(ry) Try to lift compile_lock as high up in the call stack for
|
||||
// sanity.
|
||||
let compile_lock = self.compile_lock.lock().await;
|
||||
|
||||
let compiled_module = match out.media_type {
|
||||
// Check if we need to compile files
|
||||
let was_compiled = match out.media_type {
|
||||
msg::MediaType::TypeScript
|
||||
| msg::MediaType::TSX
|
||||
| msg::MediaType::JSX => {
|
||||
state1
|
||||
.ts_compiler
|
||||
.compile(state1.clone(), &out, target_lib, permissions, is_dyn_import)
|
||||
.await
|
||||
}
|
||||
msg::MediaType::JavaScript => {
|
||||
if state1.ts_compiler.compile_js {
|
||||
state2
|
||||
.ts_compiler
|
||||
.compile(
|
||||
state1.clone(),
|
||||
&out,
|
||||
target_lib,
|
||||
permissions,
|
||||
is_dyn_import,
|
||||
)
|
||||
.await
|
||||
} else {
|
||||
if let Some(types_url) = out.types_url.clone() {
|
||||
let types_specifier = ModuleSpecifier::from(types_url);
|
||||
state1
|
||||
.file_fetcher
|
||||
.fetch_source_file(
|
||||
&types_specifier,
|
||||
Some(module_specifier.clone()),
|
||||
permissions.clone(),
|
||||
)
|
||||
.await
|
||||
.ok();
|
||||
| msg::MediaType::JSX => true,
|
||||
msg::MediaType::JavaScript => self.ts_compiler.compile_js,
|
||||
_ => false,
|
||||
};
|
||||
|
||||
Ok(CompiledModule {
|
||||
let compiled_module = if was_compiled {
|
||||
state1.ts_compiler.get_compiled_module(&out.url)?
|
||||
} else {
|
||||
CompiledModule {
|
||||
code: String::from_utf8(out.source_code.clone())?,
|
||||
name: out.url.to_string(),
|
||||
})
|
||||
}
|
||||
}
|
||||
_ => Ok(CompiledModule {
|
||||
code: String::from_utf8(out.source_code.clone())?,
|
||||
name: out.url.to_string(),
|
||||
}),
|
||||
}?;
|
||||
};
|
||||
|
||||
drop(compile_lock);
|
||||
|
||||
if let Some(ref lockfile) = state2.lockfile {
|
||||
|
@ -193,11 +246,3 @@ fn thread_safe() {
|
|||
fn f<S: Send + Sync>(_: S) {}
|
||||
f(GlobalState::mock(vec![]));
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn import_map_given_for_repl() {
|
||||
let _result = GlobalState::new(flags::Flags {
|
||||
import_map_path: Some("import_map.json".to_string()),
|
||||
..flags::Flags::default()
|
||||
});
|
||||
}
|
||||
|
|
65
cli/main.rs
65
cli/main.rs
|
@ -72,12 +72,10 @@ use crate::file_fetcher::SourceFile;
|
|||
use crate::file_fetcher::SourceFileFetcher;
|
||||
use crate::fs as deno_fs;
|
||||
use crate::global_state::GlobalState;
|
||||
use crate::import_map::ImportMap;
|
||||
use crate::msg::MediaType;
|
||||
use crate::op_error::OpError;
|
||||
use crate::ops::io::get_stdio;
|
||||
use crate::permissions::Permissions;
|
||||
use crate::state::exit_unstable;
|
||||
use crate::state::State;
|
||||
use crate::tsc::TargetLib;
|
||||
use crate::worker::MainWorker;
|
||||
|
@ -156,7 +154,13 @@ fn create_main_worker(
|
|||
global_state: GlobalState,
|
||||
main_module: ModuleSpecifier,
|
||||
) -> Result<MainWorker, ErrBox> {
|
||||
let state = State::new(global_state, None, main_module, false)?;
|
||||
let state = State::new(
|
||||
global_state.clone(),
|
||||
None,
|
||||
main_module,
|
||||
global_state.maybe_import_map.clone(),
|
||||
false,
|
||||
)?;
|
||||
|
||||
let mut worker = MainWorker::new(
|
||||
"main".to_string(),
|
||||
|
@ -220,16 +224,21 @@ async fn print_file_info(
|
|||
);
|
||||
|
||||
let module_specifier_ = module_specifier.clone();
|
||||
|
||||
global_state
|
||||
.clone()
|
||||
.fetch_compiled_module(
|
||||
module_specifier_,
|
||||
.prepare_module_load(
|
||||
module_specifier_.clone(),
|
||||
None,
|
||||
TargetLib::Main,
|
||||
Permissions::allow_all(),
|
||||
false,
|
||||
global_state.maybe_import_map.clone(),
|
||||
)
|
||||
.await?;
|
||||
global_state
|
||||
.clone()
|
||||
.fetch_compiled_module(module_specifier_, None)
|
||||
.await?;
|
||||
|
||||
if out.media_type == msg::MediaType::TypeScript
|
||||
|| (out.media_type == msg::MediaType::JavaScript
|
||||
|
@ -393,43 +402,49 @@ async fn bundle_command(
|
|||
source_file: String,
|
||||
out_file: Option<PathBuf>,
|
||||
) -> Result<(), ErrBox> {
|
||||
let mut module_name = ModuleSpecifier::resolve_url_or_path(&source_file)?;
|
||||
let url = module_name.as_url();
|
||||
let mut module_specifier =
|
||||
ModuleSpecifier::resolve_url_or_path(&source_file)?;
|
||||
let url = module_specifier.as_url();
|
||||
|
||||
// TODO(bartlomieju): fix this hack in ModuleSpecifier
|
||||
if url.scheme() == "file" {
|
||||
let a = deno_fs::normalize_path(&url.to_file_path().unwrap());
|
||||
let u = Url::from_file_path(a).unwrap();
|
||||
module_name = ModuleSpecifier::from(u)
|
||||
module_specifier = ModuleSpecifier::from(u)
|
||||
}
|
||||
|
||||
debug!(">>>>> bundle START");
|
||||
let compiler_config = tsc::CompilerConfig::load(flags.config_path.clone())?;
|
||||
|
||||
let maybe_import_map = match flags.import_map_path.as_ref() {
|
||||
None => None,
|
||||
Some(file_path) => {
|
||||
if !flags.unstable {
|
||||
exit_unstable("--importmap")
|
||||
}
|
||||
Some(ImportMap::load(file_path)?)
|
||||
}
|
||||
};
|
||||
|
||||
let global_state = GlobalState::new(flags)?;
|
||||
|
||||
let bundle_result = tsc::bundle(
|
||||
info!("Bundling {}", module_specifier.to_string());
|
||||
|
||||
let output = tsc::bundle(
|
||||
&global_state,
|
||||
compiler_config,
|
||||
module_name,
|
||||
maybe_import_map,
|
||||
out_file,
|
||||
module_specifier,
|
||||
global_state.maybe_import_map.clone(),
|
||||
global_state.flags.unstable,
|
||||
)
|
||||
.await;
|
||||
.await?;
|
||||
|
||||
debug!(">>>>> bundle END");
|
||||
bundle_result
|
||||
|
||||
let output_string = fmt::format_text(&output)?;
|
||||
|
||||
if let Some(out_file_) = out_file.as_ref() {
|
||||
info!("Emitting bundle to {:?}", out_file_);
|
||||
let output_bytes = output_string.as_bytes();
|
||||
let output_len = output_bytes.len();
|
||||
deno_fs::write_file(out_file_, output_bytes, 0o666)?;
|
||||
// TODO(bartlomieju): add "humanFileSize" method
|
||||
info!("{} bytes emitted.", output_len);
|
||||
} else {
|
||||
println!("{}", output_string);
|
||||
}
|
||||
|
||||
Ok(())
|
||||
}
|
||||
|
||||
async fn doc_command(
|
||||
|
|
|
@ -1,5 +1,6 @@
|
|||
// Copyright 2018-2020 the Deno authors. All rights reserved. MIT license.
|
||||
|
||||
use crate::doc::Location;
|
||||
use crate::file_fetcher::map_file_extension;
|
||||
use crate::file_fetcher::SourceFile;
|
||||
use crate::file_fetcher::SourceFileFetcher;
|
||||
|
@ -9,7 +10,7 @@ use crate::op_error::OpError;
|
|||
use crate::permissions::Permissions;
|
||||
use crate::swc_util::analyze_dependencies_and_references;
|
||||
use crate::swc_util::TsReferenceKind;
|
||||
use crate::tsc::get_available_libs;
|
||||
use crate::tsc::AVAILABLE_LIBS;
|
||||
use deno_core::ErrBox;
|
||||
use deno_core::ModuleSpecifier;
|
||||
use futures::stream::FuturesUnordered;
|
||||
|
@ -24,6 +25,18 @@ use std::hash::BuildHasher;
|
|||
use std::path::PathBuf;
|
||||
use std::pin::Pin;
|
||||
|
||||
// TODO(bartlomieju): it'd be great if this function returned
|
||||
// more structured data and possibly format the same as TS diagnostics.
|
||||
/// Decorate error with location of import that caused the error.
|
||||
fn err_with_location(e: ErrBox, location: &Location) -> ErrBox {
|
||||
let location_str = format!(
|
||||
"\nImported from \"{}:{}\"",
|
||||
location.filename, location.line
|
||||
);
|
||||
let err_str = e.to_string();
|
||||
OpError::other(format!("{}{}", err_str, location_str)).into()
|
||||
}
|
||||
|
||||
fn serialize_module_specifier<S>(
|
||||
spec: &ModuleSpecifier,
|
||||
s: S,
|
||||
|
@ -138,8 +151,9 @@ impl ModuleGraphLoader {
|
|||
pub async fn add_to_graph(
|
||||
&mut self,
|
||||
specifier: &ModuleSpecifier,
|
||||
maybe_referrer: Option<ModuleSpecifier>,
|
||||
) -> Result<(), ErrBox> {
|
||||
self.download_module(specifier.clone(), None)?;
|
||||
self.download_module(specifier.clone(), maybe_referrer)?;
|
||||
|
||||
loop {
|
||||
let (specifier, source_file) =
|
||||
|
@ -239,10 +253,8 @@ impl ModuleGraphLoader {
|
|||
imports.push(import_descriptor);
|
||||
}
|
||||
|
||||
let available_libs = get_available_libs();
|
||||
|
||||
for ref_desc in ref_descs {
|
||||
if available_libs.contains(&ref_desc.specifier) {
|
||||
if AVAILABLE_LIBS.contains(&ref_desc.specifier.as_str()) {
|
||||
continue;
|
||||
}
|
||||
|
||||
|
@ -446,31 +458,33 @@ impl ModuleGraphLoader {
|
|||
let import_descriptor = ImportDescriptor {
|
||||
specifier: import_desc.specifier.to_string(),
|
||||
resolved_specifier,
|
||||
type_directive: import_desc.deno_types,
|
||||
type_directive: import_desc.deno_types.clone(),
|
||||
resolved_type_directive,
|
||||
};
|
||||
|
||||
self.download_module(
|
||||
self
|
||||
.download_module(
|
||||
import_descriptor.resolved_specifier.clone(),
|
||||
Some(module_specifier.clone()),
|
||||
)?;
|
||||
)
|
||||
.map_err(|e| err_with_location(e, &import_desc.location))?;
|
||||
|
||||
if let Some(type_dir_url) =
|
||||
import_descriptor.resolved_type_directive.as_ref()
|
||||
{
|
||||
self.download_module(
|
||||
self
|
||||
.download_module(
|
||||
type_dir_url.clone(),
|
||||
Some(module_specifier.clone()),
|
||||
)?;
|
||||
)
|
||||
.map_err(|e| err_with_location(e, &import_desc.location))?;
|
||||
}
|
||||
|
||||
imports.push(import_descriptor);
|
||||
}
|
||||
|
||||
let available_libs = get_available_libs();
|
||||
|
||||
for ref_desc in ref_descs {
|
||||
if available_libs.contains(&ref_desc.specifier) {
|
||||
if AVAILABLE_LIBS.contains(&ref_desc.specifier.as_str()) {
|
||||
continue;
|
||||
}
|
||||
|
||||
|
@ -484,10 +498,12 @@ impl ModuleGraphLoader {
|
|||
resolved_specifier,
|
||||
};
|
||||
|
||||
self.download_module(
|
||||
self
|
||||
.download_module(
|
||||
reference_descriptor.resolved_specifier.clone(),
|
||||
Some(module_specifier.clone()),
|
||||
)?;
|
||||
)
|
||||
.map_err(|e| err_with_location(e, &ref_desc.location))?;
|
||||
|
||||
match ref_desc.kind {
|
||||
TsReferenceKind::Lib => {
|
||||
|
@ -539,7 +555,7 @@ mod tests {
|
|||
false,
|
||||
false,
|
||||
);
|
||||
graph_loader.add_to_graph(&module_specifier).await?;
|
||||
graph_loader.add_to_graph(&module_specifier, None).await?;
|
||||
Ok(graph_loader.get_graph())
|
||||
}
|
||||
|
||||
|
|
129
cli/state.rs
129
cli/state.rs
|
@ -268,76 +268,23 @@ impl ModuleLoader for State {
|
|||
Ok(module_specifier)
|
||||
}
|
||||
|
||||
/// Given an absolute url, load its source code.
|
||||
fn load(
|
||||
&self,
|
||||
module_specifier: &ModuleSpecifier,
|
||||
maybe_referrer: Option<ModuleSpecifier>,
|
||||
is_dyn_import: bool,
|
||||
_is_dyn_import: bool,
|
||||
) -> Pin<Box<deno_core::ModuleSourceFuture>> {
|
||||
let module_specifier = module_specifier.clone();
|
||||
|
||||
// TODO(bartlomieju): this code is duplicated from module_graph.
|
||||
// It should be removed when `prepare_load` will be used to load modules.
|
||||
// Disallow http:// imports from modules loaded over https://
|
||||
if let Some(referrer) = maybe_referrer.as_ref() {
|
||||
if let "https" = referrer.as_url().scheme() {
|
||||
if let "http" = module_specifier.as_url().scheme() {
|
||||
let e = OpError::permission_denied(
|
||||
"Modules loaded over https:// are not allowed to import modules over http://".to_string()
|
||||
);
|
||||
return async move { Err(e.into()) }.boxed_local();
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
if is_dyn_import {
|
||||
if let Err(e) = self.check_dyn_import(&module_specifier) {
|
||||
return async move { Err(e.into()) }.boxed_local();
|
||||
}
|
||||
} else {
|
||||
// Verify that remote file doesn't try to statically import local file.
|
||||
if let Some(referrer) = maybe_referrer.as_ref() {
|
||||
let referrer_url = referrer.as_url();
|
||||
match referrer_url.scheme() {
|
||||
"http" | "https" => {
|
||||
let specifier_url = module_specifier.as_url();
|
||||
match specifier_url.scheme() {
|
||||
"http" | "https" => {}
|
||||
_ => {
|
||||
let e = OpError::permission_denied(
|
||||
"Remote modules are not allowed to statically import local modules. Use dynamic import instead.".to_string()
|
||||
);
|
||||
return async move { Err(e.into()) }.boxed_local();
|
||||
}
|
||||
}
|
||||
}
|
||||
_ => {}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
let module_specifier = module_specifier.to_owned();
|
||||
let mut state = self.borrow_mut();
|
||||
// TODO(bartlomieju): incrementing resolve_count here has no sense...
|
||||
state.metrics.resolve_count += 1;
|
||||
let module_url_specified = module_specifier.to_string();
|
||||
let global_state = state.global_state.clone();
|
||||
let target_lib = state.target_lib.clone();
|
||||
let permissions = if state.is_main {
|
||||
Permissions::allow_all()
|
||||
} else {
|
||||
state.permissions.clone()
|
||||
};
|
||||
|
||||
// TODO(bartlomieju): `fetch_compiled_module` should take `load_id` param
|
||||
let fut = async move {
|
||||
let compiled_module = global_state
|
||||
.fetch_compiled_module(
|
||||
module_specifier,
|
||||
maybe_referrer,
|
||||
target_lib,
|
||||
permissions,
|
||||
is_dyn_import,
|
||||
)
|
||||
.fetch_compiled_module(module_specifier, maybe_referrer)
|
||||
.await?;
|
||||
Ok(deno_core::ModuleSource {
|
||||
// Real module name, might be different from initial specifier
|
||||
|
@ -354,22 +301,47 @@ impl ModuleLoader for State {
|
|||
fn prepare_load(
|
||||
&self,
|
||||
_load_id: ModuleLoadId,
|
||||
_module_specifier: &ModuleSpecifier,
|
||||
_maybe_referrer: Option<String>,
|
||||
_is_dyn_import: bool,
|
||||
module_specifier: &ModuleSpecifier,
|
||||
maybe_referrer: Option<String>,
|
||||
is_dyn_import: bool,
|
||||
) -> Pin<Box<dyn Future<Output = Result<(), ErrBox>>>> {
|
||||
// TODO(bartlomieju):
|
||||
// 1. recursively:
|
||||
// a) resolve specifier
|
||||
// b) check permission if dynamic import
|
||||
// c) fetch/download source code
|
||||
// d) parse the source code and extract all import/exports (dependencies)
|
||||
// e) add discovered deps and loop algorithm until no new dependencies
|
||||
// are discovered
|
||||
// 2. run through appropriate compiler giving it access only to
|
||||
// discovered files
|
||||
let module_specifier = module_specifier.clone();
|
||||
let state = self.borrow();
|
||||
let target_lib = state.target_lib.clone();
|
||||
let maybe_import_map = state.import_map.clone();
|
||||
// Only "main" module is loaded without permission check,
|
||||
// ie. module that is associated with "is_main" state
|
||||
// and is not a dynamic import.
|
||||
let permissions = if state.is_main && !is_dyn_import {
|
||||
Permissions::allow_all()
|
||||
} else {
|
||||
state.permissions.clone()
|
||||
};
|
||||
let global_state = state.global_state.clone();
|
||||
// TODO(bartlomieju): I'm not sure if it's correct to ignore
|
||||
// bad referrer - this is the case for `Deno.core.evalContext()` where
|
||||
// `ref_str` is `<unknown>`.
|
||||
let maybe_referrer = if let Some(ref_str) = maybe_referrer {
|
||||
ModuleSpecifier::resolve_url(&ref_str).ok()
|
||||
} else {
|
||||
None
|
||||
};
|
||||
drop(state);
|
||||
|
||||
async { Ok(()) }.boxed_local()
|
||||
// TODO(bartlomieju): `prepare_module_load` should take `load_id` param
|
||||
async move {
|
||||
global_state
|
||||
.prepare_module_load(
|
||||
module_specifier,
|
||||
maybe_referrer,
|
||||
target_lib,
|
||||
permissions,
|
||||
is_dyn_import,
|
||||
maybe_import_map,
|
||||
)
|
||||
.await
|
||||
}
|
||||
.boxed_local()
|
||||
}
|
||||
}
|
||||
|
||||
|
@ -379,19 +351,9 @@ impl State {
|
|||
global_state: GlobalState,
|
||||
shared_permissions: Option<Permissions>,
|
||||
main_module: ModuleSpecifier,
|
||||
maybe_import_map: Option<ImportMap>,
|
||||
is_internal: bool,
|
||||
) -> Result<Self, ErrBox> {
|
||||
let import_map: Option<ImportMap> =
|
||||
match global_state.flags.import_map_path.as_ref() {
|
||||
None => None,
|
||||
Some(file_path) => {
|
||||
if !global_state.flags.unstable {
|
||||
exit_unstable("--importmap")
|
||||
}
|
||||
Some(ImportMap::load(file_path)?)
|
||||
}
|
||||
};
|
||||
|
||||
let seeded_rng = match global_state.flags.seed {
|
||||
Some(seed) => Some(StdRng::seed_from_u64(seed)),
|
||||
None => None,
|
||||
|
@ -407,7 +369,7 @@ impl State {
|
|||
global_state,
|
||||
main_module,
|
||||
permissions,
|
||||
import_map,
|
||||
import_map: maybe_import_map,
|
||||
metrics: Metrics::default(),
|
||||
global_timer: GlobalTimer::new(),
|
||||
workers: HashMap::new(),
|
||||
|
@ -529,6 +491,7 @@ impl State {
|
|||
GlobalState::mock(vec!["deno".to_string()]),
|
||||
None,
|
||||
module_specifier,
|
||||
None,
|
||||
false,
|
||||
)
|
||||
.unwrap()
|
||||
|
|
|
@ -1,4 +1,5 @@
|
|||
// Copyright 2018-2020 the Deno authors. All rights reserved. MIT license.
|
||||
use crate::doc::Location;
|
||||
use crate::msg::MediaType;
|
||||
use crate::swc_common;
|
||||
use crate::swc_common::comments::CommentKind;
|
||||
|
@ -450,6 +451,7 @@ fn get_deno_types(parser: &AstParser, span: Span) -> Option<String> {
|
|||
pub struct ImportDescriptor {
|
||||
pub specifier: String,
|
||||
pub deno_types: Option<String>,
|
||||
pub location: Location,
|
||||
}
|
||||
|
||||
#[derive(Clone, Debug, PartialEq)]
|
||||
|
@ -463,6 +465,7 @@ pub enum TsReferenceKind {
|
|||
pub struct TsReferenceDescriptor {
|
||||
pub kind: TsReferenceKind,
|
||||
pub specifier: String,
|
||||
pub location: Location,
|
||||
}
|
||||
|
||||
pub fn analyze_dependencies_and_references(
|
||||
|
@ -496,16 +499,19 @@ pub fn analyze_dependencies_and_references(
|
|||
desc.kind != DependencyKind::DynamicImport
|
||||
})
|
||||
.map(|desc| {
|
||||
let location = parser.get_span_location(desc.span);
|
||||
if desc.kind == DependencyKind::Import {
|
||||
let deno_types = get_deno_types(&parser, desc.span);
|
||||
ImportDescriptor {
|
||||
specifier: desc.specifier.to_string(),
|
||||
deno_types,
|
||||
location: location.into(),
|
||||
}
|
||||
} else {
|
||||
ImportDescriptor {
|
||||
specifier: desc.specifier.to_string(),
|
||||
deno_types: None,
|
||||
location: location.into(),
|
||||
}
|
||||
}
|
||||
})
|
||||
|
@ -553,7 +559,12 @@ pub fn analyze_dependencies_and_references(
|
|||
.trim_end_matches('\'')
|
||||
.to_string();
|
||||
|
||||
references.push(TsReferenceDescriptor { kind, specifier });
|
||||
let location = parser.get_span_location(comment.span);
|
||||
references.push(TsReferenceDescriptor {
|
||||
kind,
|
||||
specifier,
|
||||
location: location.into(),
|
||||
});
|
||||
}
|
||||
Ok((imports, references))
|
||||
})
|
||||
|
@ -595,15 +606,30 @@ console.log(qat.qat);
|
|||
vec![
|
||||
ImportDescriptor {
|
||||
specifier: "./type_definitions/foo.js".to_string(),
|
||||
deno_types: Some("./type_definitions/foo.d.ts".to_string())
|
||||
deno_types: Some("./type_definitions/foo.d.ts".to_string()),
|
||||
location: Location {
|
||||
filename: "some/file.ts".to_string(),
|
||||
line: 9,
|
||||
col: 0,
|
||||
},
|
||||
},
|
||||
ImportDescriptor {
|
||||
specifier: "./type_definitions/fizz.js".to_string(),
|
||||
deno_types: Some("./type_definitions/fizz.d.ts".to_string())
|
||||
deno_types: Some("./type_definitions/fizz.d.ts".to_string()),
|
||||
location: Location {
|
||||
filename: "some/file.ts".to_string(),
|
||||
line: 11,
|
||||
col: 0,
|
||||
},
|
||||
},
|
||||
ImportDescriptor {
|
||||
specifier: "./type_definitions/qat.ts".to_string(),
|
||||
deno_types: None
|
||||
deno_types: None,
|
||||
location: Location {
|
||||
filename: "some/file.ts".to_string(),
|
||||
line: 15,
|
||||
col: 0,
|
||||
},
|
||||
},
|
||||
]
|
||||
);
|
||||
|
@ -617,14 +643,29 @@ console.log(qat.qat);
|
|||
TsReferenceDescriptor {
|
||||
specifier: "dom".to_string(),
|
||||
kind: TsReferenceKind::Lib,
|
||||
location: Location {
|
||||
filename: "some/file.ts".to_string(),
|
||||
line: 5,
|
||||
col: 0,
|
||||
},
|
||||
},
|
||||
TsReferenceDescriptor {
|
||||
specifier: "./type_reference.d.ts".to_string(),
|
||||
kind: TsReferenceKind::Types,
|
||||
location: Location {
|
||||
filename: "some/file.ts".to_string(),
|
||||
line: 6,
|
||||
col: 0,
|
||||
},
|
||||
},
|
||||
TsReferenceDescriptor {
|
||||
specifier: "./type_reference/dep.ts".to_string(),
|
||||
kind: TsReferenceKind::Path,
|
||||
location: Location {
|
||||
filename: "some/file.ts".to_string(),
|
||||
line: 7,
|
||||
col: 0,
|
||||
},
|
||||
},
|
||||
]
|
||||
);
|
||||
|
|
|
@ -1 +1,2 @@
|
|||
error: Modules loaded over https:// are not allowed to import modules over http://
|
||||
Imported from "https://localhost:5545/cli/tests/disallow_http_from_https.js:2"
|
||||
|
|
|
@ -1 +1,2 @@
|
|||
error: Modules loaded over https:// are not allowed to import modules over http://
|
||||
Imported from "https://localhost:5545/cli/tests/disallow_http_from_https.ts:2"
|
||||
|
|
|
@ -1 +1 @@
|
|||
error: Cannot resolve module "[WILDCARD]/bad-module.ts" from "[WILDCARD]/error_005_missing_dynamic_import.ts"
|
||||
error: Uncaught TypeError: Cannot resolve module "[WILDCARD]/bad-module.ts" from "[WILDCARD]/error_005_missing_dynamic_import.ts"
|
||||
|
|
|
@ -1 +1,2 @@
|
|||
error: relative import path "bad-module.ts" not prefixed with / or ./ or ../ Imported from "[WILDCARD]/error_012_bad_dynamic_import_specifier.ts"
|
||||
Compile [WILDCARD]error_012_bad_dynamic_import_specifier.ts
|
||||
error: Uncaught TypeError: relative import path "bad-module.ts" not prefixed with / or ./ or ../ Imported from "[WILDCARD]/error_012_bad_dynamic_import_specifier.ts"
|
||||
|
|
|
@ -1,2 +1,3 @@
|
|||
[WILDCARD]
|
||||
error: Remote modules are not allowed to statically import local modules. Use dynamic import instead.
|
||||
Imported from "[WILDCARD]error_local_static_import_from_remote.js:1"
|
||||
|
|
|
@ -1,2 +1,3 @@
|
|||
[WILDCARD]
|
||||
error: Remote modules are not allowed to statically import local modules. Use dynamic import instead.
|
||||
Imported from "[WILDCARD]error_local_static_import_from_remote.ts:1"
|
||||
|
|
|
@ -1,4 +1 @@
|
|||
error: Uncaught SyntaxError: Unexpected identifier
|
||||
(the following is a syntax error ^^ ! )
|
||||
~~~~~~~~~
|
||||
at [WILDCARD]tests/error_syntax.js:3:6
|
||||
error: Expected Comma, got Some(Word(following)) at [WILDCARD]tests/error_syntax.js:3:5
|
||||
|
|
|
@ -1,2 +1 @@
|
|||
error: Uncaught SyntaxError: Unexpected end of input
|
||||
at [WILDCARD]tests/error_syntax_empty_trailing_line.mjs:[WILDCARD]
|
||||
error: Unexpected eof at [WILDCARD]tests/error_syntax_empty_trailing_line.mjs:2:21
|
||||
|
|
|
@ -412,6 +412,71 @@ fn js_unit_tests() {
|
|||
assert!(status.success());
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn ts_dependency_recompilation() {
|
||||
let t = TempDir::new().expect("tempdir fail");
|
||||
let ats = t.path().join("a.ts");
|
||||
|
||||
std::fs::write(
|
||||
&ats,
|
||||
"
|
||||
import { foo } from \"./b.ts\";
|
||||
|
||||
function print(str: string): void {
|
||||
console.log(str);
|
||||
}
|
||||
|
||||
print(foo);",
|
||||
)
|
||||
.unwrap();
|
||||
|
||||
let bts = t.path().join("b.ts");
|
||||
std::fs::write(
|
||||
&bts,
|
||||
"
|
||||
export const foo = \"foo\";",
|
||||
)
|
||||
.unwrap();
|
||||
|
||||
let output = util::deno_cmd()
|
||||
.current_dir(util::root_path())
|
||||
.env("NO_COLOR", "1")
|
||||
.arg("run")
|
||||
.arg(&ats)
|
||||
.output()
|
||||
.expect("failed to spawn script");
|
||||
|
||||
let stdout_output = std::str::from_utf8(&output.stdout).unwrap().trim();
|
||||
let stderr_output = std::str::from_utf8(&output.stderr).unwrap().trim();
|
||||
|
||||
assert!(stdout_output.ends_with("foo"));
|
||||
assert!(stderr_output.starts_with("Compile"));
|
||||
|
||||
// Overwrite contents of b.ts and run again
|
||||
std::fs::write(
|
||||
&bts,
|
||||
"
|
||||
export const foo = 5;",
|
||||
)
|
||||
.expect("error writing file");
|
||||
|
||||
let output = util::deno_cmd()
|
||||
.current_dir(util::root_path())
|
||||
.env("NO_COLOR", "1")
|
||||
.arg("run")
|
||||
.arg(&ats)
|
||||
.output()
|
||||
.expect("failed to spawn script");
|
||||
|
||||
let stdout_output = std::str::from_utf8(&output.stdout).unwrap().trim();
|
||||
let stderr_output = std::str::from_utf8(&output.stderr).unwrap().trim();
|
||||
|
||||
// error: TS2345 [ERROR]: Argument of type '5' is not assignable to parameter of type 'string'.
|
||||
assert!(stderr_output.contains("TS2345"));
|
||||
assert!(!output.status.success());
|
||||
assert!(stdout_output.is_empty());
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn bundle_exports() {
|
||||
// First we have to generate a bundle of some module that has exports.
|
||||
|
@ -1377,7 +1442,7 @@ itest!(error_004_missing_module {
|
|||
});
|
||||
|
||||
itest!(error_005_missing_dynamic_import {
|
||||
args: "run --reload --allow-read error_005_missing_dynamic_import.ts",
|
||||
args: "run --reload --allow-read --quiet error_005_missing_dynamic_import.ts",
|
||||
exit_code: 1,
|
||||
output: "error_005_missing_dynamic_import.ts.out",
|
||||
});
|
||||
|
@ -1424,7 +1489,7 @@ itest!(error_014_catch_dynamic_import_error {
|
|||
});
|
||||
|
||||
itest!(error_015_dynamic_import_permissions {
|
||||
args: "run --reload error_015_dynamic_import_permissions.js",
|
||||
args: "run --reload --quiet error_015_dynamic_import_permissions.js",
|
||||
output: "error_015_dynamic_import_permissions.out",
|
||||
exit_code: 1,
|
||||
http_server: true,
|
||||
|
|
438
cli/tsc.rs
438
cli/tsc.rs
|
@ -5,10 +5,9 @@ use crate::diagnostics::DiagnosticItem;
|
|||
use crate::disk_cache::DiskCache;
|
||||
use crate::file_fetcher::SourceFile;
|
||||
use crate::file_fetcher::SourceFileFetcher;
|
||||
use crate::fmt;
|
||||
use crate::fs as deno_fs;
|
||||
use crate::global_state::GlobalState;
|
||||
use crate::import_map::ImportMap;
|
||||
use crate::module_graph::ModuleGraphFile;
|
||||
use crate::module_graph::ModuleGraphLoader;
|
||||
use crate::msg;
|
||||
use crate::op_error::OpError;
|
||||
|
@ -16,7 +15,6 @@ use crate::ops;
|
|||
use crate::permissions::Permissions;
|
||||
use crate::source_maps::SourceMapGetter;
|
||||
use crate::startup_data;
|
||||
use crate::state::exit_unstable;
|
||||
use crate::state::State;
|
||||
use crate::version;
|
||||
use crate::web_worker::WebWorker;
|
||||
|
@ -50,73 +48,69 @@ use std::sync::atomic::Ordering;
|
|||
use std::sync::Arc;
|
||||
use std::sync::Mutex;
|
||||
use std::task::Poll;
|
||||
use std::time::Instant;
|
||||
use url::Url;
|
||||
|
||||
// TODO(bartlomieju): make static
|
||||
pub fn get_available_libs() -> Vec<String> {
|
||||
vec![
|
||||
"deno.ns".to_string(),
|
||||
"deno.window".to_string(),
|
||||
"deno.worker".to_string(),
|
||||
"deno.shared_globals".to_string(),
|
||||
"deno.unstable".to_string(),
|
||||
"dom".to_string(),
|
||||
"dom.iterable".to_string(),
|
||||
"es5".to_string(),
|
||||
"es6".to_string(),
|
||||
"esnext".to_string(),
|
||||
"es2020".to_string(),
|
||||
"es2020.full".to_string(),
|
||||
"es2019".to_string(),
|
||||
"es2019.full".to_string(),
|
||||
"es2018".to_string(),
|
||||
"es2018.full".to_string(),
|
||||
"es2017".to_string(),
|
||||
"es2017.full".to_string(),
|
||||
"es2016".to_string(),
|
||||
"es2016.full".to_string(),
|
||||
"es2015".to_string(),
|
||||
"es2015.collection".to_string(),
|
||||
"es2015.core".to_string(),
|
||||
"es2015.generator".to_string(),
|
||||
"es2015.iterable".to_string(),
|
||||
"es2015.promise".to_string(),
|
||||
"es2015.proxy".to_string(),
|
||||
"es2015.reflect".to_string(),
|
||||
"es2015.symbol".to_string(),
|
||||
"es2015.symbol.wellknown".to_string(),
|
||||
"es2016.array.include".to_string(),
|
||||
"es2017.intl".to_string(),
|
||||
"es2017.object".to_string(),
|
||||
"es2017.sharedmemory".to_string(),
|
||||
"es2017.string".to_string(),
|
||||
"es2017.typedarrays".to_string(),
|
||||
"es2018.asyncgenerator".to_string(),
|
||||
"es2018.asynciterable".to_string(),
|
||||
"es2018.intl".to_string(),
|
||||
"es2018.promise".to_string(),
|
||||
"es2018.regexp".to_string(),
|
||||
"es2019.array".to_string(),
|
||||
"es2019.object".to_string(),
|
||||
"es2019.string".to_string(),
|
||||
"es2019.symbol".to_string(),
|
||||
"es2020.bigint".to_string(),
|
||||
"es2020.promise".to_string(),
|
||||
"es2020.string".to_string(),
|
||||
"es2020.symbol.wellknown".to_string(),
|
||||
"esnext.array".to_string(),
|
||||
"esnext.asynciterable".to_string(),
|
||||
"esnext.bigint".to_string(),
|
||||
"esnext.intl".to_string(),
|
||||
"esnext.promise".to_string(),
|
||||
"esnext.string".to_string(),
|
||||
"esnext.symbol".to_string(),
|
||||
"scripthost".to_string(),
|
||||
"webworker".to_string(),
|
||||
"webworker.importscripts".to_string(),
|
||||
]
|
||||
}
|
||||
pub const AVAILABLE_LIBS: &[&str] = &[
|
||||
"deno.ns",
|
||||
"deno.window",
|
||||
"deno.worker",
|
||||
"deno.shared_globals",
|
||||
"deno.unstable",
|
||||
"dom",
|
||||
"dom.iterable",
|
||||
"es5",
|
||||
"es6",
|
||||
"esnext",
|
||||
"es2020",
|
||||
"es2020.full",
|
||||
"es2019",
|
||||
"es2019.full",
|
||||
"es2018",
|
||||
"es2018.full",
|
||||
"es2017",
|
||||
"es2017.full",
|
||||
"es2016",
|
||||
"es2016.full",
|
||||
"es2015",
|
||||
"es2015.collection",
|
||||
"es2015.core",
|
||||
"es2015.generator",
|
||||
"es2015.iterable",
|
||||
"es2015.promise",
|
||||
"es2015.proxy",
|
||||
"es2015.reflect",
|
||||
"es2015.symbol",
|
||||
"es2015.symbol.wellknown",
|
||||
"es2016.array.include",
|
||||
"es2017.intl",
|
||||
"es2017.object",
|
||||
"es2017.sharedmemory",
|
||||
"es2017.string",
|
||||
"es2017.typedarrays",
|
||||
"es2018.asyncgenerator",
|
||||
"es2018.asynciterable",
|
||||
"es2018.intl",
|
||||
"es2018.promise",
|
||||
"es2018.regexp",
|
||||
"es2019.array",
|
||||
"es2019.object",
|
||||
"es2019.string",
|
||||
"es2019.symbol",
|
||||
"es2020.bigint",
|
||||
"es2020.promise",
|
||||
"es2020.string",
|
||||
"es2020.symbol.wellknown",
|
||||
"esnext.array",
|
||||
"esnext.asynciterable",
|
||||
"esnext.bigint",
|
||||
"esnext.intl",
|
||||
"esnext.promise",
|
||||
"esnext.string",
|
||||
"esnext.symbol",
|
||||
"scripthost",
|
||||
"webworker",
|
||||
"webworker.importscripts",
|
||||
];
|
||||
|
||||
#[derive(Debug, Clone)]
|
||||
pub struct CompiledModule {
|
||||
|
@ -160,6 +154,7 @@ impl Future for CompilerWorker {
|
|||
}
|
||||
}
|
||||
|
||||
// TODO(bartlomieju): use JSONC parser from dprint instead of Regex
|
||||
lazy_static! {
|
||||
static ref CHECK_JS_RE: Regex =
|
||||
Regex::new(r#""checkJs"\s*?:\s*?true"#).unwrap();
|
||||
|
@ -175,8 +170,13 @@ fn create_compiler_worker(
|
|||
// like 'eval', 'repl'
|
||||
let entry_point =
|
||||
ModuleSpecifier::resolve_url_or_path("./__$deno$ts_compiler.ts").unwrap();
|
||||
let worker_state =
|
||||
State::new(global_state.clone(), Some(permissions), entry_point, true)
|
||||
let worker_state = State::new(
|
||||
global_state.clone(),
|
||||
Some(permissions),
|
||||
entry_point,
|
||||
None,
|
||||
true,
|
||||
)
|
||||
.expect("Unable to create worker state");
|
||||
|
||||
// TODO(bartlomieju): this metric is never used anywhere
|
||||
|
@ -294,6 +294,28 @@ impl CompiledFileMetadata {
|
|||
}
|
||||
}
|
||||
|
||||
/// Information associated with compilation of a "module graph",
|
||||
/// ie. entry point and all its dependencies.
|
||||
/// It's used to perform cache invalidation if content of any
|
||||
/// dependency changes.
|
||||
#[derive(Deserialize, Serialize)]
|
||||
pub struct GraphFileMetadata {
|
||||
pub deps: Vec<String>,
|
||||
pub version_hash: String,
|
||||
}
|
||||
|
||||
impl GraphFileMetadata {
|
||||
pub fn from_json_string(
|
||||
metadata_string: String,
|
||||
) -> Result<Self, serde_json::Error> {
|
||||
serde_json::from_str::<Self>(&metadata_string)
|
||||
}
|
||||
|
||||
pub fn to_json_string(&self) -> Result<String, serde_json::Error> {
|
||||
serde_json::to_string(self)
|
||||
}
|
||||
}
|
||||
|
||||
/// Emit a SHA256 hash based on source code, deno version and TS config.
|
||||
/// Used to check if a recompilation for source code is needed.
|
||||
pub fn source_code_version_hash(
|
||||
|
@ -383,6 +405,7 @@ impl TsCompiler {
|
|||
})))
|
||||
}
|
||||
|
||||
// TODO(bartlomieju): this method is no longer needed
|
||||
/// Mark given module URL as compiled to avoid multiple compilations of same
|
||||
/// module in single run.
|
||||
fn mark_compiled(&self, url: &Url) {
|
||||
|
@ -390,11 +413,34 @@ impl TsCompiler {
|
|||
c.insert(url.clone());
|
||||
}
|
||||
|
||||
/// Check if given module URL has already been compiled and can be fetched
|
||||
/// directly from disk.
|
||||
fn has_compiled(&self, url: &Url) -> bool {
|
||||
let c = self.compiled.lock().unwrap();
|
||||
c.contains(url)
|
||||
/// Check if there is compiled source in cache that is valid
|
||||
/// and can be used again.
|
||||
// TODO(bartlomieju): there should be check that cached file actually exists
|
||||
fn has_compiled_source(
|
||||
&self,
|
||||
file_fetcher: &SourceFileFetcher,
|
||||
url: &Url,
|
||||
) -> bool {
|
||||
let specifier = ModuleSpecifier::from(url.clone());
|
||||
if let Some(source_file) = file_fetcher
|
||||
.fetch_cached_source_file(&specifier, Permissions::allow_all())
|
||||
{
|
||||
if let Some(metadata) = self.get_metadata(&url) {
|
||||
// 2. compare version hashes
|
||||
// TODO: it would probably be good idea to make it method implemented on SourceFile
|
||||
let version_hash_to_validate = source_code_version_hash(
|
||||
&source_file.source_code,
|
||||
version::DENO,
|
||||
&self.config.hash,
|
||||
);
|
||||
|
||||
if metadata.version_hash == version_hash_to_validate {
|
||||
return true;
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
false
|
||||
}
|
||||
|
||||
/// Asynchronously compile module and all it's dependencies.
|
||||
|
@ -406,64 +452,43 @@ impl TsCompiler {
|
|||
///
|
||||
/// If compilation is required then new V8 worker is spawned with fresh TS
|
||||
/// compiler.
|
||||
pub async fn compile(
|
||||
pub async fn compile_module_graph(
|
||||
&self,
|
||||
global_state: GlobalState,
|
||||
source_file: &SourceFile,
|
||||
target: TargetLib,
|
||||
permissions: Permissions,
|
||||
is_dyn_import: bool,
|
||||
) -> Result<CompiledModule, ErrBox> {
|
||||
if self.has_compiled(&source_file.url) {
|
||||
return self.get_compiled_module(&source_file.url);
|
||||
}
|
||||
module_graph: HashMap<String, ModuleGraphFile>,
|
||||
) -> Result<(), ErrBox> {
|
||||
let mut has_cached_version = false;
|
||||
|
||||
if self.use_disk_cache {
|
||||
// Try to load cached version:
|
||||
// 1. check if there's 'meta' file
|
||||
if let Some(metadata) = self.get_metadata(&source_file.url) {
|
||||
// 2. compare version hashes
|
||||
// TODO: it would probably be good idea to make it method implemented on SourceFile
|
||||
let version_hash_to_validate = source_code_version_hash(
|
||||
&source_file.source_code,
|
||||
version::DENO,
|
||||
if let Some(metadata) = self.get_graph_metadata(&source_file.url) {
|
||||
has_cached_version = true;
|
||||
|
||||
let version_hash = crate::checksum::gen(vec![
|
||||
version::DENO.as_bytes(),
|
||||
&self.config.hash,
|
||||
);
|
||||
]);
|
||||
|
||||
if metadata.version_hash == version_hash_to_validate {
|
||||
debug!("load_cache metadata version hash match");
|
||||
if let Ok(compiled_module) =
|
||||
self.get_compiled_module(&source_file.url)
|
||||
{
|
||||
self.mark_compiled(&source_file.url);
|
||||
return Ok(compiled_module);
|
||||
has_cached_version &= metadata.version_hash == version_hash;
|
||||
has_cached_version &= self
|
||||
.has_compiled_source(&global_state.file_fetcher, &source_file.url);
|
||||
|
||||
for dep in metadata.deps {
|
||||
let url = Url::parse(&dep).expect("Dep is not a valid url");
|
||||
has_cached_version &=
|
||||
self.has_compiled_source(&global_state.file_fetcher, &url);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
if has_cached_version {
|
||||
return Ok(());
|
||||
}
|
||||
let source_file_ = source_file.clone();
|
||||
|
||||
let module_url = source_file.url.clone();
|
||||
let module_specifier = ModuleSpecifier::from(source_file.url.clone());
|
||||
let import_map: Option<ImportMap> =
|
||||
match global_state.flags.import_map_path.as_ref() {
|
||||
None => None,
|
||||
Some(file_path) => {
|
||||
if !global_state.flags.unstable {
|
||||
exit_unstable("--importmap")
|
||||
}
|
||||
Some(ImportMap::load(file_path)?)
|
||||
}
|
||||
};
|
||||
let mut module_graph_loader = ModuleGraphLoader::new(
|
||||
global_state.file_fetcher.clone(),
|
||||
import_map,
|
||||
permissions.clone(),
|
||||
is_dyn_import,
|
||||
true,
|
||||
);
|
||||
|
||||
module_graph_loader.add_to_graph(&module_specifier).await?;
|
||||
let module_graph = module_graph_loader.get_graph();
|
||||
let module_graph_json =
|
||||
serde_json::to_value(module_graph).expect("Failed to serialize data");
|
||||
let target = match target {
|
||||
|
@ -500,23 +525,17 @@ impl TsCompiler {
|
|||
|
||||
let req_msg = j.to_string().into_boxed_str().into_boxed_bytes();
|
||||
|
||||
let ts_compiler = self.clone();
|
||||
|
||||
// TODO(bartlomieju): lift this call up - TSC shouldn't print anything
|
||||
info!(
|
||||
"{} {}",
|
||||
colors::green("Compile".to_string()),
|
||||
module_url.to_string()
|
||||
);
|
||||
|
||||
let start = Instant::now();
|
||||
|
||||
let msg =
|
||||
execute_in_same_thread(global_state.clone(), permissions, req_msg)
|
||||
.await?;
|
||||
|
||||
let end = Instant::now();
|
||||
debug!("time spent in compiler thread {:#?}", end - start);
|
||||
|
||||
let json_str = std::str::from_utf8(&msg).unwrap();
|
||||
|
||||
let compile_response: CompileResponse = serde_json::from_str(json_str)?;
|
||||
|
@ -525,8 +544,69 @@ impl TsCompiler {
|
|||
return Err(ErrBox::from(compile_response.diagnostics));
|
||||
}
|
||||
|
||||
self.set_graph_metadata(
|
||||
source_file.url.clone(),
|
||||
&compile_response.emit_map,
|
||||
)?;
|
||||
self.cache_emitted_files(compile_response.emit_map)?;
|
||||
ts_compiler.get_compiled_module(&source_file_.url)
|
||||
Ok(())
|
||||
}
|
||||
|
||||
fn get_graph_metadata(&self, url: &Url) -> Option<GraphFileMetadata> {
|
||||
// Try to load cached version:
|
||||
// 1. check if there's 'meta' file
|
||||
let cache_key = self
|
||||
.disk_cache
|
||||
.get_cache_filename_with_extension(url, "graph");
|
||||
if let Ok(metadata_bytes) = self.disk_cache.get(&cache_key) {
|
||||
if let Ok(metadata) = std::str::from_utf8(&metadata_bytes) {
|
||||
if let Ok(read_metadata) =
|
||||
GraphFileMetadata::from_json_string(metadata.to_string())
|
||||
{
|
||||
return Some(read_metadata);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
None
|
||||
}
|
||||
|
||||
fn set_graph_metadata(
|
||||
&self,
|
||||
url: Url,
|
||||
emit_map: &HashMap<String, EmittedSource>,
|
||||
) -> std::io::Result<()> {
|
||||
let version_hash =
|
||||
crate::checksum::gen(vec![version::DENO.as_bytes(), &self.config.hash]);
|
||||
let mut deps = vec![];
|
||||
|
||||
for (_emitted_name, source) in emit_map.iter() {
|
||||
let specifier = ModuleSpecifier::resolve_url(&source.filename)
|
||||
.expect("Should be a valid module specifier");
|
||||
|
||||
let source_file = self
|
||||
.file_fetcher
|
||||
.fetch_cached_source_file(&specifier, Permissions::allow_all())
|
||||
.expect("Source file not found");
|
||||
|
||||
// NOTE: JavaScript files are only cached to disk if `checkJs`
|
||||
// option in on
|
||||
if source_file.media_type == msg::MediaType::JavaScript
|
||||
&& !self.compile_js
|
||||
{
|
||||
continue;
|
||||
}
|
||||
|
||||
deps.push(specifier.to_string());
|
||||
}
|
||||
|
||||
let graph_metadata = GraphFileMetadata { deps, version_hash };
|
||||
let meta_key = self
|
||||
.disk_cache
|
||||
.get_cache_filename_with_extension(&url, "graph");
|
||||
self
|
||||
.disk_cache
|
||||
.set(&meta_key, graph_metadata.to_json_string()?.as_bytes())
|
||||
}
|
||||
|
||||
/// Get associated `CompiledFileMetadata` for given module if it exists.
|
||||
|
@ -557,10 +637,23 @@ impl TsCompiler {
|
|||
let specifier = ModuleSpecifier::resolve_url(&source.filename)
|
||||
.expect("Should be a valid module specifier");
|
||||
|
||||
let source_file = self
|
||||
.file_fetcher
|
||||
.fetch_cached_source_file(&specifier, Permissions::allow_all())
|
||||
.expect("Source file not found");
|
||||
|
||||
// NOTE: JavaScript files are only cached to disk if `checkJs`
|
||||
// option in on
|
||||
if source_file.media_type == msg::MediaType::JavaScript
|
||||
&& !self.compile_js
|
||||
{
|
||||
continue;
|
||||
}
|
||||
|
||||
if emitted_name.ends_with(".map") {
|
||||
self.cache_source_map(&specifier, &source.contents)?;
|
||||
} else if emitted_name.ends_with(".js") {
|
||||
self.cache_compiled_file(&specifier, &source.contents)?;
|
||||
self.cache_compiled_file(&specifier, source_file, &source.contents)?;
|
||||
} else {
|
||||
panic!("Trying to cache unknown file type {}", emitted_name);
|
||||
}
|
||||
|
@ -618,20 +711,9 @@ impl TsCompiler {
|
|||
fn cache_compiled_file(
|
||||
&self,
|
||||
module_specifier: &ModuleSpecifier,
|
||||
source_file: SourceFile,
|
||||
contents: &str,
|
||||
) -> std::io::Result<()> {
|
||||
let source_file = self
|
||||
.file_fetcher
|
||||
.fetch_cached_source_file(&module_specifier, Permissions::allow_all())
|
||||
.expect("Source file not found");
|
||||
|
||||
// NOTE: JavaScript files are only cached to disk if `checkJs`
|
||||
// option in on
|
||||
if source_file.media_type == msg::MediaType::JavaScript && !self.compile_js
|
||||
{
|
||||
return Ok(());
|
||||
}
|
||||
|
||||
// By default TSC output source map url that is relative; we need
|
||||
// to substitute it manually to correct file URL in DENO_DIR.
|
||||
let mut content_lines = contents
|
||||
|
@ -664,10 +746,6 @@ impl TsCompiler {
|
|||
.get_cache_filename_with_extension(module_specifier.as_url(), "js");
|
||||
self.disk_cache.set(&js_key, contents.as_bytes())?;
|
||||
self.mark_compiled(module_specifier.as_url());
|
||||
let source_file = self
|
||||
.file_fetcher
|
||||
.fetch_cached_source_file(&module_specifier, Permissions::allow_all())
|
||||
.expect("Source file not found");
|
||||
|
||||
let version_hash = source_code_version_hash(
|
||||
&source_file.source_code,
|
||||
|
@ -720,18 +798,6 @@ impl TsCompiler {
|
|||
module_specifier: &ModuleSpecifier,
|
||||
contents: &str,
|
||||
) -> std::io::Result<()> {
|
||||
let source_file = self
|
||||
.file_fetcher
|
||||
.fetch_cached_source_file(&module_specifier, Permissions::allow_all())
|
||||
.expect("Source file not found");
|
||||
|
||||
// NOTE: JavaScript files are only cached to disk if `checkJs`
|
||||
// option in on
|
||||
if source_file.media_type == msg::MediaType::JavaScript && !self.compile_js
|
||||
{
|
||||
return Ok(());
|
||||
}
|
||||
|
||||
let js_key = self
|
||||
.disk_cache
|
||||
.get_cache_filename_with_extension(module_specifier.as_url(), "js");
|
||||
|
@ -854,14 +920,12 @@ pub async fn bundle(
|
|||
compiler_config: CompilerConfig,
|
||||
module_specifier: ModuleSpecifier,
|
||||
maybe_import_map: Option<ImportMap>,
|
||||
out_file: Option<PathBuf>,
|
||||
unstable: bool,
|
||||
) -> Result<(), ErrBox> {
|
||||
) -> Result<String, ErrBox> {
|
||||
debug!(
|
||||
"Invoking the compiler to bundle. module_name: {}",
|
||||
module_specifier.to_string()
|
||||
);
|
||||
eprintln!("Bundling {}", module_specifier.to_string());
|
||||
|
||||
let permissions = Permissions::allow_all();
|
||||
let mut module_graph_loader = ModuleGraphLoader::new(
|
||||
|
@ -871,7 +935,9 @@ pub async fn bundle(
|
|||
false,
|
||||
true,
|
||||
);
|
||||
module_graph_loader.add_to_graph(&module_specifier).await?;
|
||||
module_graph_loader
|
||||
.add_to_graph(&module_specifier, None)
|
||||
.await?;
|
||||
let module_graph = module_graph_loader.get_graph();
|
||||
let module_graph_json =
|
||||
serde_json::to_value(module_graph).expect("Failed to serialize data");
|
||||
|
@ -921,26 +987,7 @@ pub async fn bundle(
|
|||
|
||||
assert!(bundle_response.bundle_output.is_some());
|
||||
let output = bundle_response.bundle_output.unwrap();
|
||||
|
||||
// TODO(bartlomieju): the rest of this function should be handled
|
||||
// in `main.rs` - it has nothing to do with TypeScript...
|
||||
let output_string = fmt::format_text(&output)?;
|
||||
|
||||
if let Some(out_file_) = out_file.as_ref() {
|
||||
eprintln!("Emitting bundle to {:?}", out_file_);
|
||||
|
||||
let output_bytes = output_string.as_bytes();
|
||||
let output_len = output_bytes.len();
|
||||
|
||||
deno_fs::write_file(out_file_, output_bytes, 0o666)?;
|
||||
// TODO(bartlomieju): do we really need to show this info? (it doesn't respect --quiet flag)
|
||||
// TODO(bartlomieju): add "humanFileSize" method
|
||||
eprintln!("{} bytes emitted.", output_len);
|
||||
} else {
|
||||
println!("{}", output_string);
|
||||
}
|
||||
|
||||
Ok(())
|
||||
Ok(output)
|
||||
}
|
||||
|
||||
/// This function is used by `Deno.compile()` and `Deno.bundle()` APIs.
|
||||
|
@ -968,7 +1015,9 @@ pub async fn runtime_compile<S: BuildHasher>(
|
|||
let module_specifier =
|
||||
ModuleSpecifier::resolve_import(root_name, "<unknown>")?;
|
||||
root_names.push(module_specifier.to_string());
|
||||
module_graph_loader.add_to_graph(&module_specifier).await?;
|
||||
module_graph_loader
|
||||
.add_to_graph(&module_specifier, None)
|
||||
.await?;
|
||||
}
|
||||
|
||||
// download all additional files from TSconfig and add them to root_names
|
||||
|
@ -983,7 +1032,9 @@ pub async fn runtime_compile<S: BuildHasher>(
|
|||
.expect("type is not a string")
|
||||
.to_string();
|
||||
let type_specifier = ModuleSpecifier::resolve_url_or_path(&type_str)?;
|
||||
module_graph_loader.add_to_graph(&type_specifier).await?;
|
||||
module_graph_loader
|
||||
.add_to_graph(&type_specifier, None)
|
||||
.await?;
|
||||
root_names.push(type_specifier.to_string())
|
||||
}
|
||||
}
|
||||
|
@ -1078,18 +1129,36 @@ mod tests {
|
|||
};
|
||||
let mock_state =
|
||||
GlobalState::mock(vec![String::from("deno"), String::from("hello.ts")]);
|
||||
|
||||
let mut module_graph_loader = ModuleGraphLoader::new(
|
||||
mock_state.file_fetcher.clone(),
|
||||
None,
|
||||
Permissions::allow_all(),
|
||||
false,
|
||||
false,
|
||||
);
|
||||
module_graph_loader
|
||||
.add_to_graph(&specifier, None)
|
||||
.await
|
||||
.expect("Failed to create graph");
|
||||
let module_graph = module_graph_loader.get_graph();
|
||||
|
||||
let result = mock_state
|
||||
.ts_compiler
|
||||
.compile(
|
||||
.compile_module_graph(
|
||||
mock_state.clone(),
|
||||
&out,
|
||||
TargetLib::Main,
|
||||
Permissions::allow_all(),
|
||||
false,
|
||||
module_graph,
|
||||
)
|
||||
.await;
|
||||
assert!(result.is_ok());
|
||||
let source_code = result.unwrap().code;
|
||||
let compiled_file = mock_state
|
||||
.ts_compiler
|
||||
.get_compiled_module(&out.url)
|
||||
.unwrap();
|
||||
let source_code = compiled_file.code;
|
||||
assert!(source_code
|
||||
.as_bytes()
|
||||
.starts_with(b"\"use strict\";\nconsole.log(\"Hello World\");"));
|
||||
|
@ -1143,7 +1212,6 @@ mod tests {
|
|||
CompilerConfig::load(None).unwrap(),
|
||||
module_name,
|
||||
None,
|
||||
None,
|
||||
false,
|
||||
)
|
||||
.await;
|
||||
|
|
|
@ -306,7 +306,8 @@ mod tests {
|
|||
ModuleSpecifier::resolve_url_or_path(&p.to_string_lossy()).unwrap();
|
||||
let global_state = GlobalState::new(flags::Flags::default()).unwrap();
|
||||
let state =
|
||||
State::new(global_state, None, module_specifier.clone(), false).unwrap();
|
||||
State::new(global_state, None, module_specifier.clone(), None, false)
|
||||
.unwrap();
|
||||
let state_ = state.clone();
|
||||
tokio_util::run_basic(async move {
|
||||
let mut worker =
|
||||
|
@ -335,7 +336,8 @@ mod tests {
|
|||
ModuleSpecifier::resolve_url_or_path(&p.to_string_lossy()).unwrap();
|
||||
let global_state = GlobalState::new(flags::Flags::default()).unwrap();
|
||||
let state =
|
||||
State::new(global_state, None, module_specifier.clone(), false).unwrap();
|
||||
State::new(global_state, None, module_specifier.clone(), None, false)
|
||||
.unwrap();
|
||||
let state_ = state.clone();
|
||||
tokio_util::run_basic(async move {
|
||||
let mut worker =
|
||||
|
@ -350,7 +352,6 @@ mod tests {
|
|||
});
|
||||
|
||||
let state = state_.borrow();
|
||||
assert_eq!(state.metrics.resolve_count, 1);
|
||||
// Check that we didn't start the compiler.
|
||||
assert_eq!(state.global_state.compiler_starts.load(Ordering::SeqCst), 0);
|
||||
}
|
||||
|
@ -372,8 +373,13 @@ mod tests {
|
|||
..flags::Flags::default()
|
||||
};
|
||||
let global_state = GlobalState::new(flags).unwrap();
|
||||
let state =
|
||||
State::new(global_state.clone(), None, module_specifier.clone(), false)
|
||||
let state = State::new(
|
||||
global_state.clone(),
|
||||
None,
|
||||
module_specifier.clone(),
|
||||
None,
|
||||
false,
|
||||
)
|
||||
.unwrap();
|
||||
let mut worker = MainWorker::new(
|
||||
"TEST".to_string(),
|
||||
|
|
Loading…
Reference in a new issue