2020-01-02 15:13:47 -05:00
|
|
|
// Copyright 2018-2020 the Deno authors. All rights reserved. MIT license.
|
2020-02-18 10:08:18 -05:00
|
|
|
use crate::colors;
|
2019-06-04 09:03:56 -04:00
|
|
|
use crate::diagnostics::Diagnostic;
|
2020-05-05 12:23:15 -04:00
|
|
|
use crate::diagnostics::DiagnosticItem;
|
2019-07-17 18:15:30 -04:00
|
|
|
use crate::disk_cache::DiskCache;
|
2019-07-31 07:58:41 -04:00
|
|
|
use crate::file_fetcher::SourceFile;
|
|
|
|
use crate::file_fetcher::SourceFileFetcher;
|
2020-05-07 14:39:00 -04:00
|
|
|
use crate::fmt;
|
2020-05-05 12:23:15 -04:00
|
|
|
use crate::fs as deno_fs;
|
2020-02-06 23:05:02 -05:00
|
|
|
use crate::global_state::GlobalState;
|
2020-05-18 06:59:29 -04:00
|
|
|
use crate::import_map::ImportMap;
|
|
|
|
use crate::module_graph::ModuleGraphLoader;
|
2019-01-14 01:30:38 -05:00
|
|
|
use crate::msg;
|
2020-02-23 14:51:29 -05:00
|
|
|
use crate::op_error::OpError;
|
2020-05-08 10:18:00 -04:00
|
|
|
use crate::ops;
|
2020-05-11 07:13:27 -04:00
|
|
|
use crate::permissions::Permissions;
|
2019-07-17 18:15:30 -04:00
|
|
|
use crate::source_maps::SourceMapGetter;
|
2019-03-18 20:03:37 -04:00
|
|
|
use crate::startup_data;
|
2020-05-18 06:59:29 -04:00
|
|
|
use crate::state::exit_unstable;
|
2020-05-08 10:18:00 -04:00
|
|
|
use crate::state::State;
|
2019-07-17 18:15:30 -04:00
|
|
|
use crate::version;
|
2020-05-08 10:18:00 -04:00
|
|
|
use crate::web_worker::WebWorker;
|
2020-02-11 04:04:59 -05:00
|
|
|
use crate::worker::WorkerEvent;
|
2020-05-08 10:18:00 -04:00
|
|
|
use core::task::Context;
|
2020-01-05 11:56:18 -05:00
|
|
|
use deno_core::Buf;
|
|
|
|
use deno_core::ErrBox;
|
|
|
|
use deno_core::ModuleSpecifier;
|
2020-05-08 10:18:00 -04:00
|
|
|
use deno_core::StartupData;
|
2020-05-18 06:59:29 -04:00
|
|
|
use futures::future::Either;
|
2020-05-08 10:18:00 -04:00
|
|
|
use futures::future::Future;
|
|
|
|
use futures::future::FutureExt;
|
2020-03-10 08:26:17 -04:00
|
|
|
use log::info;
|
2019-08-17 12:53:34 -04:00
|
|
|
use regex::Regex;
|
2020-05-05 12:23:15 -04:00
|
|
|
use serde::Deserialize;
|
2020-05-16 15:47:26 -04:00
|
|
|
use serde::Serialize;
|
2020-02-03 18:08:44 -05:00
|
|
|
use serde_json::json;
|
2020-05-05 12:23:15 -04:00
|
|
|
use serde_json::Value;
|
2020-05-12 11:44:25 -04:00
|
|
|
use sourcemap::SourceMap;
|
2020-01-08 09:17:44 -05:00
|
|
|
use std::collections::HashMap;
|
2019-07-17 18:15:30 -04:00
|
|
|
use std::collections::HashSet;
|
|
|
|
use std::fs;
|
2020-01-08 09:17:44 -05:00
|
|
|
use std::hash::BuildHasher;
|
2019-09-20 10:19:51 -04:00
|
|
|
use std::io;
|
2020-02-06 21:24:51 -05:00
|
|
|
use std::ops::Deref;
|
2020-05-08 10:18:00 -04:00
|
|
|
use std::ops::DerefMut;
|
2019-06-24 13:10:21 -04:00
|
|
|
use std::path::PathBuf;
|
2020-05-08 10:18:00 -04:00
|
|
|
use std::pin::Pin;
|
2019-02-18 10:42:15 -05:00
|
|
|
use std::str;
|
2019-04-04 05:33:32 -04:00
|
|
|
use std::sync::atomic::Ordering;
|
2020-02-06 21:24:51 -05:00
|
|
|
use std::sync::Arc;
|
2019-07-17 18:15:30 -04:00
|
|
|
use std::sync::Mutex;
|
2020-05-08 10:18:00 -04:00
|
|
|
use std::task::Poll;
|
2020-05-18 06:59:29 -04:00
|
|
|
use std::time::Instant;
|
2019-07-17 18:15:30 -04:00
|
|
|
use url::Url;
|
2019-01-09 12:59:46 -05:00
|
|
|
|
2020-05-18 06:59:29 -04:00
|
|
|
// TODO(bartlomieju): make static
|
|
|
|
pub fn get_available_libs() -> Vec<String> {
|
|
|
|
vec![
|
|
|
|
"deno.ns".to_string(),
|
|
|
|
"deno.window".to_string(),
|
|
|
|
"deno.worker".to_string(),
|
|
|
|
"deno.shared_globals".to_string(),
|
|
|
|
"deno.unstable".to_string(),
|
|
|
|
"dom".to_string(),
|
|
|
|
"dom.iterable".to_string(),
|
|
|
|
"es5".to_string(),
|
|
|
|
"es6".to_string(),
|
|
|
|
"esnext".to_string(),
|
|
|
|
"es2020".to_string(),
|
|
|
|
"es2020.full".to_string(),
|
|
|
|
"es2019".to_string(),
|
|
|
|
"es2019.full".to_string(),
|
|
|
|
"es2018".to_string(),
|
|
|
|
"es2018.full".to_string(),
|
|
|
|
"es2017".to_string(),
|
|
|
|
"es2017.full".to_string(),
|
|
|
|
"es2016".to_string(),
|
|
|
|
"es2016.full".to_string(),
|
|
|
|
"es2015".to_string(),
|
|
|
|
"es2015.collection".to_string(),
|
|
|
|
"es2015.core".to_string(),
|
|
|
|
"es2015.generator".to_string(),
|
|
|
|
"es2015.iterable".to_string(),
|
|
|
|
"es2015.promise".to_string(),
|
|
|
|
"es2015.proxy".to_string(),
|
|
|
|
"es2015.reflect".to_string(),
|
|
|
|
"es2015.symbol".to_string(),
|
|
|
|
"es2015.symbol.wellknown".to_string(),
|
|
|
|
"es2016.array.include".to_string(),
|
|
|
|
"es2017.intl".to_string(),
|
|
|
|
"es2017.object".to_string(),
|
|
|
|
"es2017.sharedmemory".to_string(),
|
|
|
|
"es2017.string".to_string(),
|
|
|
|
"es2017.typedarrays".to_string(),
|
|
|
|
"es2018.asyncgenerator".to_string(),
|
|
|
|
"es2018.asynciterable".to_string(),
|
|
|
|
"es2018.intl".to_string(),
|
|
|
|
"es2018.promise".to_string(),
|
|
|
|
"es2018.regexp".to_string(),
|
|
|
|
"es2019.array".to_string(),
|
|
|
|
"es2019.object".to_string(),
|
|
|
|
"es2019.string".to_string(),
|
|
|
|
"es2019.symbol".to_string(),
|
|
|
|
"es2020.bigint".to_string(),
|
|
|
|
"es2020.promise".to_string(),
|
|
|
|
"es2020.string".to_string(),
|
|
|
|
"es2020.symbol.wellknown".to_string(),
|
|
|
|
"esnext.array".to_string(),
|
|
|
|
"esnext.asynciterable".to_string(),
|
|
|
|
"esnext.bigint".to_string(),
|
|
|
|
"esnext.intl".to_string(),
|
|
|
|
"esnext.promise".to_string(),
|
|
|
|
"esnext.string".to_string(),
|
|
|
|
"esnext.symbol".to_string(),
|
|
|
|
"scripthost".to_string(),
|
|
|
|
"webworker".to_string(),
|
|
|
|
"webworker.importscripts".to_string(),
|
|
|
|
]
|
|
|
|
}
|
|
|
|
|
2020-05-08 10:18:00 -04:00
|
|
|
#[derive(Debug, Clone)]
|
|
|
|
pub struct CompiledModule {
|
|
|
|
pub code: String,
|
|
|
|
pub name: String,
|
|
|
|
}
|
|
|
|
|
|
|
|
pub struct CompilerWorker(WebWorker);
|
|
|
|
|
|
|
|
impl CompilerWorker {
|
|
|
|
pub fn new(name: String, startup_data: StartupData, state: State) -> Self {
|
|
|
|
let state_ = state.clone();
|
|
|
|
let mut worker = WebWorker::new(name, startup_data, state_, false);
|
|
|
|
{
|
|
|
|
let isolate = &mut worker.isolate;
|
|
|
|
ops::compiler::init(isolate, &state);
|
|
|
|
}
|
|
|
|
Self(worker)
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
|
|
|
impl Deref for CompilerWorker {
|
|
|
|
type Target = WebWorker;
|
|
|
|
fn deref(&self) -> &Self::Target {
|
|
|
|
&self.0
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
|
|
|
impl DerefMut for CompilerWorker {
|
|
|
|
fn deref_mut(&mut self) -> &mut Self::Target {
|
|
|
|
&mut self.0
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
|
|
|
impl Future for CompilerWorker {
|
|
|
|
type Output = Result<(), ErrBox>;
|
|
|
|
|
|
|
|
fn poll(self: Pin<&mut Self>, cx: &mut Context) -> Poll<Self::Output> {
|
|
|
|
let inner = self.get_mut();
|
|
|
|
inner.0.poll_unpin(cx)
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
2019-08-17 12:53:34 -04:00
|
|
|
lazy_static! {
|
|
|
|
static ref CHECK_JS_RE: Regex =
|
|
|
|
Regex::new(r#""checkJs"\s*?:\s*?true"#).unwrap();
|
|
|
|
}
|
|
|
|
|
2020-05-20 10:25:40 -04:00
|
|
|
/// Create a new worker with snapshot of TS compiler and setup compiler's
|
|
|
|
/// runtime.
|
|
|
|
fn create_compiler_worker(
|
|
|
|
global_state: GlobalState,
|
|
|
|
permissions: Permissions,
|
|
|
|
) -> CompilerWorker {
|
|
|
|
// TODO(bartlomieju): these $deno$ specifiers should be unified for all subcommands
|
|
|
|
// like 'eval', 'repl'
|
|
|
|
let entry_point =
|
|
|
|
ModuleSpecifier::resolve_url_or_path("./__$deno$ts_compiler.ts").unwrap();
|
|
|
|
let worker_state =
|
|
|
|
State::new(global_state.clone(), Some(permissions), entry_point, true)
|
|
|
|
.expect("Unable to create worker state");
|
|
|
|
|
|
|
|
// TODO(bartlomieju): this metric is never used anywhere
|
|
|
|
// Count how many times we start the compiler worker.
|
|
|
|
global_state.compiler_starts.fetch_add(1, Ordering::SeqCst);
|
|
|
|
|
|
|
|
let mut worker = CompilerWorker::new(
|
|
|
|
"TS".to_string(),
|
|
|
|
startup_data::compiler_isolate_init(),
|
|
|
|
worker_state,
|
|
|
|
);
|
|
|
|
worker.execute("bootstrap.tsCompilerRuntime()").unwrap();
|
|
|
|
worker
|
|
|
|
}
|
|
|
|
|
2020-02-03 18:08:44 -05:00
|
|
|
#[derive(Clone)]
|
2020-01-29 12:54:23 -05:00
|
|
|
pub enum TargetLib {
|
|
|
|
Main,
|
|
|
|
Worker,
|
|
|
|
}
|
|
|
|
|
2019-07-31 13:16:03 -04:00
|
|
|
/// Struct which represents the state of the compiler
|
|
|
|
/// configuration where the first is canonical name for the configuration file,
|
|
|
|
/// second is a vector of the bytes of the contents of the configuration file,
|
|
|
|
/// third is bytes of the hash of contents.
|
|
|
|
#[derive(Clone)]
|
|
|
|
pub struct CompilerConfig {
|
|
|
|
pub path: Option<PathBuf>,
|
|
|
|
pub content: Option<Vec<u8>>,
|
|
|
|
pub hash: Vec<u8>,
|
2019-08-17 12:53:34 -04:00
|
|
|
pub compile_js: bool,
|
2019-07-31 13:16:03 -04:00
|
|
|
}
|
|
|
|
|
|
|
|
impl CompilerConfig {
|
|
|
|
/// Take the passed flag and resolve the file name relative to the cwd.
|
|
|
|
pub fn load(config_path: Option<String>) -> Result<Self, ErrBox> {
|
|
|
|
let config_file = match &config_path {
|
|
|
|
Some(config_file_name) => {
|
|
|
|
debug!("Compiler config file: {}", config_file_name);
|
|
|
|
let cwd = std::env::current_dir().unwrap();
|
|
|
|
Some(cwd.join(config_file_name))
|
|
|
|
}
|
|
|
|
_ => None,
|
|
|
|
};
|
|
|
|
|
|
|
|
// Convert the PathBuf to a canonicalized string. This is needed by the
|
|
|
|
// compiler to properly deal with the configuration.
|
|
|
|
let config_path = match &config_file {
|
2019-12-23 09:59:44 -05:00
|
|
|
Some(config_file) => Some(config_file.canonicalize().map_err(|_| {
|
|
|
|
io::Error::new(
|
|
|
|
io::ErrorKind::InvalidInput,
|
|
|
|
format!(
|
|
|
|
"Could not find the config file: {}",
|
|
|
|
config_file.to_string_lossy()
|
|
|
|
),
|
|
|
|
)
|
|
|
|
})),
|
2019-07-31 13:16:03 -04:00
|
|
|
_ => None,
|
|
|
|
};
|
|
|
|
|
|
|
|
// Load the contents of the configuration file
|
|
|
|
let config = match &config_file {
|
|
|
|
Some(config_file) => {
|
|
|
|
debug!("Attempt to load config: {}", config_file.to_str().unwrap());
|
|
|
|
let config = fs::read(&config_file)?;
|
|
|
|
Some(config)
|
|
|
|
}
|
|
|
|
_ => None,
|
|
|
|
};
|
|
|
|
|
|
|
|
let config_hash = match &config {
|
|
|
|
Some(bytes) => bytes.clone(),
|
|
|
|
_ => b"".to_vec(),
|
|
|
|
};
|
|
|
|
|
2019-08-17 12:53:34 -04:00
|
|
|
// If `checkJs` is set to true in `compilerOptions` then we're gonna be compiling
|
|
|
|
// JavaScript files as well
|
|
|
|
let compile_js = if let Some(config_content) = config.clone() {
|
|
|
|
let config_str = std::str::from_utf8(&config_content)?;
|
|
|
|
CHECK_JS_RE.is_match(config_str)
|
|
|
|
} else {
|
|
|
|
false
|
|
|
|
};
|
|
|
|
|
2019-07-31 13:16:03 -04:00
|
|
|
let ts_config = Self {
|
2019-12-23 09:59:44 -05:00
|
|
|
path: config_path.unwrap_or_else(|| Ok(PathBuf::new())).ok(),
|
2019-07-31 13:16:03 -04:00
|
|
|
content: config,
|
|
|
|
hash: config_hash,
|
2019-08-17 12:53:34 -04:00
|
|
|
compile_js,
|
2019-07-31 13:16:03 -04:00
|
|
|
};
|
|
|
|
|
|
|
|
Ok(ts_config)
|
|
|
|
}
|
|
|
|
}
|
2019-07-17 18:15:30 -04:00
|
|
|
|
|
|
|
/// Information associated with compiled file in cache.
|
|
|
|
/// Includes source code path and state hash.
|
|
|
|
/// version_hash is used to validate versions of the file
|
|
|
|
/// and could be used to remove stale file in cache.
|
2020-05-16 15:47:26 -04:00
|
|
|
#[derive(Deserialize, Serialize)]
|
2019-07-17 18:15:30 -04:00
|
|
|
pub struct CompiledFileMetadata {
|
|
|
|
pub source_path: PathBuf,
|
|
|
|
pub version_hash: String,
|
2019-01-09 12:59:46 -05:00
|
|
|
}
|
|
|
|
|
2019-07-17 18:15:30 -04:00
|
|
|
impl CompiledFileMetadata {
|
2020-05-16 15:47:26 -04:00
|
|
|
pub fn from_json_string(
|
|
|
|
metadata_string: String,
|
|
|
|
) -> Result<Self, serde_json::Error> {
|
|
|
|
serde_json::from_str::<Self>(&metadata_string)
|
2019-01-09 12:59:46 -05:00
|
|
|
}
|
|
|
|
|
2020-01-04 05:20:52 -05:00
|
|
|
pub fn to_json_string(&self) -> Result<String, serde_json::Error> {
|
2020-05-16 15:47:26 -04:00
|
|
|
serde_json::to_string(self)
|
2019-07-17 18:15:30 -04:00
|
|
|
}
|
|
|
|
}
|
2020-05-16 15:47:26 -04:00
|
|
|
|
2019-08-28 18:58:42 -04:00
|
|
|
/// Emit a SHA256 hash based on source code, deno version and TS config.
|
2019-07-17 18:15:30 -04:00
|
|
|
/// Used to check if a recompilation for source code is needed.
|
|
|
|
pub fn source_code_version_hash(
|
|
|
|
source_code: &[u8],
|
|
|
|
version: &str,
|
|
|
|
config_hash: &[u8],
|
|
|
|
) -> String {
|
2019-11-03 10:39:27 -05:00
|
|
|
crate::checksum::gen(vec![source_code, version.as_bytes(), config_hash])
|
2019-07-17 18:15:30 -04:00
|
|
|
}
|
|
|
|
|
2020-02-06 21:24:51 -05:00
|
|
|
pub struct TsCompilerInner {
|
2019-07-31 07:58:41 -04:00
|
|
|
pub file_fetcher: SourceFileFetcher,
|
2019-07-17 18:15:30 -04:00
|
|
|
pub config: CompilerConfig,
|
|
|
|
pub disk_cache: DiskCache,
|
|
|
|
/// Set of all URLs that have been compiled. This prevents double
|
|
|
|
/// compilation of module.
|
|
|
|
pub compiled: Mutex<HashSet<Url>>,
|
|
|
|
/// This setting is controlled by `--reload` flag. Unless the flag
|
|
|
|
/// is provided disk cache is used.
|
|
|
|
pub use_disk_cache: bool,
|
2019-07-31 13:16:03 -04:00
|
|
|
/// This setting is controlled by `compilerOptions.checkJs`
|
|
|
|
pub compile_js: bool,
|
2019-07-17 18:15:30 -04:00
|
|
|
}
|
|
|
|
|
2020-02-06 21:24:51 -05:00
|
|
|
#[derive(Clone)]
|
|
|
|
pub struct TsCompiler(Arc<TsCompilerInner>);
|
|
|
|
|
|
|
|
impl Deref for TsCompiler {
|
|
|
|
type Target = TsCompilerInner;
|
|
|
|
fn deref(&self) -> &Self::Target {
|
|
|
|
&self.0
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
2020-05-05 12:23:15 -04:00
|
|
|
#[derive(Deserialize)]
|
|
|
|
#[serde(rename_all = "camelCase")]
|
|
|
|
struct EmittedSource {
|
|
|
|
filename: String,
|
|
|
|
contents: String,
|
|
|
|
}
|
|
|
|
|
|
|
|
#[derive(Deserialize)]
|
|
|
|
#[serde(rename_all = "camelCase")]
|
|
|
|
struct BundleResponse {
|
|
|
|
diagnostics: Diagnostic,
|
2020-05-20 10:25:40 -04:00
|
|
|
bundle_output: Option<String>,
|
2020-05-05 12:23:15 -04:00
|
|
|
}
|
|
|
|
|
|
|
|
#[derive(Deserialize)]
|
|
|
|
#[serde(rename_all = "camelCase")]
|
|
|
|
struct CompileResponse {
|
|
|
|
diagnostics: Diagnostic,
|
|
|
|
emit_map: HashMap<String, EmittedSource>,
|
|
|
|
}
|
|
|
|
|
|
|
|
// TODO(bartlomieju): possible deduplicate once TS refactor is stabilized
|
|
|
|
#[derive(Deserialize)]
|
|
|
|
#[serde(rename_all = "camelCase")]
|
|
|
|
#[allow(unused)]
|
|
|
|
struct RuntimeBundleResponse {
|
|
|
|
diagnostics: Vec<DiagnosticItem>,
|
|
|
|
output: String,
|
|
|
|
}
|
|
|
|
|
|
|
|
#[derive(Deserialize)]
|
|
|
|
#[serde(rename_all = "camelCase")]
|
|
|
|
struct RuntimeCompileResponse {
|
|
|
|
diagnostics: Vec<DiagnosticItem>,
|
|
|
|
emit_map: HashMap<String, EmittedSource>,
|
|
|
|
}
|
|
|
|
|
2019-07-17 18:15:30 -04:00
|
|
|
impl TsCompiler {
|
|
|
|
pub fn new(
|
2019-07-31 07:58:41 -04:00
|
|
|
file_fetcher: SourceFileFetcher,
|
|
|
|
disk_cache: DiskCache,
|
2019-07-17 18:15:30 -04:00
|
|
|
use_disk_cache: bool,
|
|
|
|
config_path: Option<String>,
|
2019-07-31 13:16:03 -04:00
|
|
|
) -> Result<Self, ErrBox> {
|
|
|
|
let config = CompilerConfig::load(config_path)?;
|
2020-02-06 21:24:51 -05:00
|
|
|
Ok(TsCompiler(Arc::new(TsCompilerInner {
|
2019-07-31 07:58:41 -04:00
|
|
|
file_fetcher,
|
|
|
|
disk_cache,
|
2019-08-17 12:53:34 -04:00
|
|
|
compile_js: config.compile_js,
|
2019-07-31 13:16:03 -04:00
|
|
|
config,
|
2019-07-17 18:15:30 -04:00
|
|
|
compiled: Mutex::new(HashSet::new()),
|
|
|
|
use_disk_cache,
|
2020-02-06 21:24:51 -05:00
|
|
|
})))
|
2019-07-17 18:15:30 -04:00
|
|
|
}
|
|
|
|
|
2020-02-03 18:08:44 -05:00
|
|
|
/// Mark given module URL as compiled to avoid multiple compilations of same
|
|
|
|
/// module in single run.
|
2019-07-17 18:15:30 -04:00
|
|
|
fn mark_compiled(&self, url: &Url) {
|
|
|
|
let mut c = self.compiled.lock().unwrap();
|
|
|
|
c.insert(url.clone());
|
|
|
|
}
|
|
|
|
|
2020-02-03 18:08:44 -05:00
|
|
|
/// Check if given module URL has already been compiled and can be fetched
|
|
|
|
/// directly from disk.
|
2019-07-17 18:15:30 -04:00
|
|
|
fn has_compiled(&self, url: &Url) -> bool {
|
|
|
|
let c = self.compiled.lock().unwrap();
|
|
|
|
c.contains(url)
|
|
|
|
}
|
|
|
|
|
|
|
|
/// Asynchronously compile module and all it's dependencies.
|
|
|
|
///
|
|
|
|
/// This method compiled every module at most once.
|
|
|
|
///
|
2020-02-03 18:08:44 -05:00
|
|
|
/// If `--reload` flag was provided then compiler will not on-disk cache and
|
|
|
|
/// force recompilation.
|
2019-07-17 18:15:30 -04:00
|
|
|
///
|
2020-02-03 18:08:44 -05:00
|
|
|
/// If compilation is required then new V8 worker is spawned with fresh TS
|
|
|
|
/// compiler.
|
2020-02-25 14:42:00 -05:00
|
|
|
pub async fn compile(
|
2020-01-04 05:20:52 -05:00
|
|
|
&self,
|
2020-02-06 23:05:02 -05:00
|
|
|
global_state: GlobalState,
|
2019-07-17 18:15:30 -04:00
|
|
|
source_file: &SourceFile,
|
2020-01-29 12:54:23 -05:00
|
|
|
target: TargetLib,
|
2020-05-11 07:13:27 -04:00
|
|
|
permissions: Permissions,
|
2020-05-18 06:59:29 -04:00
|
|
|
is_dyn_import: bool,
|
2020-02-06 21:24:51 -05:00
|
|
|
) -> Result<CompiledModule, ErrBox> {
|
2019-07-17 18:15:30 -04:00
|
|
|
if self.has_compiled(&source_file.url) {
|
2020-02-06 21:24:51 -05:00
|
|
|
return self.get_compiled_module(&source_file.url);
|
2019-07-17 18:15:30 -04:00
|
|
|
}
|
|
|
|
|
|
|
|
if self.use_disk_cache {
|
|
|
|
// Try to load cached version:
|
|
|
|
// 1. check if there's 'meta' file
|
|
|
|
if let Some(metadata) = self.get_metadata(&source_file.url) {
|
|
|
|
// 2. compare version hashes
|
|
|
|
// TODO: it would probably be good idea to make it method implemented on SourceFile
|
|
|
|
let version_hash_to_validate = source_code_version_hash(
|
|
|
|
&source_file.source_code,
|
|
|
|
version::DENO,
|
2019-07-31 13:16:03 -04:00
|
|
|
&self.config.hash,
|
2019-07-17 18:15:30 -04:00
|
|
|
);
|
|
|
|
|
|
|
|
if metadata.version_hash == version_hash_to_validate {
|
|
|
|
debug!("load_cache metadata version hash match");
|
|
|
|
if let Ok(compiled_module) =
|
2019-07-31 13:16:03 -04:00
|
|
|
self.get_compiled_module(&source_file.url)
|
2019-07-17 18:15:30 -04:00
|
|
|
{
|
2019-07-31 13:16:03 -04:00
|
|
|
self.mark_compiled(&source_file.url);
|
2020-02-06 21:24:51 -05:00
|
|
|
return Ok(compiled_module);
|
2019-07-17 18:15:30 -04:00
|
|
|
}
|
2019-06-08 14:42:28 -04:00
|
|
|
}
|
|
|
|
}
|
2019-07-17 18:15:30 -04:00
|
|
|
}
|
|
|
|
let source_file_ = source_file.clone();
|
|
|
|
let module_url = source_file.url.clone();
|
2020-05-18 06:59:29 -04:00
|
|
|
let module_specifier = ModuleSpecifier::from(source_file.url.clone());
|
|
|
|
let import_map: Option<ImportMap> =
|
|
|
|
match global_state.flags.import_map_path.as_ref() {
|
|
|
|
None => None,
|
|
|
|
Some(file_path) => {
|
|
|
|
if !global_state.flags.unstable {
|
|
|
|
exit_unstable("--importmap")
|
|
|
|
}
|
|
|
|
Some(ImportMap::load(file_path)?)
|
|
|
|
}
|
|
|
|
};
|
|
|
|
let mut module_graph_loader = ModuleGraphLoader::new(
|
|
|
|
global_state.file_fetcher.clone(),
|
|
|
|
import_map,
|
|
|
|
permissions.clone(),
|
|
|
|
is_dyn_import,
|
|
|
|
false,
|
|
|
|
);
|
|
|
|
|
|
|
|
module_graph_loader.add_to_graph(&module_specifier).await?;
|
|
|
|
let module_graph = module_graph_loader.get_graph();
|
|
|
|
let module_graph_json =
|
|
|
|
serde_json::to_value(module_graph).expect("Failed to serialize data");
|
|
|
|
|
2020-01-29 12:54:23 -05:00
|
|
|
let target = match target {
|
|
|
|
TargetLib::Main => "main",
|
|
|
|
TargetLib::Worker => "worker",
|
|
|
|
};
|
2019-07-17 18:15:30 -04:00
|
|
|
let root_names = vec![module_url.to_string()];
|
2020-05-18 06:59:29 -04:00
|
|
|
let bundle = false;
|
|
|
|
let unstable = global_state.flags.unstable;
|
|
|
|
let compiler_config = self.config.clone();
|
|
|
|
let cwd = std::env::current_dir().unwrap();
|
|
|
|
let j = match (compiler_config.path, compiler_config.content) {
|
|
|
|
(Some(config_path), Some(config_data)) => json!({
|
|
|
|
"type": msg::CompilerRequestType::Compile as i32,
|
|
|
|
"target": target,
|
|
|
|
"rootNames": root_names,
|
|
|
|
"bundle": bundle,
|
|
|
|
"unstable": unstable,
|
|
|
|
"configPath": config_path,
|
|
|
|
"config": str::from_utf8(&config_data).unwrap(),
|
|
|
|
"cwd": cwd,
|
|
|
|
"sourceFileMap": module_graph_json,
|
|
|
|
}),
|
|
|
|
_ => json!({
|
|
|
|
"type": msg::CompilerRequestType::Compile as i32,
|
|
|
|
"target": target,
|
|
|
|
"rootNames": root_names,
|
|
|
|
"bundle": bundle,
|
|
|
|
"unstable": unstable,
|
|
|
|
"cwd": cwd,
|
|
|
|
"sourceFileMap": module_graph_json,
|
|
|
|
}),
|
|
|
|
};
|
|
|
|
|
|
|
|
let req_msg = j.to_string().into_boxed_str().into_boxed_bytes();
|
2019-07-17 18:15:30 -04:00
|
|
|
|
2020-02-06 21:24:51 -05:00
|
|
|
let ts_compiler = self.clone();
|
2019-11-04 10:38:52 -05:00
|
|
|
|
2020-03-10 08:26:17 -04:00
|
|
|
info!(
|
2020-02-18 10:08:18 -05:00
|
|
|
"{} {}",
|
|
|
|
colors::green("Compile".to_string()),
|
|
|
|
module_url.to_string()
|
|
|
|
);
|
2020-03-10 08:26:17 -04:00
|
|
|
|
2020-05-18 06:59:29 -04:00
|
|
|
let start = Instant::now();
|
|
|
|
|
2020-05-11 07:13:27 -04:00
|
|
|
let msg =
|
2020-05-18 06:59:29 -04:00
|
|
|
execute_in_same_thread(global_state.clone(), permissions, req_msg)
|
|
|
|
.await?;
|
|
|
|
|
|
|
|
let end = Instant::now();
|
|
|
|
debug!("time spent in compiler thread {:#?}", end - start);
|
|
|
|
|
2020-02-11 04:04:59 -05:00
|
|
|
let json_str = std::str::from_utf8(&msg).unwrap();
|
2020-05-05 12:23:15 -04:00
|
|
|
|
|
|
|
let compile_response: CompileResponse = serde_json::from_str(json_str)?;
|
|
|
|
|
|
|
|
if !compile_response.diagnostics.items.is_empty() {
|
|
|
|
return Err(ErrBox::from(compile_response.diagnostics));
|
2020-02-06 21:24:51 -05:00
|
|
|
}
|
2020-05-05 12:23:15 -04:00
|
|
|
|
|
|
|
self.cache_emitted_files(compile_response.emit_map)?;
|
2020-02-18 10:08:18 -05:00
|
|
|
ts_compiler.get_compiled_module(&source_file_.url)
|
2019-07-17 18:15:30 -04:00
|
|
|
}
|
|
|
|
|
|
|
|
/// Get associated `CompiledFileMetadata` for given module if it exists.
|
2020-01-04 05:20:52 -05:00
|
|
|
pub fn get_metadata(&self, url: &Url) -> Option<CompiledFileMetadata> {
|
2019-07-17 18:15:30 -04:00
|
|
|
// Try to load cached version:
|
|
|
|
// 1. check if there's 'meta' file
|
|
|
|
let cache_key = self
|
|
|
|
.disk_cache
|
|
|
|
.get_cache_filename_with_extension(url, "meta");
|
|
|
|
if let Ok(metadata_bytes) = self.disk_cache.get(&cache_key) {
|
|
|
|
if let Ok(metadata) = std::str::from_utf8(&metadata_bytes) {
|
2020-05-16 15:47:26 -04:00
|
|
|
if let Ok(read_metadata) =
|
2019-07-17 18:15:30 -04:00
|
|
|
CompiledFileMetadata::from_json_string(metadata.to_string())
|
|
|
|
{
|
|
|
|
return Some(read_metadata);
|
2019-06-04 09:03:56 -04:00
|
|
|
}
|
|
|
|
}
|
2019-07-17 18:15:30 -04:00
|
|
|
}
|
|
|
|
|
|
|
|
None
|
|
|
|
}
|
2019-05-20 12:06:57 -04:00
|
|
|
|
2020-05-05 12:23:15 -04:00
|
|
|
fn cache_emitted_files(
|
|
|
|
&self,
|
|
|
|
emit_map: HashMap<String, EmittedSource>,
|
|
|
|
) -> std::io::Result<()> {
|
|
|
|
for (emitted_name, source) in emit_map.iter() {
|
|
|
|
let specifier = ModuleSpecifier::resolve_url(&source.filename)
|
|
|
|
.expect("Should be a valid module specifier");
|
|
|
|
|
|
|
|
if emitted_name.ends_with(".map") {
|
|
|
|
self.cache_source_map(&specifier, &source.contents)?;
|
|
|
|
} else if emitted_name.ends_with(".js") {
|
|
|
|
self.cache_compiled_file(&specifier, &source.contents)?;
|
|
|
|
} else {
|
|
|
|
panic!("Trying to cache unknown file type {}", emitted_name);
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
|
|
|
Ok(())
|
|
|
|
}
|
|
|
|
|
2019-07-31 13:16:03 -04:00
|
|
|
pub fn get_compiled_module(
|
2020-01-04 05:20:52 -05:00
|
|
|
&self,
|
2019-07-31 13:16:03 -04:00
|
|
|
module_url: &Url,
|
|
|
|
) -> Result<CompiledModule, ErrBox> {
|
|
|
|
let compiled_source_file = self.get_compiled_source_file(module_url)?;
|
|
|
|
|
|
|
|
let compiled_module = CompiledModule {
|
|
|
|
code: str::from_utf8(&compiled_source_file.source_code)
|
|
|
|
.unwrap()
|
|
|
|
.to_string(),
|
|
|
|
name: module_url.to_string(),
|
|
|
|
};
|
|
|
|
|
|
|
|
Ok(compiled_module)
|
|
|
|
}
|
|
|
|
|
2019-07-17 18:15:30 -04:00
|
|
|
/// Return compiled JS file for given TS module.
|
|
|
|
// TODO: ideally we shouldn't construct SourceFile by hand, but it should be delegated to
|
|
|
|
// SourceFileFetcher
|
|
|
|
pub fn get_compiled_source_file(
|
2020-01-04 05:20:52 -05:00
|
|
|
&self,
|
2019-07-31 13:16:03 -04:00
|
|
|
module_url: &Url,
|
2019-07-17 18:15:30 -04:00
|
|
|
) -> Result<SourceFile, ErrBox> {
|
|
|
|
let cache_key = self
|
|
|
|
.disk_cache
|
2019-07-31 13:16:03 -04:00
|
|
|
.get_cache_filename_with_extension(&module_url, "js");
|
2019-07-17 18:15:30 -04:00
|
|
|
let compiled_code = self.disk_cache.get(&cache_key)?;
|
|
|
|
let compiled_code_filename = self.disk_cache.location.join(cache_key);
|
|
|
|
debug!("compiled filename: {:?}", compiled_code_filename);
|
|
|
|
|
|
|
|
let compiled_module = SourceFile {
|
2019-07-31 13:16:03 -04:00
|
|
|
url: module_url.clone(),
|
2019-07-17 18:15:30 -04:00
|
|
|
filename: compiled_code_filename,
|
|
|
|
media_type: msg::MediaType::JavaScript,
|
|
|
|
source_code: compiled_code,
|
2020-01-26 13:59:41 -05:00
|
|
|
types_url: None,
|
2020-05-18 06:59:29 -04:00
|
|
|
types_header: None,
|
2019-07-17 18:15:30 -04:00
|
|
|
};
|
|
|
|
|
|
|
|
Ok(compiled_module)
|
|
|
|
}
|
|
|
|
|
|
|
|
/// Save compiled JS file for given TS module to on-disk cache.
|
|
|
|
///
|
|
|
|
/// Along compiled file a special metadata file is saved as well containing
|
|
|
|
/// hash that can be validated to avoid unnecessary recompilation.
|
2020-04-15 23:14:28 -04:00
|
|
|
fn cache_compiled_file(
|
2020-01-04 05:20:52 -05:00
|
|
|
&self,
|
2019-07-17 18:15:30 -04:00
|
|
|
module_specifier: &ModuleSpecifier,
|
|
|
|
contents: &str,
|
|
|
|
) -> std::io::Result<()> {
|
2020-05-05 12:23:15 -04:00
|
|
|
let source_file = self
|
|
|
|
.file_fetcher
|
2020-05-11 07:13:27 -04:00
|
|
|
.fetch_cached_source_file(&module_specifier, Permissions::allow_all())
|
2020-05-05 12:23:15 -04:00
|
|
|
.expect("Source file not found");
|
|
|
|
|
|
|
|
// NOTE: JavaScript files are only cached to disk if `checkJs`
|
|
|
|
// option in on
|
|
|
|
if source_file.media_type == msg::MediaType::JavaScript && !self.compile_js
|
|
|
|
{
|
|
|
|
return Ok(());
|
|
|
|
}
|
|
|
|
|
2020-05-11 17:48:36 -04:00
|
|
|
// By default TSC output source map url that is relative; we need
|
|
|
|
// to substitute it manually to correct file URL in DENO_DIR.
|
|
|
|
let mut content_lines = contents
|
|
|
|
.split('\n')
|
|
|
|
.map(|s| s.to_string())
|
|
|
|
.collect::<Vec<String>>();
|
|
|
|
|
|
|
|
if !content_lines.is_empty() {
|
|
|
|
let last_line = content_lines.pop().unwrap();
|
|
|
|
if last_line.starts_with("//# sourceMappingURL=") {
|
|
|
|
let source_map_key = self.disk_cache.get_cache_filename_with_extension(
|
|
|
|
module_specifier.as_url(),
|
|
|
|
"js.map",
|
|
|
|
);
|
|
|
|
let source_map_path = self.disk_cache.location.join(source_map_key);
|
|
|
|
let source_map_file_url = Url::from_file_path(source_map_path)
|
|
|
|
.expect("Bad file URL for source map");
|
|
|
|
let new_last_line =
|
|
|
|
format!("//# sourceMappingURL={}", source_map_file_url.to_string());
|
|
|
|
content_lines.push(new_last_line);
|
|
|
|
} else {
|
|
|
|
content_lines.push(last_line);
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
|
|
|
let contents = content_lines.join("\n");
|
|
|
|
|
2019-07-17 18:15:30 -04:00
|
|
|
let js_key = self
|
|
|
|
.disk_cache
|
|
|
|
.get_cache_filename_with_extension(module_specifier.as_url(), "js");
|
2020-02-19 17:51:10 -05:00
|
|
|
self.disk_cache.set(&js_key, contents.as_bytes())?;
|
|
|
|
self.mark_compiled(module_specifier.as_url());
|
2020-05-11 07:13:27 -04:00
|
|
|
let source_file = self
|
|
|
|
.file_fetcher
|
|
|
|
.fetch_cached_source_file(&module_specifier, Permissions::allow_all())
|
|
|
|
.expect("Source file not found");
|
2020-02-19 17:51:10 -05:00
|
|
|
|
|
|
|
let version_hash = source_code_version_hash(
|
|
|
|
&source_file.source_code,
|
|
|
|
version::DENO,
|
|
|
|
&self.config.hash,
|
|
|
|
);
|
2019-07-17 18:15:30 -04:00
|
|
|
|
2020-02-19 17:51:10 -05:00
|
|
|
let compiled_file_metadata = CompiledFileMetadata {
|
|
|
|
source_path: source_file.filename,
|
|
|
|
version_hash,
|
|
|
|
};
|
|
|
|
let meta_key = self
|
|
|
|
.disk_cache
|
|
|
|
.get_cache_filename_with_extension(module_specifier.as_url(), "meta");
|
|
|
|
self.disk_cache.set(
|
|
|
|
&meta_key,
|
|
|
|
compiled_file_metadata.to_json_string()?.as_bytes(),
|
|
|
|
)
|
2019-07-17 18:15:30 -04:00
|
|
|
}
|
|
|
|
|
|
|
|
/// Return associated source map file for given TS module.
|
|
|
|
// TODO: ideally we shouldn't construct SourceFile by hand, but it should be delegated to
|
|
|
|
// SourceFileFetcher
|
|
|
|
pub fn get_source_map_file(
|
2020-01-04 05:20:52 -05:00
|
|
|
&self,
|
2019-07-17 18:15:30 -04:00
|
|
|
module_specifier: &ModuleSpecifier,
|
|
|
|
) -> Result<SourceFile, ErrBox> {
|
|
|
|
let cache_key = self
|
|
|
|
.disk_cache
|
|
|
|
.get_cache_filename_with_extension(module_specifier.as_url(), "js.map");
|
|
|
|
let source_code = self.disk_cache.get(&cache_key)?;
|
|
|
|
let source_map_filename = self.disk_cache.location.join(cache_key);
|
|
|
|
debug!("source map filename: {:?}", source_map_filename);
|
|
|
|
|
|
|
|
let source_map_file = SourceFile {
|
|
|
|
url: module_specifier.as_url().to_owned(),
|
|
|
|
filename: source_map_filename,
|
|
|
|
media_type: msg::MediaType::JavaScript,
|
|
|
|
source_code,
|
2020-01-26 13:59:41 -05:00
|
|
|
types_url: None,
|
2020-05-18 06:59:29 -04:00
|
|
|
types_header: None,
|
2019-07-17 18:15:30 -04:00
|
|
|
};
|
|
|
|
|
|
|
|
Ok(source_map_file)
|
|
|
|
}
|
|
|
|
|
|
|
|
/// Save source map file for given TS module to on-disk cache.
|
|
|
|
fn cache_source_map(
|
2020-01-04 05:20:52 -05:00
|
|
|
&self,
|
2019-07-17 18:15:30 -04:00
|
|
|
module_specifier: &ModuleSpecifier,
|
|
|
|
contents: &str,
|
|
|
|
) -> std::io::Result<()> {
|
2020-05-05 12:23:15 -04:00
|
|
|
let source_file = self
|
|
|
|
.file_fetcher
|
2020-05-11 07:13:27 -04:00
|
|
|
.fetch_cached_source_file(&module_specifier, Permissions::allow_all())
|
2020-05-05 12:23:15 -04:00
|
|
|
.expect("Source file not found");
|
|
|
|
|
|
|
|
// NOTE: JavaScript files are only cached to disk if `checkJs`
|
|
|
|
// option in on
|
|
|
|
if source_file.media_type == msg::MediaType::JavaScript && !self.compile_js
|
|
|
|
{
|
|
|
|
return Ok(());
|
|
|
|
}
|
|
|
|
|
2020-05-12 11:44:25 -04:00
|
|
|
let js_key = self
|
|
|
|
.disk_cache
|
|
|
|
.get_cache_filename_with_extension(module_specifier.as_url(), "js");
|
|
|
|
let js_path = self.disk_cache.location.join(js_key);
|
|
|
|
let js_file_url =
|
|
|
|
Url::from_file_path(js_path).expect("Bad file URL for file");
|
|
|
|
|
2019-07-17 18:15:30 -04:00
|
|
|
let source_map_key = self
|
|
|
|
.disk_cache
|
|
|
|
.get_cache_filename_with_extension(module_specifier.as_url(), "js.map");
|
2020-05-12 11:44:25 -04:00
|
|
|
|
|
|
|
let mut sm = SourceMap::from_slice(contents.as_bytes())
|
|
|
|
.expect("Invalid source map content");
|
|
|
|
sm.set_file(Some(&js_file_url.to_string()));
|
|
|
|
sm.set_source(0, &module_specifier.to_string());
|
|
|
|
|
|
|
|
let mut output: Vec<u8> = vec![];
|
|
|
|
sm.to_writer(&mut output)
|
|
|
|
.expect("Failed to write source map");
|
|
|
|
|
|
|
|
self.disk_cache.set(&source_map_key, &output)
|
2019-07-17 18:15:30 -04:00
|
|
|
}
|
2019-04-05 00:04:06 -04:00
|
|
|
}
|
|
|
|
|
2019-07-17 18:15:30 -04:00
|
|
|
impl SourceMapGetter for TsCompiler {
|
|
|
|
fn get_source_map(&self, script_name: &str) -> Option<Vec<u8>> {
|
|
|
|
self
|
|
|
|
.try_to_resolve_and_get_source_map(script_name)
|
2019-11-07 14:21:45 -05:00
|
|
|
.map(|out| out.source_code)
|
2019-07-17 18:15:30 -04:00
|
|
|
}
|
|
|
|
|
|
|
|
fn get_source_line(&self, script_name: &str, line: usize) -> Option<String> {
|
|
|
|
self
|
|
|
|
.try_resolve_and_get_source_file(script_name)
|
|
|
|
.and_then(|out| {
|
|
|
|
str::from_utf8(&out.source_code).ok().and_then(|v| {
|
2020-03-24 23:55:54 -04:00
|
|
|
// Do NOT use .lines(): it skips the terminating empty line.
|
|
|
|
// (due to internally using .split_terminator() instead of .split())
|
|
|
|
let lines: Vec<&str> = v.split('\n').collect();
|
2019-07-17 18:15:30 -04:00
|
|
|
assert!(lines.len() > line);
|
|
|
|
Some(lines[line].to_string())
|
|
|
|
})
|
|
|
|
})
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
|
|
|
// `SourceMapGetter` related methods
|
|
|
|
impl TsCompiler {
|
2020-01-04 05:20:52 -05:00
|
|
|
fn try_to_resolve(&self, script_name: &str) -> Option<ModuleSpecifier> {
|
2019-07-17 18:15:30 -04:00
|
|
|
// if `script_name` can't be resolved to ModuleSpecifier it's probably internal
|
|
|
|
// script (like `gen/cli/bundle/compiler.js`) so we won't be
|
|
|
|
// able to get source for it anyway
|
|
|
|
ModuleSpecifier::resolve_url(script_name).ok()
|
|
|
|
}
|
|
|
|
|
|
|
|
fn try_resolve_and_get_source_file(
|
|
|
|
&self,
|
|
|
|
script_name: &str,
|
|
|
|
) -> Option<SourceFile> {
|
|
|
|
if let Some(module_specifier) = self.try_to_resolve(script_name) {
|
2020-04-15 23:14:28 -04:00
|
|
|
return self
|
2019-11-22 12:46:57 -05:00
|
|
|
.file_fetcher
|
2020-05-11 07:13:27 -04:00
|
|
|
.fetch_cached_source_file(&module_specifier, Permissions::allow_all());
|
2019-07-17 18:15:30 -04:00
|
|
|
}
|
|
|
|
|
|
|
|
None
|
|
|
|
}
|
|
|
|
|
|
|
|
fn try_to_resolve_and_get_source_map(
|
|
|
|
&self,
|
|
|
|
script_name: &str,
|
|
|
|
) -> Option<SourceFile> {
|
|
|
|
if let Some(module_specifier) = self.try_to_resolve(script_name) {
|
|
|
|
return match self.get_source_map_file(&module_specifier) {
|
|
|
|
Ok(out) => Some(out),
|
|
|
|
Err(_) => None,
|
|
|
|
};
|
|
|
|
}
|
|
|
|
|
|
|
|
None
|
|
|
|
}
|
2019-01-09 12:59:46 -05:00
|
|
|
}
|
|
|
|
|
2020-05-18 06:59:29 -04:00
|
|
|
async fn execute_in_same_thread(
|
2020-02-06 23:05:02 -05:00
|
|
|
global_state: GlobalState,
|
2020-05-11 07:13:27 -04:00
|
|
|
permissions: Permissions,
|
2020-02-06 21:24:51 -05:00
|
|
|
req: Buf,
|
2020-02-11 04:04:59 -05:00
|
|
|
) -> Result<Buf, ErrBox> {
|
2020-05-20 10:25:40 -04:00
|
|
|
let mut worker = create_compiler_worker(global_state.clone(), permissions);
|
2020-05-18 06:59:29 -04:00
|
|
|
let handle = worker.thread_safe_handle();
|
2020-04-09 18:15:17 -04:00
|
|
|
handle.post_message(req)?;
|
2020-05-18 06:59:29 -04:00
|
|
|
|
|
|
|
let mut event_fut = handle.get_event().boxed_local();
|
|
|
|
|
|
|
|
loop {
|
|
|
|
let select_result = futures::future::select(event_fut, &mut worker).await;
|
|
|
|
match select_result {
|
|
|
|
Either::Left((event_result, _worker)) => {
|
|
|
|
let event = event_result
|
|
|
|
.expect("Compiler didn't respond")
|
|
|
|
.expect("Empty message");
|
|
|
|
|
|
|
|
let buf = match event {
|
|
|
|
WorkerEvent::Message(buf) => Ok(buf),
|
|
|
|
WorkerEvent::Error(error) => Err(error),
|
|
|
|
WorkerEvent::TerminalError(error) => Err(error),
|
|
|
|
}?;
|
|
|
|
return Ok(buf);
|
|
|
|
}
|
|
|
|
Either::Right((worker_result, event_fut_)) => {
|
|
|
|
event_fut = event_fut_;
|
|
|
|
worker_result?;
|
|
|
|
}
|
|
|
|
}
|
|
|
|
}
|
2020-02-06 21:24:51 -05:00
|
|
|
}
|
|
|
|
|
2020-05-20 10:25:40 -04:00
|
|
|
pub async fn bundle(
|
|
|
|
global_state: &GlobalState,
|
|
|
|
compiler_config: CompilerConfig,
|
|
|
|
module_specifier: ModuleSpecifier,
|
|
|
|
maybe_import_map: Option<ImportMap>,
|
|
|
|
out_file: Option<PathBuf>,
|
|
|
|
unstable: bool,
|
|
|
|
) -> Result<(), ErrBox> {
|
|
|
|
debug!(
|
|
|
|
"Invoking the compiler to bundle. module_name: {}",
|
|
|
|
module_specifier.to_string()
|
|
|
|
);
|
|
|
|
eprintln!("Bundling {}", module_specifier.to_string());
|
|
|
|
|
|
|
|
let permissions = Permissions::allow_all();
|
|
|
|
let mut module_graph_loader = ModuleGraphLoader::new(
|
|
|
|
global_state.file_fetcher.clone(),
|
|
|
|
maybe_import_map,
|
|
|
|
permissions.clone(),
|
|
|
|
false,
|
|
|
|
true,
|
|
|
|
);
|
|
|
|
module_graph_loader.add_to_graph(&module_specifier).await?;
|
|
|
|
let module_graph = module_graph_loader.get_graph();
|
|
|
|
let module_graph_json =
|
|
|
|
serde_json::to_value(module_graph).expect("Failed to serialize data");
|
|
|
|
|
|
|
|
let root_names = vec![module_specifier.to_string()];
|
|
|
|
let bundle = true;
|
|
|
|
let target = "main";
|
|
|
|
let cwd = std::env::current_dir().unwrap();
|
|
|
|
|
|
|
|
// TODO(bartlomieju): this is non-sense; CompilerConfig's `path` and `content` should
|
|
|
|
// be optional
|
|
|
|
let j = match (compiler_config.path, compiler_config.content) {
|
|
|
|
(Some(config_path), Some(config_data)) => json!({
|
|
|
|
"type": msg::CompilerRequestType::Compile as i32,
|
|
|
|
"target": target,
|
|
|
|
"rootNames": root_names,
|
|
|
|
"bundle": bundle,
|
|
|
|
"unstable": unstable,
|
|
|
|
"configPath": config_path,
|
|
|
|
"config": str::from_utf8(&config_data).unwrap(),
|
|
|
|
"cwd": cwd,
|
|
|
|
"sourceFileMap": module_graph_json,
|
|
|
|
}),
|
|
|
|
_ => json!({
|
|
|
|
"type": msg::CompilerRequestType::Compile as i32,
|
|
|
|
"target": target,
|
|
|
|
"rootNames": root_names,
|
|
|
|
"bundle": bundle,
|
|
|
|
"unstable": unstable,
|
|
|
|
"cwd": cwd,
|
|
|
|
"sourceFileMap": module_graph_json,
|
|
|
|
}),
|
|
|
|
};
|
|
|
|
|
|
|
|
let req_msg = j.to_string().into_boxed_str().into_boxed_bytes();
|
|
|
|
|
|
|
|
let msg =
|
|
|
|
execute_in_same_thread(global_state.clone(), permissions, req_msg).await?;
|
|
|
|
let json_str = std::str::from_utf8(&msg).unwrap();
|
|
|
|
debug!("Message: {}", json_str);
|
|
|
|
|
|
|
|
let bundle_response: BundleResponse = serde_json::from_str(json_str)?;
|
|
|
|
|
|
|
|
if !bundle_response.diagnostics.items.is_empty() {
|
|
|
|
return Err(ErrBox::from(bundle_response.diagnostics));
|
|
|
|
}
|
|
|
|
|
|
|
|
assert!(bundle_response.bundle_output.is_some());
|
|
|
|
let output = bundle_response.bundle_output.unwrap();
|
|
|
|
|
|
|
|
// TODO(bartlomieju): the rest of this function should be handled
|
|
|
|
// in `main.rs` - it has nothing to do with TypeScript...
|
|
|
|
let output_string = fmt::format_text(&output)?;
|
|
|
|
|
|
|
|
if let Some(out_file_) = out_file.as_ref() {
|
|
|
|
eprintln!("Emitting bundle to {:?}", out_file_);
|
|
|
|
|
|
|
|
let output_bytes = output_string.as_bytes();
|
|
|
|
let output_len = output_bytes.len();
|
|
|
|
|
|
|
|
deno_fs::write_file(out_file_, output_bytes, 0o666)?;
|
|
|
|
// TODO(bartlomieju): do we really need to show this info? (it doesn't respect --quiet flag)
|
|
|
|
// TODO(bartlomieju): add "humanFileSize" method
|
|
|
|
eprintln!("{} bytes emitted.", output_len);
|
|
|
|
} else {
|
|
|
|
println!("{}", output_string);
|
|
|
|
}
|
|
|
|
|
|
|
|
Ok(())
|
|
|
|
}
|
|
|
|
|
2020-05-05 12:23:15 -04:00
|
|
|
/// This function is used by `Deno.compile()` and `Deno.bundle()` APIs.
|
|
|
|
pub async fn runtime_compile<S: BuildHasher>(
|
2020-02-06 23:05:02 -05:00
|
|
|
global_state: GlobalState,
|
2020-05-11 07:13:27 -04:00
|
|
|
permissions: Permissions,
|
2020-01-08 09:17:44 -05:00
|
|
|
root_name: &str,
|
|
|
|
sources: &Option<HashMap<String, String, S>>,
|
|
|
|
bundle: bool,
|
2020-05-18 06:59:29 -04:00
|
|
|
maybe_options: &Option<String>,
|
2020-05-05 12:23:15 -04:00
|
|
|
) -> Result<Value, OpError> {
|
2020-05-18 06:59:29 -04:00
|
|
|
let mut root_names = vec![];
|
|
|
|
let mut module_graph_loader = ModuleGraphLoader::new(
|
|
|
|
global_state.file_fetcher.clone(),
|
|
|
|
None,
|
|
|
|
permissions.clone(),
|
|
|
|
false,
|
|
|
|
false,
|
|
|
|
);
|
|
|
|
|
|
|
|
if let Some(s_map) = sources {
|
|
|
|
root_names.push(root_name.to_string());
|
|
|
|
module_graph_loader.build_local_graph(root_name, s_map)?;
|
|
|
|
} else {
|
|
|
|
let module_specifier =
|
|
|
|
ModuleSpecifier::resolve_import(root_name, "<unknown>")?;
|
|
|
|
root_names.push(module_specifier.to_string());
|
|
|
|
module_graph_loader.add_to_graph(&module_specifier).await?;
|
|
|
|
}
|
|
|
|
|
|
|
|
// download all additional files from TSconfig and add them to root_names
|
|
|
|
if let Some(options) = maybe_options {
|
|
|
|
let options_json: serde_json::Value = serde_json::from_str(options)?;
|
|
|
|
if let Some(types_option) = options_json.get("types") {
|
|
|
|
let types_arr = types_option.as_array().expect("types is not an array");
|
|
|
|
|
|
|
|
for type_value in types_arr {
|
|
|
|
let type_str = type_value
|
|
|
|
.as_str()
|
|
|
|
.expect("type is not a string")
|
|
|
|
.to_string();
|
|
|
|
let type_specifier = ModuleSpecifier::resolve_url_or_path(&type_str)?;
|
|
|
|
module_graph_loader.add_to_graph(&type_specifier).await?;
|
|
|
|
root_names.push(type_specifier.to_string())
|
|
|
|
}
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
|
|
|
let module_graph = module_graph_loader.get_graph();
|
|
|
|
let module_graph_json =
|
|
|
|
serde_json::to_value(module_graph).expect("Failed to serialize data");
|
|
|
|
|
2020-01-08 09:17:44 -05:00
|
|
|
let req_msg = json!({
|
|
|
|
"type": msg::CompilerRequestType::RuntimeCompile as i32,
|
2020-01-24 14:15:01 -05:00
|
|
|
"target": "runtime",
|
2020-05-18 06:59:29 -04:00
|
|
|
"rootNames": root_names,
|
|
|
|
"sourceFileMap": module_graph_json,
|
|
|
|
"options": maybe_options,
|
2020-01-08 09:17:44 -05:00
|
|
|
"bundle": bundle,
|
2020-04-30 11:23:40 -04:00
|
|
|
"unstable": global_state.flags.unstable,
|
2020-01-08 09:17:44 -05:00
|
|
|
})
|
|
|
|
.to_string()
|
|
|
|
.into_boxed_str()
|
|
|
|
.into_boxed_bytes();
|
|
|
|
|
2020-05-05 12:23:15 -04:00
|
|
|
let compiler = global_state.ts_compiler.clone();
|
|
|
|
|
2020-05-18 06:59:29 -04:00
|
|
|
let msg = execute_in_same_thread(global_state, permissions, req_msg).await?;
|
2020-05-05 12:23:15 -04:00
|
|
|
let json_str = std::str::from_utf8(&msg).unwrap();
|
|
|
|
|
|
|
|
// TODO(bartlomieju): factor `bundle` path into separate function `runtime_bundle`
|
|
|
|
if bundle {
|
|
|
|
let _response: RuntimeBundleResponse = serde_json::from_str(json_str)?;
|
|
|
|
return Ok(serde_json::from_str::<Value>(json_str).unwrap());
|
|
|
|
}
|
|
|
|
|
|
|
|
let response: RuntimeCompileResponse = serde_json::from_str(json_str)?;
|
|
|
|
|
|
|
|
if response.diagnostics.is_empty() && sources.is_none() {
|
|
|
|
compiler.cache_emitted_files(response.emit_map)?;
|
|
|
|
}
|
|
|
|
|
|
|
|
// We're returning `Ok()` instead of `Err()` because it's not runtime
|
|
|
|
// error if there were diagnostics produces; we want to let user handle
|
|
|
|
// diagnostics in the runtime.
|
|
|
|
Ok(serde_json::from_str::<Value>(json_str).unwrap())
|
2020-01-08 09:17:44 -05:00
|
|
|
}
|
|
|
|
|
2020-05-05 12:23:15 -04:00
|
|
|
/// This function is used by `Deno.transpileOnly()` API.
|
|
|
|
pub async fn runtime_transpile<S: BuildHasher>(
|
2020-02-06 23:05:02 -05:00
|
|
|
global_state: GlobalState,
|
2020-05-11 07:13:27 -04:00
|
|
|
permissions: Permissions,
|
2020-01-08 09:17:44 -05:00
|
|
|
sources: &HashMap<String, String, S>,
|
|
|
|
options: &Option<String>,
|
2020-05-05 12:23:15 -04:00
|
|
|
) -> Result<Value, OpError> {
|
2020-01-08 09:17:44 -05:00
|
|
|
let req_msg = json!({
|
|
|
|
"type": msg::CompilerRequestType::RuntimeTranspile as i32,
|
|
|
|
"sources": sources,
|
|
|
|
"options": options,
|
|
|
|
})
|
|
|
|
.to_string()
|
|
|
|
.into_boxed_str()
|
|
|
|
.into_boxed_bytes();
|
|
|
|
|
2020-05-18 06:59:29 -04:00
|
|
|
let msg = execute_in_same_thread(global_state, permissions, req_msg).await?;
|
2020-05-05 12:23:15 -04:00
|
|
|
let json_str = std::str::from_utf8(&msg).unwrap();
|
|
|
|
let v = serde_json::from_str::<serde_json::Value>(json_str)
|
|
|
|
.expect("Error decoding JSON string.");
|
|
|
|
Ok(v)
|
2020-01-08 09:17:44 -05:00
|
|
|
}
|
|
|
|
|
2019-01-09 12:59:46 -05:00
|
|
|
#[cfg(test)]
|
|
|
|
mod tests {
|
|
|
|
use super::*;
|
2019-08-17 12:53:34 -04:00
|
|
|
use crate::fs as deno_fs;
|
2020-01-05 11:56:18 -05:00
|
|
|
use deno_core::ModuleSpecifier;
|
2019-07-17 18:15:30 -04:00
|
|
|
use std::path::PathBuf;
|
2019-08-17 12:53:34 -04:00
|
|
|
use tempfile::TempDir;
|
2019-07-17 18:15:30 -04:00
|
|
|
|
2020-02-03 18:08:44 -05:00
|
|
|
#[tokio::test]
|
2020-02-25 14:42:00 -05:00
|
|
|
async fn test_compile() {
|
2019-10-06 15:03:30 -04:00
|
|
|
let p = std::path::PathBuf::from(env!("CARGO_MANIFEST_DIR"))
|
|
|
|
.parent()
|
|
|
|
.unwrap()
|
2020-02-02 16:55:22 -05:00
|
|
|
.join("cli/tests/002_hello.ts");
|
2019-10-06 15:03:30 -04:00
|
|
|
let specifier =
|
|
|
|
ModuleSpecifier::resolve_url_or_path(p.to_str().unwrap()).unwrap();
|
|
|
|
let out = SourceFile {
|
|
|
|
url: specifier.as_url().clone(),
|
|
|
|
filename: PathBuf::from(p.to_str().unwrap().to_string()),
|
|
|
|
media_type: msg::MediaType::TypeScript,
|
2020-05-08 10:18:00 -04:00
|
|
|
source_code: include_bytes!("./tests/002_hello.ts").to_vec(),
|
2020-01-26 13:59:41 -05:00
|
|
|
types_url: None,
|
2020-05-18 06:59:29 -04:00
|
|
|
types_header: None,
|
2019-10-06 15:03:30 -04:00
|
|
|
};
|
2020-02-06 23:05:02 -05:00
|
|
|
let mock_state =
|
2020-05-11 17:48:36 -04:00
|
|
|
GlobalState::mock(vec![String::from("deno"), String::from("hello.ts")]);
|
2020-02-03 18:08:44 -05:00
|
|
|
let result = mock_state
|
|
|
|
.ts_compiler
|
2020-05-11 07:13:27 -04:00
|
|
|
.compile(
|
|
|
|
mock_state.clone(),
|
|
|
|
&out,
|
|
|
|
TargetLib::Main,
|
|
|
|
Permissions::allow_all(),
|
2020-05-18 06:59:29 -04:00
|
|
|
false,
|
2020-05-11 07:13:27 -04:00
|
|
|
)
|
2020-02-03 18:08:44 -05:00
|
|
|
.await;
|
|
|
|
assert!(result.is_ok());
|
2020-05-11 17:48:36 -04:00
|
|
|
let source_code = result.unwrap().code;
|
|
|
|
assert!(source_code
|
2020-02-03 18:08:44 -05:00
|
|
|
.as_bytes()
|
2020-02-19 15:36:18 -05:00
|
|
|
.starts_with(b"\"use strict\";\nconsole.log(\"Hello World\");"));
|
2020-05-11 17:48:36 -04:00
|
|
|
let mut lines: Vec<String> =
|
|
|
|
source_code.split('\n').map(|s| s.to_string()).collect();
|
|
|
|
let last_line = lines.pop().unwrap();
|
|
|
|
assert!(last_line.starts_with("//# sourceMappingURL=file://"));
|
2020-05-12 11:44:25 -04:00
|
|
|
|
|
|
|
// Get source map file and assert it has proper URLs
|
|
|
|
let source_map = mock_state
|
|
|
|
.ts_compiler
|
|
|
|
.get_source_map_file(&specifier)
|
|
|
|
.expect("Source map not found");
|
|
|
|
let source_str = String::from_utf8(source_map.source_code).unwrap();
|
|
|
|
let source_json: Value = serde_json::from_str(&source_str).unwrap();
|
|
|
|
|
|
|
|
let js_key = mock_state
|
|
|
|
.ts_compiler
|
|
|
|
.disk_cache
|
|
|
|
.get_cache_filename_with_extension(specifier.as_url(), "js");
|
|
|
|
let js_path = mock_state.ts_compiler.disk_cache.location.join(js_key);
|
|
|
|
let js_file_url = Url::from_file_path(js_path).unwrap();
|
|
|
|
|
|
|
|
let file_str = source_json.get("file").unwrap().as_str().unwrap();
|
|
|
|
assert_eq!(file_str, js_file_url.to_string());
|
|
|
|
|
|
|
|
let sources = source_json.get("sources").unwrap().as_array().unwrap();
|
|
|
|
assert_eq!(sources.len(), 1);
|
|
|
|
let source = sources.get(0).unwrap().as_str().unwrap();
|
|
|
|
assert_eq!(source, specifier.to_string());
|
2019-04-04 05:33:32 -04:00
|
|
|
}
|
|
|
|
|
2020-02-03 18:08:44 -05:00
|
|
|
#[tokio::test]
|
2020-02-25 14:42:00 -05:00
|
|
|
async fn test_bundle() {
|
2019-09-04 17:16:46 -04:00
|
|
|
let p = std::path::PathBuf::from(env!("CARGO_MANIFEST_DIR"))
|
|
|
|
.parent()
|
|
|
|
.unwrap()
|
2020-02-02 16:55:22 -05:00
|
|
|
.join("cli/tests/002_hello.ts");
|
2020-01-05 11:56:18 -05:00
|
|
|
use deno_core::ModuleSpecifier;
|
2020-05-18 06:59:29 -04:00
|
|
|
let module_name =
|
|
|
|
ModuleSpecifier::resolve_url_or_path(p.to_str().unwrap()).unwrap();
|
2019-06-08 14:42:28 -04:00
|
|
|
|
2020-02-06 23:05:02 -05:00
|
|
|
let state = GlobalState::mock(vec![
|
2019-09-04 17:16:46 -04:00
|
|
|
String::from("deno"),
|
|
|
|
p.to_string_lossy().into(),
|
2019-06-08 14:42:28 -04:00
|
|
|
String::from("$deno$/bundle.js"),
|
|
|
|
]);
|
2019-10-06 15:03:30 -04:00
|
|
|
|
2020-05-20 10:25:40 -04:00
|
|
|
let result = bundle(
|
|
|
|
&state,
|
|
|
|
CompilerConfig::load(None).unwrap(),
|
|
|
|
module_name,
|
|
|
|
None,
|
|
|
|
None,
|
|
|
|
false,
|
|
|
|
)
|
|
|
|
.await;
|
2020-02-03 18:08:44 -05:00
|
|
|
assert!(result.is_ok());
|
2019-06-08 14:42:28 -04:00
|
|
|
}
|
2019-07-17 18:15:30 -04:00
|
|
|
|
|
|
|
#[test]
|
|
|
|
fn test_source_code_version_hash() {
|
|
|
|
assert_eq!(
|
2019-08-28 18:58:42 -04:00
|
|
|
"0185b42de0686b4c93c314daaa8dee159f768a9e9a336c2a5e3d5b8ca6c4208c",
|
2019-07-17 18:15:30 -04:00
|
|
|
source_code_version_hash(b"1+2", "0.4.0", b"{}")
|
|
|
|
);
|
|
|
|
// Different source_code should result in different hash.
|
|
|
|
assert_eq!(
|
2019-08-28 18:58:42 -04:00
|
|
|
"e58631f1b6b6ce2b300b133ec2ad16a8a5ba6b7ecf812a8c06e59056638571ac",
|
2019-07-17 18:15:30 -04:00
|
|
|
source_code_version_hash(b"1", "0.4.0", b"{}")
|
|
|
|
);
|
|
|
|
// Different version should result in different hash.
|
|
|
|
assert_eq!(
|
2019-08-28 18:58:42 -04:00
|
|
|
"307e6200347a88dbbada453102deb91c12939c65494e987d2d8978f6609b5633",
|
2019-07-17 18:15:30 -04:00
|
|
|
source_code_version_hash(b"1", "0.1.0", b"{}")
|
|
|
|
);
|
|
|
|
// Different config should result in different hash.
|
|
|
|
assert_eq!(
|
2019-08-28 18:58:42 -04:00
|
|
|
"195eaf104a591d1d7f69fc169c60a41959c2b7a21373cd23a8f675f877ec385f",
|
2019-07-17 18:15:30 -04:00
|
|
|
source_code_version_hash(b"1", "0.4.0", b"{\"compilerOptions\": {}}")
|
|
|
|
);
|
|
|
|
}
|
2019-08-17 12:53:34 -04:00
|
|
|
|
|
|
|
#[test]
|
|
|
|
fn test_compile_js() {
|
|
|
|
let temp_dir = TempDir::new().expect("tempdir fail");
|
|
|
|
let temp_dir_path = temp_dir.path();
|
|
|
|
|
|
|
|
let test_cases = vec![
|
|
|
|
// valid JSON
|
2019-12-23 09:59:44 -05:00
|
|
|
(r#"{ "compilerOptions": { "checkJs": true } } "#, true),
|
2019-08-17 12:53:34 -04:00
|
|
|
// JSON with comment
|
|
|
|
(
|
|
|
|
r#"{ "compilerOptions": { // force .js file compilation by Deno "checkJs": true } } "#,
|
|
|
|
true,
|
|
|
|
),
|
|
|
|
// invalid JSON
|
2019-12-23 09:59:44 -05:00
|
|
|
(r#"{ "compilerOptions": { "checkJs": true },{ } "#, true),
|
2019-08-17 12:53:34 -04:00
|
|
|
// without content
|
2019-12-23 09:59:44 -05:00
|
|
|
("", false),
|
2019-08-17 12:53:34 -04:00
|
|
|
];
|
|
|
|
|
|
|
|
let path = temp_dir_path.join("tsconfig.json");
|
|
|
|
let path_str = path.to_str().unwrap().to_string();
|
|
|
|
|
|
|
|
for (json_str, expected) in test_cases {
|
|
|
|
deno_fs::write_file(&path, json_str.as_bytes(), 0o666).unwrap();
|
|
|
|
let config = CompilerConfig::load(Some(path_str.clone())).unwrap();
|
|
|
|
assert_eq!(config.compile_js, expected);
|
|
|
|
}
|
|
|
|
}
|
2019-09-20 10:19:51 -04:00
|
|
|
|
|
|
|
#[test]
|
|
|
|
fn test_compiler_config_load() {
|
|
|
|
let temp_dir = TempDir::new().expect("tempdir fail");
|
|
|
|
let temp_dir_path = temp_dir.path();
|
|
|
|
let path = temp_dir_path.join("doesnotexist.json");
|
|
|
|
let path_str = path.to_str().unwrap().to_string();
|
2019-12-23 09:59:44 -05:00
|
|
|
let res = CompilerConfig::load(Some(path_str));
|
2019-09-20 10:19:51 -04:00
|
|
|
assert!(res.is_err());
|
|
|
|
}
|
2019-01-09 12:59:46 -05:00
|
|
|
}
|