mirror of
https://github.com/denoland/deno.git
synced 2024-12-22 15:24:46 -05:00
refactor(compiler): split code paths for compile and bundle (#6304)
* refactor "compile" and "runtimeCompile" in "compiler.ts" and factor out separate methods for "compile" and "bundle" operations * remove noisy debug output from "compiler.ts" * provide "Serialize" implementations for enums in "msg.rs" * rename "analyze_dependencies_and_references" to "pre_process_file" and move it to "tsc.rs" * refactor ModuleGraph to use more concrete types and properly annotate locations where errors occur * remove dead code from "file_fetcher.rs" - "SourceFile.types_url" is no longer needed, as type reference parsing is done in "ModuleGraph" * remove unneeded field "source_path" from ".meta" files stored for compiled source file (towards #6080)
This commit is contained in:
parent
345a5b3dff
commit
826a3135b4
13 changed files with 1063 additions and 1003 deletions
|
@ -11,7 +11,6 @@ use deno_core::ErrBox;
|
||||||
use deno_core::ModuleSpecifier;
|
use deno_core::ModuleSpecifier;
|
||||||
use futures::future::FutureExt;
|
use futures::future::FutureExt;
|
||||||
use log::info;
|
use log::info;
|
||||||
use regex::Regex;
|
|
||||||
use std::collections::HashMap;
|
use std::collections::HashMap;
|
||||||
use std::fs;
|
use std::fs;
|
||||||
use std::future::Future;
|
use std::future::Future;
|
||||||
|
@ -33,7 +32,6 @@ use url::Url;
|
||||||
pub struct SourceFile {
|
pub struct SourceFile {
|
||||||
pub url: Url,
|
pub url: Url,
|
||||||
pub filename: PathBuf,
|
pub filename: PathBuf,
|
||||||
pub types_url: Option<Url>,
|
|
||||||
pub types_header: Option<String>,
|
pub types_header: Option<String>,
|
||||||
pub media_type: msg::MediaType,
|
pub media_type: msg::MediaType,
|
||||||
pub source_code: Vec<u8>,
|
pub source_code: Vec<u8>,
|
||||||
|
@ -316,18 +314,11 @@ impl SourceFileFetcher {
|
||||||
};
|
};
|
||||||
|
|
||||||
let media_type = map_content_type(&filepath, None);
|
let media_type = map_content_type(&filepath, None);
|
||||||
let types_url = match media_type {
|
|
||||||
msg::MediaType::JavaScript | msg::MediaType::JSX => {
|
|
||||||
get_types_url(&module_url, &source_code, None)
|
|
||||||
}
|
|
||||||
_ => None,
|
|
||||||
};
|
|
||||||
Ok(SourceFile {
|
Ok(SourceFile {
|
||||||
url: module_url.clone(),
|
url: module_url.clone(),
|
||||||
filename: filepath,
|
filename: filepath,
|
||||||
media_type,
|
media_type,
|
||||||
source_code,
|
source_code,
|
||||||
types_url,
|
|
||||||
types_header: None,
|
types_header: None,
|
||||||
})
|
})
|
||||||
}
|
}
|
||||||
|
@ -394,20 +385,11 @@ impl SourceFileFetcher {
|
||||||
headers.get("content-type").map(|e| e.as_str()),
|
headers.get("content-type").map(|e| e.as_str()),
|
||||||
);
|
);
|
||||||
let types_header = headers.get("x-typescript-types").map(|e| e.to_string());
|
let types_header = headers.get("x-typescript-types").map(|e| e.to_string());
|
||||||
let types_url = match media_type {
|
|
||||||
msg::MediaType::JavaScript | msg::MediaType::JSX => get_types_url(
|
|
||||||
&module_url,
|
|
||||||
&source_code,
|
|
||||||
headers.get("x-typescript-types").map(|e| e.as_str()),
|
|
||||||
),
|
|
||||||
_ => None,
|
|
||||||
};
|
|
||||||
Ok(Some(SourceFile {
|
Ok(Some(SourceFile {
|
||||||
url: module_url.clone(),
|
url: module_url.clone(),
|
||||||
filename: cache_filename,
|
filename: cache_filename,
|
||||||
media_type,
|
media_type,
|
||||||
source_code,
|
source_code,
|
||||||
types_url,
|
|
||||||
types_header,
|
types_header,
|
||||||
}))
|
}))
|
||||||
}
|
}
|
||||||
|
@ -519,21 +501,12 @@ impl SourceFileFetcher {
|
||||||
|
|
||||||
let types_header =
|
let types_header =
|
||||||
headers.get("x-typescript-types").map(String::to_string);
|
headers.get("x-typescript-types").map(String::to_string);
|
||||||
let types_url = match media_type {
|
|
||||||
msg::MediaType::JavaScript | msg::MediaType::JSX => get_types_url(
|
|
||||||
&module_url,
|
|
||||||
&source,
|
|
||||||
headers.get("x-typescript-types").map(String::as_str),
|
|
||||||
),
|
|
||||||
_ => None,
|
|
||||||
};
|
|
||||||
|
|
||||||
let source_file = SourceFile {
|
let source_file = SourceFile {
|
||||||
url: module_url.clone(),
|
url: module_url.clone(),
|
||||||
filename: cache_filepath,
|
filename: cache_filepath,
|
||||||
media_type,
|
media_type,
|
||||||
source_code: source,
|
source_code: source,
|
||||||
types_url,
|
|
||||||
types_header,
|
types_header,
|
||||||
};
|
};
|
||||||
|
|
||||||
|
@ -617,41 +590,6 @@ fn map_js_like_extension(
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
/// Take a module URL and source code and determines if the source code contains
|
|
||||||
/// a type directive, and if so, returns the parsed URL for that type directive.
|
|
||||||
fn get_types_url(
|
|
||||||
module_url: &Url,
|
|
||||||
source_code: &[u8],
|
|
||||||
maybe_types_header: Option<&str>,
|
|
||||||
) -> Option<Url> {
|
|
||||||
lazy_static! {
|
|
||||||
/// Matches reference type directives in strings, which provide
|
|
||||||
/// type files that should be used by the compiler instead of the
|
|
||||||
/// JavaScript file.
|
|
||||||
static ref DIRECTIVE_TYPES: Regex = Regex::new(
|
|
||||||
r#"(?m)^/{3}\s*<reference\s+types\s*=\s*["']([^"']+)["']\s*/>"#
|
|
||||||
)
|
|
||||||
.unwrap();
|
|
||||||
}
|
|
||||||
|
|
||||||
match maybe_types_header {
|
|
||||||
Some(types_header) => match Url::parse(&types_header) {
|
|
||||||
Ok(url) => Some(url),
|
|
||||||
_ => Some(module_url.join(&types_header).unwrap()),
|
|
||||||
},
|
|
||||||
_ => match DIRECTIVE_TYPES.captures(str::from_utf8(source_code).unwrap()) {
|
|
||||||
Some(cap) => {
|
|
||||||
let val = cap.get(1).unwrap().as_str();
|
|
||||||
match Url::parse(&val) {
|
|
||||||
Ok(url) => Some(url),
|
|
||||||
_ => Some(module_url.join(&val).unwrap()),
|
|
||||||
}
|
|
||||||
}
|
|
||||||
_ => None,
|
|
||||||
},
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
fn filter_shebang(bytes: Vec<u8>) -> Vec<u8> {
|
fn filter_shebang(bytes: Vec<u8>) -> Vec<u8> {
|
||||||
let string = str::from_utf8(&bytes).unwrap();
|
let string = str::from_utf8(&bytes).unwrap();
|
||||||
if let Some(i) = string.find('\n') {
|
if let Some(i) = string.find('\n') {
|
||||||
|
@ -1868,85 +1806,6 @@ mod tests {
|
||||||
drop(http_server_guard);
|
drop(http_server_guard);
|
||||||
}
|
}
|
||||||
|
|
||||||
#[test]
|
|
||||||
fn test_get_types_url_1() {
|
|
||||||
let module_url = Url::parse("https://example.com/mod.js").unwrap();
|
|
||||||
let source_code = b"console.log(\"foo\");".to_owned();
|
|
||||||
let result = get_types_url(&module_url, &source_code, None);
|
|
||||||
assert_eq!(result, None);
|
|
||||||
}
|
|
||||||
|
|
||||||
#[test]
|
|
||||||
fn test_get_types_url_2() {
|
|
||||||
let module_url = Url::parse("https://example.com/mod.js").unwrap();
|
|
||||||
let source_code = r#"/// <reference types="./mod.d.ts" />
|
|
||||||
console.log("foo");"#
|
|
||||||
.as_bytes()
|
|
||||||
.to_owned();
|
|
||||||
let result = get_types_url(&module_url, &source_code, None);
|
|
||||||
assert_eq!(
|
|
||||||
result,
|
|
||||||
Some(Url::parse("https://example.com/mod.d.ts").unwrap())
|
|
||||||
);
|
|
||||||
}
|
|
||||||
|
|
||||||
#[test]
|
|
||||||
fn test_get_types_url_3() {
|
|
||||||
let module_url = Url::parse("https://example.com/mod.js").unwrap();
|
|
||||||
let source_code = r#"/// <reference types="https://deno.land/mod.d.ts" />
|
|
||||||
console.log("foo");"#
|
|
||||||
.as_bytes()
|
|
||||||
.to_owned();
|
|
||||||
let result = get_types_url(&module_url, &source_code, None);
|
|
||||||
assert_eq!(
|
|
||||||
result,
|
|
||||||
Some(Url::parse("https://deno.land/mod.d.ts").unwrap())
|
|
||||||
);
|
|
||||||
}
|
|
||||||
|
|
||||||
#[test]
|
|
||||||
fn test_get_types_url_4() {
|
|
||||||
let module_url = Url::parse("file:///foo/bar/baz.js").unwrap();
|
|
||||||
let source_code = r#"/// <reference types="../qat/baz.d.ts" />
|
|
||||||
console.log("foo");"#
|
|
||||||
.as_bytes()
|
|
||||||
.to_owned();
|
|
||||||
let result = get_types_url(&module_url, &source_code, None);
|
|
||||||
assert_eq!(
|
|
||||||
result,
|
|
||||||
Some(Url::parse("file:///foo/qat/baz.d.ts").unwrap())
|
|
||||||
);
|
|
||||||
}
|
|
||||||
|
|
||||||
#[test]
|
|
||||||
fn test_get_types_url_5() {
|
|
||||||
let module_url = Url::parse("https://example.com/mod.js").unwrap();
|
|
||||||
let source_code = b"console.log(\"foo\");".to_owned();
|
|
||||||
let result = get_types_url(&module_url, &source_code, Some("./mod.d.ts"));
|
|
||||||
assert_eq!(
|
|
||||||
result,
|
|
||||||
Some(Url::parse("https://example.com/mod.d.ts").unwrap())
|
|
||||||
);
|
|
||||||
}
|
|
||||||
|
|
||||||
#[test]
|
|
||||||
fn test_get_types_url_6() {
|
|
||||||
let module_url = Url::parse("https://example.com/mod.js").unwrap();
|
|
||||||
let source_code = r#"/// <reference types="./mod.d.ts" />
|
|
||||||
console.log("foo");"#
|
|
||||||
.as_bytes()
|
|
||||||
.to_owned();
|
|
||||||
let result = get_types_url(
|
|
||||||
&module_url,
|
|
||||||
&source_code,
|
|
||||||
Some("https://deno.land/mod.d.ts"),
|
|
||||||
);
|
|
||||||
assert_eq!(
|
|
||||||
result,
|
|
||||||
Some(Url::parse("https://deno.land/mod.d.ts").unwrap())
|
|
||||||
);
|
|
||||||
}
|
|
||||||
|
|
||||||
#[tokio::test]
|
#[tokio::test]
|
||||||
async fn test_fetch_with_types_header() {
|
async fn test_fetch_with_types_header() {
|
||||||
let http_server_guard = test_util::http_server();
|
let http_server_guard = test_util::http_server();
|
||||||
|
@ -1967,33 +1826,8 @@ mod tests {
|
||||||
assert_eq!(source.source_code, b"export const foo = 'foo';");
|
assert_eq!(source.source_code, b"export const foo = 'foo';");
|
||||||
assert_eq!(&(source.media_type), &msg::MediaType::JavaScript);
|
assert_eq!(&(source.media_type), &msg::MediaType::JavaScript);
|
||||||
assert_eq!(
|
assert_eq!(
|
||||||
source.types_url,
|
source.types_header,
|
||||||
Some(Url::parse("http://127.0.0.1:4545/xTypeScriptTypes.d.ts").unwrap())
|
Some("./xTypeScriptTypes.d.ts".to_string())
|
||||||
);
|
|
||||||
drop(http_server_guard);
|
|
||||||
}
|
|
||||||
|
|
||||||
#[tokio::test]
|
|
||||||
async fn test_fetch_with_types_reference() {
|
|
||||||
let http_server_guard = test_util::http_server();
|
|
||||||
let (_temp_dir, fetcher) = test_setup();
|
|
||||||
let module_url =
|
|
||||||
Url::parse("http://127.0.0.1:4545/referenceTypes.js").unwrap();
|
|
||||||
let source = fetcher
|
|
||||||
.fetch_remote_source(
|
|
||||||
&module_url,
|
|
||||||
false,
|
|
||||||
false,
|
|
||||||
1,
|
|
||||||
&Permissions::allow_all(),
|
|
||||||
)
|
|
||||||
.await;
|
|
||||||
assert!(source.is_ok());
|
|
||||||
let source = source.unwrap();
|
|
||||||
assert_eq!(&(source.media_type), &msg::MediaType::JavaScript);
|
|
||||||
assert_eq!(
|
|
||||||
source.types_url,
|
|
||||||
Some(Url::parse("http://127.0.0.1:4545/xTypeScriptTypes.d.ts").unwrap())
|
|
||||||
);
|
);
|
||||||
drop(http_server_guard);
|
drop(http_server_guard);
|
||||||
}
|
}
|
||||||
|
|
|
@ -260,9 +260,9 @@ impl GlobalState {
|
||||||
/// - JSX import
|
/// - JSX import
|
||||||
fn should_allow_js(module_graph_files: &[&ModuleGraphFile]) -> bool {
|
fn should_allow_js(module_graph_files: &[&ModuleGraphFile]) -> bool {
|
||||||
module_graph_files.iter().any(|module_file| {
|
module_graph_files.iter().any(|module_file| {
|
||||||
if module_file.media_type == (MediaType::JSX as i32) {
|
if module_file.media_type == MediaType::JSX {
|
||||||
true
|
true
|
||||||
} else if module_file.media_type == (MediaType::JavaScript as i32) {
|
} else if module_file.media_type == MediaType::JavaScript {
|
||||||
module_file.imports.iter().any(|import_desc| {
|
module_file.imports.iter().any(|import_desc| {
|
||||||
let import_file = module_graph_files
|
let import_file = module_graph_files
|
||||||
.iter()
|
.iter()
|
||||||
|
@ -271,9 +271,9 @@ fn should_allow_js(module_graph_files: &[&ModuleGraphFile]) -> bool {
|
||||||
})
|
})
|
||||||
.expect("Failed to find imported file");
|
.expect("Failed to find imported file");
|
||||||
let media_type = import_file.media_type;
|
let media_type = import_file.media_type;
|
||||||
media_type == (MediaType::TypeScript as i32)
|
media_type == MediaType::TypeScript
|
||||||
|| media_type == (MediaType::TSX as i32)
|
|| media_type == MediaType::TSX
|
||||||
|| media_type == (MediaType::JSX as i32)
|
|| media_type == MediaType::JSX
|
||||||
})
|
})
|
||||||
} else {
|
} else {
|
||||||
false
|
false
|
||||||
|
@ -301,9 +301,9 @@ fn needs_compilation(
|
||||||
needs_compilation |= module_graph_files.iter().any(|module_file| {
|
needs_compilation |= module_graph_files.iter().any(|module_file| {
|
||||||
let media_type = module_file.media_type;
|
let media_type = module_file.media_type;
|
||||||
|
|
||||||
media_type == (MediaType::TypeScript as i32)
|
media_type == (MediaType::TypeScript)
|
||||||
|| media_type == (MediaType::TSX as i32)
|
|| media_type == (MediaType::TSX)
|
||||||
|| media_type == (MediaType::JSX as i32)
|
|| media_type == (MediaType::JSX)
|
||||||
});
|
});
|
||||||
|
|
||||||
needs_compilation
|
needs_compilation
|
||||||
|
@ -317,6 +317,7 @@ fn thread_safe() {
|
||||||
|
|
||||||
#[test]
|
#[test]
|
||||||
fn test_should_allow_js() {
|
fn test_should_allow_js() {
|
||||||
|
use crate::doc::Location;
|
||||||
use crate::module_graph::ImportDescriptor;
|
use crate::module_graph::ImportDescriptor;
|
||||||
|
|
||||||
assert!(should_allow_js(&[
|
assert!(should_allow_js(&[
|
||||||
|
@ -330,7 +331,7 @@ fn test_should_allow_js() {
|
||||||
lib_directives: vec![],
|
lib_directives: vec![],
|
||||||
types_directives: vec![],
|
types_directives: vec![],
|
||||||
type_headers: vec![],
|
type_headers: vec![],
|
||||||
media_type: MediaType::TypeScript as i32,
|
media_type: MediaType::TypeScript,
|
||||||
source_code: "function foo() {}".to_string(),
|
source_code: "function foo() {}".to_string(),
|
||||||
},
|
},
|
||||||
&ModuleGraphFile {
|
&ModuleGraphFile {
|
||||||
|
@ -346,12 +347,17 @@ fn test_should_allow_js() {
|
||||||
.unwrap(),
|
.unwrap(),
|
||||||
type_directive: None,
|
type_directive: None,
|
||||||
resolved_type_directive: None,
|
resolved_type_directive: None,
|
||||||
|
location: Location {
|
||||||
|
filename: "file:///some/file1.js".to_string(),
|
||||||
|
line: 0,
|
||||||
|
col: 0,
|
||||||
|
},
|
||||||
}],
|
}],
|
||||||
referenced_files: vec![],
|
referenced_files: vec![],
|
||||||
lib_directives: vec![],
|
lib_directives: vec![],
|
||||||
types_directives: vec![],
|
types_directives: vec![],
|
||||||
type_headers: vec![],
|
type_headers: vec![],
|
||||||
media_type: MediaType::JavaScript as i32,
|
media_type: MediaType::JavaScript,
|
||||||
source_code: "function foo() {}".to_string(),
|
source_code: "function foo() {}".to_string(),
|
||||||
},
|
},
|
||||||
],));
|
],));
|
||||||
|
@ -367,7 +373,7 @@ fn test_should_allow_js() {
|
||||||
lib_directives: vec![],
|
lib_directives: vec![],
|
||||||
types_directives: vec![],
|
types_directives: vec![],
|
||||||
type_headers: vec![],
|
type_headers: vec![],
|
||||||
media_type: MediaType::JSX as i32,
|
media_type: MediaType::JSX,
|
||||||
source_code: "function foo() {}".to_string(),
|
source_code: "function foo() {}".to_string(),
|
||||||
},
|
},
|
||||||
&ModuleGraphFile {
|
&ModuleGraphFile {
|
||||||
|
@ -383,12 +389,17 @@ fn test_should_allow_js() {
|
||||||
.unwrap(),
|
.unwrap(),
|
||||||
type_directive: None,
|
type_directive: None,
|
||||||
resolved_type_directive: None,
|
resolved_type_directive: None,
|
||||||
|
location: Location {
|
||||||
|
filename: "file:///some/file1.ts".to_string(),
|
||||||
|
line: 0,
|
||||||
|
col: 0,
|
||||||
|
},
|
||||||
}],
|
}],
|
||||||
referenced_files: vec![],
|
referenced_files: vec![],
|
||||||
lib_directives: vec![],
|
lib_directives: vec![],
|
||||||
types_directives: vec![],
|
types_directives: vec![],
|
||||||
type_headers: vec![],
|
type_headers: vec![],
|
||||||
media_type: MediaType::TypeScript as i32,
|
media_type: MediaType::TypeScript,
|
||||||
source_code: "function foo() {}".to_string(),
|
source_code: "function foo() {}".to_string(),
|
||||||
},
|
},
|
||||||
]));
|
]));
|
||||||
|
@ -404,7 +415,7 @@ fn test_should_allow_js() {
|
||||||
lib_directives: vec![],
|
lib_directives: vec![],
|
||||||
types_directives: vec![],
|
types_directives: vec![],
|
||||||
type_headers: vec![],
|
type_headers: vec![],
|
||||||
media_type: MediaType::JavaScript as i32,
|
media_type: MediaType::JavaScript,
|
||||||
source_code: "function foo() {}".to_string(),
|
source_code: "function foo() {}".to_string(),
|
||||||
},
|
},
|
||||||
&ModuleGraphFile {
|
&ModuleGraphFile {
|
||||||
|
@ -420,12 +431,17 @@ fn test_should_allow_js() {
|
||||||
.unwrap(),
|
.unwrap(),
|
||||||
type_directive: None,
|
type_directive: None,
|
||||||
resolved_type_directive: None,
|
resolved_type_directive: None,
|
||||||
|
location: Location {
|
||||||
|
filename: "file:///some/file.js".to_string(),
|
||||||
|
line: 0,
|
||||||
|
col: 0,
|
||||||
|
},
|
||||||
}],
|
}],
|
||||||
referenced_files: vec![],
|
referenced_files: vec![],
|
||||||
lib_directives: vec![],
|
lib_directives: vec![],
|
||||||
types_directives: vec![],
|
types_directives: vec![],
|
||||||
type_headers: vec![],
|
type_headers: vec![],
|
||||||
media_type: MediaType::JavaScript as i32,
|
media_type: MediaType::JavaScript,
|
||||||
source_code: "function foo() {}".to_string(),
|
source_code: "function foo() {}".to_string(),
|
||||||
},
|
},
|
||||||
],));
|
],));
|
||||||
|
@ -446,7 +462,7 @@ fn test_needs_compilation() {
|
||||||
lib_directives: vec![],
|
lib_directives: vec![],
|
||||||
types_directives: vec![],
|
types_directives: vec![],
|
||||||
type_headers: vec![],
|
type_headers: vec![],
|
||||||
media_type: MediaType::JavaScript as i32,
|
media_type: MediaType::JavaScript,
|
||||||
source_code: "function foo() {}".to_string(),
|
source_code: "function foo() {}".to_string(),
|
||||||
}],
|
}],
|
||||||
));
|
));
|
||||||
|
@ -470,7 +486,7 @@ fn test_needs_compilation() {
|
||||||
lib_directives: vec![],
|
lib_directives: vec![],
|
||||||
types_directives: vec![],
|
types_directives: vec![],
|
||||||
type_headers: vec![],
|
type_headers: vec![],
|
||||||
media_type: MediaType::TypeScript as i32,
|
media_type: MediaType::TypeScript,
|
||||||
source_code: "function foo() {}".to_string(),
|
source_code: "function foo() {}".to_string(),
|
||||||
},
|
},
|
||||||
&ModuleGraphFile {
|
&ModuleGraphFile {
|
||||||
|
@ -483,7 +499,7 @@ fn test_needs_compilation() {
|
||||||
lib_directives: vec![],
|
lib_directives: vec![],
|
||||||
types_directives: vec![],
|
types_directives: vec![],
|
||||||
type_headers: vec![],
|
type_headers: vec![],
|
||||||
media_type: MediaType::JavaScript as i32,
|
media_type: MediaType::JavaScript,
|
||||||
source_code: "function foo() {}".to_string(),
|
source_code: "function foo() {}".to_string(),
|
||||||
},
|
},
|
||||||
],
|
],
|
||||||
|
|
|
@ -441,7 +441,6 @@ class Host implements ts.CompilerHost {
|
||||||
specifier,
|
specifier,
|
||||||
containingFile,
|
containingFile,
|
||||||
maybeUrl,
|
maybeUrl,
|
||||||
sf: SourceFile.getCached(maybeUrl!),
|
|
||||||
});
|
});
|
||||||
|
|
||||||
let sourceFile: SourceFile | undefined = undefined;
|
let sourceFile: SourceFile | undefined = undefined;
|
||||||
|
@ -657,26 +656,28 @@ type WriteFileCallback = (
|
||||||
sourceFiles?: readonly ts.SourceFile[]
|
sourceFiles?: readonly ts.SourceFile[]
|
||||||
) => void;
|
) => void;
|
||||||
|
|
||||||
interface WriteFileState {
|
interface CompileWriteFileState {
|
||||||
type: CompilerRequestType;
|
rootNames: string[];
|
||||||
bundle?: boolean;
|
emitMap: Record<string, EmittedSource>;
|
||||||
bundleOutput?: string;
|
}
|
||||||
host?: Host;
|
|
||||||
|
interface BundleWriteFileState {
|
||||||
|
host?: Host;
|
||||||
|
bundleOutput: undefined | string;
|
||||||
rootNames: string[];
|
rootNames: string[];
|
||||||
emitMap?: Record<string, EmittedSource>;
|
|
||||||
sources?: Record<string, string>;
|
|
||||||
}
|
}
|
||||||
|
|
||||||
// Warning! The values in this enum are duplicated in `cli/msg.rs`
|
// Warning! The values in this enum are duplicated in `cli/msg.rs`
|
||||||
// Update carefully!
|
// Update carefully!
|
||||||
enum CompilerRequestType {
|
enum CompilerRequestType {
|
||||||
Compile = 0,
|
Compile = 0,
|
||||||
RuntimeCompile = 1,
|
Bundle = 1,
|
||||||
RuntimeTranspile = 2,
|
RuntimeCompile = 2,
|
||||||
|
RuntimeBundle = 3,
|
||||||
|
RuntimeTranspile = 4,
|
||||||
}
|
}
|
||||||
|
|
||||||
// TODO(bartlomieju): probably could be defined inline?
|
function createBundleWriteFile(state: BundleWriteFileState): WriteFileCallback {
|
||||||
function createBundleWriteFile(state: WriteFileState): WriteFileCallback {
|
|
||||||
return function writeFile(
|
return function writeFile(
|
||||||
_fileName: string,
|
_fileName: string,
|
||||||
data: string,
|
data: string,
|
||||||
|
@ -684,8 +685,6 @@ function createBundleWriteFile(state: WriteFileState): WriteFileCallback {
|
||||||
): void {
|
): void {
|
||||||
assert(sourceFiles != null);
|
assert(sourceFiles != null);
|
||||||
assert(state.host);
|
assert(state.host);
|
||||||
assert(state.emitMap);
|
|
||||||
assert(state.bundle);
|
|
||||||
// we only support single root names for bundles
|
// we only support single root names for bundles
|
||||||
assert(state.rootNames.length === 1);
|
assert(state.rootNames.length === 1);
|
||||||
state.bundleOutput = buildBundle(
|
state.bundleOutput = buildBundle(
|
||||||
|
@ -697,17 +696,15 @@ function createBundleWriteFile(state: WriteFileState): WriteFileCallback {
|
||||||
};
|
};
|
||||||
}
|
}
|
||||||
|
|
||||||
// TODO(bartlomieju): probably could be defined inline?
|
function createCompileWriteFile(
|
||||||
function createCompileWriteFile(state: WriteFileState): WriteFileCallback {
|
state: CompileWriteFileState
|
||||||
|
): WriteFileCallback {
|
||||||
return function writeFile(
|
return function writeFile(
|
||||||
fileName: string,
|
fileName: string,
|
||||||
data: string,
|
data: string,
|
||||||
sourceFiles?: readonly ts.SourceFile[]
|
sourceFiles?: readonly ts.SourceFile[]
|
||||||
): void {
|
): void {
|
||||||
assert(sourceFiles != null);
|
assert(sourceFiles != null);
|
||||||
assert(state.host);
|
|
||||||
assert(state.emitMap);
|
|
||||||
assert(!state.bundle);
|
|
||||||
assert(sourceFiles.length === 1);
|
assert(sourceFiles.length === 1);
|
||||||
state.emitMap[fileName] = {
|
state.emitMap[fileName] = {
|
||||||
filename: sourceFiles[0].fileName,
|
filename: sourceFiles[0].fileName,
|
||||||
|
@ -1067,7 +1064,8 @@ interface SourceFileMapEntry {
|
||||||
typeHeaders: ReferenceDescriptor[];
|
typeHeaders: ReferenceDescriptor[];
|
||||||
}
|
}
|
||||||
|
|
||||||
interface CompilerRequestCompile {
|
/** Used when "deno run" is invoked */
|
||||||
|
interface CompileRequest {
|
||||||
type: CompilerRequestType.Compile;
|
type: CompilerRequestType.Compile;
|
||||||
allowJs: boolean;
|
allowJs: boolean;
|
||||||
target: CompilerHostTarget;
|
target: CompilerHostTarget;
|
||||||
|
@ -1075,53 +1073,81 @@ interface CompilerRequestCompile {
|
||||||
configPath?: string;
|
configPath?: string;
|
||||||
config?: string;
|
config?: string;
|
||||||
unstable: boolean;
|
unstable: boolean;
|
||||||
bundle: boolean;
|
|
||||||
cwd: string;
|
cwd: string;
|
||||||
// key value is fully resolved URL
|
// key value is fully resolved URL
|
||||||
sourceFileMap: Record<string, SourceFileMapEntry>;
|
sourceFileMap: Record<string, SourceFileMapEntry>;
|
||||||
}
|
}
|
||||||
|
|
||||||
interface CompilerRequestRuntimeCompile {
|
/** Used when "deno bundle" is invoked */
|
||||||
|
interface BundleRequest {
|
||||||
|
type: CompilerRequestType.Bundle;
|
||||||
|
target: CompilerHostTarget;
|
||||||
|
rootNames: string[];
|
||||||
|
configPath?: string;
|
||||||
|
config?: string;
|
||||||
|
unstable: boolean;
|
||||||
|
cwd: string;
|
||||||
|
// key value is fully resolved URL
|
||||||
|
sourceFileMap: Record<string, SourceFileMapEntry>;
|
||||||
|
}
|
||||||
|
|
||||||
|
/** Used when "Deno.compile()" API is called */
|
||||||
|
interface RuntimeCompileRequest {
|
||||||
type: CompilerRequestType.RuntimeCompile;
|
type: CompilerRequestType.RuntimeCompile;
|
||||||
target: CompilerHostTarget;
|
target: CompilerHostTarget;
|
||||||
rootNames: string[];
|
rootNames: string[];
|
||||||
sourceFileMap: Record<string, SourceFileMapEntry>;
|
sourceFileMap: Record<string, SourceFileMapEntry>;
|
||||||
unstable?: boolean;
|
unstable?: boolean;
|
||||||
bundle?: boolean;
|
|
||||||
options?: string;
|
options?: string;
|
||||||
}
|
}
|
||||||
|
|
||||||
interface CompilerRequestRuntimeTranspile {
|
/** Used when "Deno.bundle()" API is called */
|
||||||
|
interface RuntimeBundleRequest {
|
||||||
|
type: CompilerRequestType.RuntimeBundle;
|
||||||
|
target: CompilerHostTarget;
|
||||||
|
rootNames: string[];
|
||||||
|
sourceFileMap: Record<string, SourceFileMapEntry>;
|
||||||
|
unstable?: boolean;
|
||||||
|
options?: string;
|
||||||
|
}
|
||||||
|
|
||||||
|
/** Used when "Deno.transpileOnly()" API is called */
|
||||||
|
interface RuntimeTranspileRequest {
|
||||||
type: CompilerRequestType.RuntimeTranspile;
|
type: CompilerRequestType.RuntimeTranspile;
|
||||||
sources: Record<string, string>;
|
sources: Record<string, string>;
|
||||||
options?: string;
|
options?: string;
|
||||||
}
|
}
|
||||||
|
|
||||||
type CompilerRequest =
|
type CompilerRequest =
|
||||||
| CompilerRequestCompile
|
| CompileRequest
|
||||||
| CompilerRequestRuntimeCompile
|
| BundleRequest
|
||||||
| CompilerRequestRuntimeTranspile;
|
| RuntimeCompileRequest
|
||||||
|
| RuntimeBundleRequest
|
||||||
|
| RuntimeTranspileRequest;
|
||||||
|
|
||||||
interface CompileResult {
|
interface CompileResponse {
|
||||||
emitMap?: Record<string, EmittedSource>;
|
emitMap: Record<string, EmittedSource>;
|
||||||
|
diagnostics: Diagnostic;
|
||||||
|
}
|
||||||
|
|
||||||
|
interface BundleResponse {
|
||||||
bundleOutput?: string;
|
bundleOutput?: string;
|
||||||
diagnostics: Diagnostic;
|
diagnostics: Diagnostic;
|
||||||
}
|
}
|
||||||
|
|
||||||
interface RuntimeCompileResult {
|
interface RuntimeCompileResponse {
|
||||||
emitMap: Record<string, EmittedSource>;
|
emitMap: Record<string, EmittedSource>;
|
||||||
diagnostics: DiagnosticItem[];
|
diagnostics: DiagnosticItem[];
|
||||||
}
|
}
|
||||||
|
|
||||||
interface RuntimeBundleResult {
|
interface RuntimeBundleResponse {
|
||||||
output: string;
|
output?: string;
|
||||||
diagnostics: DiagnosticItem[];
|
diagnostics: DiagnosticItem[];
|
||||||
}
|
}
|
||||||
|
|
||||||
function compile(request: CompilerRequestCompile): CompileResult {
|
function compile(request: CompileRequest): CompileResponse {
|
||||||
const {
|
const {
|
||||||
allowJs,
|
allowJs,
|
||||||
bundle,
|
|
||||||
config,
|
config,
|
||||||
configPath,
|
configPath,
|
||||||
rootNames,
|
rootNames,
|
||||||
|
@ -1139,30 +1165,19 @@ function compile(request: CompilerRequestCompile): CompileResult {
|
||||||
// each file that needs to be emitted. The Deno compiler host delegates
|
// each file that needs to be emitted. The Deno compiler host delegates
|
||||||
// this, to make it easier to perform the right actions, which vary
|
// this, to make it easier to perform the right actions, which vary
|
||||||
// based a lot on the request.
|
// based a lot on the request.
|
||||||
const state: WriteFileState = {
|
const state: CompileWriteFileState = {
|
||||||
type: request.type,
|
|
||||||
emitMap: {},
|
|
||||||
bundle,
|
|
||||||
host: undefined,
|
|
||||||
rootNames,
|
rootNames,
|
||||||
|
emitMap: {},
|
||||||
};
|
};
|
||||||
let writeFile: WriteFileCallback;
|
const host = new Host({
|
||||||
if (bundle) {
|
bundle: false,
|
||||||
writeFile = createBundleWriteFile(state);
|
|
||||||
} else {
|
|
||||||
writeFile = createCompileWriteFile(state);
|
|
||||||
}
|
|
||||||
const host = (state.host = new Host({
|
|
||||||
bundle,
|
|
||||||
target,
|
target,
|
||||||
writeFile,
|
|
||||||
unstable,
|
unstable,
|
||||||
}));
|
writeFile: createCompileWriteFile(state),
|
||||||
|
});
|
||||||
let diagnostics: readonly ts.Diagnostic[] = [];
|
let diagnostics: readonly ts.Diagnostic[] = [];
|
||||||
|
|
||||||
if (!bundle) {
|
host.mergeOptions({ allowJs });
|
||||||
host.mergeOptions({ allowJs });
|
|
||||||
}
|
|
||||||
|
|
||||||
// if there is a configuration supplied, we need to parse that
|
// if there is a configuration supplied, we need to parse that
|
||||||
if (config && config.length && configPath) {
|
if (config && config.length && configPath) {
|
||||||
|
@ -1186,24 +1201,12 @@ function compile(request: CompilerRequestCompile): CompileResult {
|
||||||
.filter(({ code }) => !ignoredDiagnostics.includes(code));
|
.filter(({ code }) => !ignoredDiagnostics.includes(code));
|
||||||
|
|
||||||
// We will only proceed with the emit if there are no diagnostics.
|
// We will only proceed with the emit if there are no diagnostics.
|
||||||
if (diagnostics && diagnostics.length === 0) {
|
if (diagnostics.length === 0) {
|
||||||
if (bundle) {
|
|
||||||
// we only support a single root module when bundling
|
|
||||||
assert(rootNames.length === 1);
|
|
||||||
setRootExports(program, rootNames[0]);
|
|
||||||
}
|
|
||||||
const emitResult = program.emit();
|
const emitResult = program.emit();
|
||||||
// If `checkJs` is off we still might be compiling entry point JavaScript file
|
// If `checkJs` is off we still might be compiling entry point JavaScript file
|
||||||
// (if it has `.ts` imports), but it won't be emitted. In that case we skip
|
// (if it has `.ts` imports), but it won't be emitted. In that case we skip
|
||||||
// assertion.
|
// assertion.
|
||||||
if (!bundle) {
|
if (options.checkJs) {
|
||||||
if (options.checkJs) {
|
|
||||||
assert(
|
|
||||||
emitResult.emitSkipped === false,
|
|
||||||
"Unexpected skip of the emit."
|
|
||||||
);
|
|
||||||
}
|
|
||||||
} else {
|
|
||||||
assert(
|
assert(
|
||||||
emitResult.emitSkipped === false,
|
emitResult.emitSkipped === false,
|
||||||
"Unexpected skip of the emit."
|
"Unexpected skip of the emit."
|
||||||
|
@ -1215,21 +1218,96 @@ function compile(request: CompilerRequestCompile): CompileResult {
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
let bundleOutput = undefined;
|
log("<<< compile end", {
|
||||||
|
rootNames,
|
||||||
|
type: CompilerRequestType[request.type],
|
||||||
|
});
|
||||||
|
|
||||||
if (diagnostics && diagnostics.length === 0 && bundle) {
|
return {
|
||||||
|
emitMap: state.emitMap,
|
||||||
|
diagnostics: fromTypeScriptDiagnostic(diagnostics),
|
||||||
|
};
|
||||||
|
}
|
||||||
|
|
||||||
|
function bundle(request: BundleRequest): BundleResponse {
|
||||||
|
const {
|
||||||
|
config,
|
||||||
|
configPath,
|
||||||
|
rootNames,
|
||||||
|
target,
|
||||||
|
unstable,
|
||||||
|
cwd,
|
||||||
|
sourceFileMap,
|
||||||
|
} = request;
|
||||||
|
log(">>> start start", {
|
||||||
|
rootNames,
|
||||||
|
type: CompilerRequestType[request.type],
|
||||||
|
});
|
||||||
|
|
||||||
|
// When a programme is emitted, TypeScript will call `writeFile` with
|
||||||
|
// each file that needs to be emitted. The Deno compiler host delegates
|
||||||
|
// this, to make it easier to perform the right actions, which vary
|
||||||
|
// based a lot on the request.
|
||||||
|
const state: BundleWriteFileState = {
|
||||||
|
rootNames,
|
||||||
|
bundleOutput: undefined,
|
||||||
|
};
|
||||||
|
const host = new Host({
|
||||||
|
bundle: true,
|
||||||
|
target,
|
||||||
|
unstable,
|
||||||
|
writeFile: createBundleWriteFile(state),
|
||||||
|
});
|
||||||
|
state.host = host;
|
||||||
|
let diagnostics: readonly ts.Diagnostic[] = [];
|
||||||
|
|
||||||
|
// if there is a configuration supplied, we need to parse that
|
||||||
|
if (config && config.length && configPath) {
|
||||||
|
const configResult = host.configure(cwd, configPath, config);
|
||||||
|
diagnostics = processConfigureResponse(configResult, configPath) || [];
|
||||||
|
}
|
||||||
|
|
||||||
|
buildSourceFileCache(sourceFileMap);
|
||||||
|
// if there was a configuration and no diagnostics with it, we will continue
|
||||||
|
// to generate the program and possibly emit it.
|
||||||
|
if (diagnostics.length === 0) {
|
||||||
|
const options = host.getCompilationSettings();
|
||||||
|
const program = ts.createProgram({
|
||||||
|
rootNames,
|
||||||
|
options,
|
||||||
|
host,
|
||||||
|
});
|
||||||
|
|
||||||
|
diagnostics = ts
|
||||||
|
.getPreEmitDiagnostics(program)
|
||||||
|
.filter(({ code }) => !ignoredDiagnostics.includes(code));
|
||||||
|
|
||||||
|
// We will only proceed with the emit if there are no diagnostics.
|
||||||
|
if (diagnostics.length === 0) {
|
||||||
|
// we only support a single root module when bundling
|
||||||
|
assert(rootNames.length === 1);
|
||||||
|
setRootExports(program, rootNames[0]);
|
||||||
|
const emitResult = program.emit();
|
||||||
|
assert(emitResult.emitSkipped === false, "Unexpected skip of the emit.");
|
||||||
|
// emitResult.diagnostics is `readonly` in TS3.5+ and can't be assigned
|
||||||
|
// without casting.
|
||||||
|
diagnostics = emitResult.diagnostics;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
let bundleOutput;
|
||||||
|
|
||||||
|
if (diagnostics.length === 0) {
|
||||||
assert(state.bundleOutput);
|
assert(state.bundleOutput);
|
||||||
bundleOutput = state.bundleOutput;
|
bundleOutput = state.bundleOutput;
|
||||||
}
|
}
|
||||||
|
|
||||||
assert(state.emitMap);
|
const result: BundleResponse = {
|
||||||
const result: CompileResult = {
|
|
||||||
emitMap: state.emitMap,
|
|
||||||
bundleOutput,
|
bundleOutput,
|
||||||
diagnostics: fromTypeScriptDiagnostic(diagnostics),
|
diagnostics: fromTypeScriptDiagnostic(diagnostics),
|
||||||
};
|
};
|
||||||
|
|
||||||
log("<<< compile end", {
|
log("<<< bundle end", {
|
||||||
rootNames,
|
rootNames,
|
||||||
type: CompilerRequestType[request.type],
|
type: CompilerRequestType[request.type],
|
||||||
});
|
});
|
||||||
|
@ -1238,20 +1316,12 @@ function compile(request: CompilerRequestCompile): CompileResult {
|
||||||
}
|
}
|
||||||
|
|
||||||
function runtimeCompile(
|
function runtimeCompile(
|
||||||
request: CompilerRequestRuntimeCompile
|
request: RuntimeCompileRequest
|
||||||
): RuntimeCompileResult | RuntimeBundleResult {
|
): RuntimeCompileResponse {
|
||||||
const {
|
const { options, rootNames, target, unstable, sourceFileMap } = request;
|
||||||
bundle,
|
|
||||||
options,
|
|
||||||
rootNames,
|
|
||||||
target,
|
|
||||||
unstable,
|
|
||||||
sourceFileMap,
|
|
||||||
} = request;
|
|
||||||
|
|
||||||
log(">>> runtime compile start", {
|
log(">>> runtime compile start", {
|
||||||
rootNames,
|
rootNames,
|
||||||
bundle,
|
|
||||||
});
|
});
|
||||||
|
|
||||||
// if there are options, convert them into TypeScript compiler options,
|
// if there are options, convert them into TypeScript compiler options,
|
||||||
|
@ -1264,26 +1334,15 @@ function runtimeCompile(
|
||||||
|
|
||||||
buildLocalSourceFileCache(sourceFileMap);
|
buildLocalSourceFileCache(sourceFileMap);
|
||||||
|
|
||||||
const state: WriteFileState = {
|
const state: CompileWriteFileState = {
|
||||||
type: request.type,
|
|
||||||
bundle,
|
|
||||||
host: undefined,
|
|
||||||
rootNames,
|
rootNames,
|
||||||
emitMap: {},
|
emitMap: {},
|
||||||
bundleOutput: undefined,
|
|
||||||
};
|
};
|
||||||
let writeFile: WriteFileCallback;
|
const host = new Host({
|
||||||
if (bundle) {
|
bundle: false,
|
||||||
writeFile = createBundleWriteFile(state);
|
|
||||||
} else {
|
|
||||||
writeFile = createCompileWriteFile(state);
|
|
||||||
}
|
|
||||||
|
|
||||||
const host = (state.host = new Host({
|
|
||||||
bundle,
|
|
||||||
target,
|
target,
|
||||||
writeFile,
|
writeFile: createCompileWriteFile(state),
|
||||||
}));
|
});
|
||||||
const compilerOptions = [DEFAULT_RUNTIME_COMPILE_OPTIONS];
|
const compilerOptions = [DEFAULT_RUNTIME_COMPILE_OPTIONS];
|
||||||
if (convertedOptions) {
|
if (convertedOptions) {
|
||||||
compilerOptions.push(convertedOptions);
|
compilerOptions.push(convertedOptions);
|
||||||
|
@ -1296,9 +1355,7 @@ function runtimeCompile(
|
||||||
],
|
],
|
||||||
});
|
});
|
||||||
}
|
}
|
||||||
if (bundle) {
|
|
||||||
compilerOptions.push(DEFAULT_BUNDLER_OPTIONS);
|
|
||||||
}
|
|
||||||
host.mergeOptions(...compilerOptions);
|
host.mergeOptions(...compilerOptions);
|
||||||
|
|
||||||
const program = ts.createProgram({
|
const program = ts.createProgram({
|
||||||
|
@ -1307,10 +1364,6 @@ function runtimeCompile(
|
||||||
host,
|
host,
|
||||||
});
|
});
|
||||||
|
|
||||||
if (bundle) {
|
|
||||||
setRootExports(program, rootNames[0]);
|
|
||||||
}
|
|
||||||
|
|
||||||
const diagnostics = ts
|
const diagnostics = ts
|
||||||
.getPreEmitDiagnostics(program)
|
.getPreEmitDiagnostics(program)
|
||||||
.filter(({ code }) => !ignoredDiagnostics.includes(code));
|
.filter(({ code }) => !ignoredDiagnostics.includes(code));
|
||||||
|
@ -1319,10 +1372,8 @@ function runtimeCompile(
|
||||||
|
|
||||||
assert(emitResult.emitSkipped === false, "Unexpected skip of the emit.");
|
assert(emitResult.emitSkipped === false, "Unexpected skip of the emit.");
|
||||||
|
|
||||||
assert(state.emitMap);
|
|
||||||
log("<<< runtime compile finish", {
|
log("<<< runtime compile finish", {
|
||||||
rootNames,
|
rootNames,
|
||||||
bundle,
|
|
||||||
emitMap: Object.keys(state.emitMap),
|
emitMap: Object.keys(state.emitMap),
|
||||||
});
|
});
|
||||||
|
|
||||||
|
@ -1330,21 +1381,86 @@ function runtimeCompile(
|
||||||
? fromTypeScriptDiagnostic(diagnostics).items
|
? fromTypeScriptDiagnostic(diagnostics).items
|
||||||
: [];
|
: [];
|
||||||
|
|
||||||
if (bundle) {
|
return {
|
||||||
return {
|
diagnostics: maybeDiagnostics,
|
||||||
diagnostics: maybeDiagnostics,
|
emitMap: state.emitMap,
|
||||||
output: state.bundleOutput,
|
};
|
||||||
} as RuntimeBundleResult;
|
}
|
||||||
} else {
|
|
||||||
return {
|
function runtimeBundle(request: RuntimeBundleRequest): RuntimeBundleResponse {
|
||||||
diagnostics: maybeDiagnostics,
|
const { options, rootNames, target, unstable, sourceFileMap } = request;
|
||||||
emitMap: state.emitMap,
|
|
||||||
} as RuntimeCompileResult;
|
log(">>> runtime bundle start", {
|
||||||
|
rootNames,
|
||||||
|
});
|
||||||
|
|
||||||
|
// if there are options, convert them into TypeScript compiler options,
|
||||||
|
// and resolve any external file references
|
||||||
|
let convertedOptions: ts.CompilerOptions | undefined;
|
||||||
|
if (options) {
|
||||||
|
const result = convertCompilerOptions(options);
|
||||||
|
convertedOptions = result.options;
|
||||||
}
|
}
|
||||||
|
|
||||||
|
buildLocalSourceFileCache(sourceFileMap);
|
||||||
|
|
||||||
|
const state: BundleWriteFileState = {
|
||||||
|
rootNames,
|
||||||
|
bundleOutput: undefined,
|
||||||
|
};
|
||||||
|
const host = new Host({
|
||||||
|
bundle: true,
|
||||||
|
target,
|
||||||
|
writeFile: createBundleWriteFile(state),
|
||||||
|
});
|
||||||
|
state.host = host;
|
||||||
|
|
||||||
|
const compilerOptions = [DEFAULT_RUNTIME_COMPILE_OPTIONS];
|
||||||
|
if (convertedOptions) {
|
||||||
|
compilerOptions.push(convertedOptions);
|
||||||
|
}
|
||||||
|
if (unstable) {
|
||||||
|
compilerOptions.push({
|
||||||
|
lib: [
|
||||||
|
"deno.unstable",
|
||||||
|
...((convertedOptions && convertedOptions.lib) || ["deno.window"]),
|
||||||
|
],
|
||||||
|
});
|
||||||
|
}
|
||||||
|
compilerOptions.push(DEFAULT_BUNDLER_OPTIONS);
|
||||||
|
host.mergeOptions(...compilerOptions);
|
||||||
|
|
||||||
|
const program = ts.createProgram({
|
||||||
|
rootNames,
|
||||||
|
options: host.getCompilationSettings(),
|
||||||
|
host,
|
||||||
|
});
|
||||||
|
|
||||||
|
setRootExports(program, rootNames[0]);
|
||||||
|
const diagnostics = ts
|
||||||
|
.getPreEmitDiagnostics(program)
|
||||||
|
.filter(({ code }) => !ignoredDiagnostics.includes(code));
|
||||||
|
|
||||||
|
const emitResult = program.emit();
|
||||||
|
|
||||||
|
assert(emitResult.emitSkipped === false, "Unexpected skip of the emit.");
|
||||||
|
|
||||||
|
log("<<< runtime bundle finish", {
|
||||||
|
rootNames,
|
||||||
|
});
|
||||||
|
|
||||||
|
const maybeDiagnostics = diagnostics.length
|
||||||
|
? fromTypeScriptDiagnostic(diagnostics).items
|
||||||
|
: [];
|
||||||
|
|
||||||
|
return {
|
||||||
|
diagnostics: maybeDiagnostics,
|
||||||
|
output: state.bundleOutput,
|
||||||
|
};
|
||||||
}
|
}
|
||||||
|
|
||||||
function runtimeTranspile(
|
function runtimeTranspile(
|
||||||
request: CompilerRequestRuntimeTranspile
|
request: RuntimeTranspileRequest
|
||||||
): Promise<Record<string, TranspileOnlyResult>> {
|
): Promise<Record<string, TranspileOnlyResult>> {
|
||||||
const result: Record<string, TranspileOnlyResult> = {};
|
const result: Record<string, TranspileOnlyResult> = {};
|
||||||
const { sources, options } = request;
|
const { sources, options } = request;
|
||||||
|
@ -1376,19 +1492,27 @@ async function tsCompilerOnMessage({
|
||||||
}): Promise<void> {
|
}): Promise<void> {
|
||||||
switch (request.type) {
|
switch (request.type) {
|
||||||
case CompilerRequestType.Compile: {
|
case CompilerRequestType.Compile: {
|
||||||
const result = compile(request as CompilerRequestCompile);
|
const result = compile(request as CompileRequest);
|
||||||
|
globalThis.postMessage(result);
|
||||||
|
break;
|
||||||
|
}
|
||||||
|
case CompilerRequestType.Bundle: {
|
||||||
|
const result = bundle(request as BundleRequest);
|
||||||
globalThis.postMessage(result);
|
globalThis.postMessage(result);
|
||||||
break;
|
break;
|
||||||
}
|
}
|
||||||
case CompilerRequestType.RuntimeCompile: {
|
case CompilerRequestType.RuntimeCompile: {
|
||||||
const result = runtimeCompile(request as CompilerRequestRuntimeCompile);
|
const result = runtimeCompile(request as RuntimeCompileRequest);
|
||||||
|
globalThis.postMessage(result);
|
||||||
|
break;
|
||||||
|
}
|
||||||
|
case CompilerRequestType.RuntimeBundle: {
|
||||||
|
const result = runtimeBundle(request as RuntimeBundleRequest);
|
||||||
globalThis.postMessage(result);
|
globalThis.postMessage(result);
|
||||||
break;
|
break;
|
||||||
}
|
}
|
||||||
case CompilerRequestType.RuntimeTranspile: {
|
case CompilerRequestType.RuntimeTranspile: {
|
||||||
const result = await runtimeTranspile(
|
const result = await runtimeTranspile(request as RuntimeTranspileRequest);
|
||||||
request as CompilerRequestRuntimeTranspile
|
|
||||||
);
|
|
||||||
globalThis.postMessage(result);
|
globalThis.postMessage(result);
|
||||||
break;
|
break;
|
||||||
}
|
}
|
||||||
|
|
|
@ -383,7 +383,6 @@ async fn eval_command(
|
||||||
let source_file = SourceFile {
|
let source_file = SourceFile {
|
||||||
filename: main_module_url.to_file_path().unwrap(),
|
filename: main_module_url.to_file_path().unwrap(),
|
||||||
url: main_module_url,
|
url: main_module_url,
|
||||||
types_url: None,
|
|
||||||
types_header: None,
|
types_header: None,
|
||||||
media_type: if as_typescript {
|
media_type: if as_typescript {
|
||||||
MediaType::TypeScript
|
MediaType::TypeScript
|
||||||
|
@ -588,7 +587,6 @@ async fn run_command(flags: Flags, script: String) -> Result<(), ErrBox> {
|
||||||
let source_file = SourceFile {
|
let source_file = SourceFile {
|
||||||
filename: main_module_url.to_file_path().unwrap(),
|
filename: main_module_url.to_file_path().unwrap(),
|
||||||
url: main_module_url,
|
url: main_module_url,
|
||||||
types_url: None,
|
|
||||||
types_header: None,
|
types_header: None,
|
||||||
media_type: MediaType::TypeScript,
|
media_type: MediaType::TypeScript,
|
||||||
source_code: source,
|
source_code: source,
|
||||||
|
@ -646,7 +644,6 @@ async fn test_command(
|
||||||
let source_file = SourceFile {
|
let source_file = SourceFile {
|
||||||
filename: test_file_url.to_file_path().unwrap(),
|
filename: test_file_url.to_file_path().unwrap(),
|
||||||
url: test_file_url,
|
url: test_file_url,
|
||||||
types_url: None,
|
|
||||||
types_header: None,
|
types_header: None,
|
||||||
media_type: MediaType::TypeScript,
|
media_type: MediaType::TypeScript,
|
||||||
source_code: test_file.clone().into_bytes(),
|
source_code: test_file.clone().into_bytes(),
|
||||||
|
|
|
@ -8,8 +8,10 @@ use crate::import_map::ImportMap;
|
||||||
use crate::msg::MediaType;
|
use crate::msg::MediaType;
|
||||||
use crate::op_error::OpError;
|
use crate::op_error::OpError;
|
||||||
use crate::permissions::Permissions;
|
use crate::permissions::Permissions;
|
||||||
use crate::swc_util::analyze_dependencies_and_references;
|
use crate::tsc::pre_process_file;
|
||||||
use crate::swc_util::TsReferenceKind;
|
use crate::tsc::ImportDesc;
|
||||||
|
use crate::tsc::TsReferenceDesc;
|
||||||
|
use crate::tsc::TsReferenceKind;
|
||||||
use crate::tsc::AVAILABLE_LIBS;
|
use crate::tsc::AVAILABLE_LIBS;
|
||||||
use deno_core::ErrBox;
|
use deno_core::ErrBox;
|
||||||
use deno_core::ModuleSpecifier;
|
use deno_core::ModuleSpecifier;
|
||||||
|
@ -21,20 +23,142 @@ use serde::Serialize;
|
||||||
use serde::Serializer;
|
use serde::Serializer;
|
||||||
use std::collections::HashMap;
|
use std::collections::HashMap;
|
||||||
use std::collections::HashSet;
|
use std::collections::HashSet;
|
||||||
use std::hash::BuildHasher;
|
|
||||||
use std::path::PathBuf;
|
use std::path::PathBuf;
|
||||||
use std::pin::Pin;
|
use std::pin::Pin;
|
||||||
|
|
||||||
// TODO(bartlomieju): it'd be great if this function returned
|
// TODO(bartlomieju): it'd be great if this function returned
|
||||||
// more structured data and possibly format the same as TS diagnostics.
|
// more structured data and possibly format the same as TS diagnostics.
|
||||||
/// Decorate error with location of import that caused the error.
|
/// Decorate error with location of import that caused the error.
|
||||||
fn err_with_location(e: ErrBox, location: &Location) -> ErrBox {
|
fn err_with_location(e: ErrBox, maybe_location: Option<&Location>) -> ErrBox {
|
||||||
let location_str = format!(
|
if let Some(location) = maybe_location {
|
||||||
"\nImported from \"{}:{}\"",
|
let location_str = format!(
|
||||||
location.filename, location.line
|
"\nImported from \"{}:{}\"",
|
||||||
);
|
location.filename, location.line
|
||||||
let err_str = e.to_string();
|
);
|
||||||
OpError::other(format!("{}{}", err_str, location_str)).into()
|
let err_str = e.to_string();
|
||||||
|
OpError::other(format!("{}{}", err_str, location_str)).into()
|
||||||
|
} else {
|
||||||
|
e
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
/// Disallow http:// imports from modules loaded over https://
|
||||||
|
fn validate_no_downgrade(
|
||||||
|
module_specifier: &ModuleSpecifier,
|
||||||
|
maybe_referrer: Option<&ModuleSpecifier>,
|
||||||
|
maybe_location: Option<&Location>,
|
||||||
|
) -> Result<(), ErrBox> {
|
||||||
|
if let Some(referrer) = maybe_referrer.as_ref() {
|
||||||
|
if let "https" = referrer.as_url().scheme() {
|
||||||
|
if let "http" = module_specifier.as_url().scheme() {
|
||||||
|
let e = OpError::permission_denied(
|
||||||
|
"Modules loaded over https:// are not allowed to import modules over http://".to_string()
|
||||||
|
);
|
||||||
|
return Err(err_with_location(e.into(), maybe_location));
|
||||||
|
};
|
||||||
|
};
|
||||||
|
};
|
||||||
|
|
||||||
|
Ok(())
|
||||||
|
}
|
||||||
|
|
||||||
|
/// Verify that remote file doesn't try to statically import local file.
|
||||||
|
fn validate_no_file_from_remote(
|
||||||
|
module_specifier: &ModuleSpecifier,
|
||||||
|
maybe_referrer: Option<&ModuleSpecifier>,
|
||||||
|
maybe_location: Option<&Location>,
|
||||||
|
) -> Result<(), ErrBox> {
|
||||||
|
if let Some(referrer) = maybe_referrer.as_ref() {
|
||||||
|
let referrer_url = referrer.as_url();
|
||||||
|
match referrer_url.scheme() {
|
||||||
|
"http" | "https" => {
|
||||||
|
let specifier_url = module_specifier.as_url();
|
||||||
|
match specifier_url.scheme() {
|
||||||
|
"http" | "https" => {}
|
||||||
|
_ => {
|
||||||
|
let e = OpError::permission_denied(
|
||||||
|
"Remote modules are not allowed to statically import local modules. Use dynamic import instead.".to_string()
|
||||||
|
);
|
||||||
|
return Err(err_with_location(e.into(), maybe_location));
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
_ => {}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
Ok(())
|
||||||
|
}
|
||||||
|
|
||||||
|
// TODO(bartlomieju): handle imports/references in ambient contexts/TS modules
|
||||||
|
// https://github.com/denoland/deno/issues/6133
|
||||||
|
fn resolve_imports_and_references(
|
||||||
|
referrer: ModuleSpecifier,
|
||||||
|
maybe_import_map: Option<&ImportMap>,
|
||||||
|
import_descs: Vec<ImportDesc>,
|
||||||
|
ref_descs: Vec<TsReferenceDesc>,
|
||||||
|
) -> Result<(Vec<ImportDescriptor>, Vec<ReferenceDescriptor>), ErrBox> {
|
||||||
|
let mut imports = vec![];
|
||||||
|
let mut references = vec![];
|
||||||
|
|
||||||
|
for import_desc in import_descs {
|
||||||
|
let maybe_resolved = if let Some(import_map) = maybe_import_map.as_ref() {
|
||||||
|
import_map.resolve(&import_desc.specifier, &referrer.to_string())?
|
||||||
|
} else {
|
||||||
|
None
|
||||||
|
};
|
||||||
|
|
||||||
|
let resolved_specifier = if let Some(resolved) = maybe_resolved {
|
||||||
|
resolved
|
||||||
|
} else {
|
||||||
|
ModuleSpecifier::resolve_import(
|
||||||
|
&import_desc.specifier,
|
||||||
|
&referrer.to_string(),
|
||||||
|
)?
|
||||||
|
};
|
||||||
|
|
||||||
|
let resolved_type_directive =
|
||||||
|
if let Some(types_specifier) = import_desc.deno_types.as_ref() {
|
||||||
|
Some(ModuleSpecifier::resolve_import(
|
||||||
|
&types_specifier,
|
||||||
|
&referrer.to_string(),
|
||||||
|
)?)
|
||||||
|
} else {
|
||||||
|
None
|
||||||
|
};
|
||||||
|
|
||||||
|
let import_descriptor = ImportDescriptor {
|
||||||
|
specifier: import_desc.specifier.to_string(),
|
||||||
|
resolved_specifier,
|
||||||
|
type_directive: import_desc.deno_types.clone(),
|
||||||
|
resolved_type_directive,
|
||||||
|
location: import_desc.location,
|
||||||
|
};
|
||||||
|
|
||||||
|
imports.push(import_descriptor);
|
||||||
|
}
|
||||||
|
|
||||||
|
for ref_desc in ref_descs {
|
||||||
|
if AVAILABLE_LIBS.contains(&ref_desc.specifier.as_str()) {
|
||||||
|
continue;
|
||||||
|
}
|
||||||
|
|
||||||
|
let resolved_specifier = ModuleSpecifier::resolve_import(
|
||||||
|
&ref_desc.specifier,
|
||||||
|
&referrer.to_string(),
|
||||||
|
)?;
|
||||||
|
|
||||||
|
let reference_descriptor = ReferenceDescriptor {
|
||||||
|
specifier: ref_desc.specifier.to_string(),
|
||||||
|
resolved_specifier,
|
||||||
|
kind: ref_desc.kind,
|
||||||
|
location: ref_desc.location,
|
||||||
|
};
|
||||||
|
|
||||||
|
references.push(reference_descriptor);
|
||||||
|
}
|
||||||
|
|
||||||
|
Ok((imports, references))
|
||||||
}
|
}
|
||||||
|
|
||||||
fn serialize_module_specifier<S>(
|
fn serialize_module_specifier<S>(
|
||||||
|
@ -68,8 +192,7 @@ const SUPPORTED_MEDIA_TYPES: [MediaType; 4] = [
|
||||||
MediaType::TSX,
|
MediaType::TSX,
|
||||||
];
|
];
|
||||||
|
|
||||||
#[derive(Debug, Serialize)]
|
pub type ModuleGraph = HashMap<String, ModuleGraphFile>;
|
||||||
pub struct ModuleGraph(HashMap<String, ModuleGraphFile>);
|
|
||||||
|
|
||||||
#[derive(Debug, Serialize)]
|
#[derive(Debug, Serialize)]
|
||||||
#[serde(rename_all = "camelCase")]
|
#[serde(rename_all = "camelCase")]
|
||||||
|
@ -82,6 +205,8 @@ pub struct ImportDescriptor {
|
||||||
pub type_directive: Option<String>,
|
pub type_directive: Option<String>,
|
||||||
#[serde(serialize_with = "serialize_option_module_specifier")]
|
#[serde(serialize_with = "serialize_option_module_specifier")]
|
||||||
pub resolved_type_directive: Option<ModuleSpecifier>,
|
pub resolved_type_directive: Option<ModuleSpecifier>,
|
||||||
|
#[serde(skip)]
|
||||||
|
pub location: Location,
|
||||||
}
|
}
|
||||||
|
|
||||||
#[derive(Debug, Serialize)]
|
#[derive(Debug, Serialize)]
|
||||||
|
@ -90,6 +215,10 @@ pub struct ReferenceDescriptor {
|
||||||
pub specifier: String,
|
pub specifier: String,
|
||||||
#[serde(serialize_with = "serialize_module_specifier")]
|
#[serde(serialize_with = "serialize_module_specifier")]
|
||||||
pub resolved_specifier: ModuleSpecifier,
|
pub resolved_specifier: ModuleSpecifier,
|
||||||
|
#[serde(skip)]
|
||||||
|
pub kind: TsReferenceKind,
|
||||||
|
#[serde(skip)]
|
||||||
|
pub location: Location,
|
||||||
}
|
}
|
||||||
|
|
||||||
#[derive(Debug, Serialize)]
|
#[derive(Debug, Serialize)]
|
||||||
|
@ -104,7 +233,7 @@ pub struct ModuleGraphFile {
|
||||||
pub lib_directives: Vec<ReferenceDescriptor>,
|
pub lib_directives: Vec<ReferenceDescriptor>,
|
||||||
pub types_directives: Vec<ReferenceDescriptor>,
|
pub types_directives: Vec<ReferenceDescriptor>,
|
||||||
pub type_headers: Vec<ReferenceDescriptor>,
|
pub type_headers: Vec<ReferenceDescriptor>,
|
||||||
pub media_type: i32,
|
pub media_type: MediaType,
|
||||||
pub source_code: String,
|
pub source_code: String,
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -117,7 +246,7 @@ pub struct ModuleGraphLoader {
|
||||||
maybe_import_map: Option<ImportMap>,
|
maybe_import_map: Option<ImportMap>,
|
||||||
pending_downloads: FuturesUnordered<SourceFileFuture>,
|
pending_downloads: FuturesUnordered<SourceFileFuture>,
|
||||||
has_downloaded: HashSet<ModuleSpecifier>,
|
has_downloaded: HashSet<ModuleSpecifier>,
|
||||||
pub graph: ModuleGraph,
|
graph: ModuleGraph,
|
||||||
is_dyn_import: bool,
|
is_dyn_import: bool,
|
||||||
analyze_dynamic_imports: bool,
|
analyze_dynamic_imports: bool,
|
||||||
}
|
}
|
||||||
|
@ -136,7 +265,7 @@ impl ModuleGraphLoader {
|
||||||
maybe_import_map,
|
maybe_import_map,
|
||||||
pending_downloads: FuturesUnordered::new(),
|
pending_downloads: FuturesUnordered::new(),
|
||||||
has_downloaded: HashSet::new(),
|
has_downloaded: HashSet::new(),
|
||||||
graph: ModuleGraph(HashMap::new()),
|
graph: ModuleGraph::new(),
|
||||||
is_dyn_import,
|
is_dyn_import,
|
||||||
analyze_dynamic_imports,
|
analyze_dynamic_imports,
|
||||||
}
|
}
|
||||||
|
@ -153,7 +282,7 @@ impl ModuleGraphLoader {
|
||||||
specifier: &ModuleSpecifier,
|
specifier: &ModuleSpecifier,
|
||||||
maybe_referrer: Option<ModuleSpecifier>,
|
maybe_referrer: Option<ModuleSpecifier>,
|
||||||
) -> Result<(), ErrBox> {
|
) -> Result<(), ErrBox> {
|
||||||
self.download_module(specifier.clone(), maybe_referrer)?;
|
self.download_module(specifier.clone(), maybe_referrer, None)?;
|
||||||
|
|
||||||
loop {
|
loop {
|
||||||
let (specifier, source_file) =
|
let (specifier, source_file) =
|
||||||
|
@ -170,10 +299,10 @@ impl ModuleGraphLoader {
|
||||||
/// This method is used to create a graph from in-memory files stored in
|
/// This method is used to create a graph from in-memory files stored in
|
||||||
/// a hash map. Useful for creating module graph for code received from
|
/// a hash map. Useful for creating module graph for code received from
|
||||||
/// the runtime.
|
/// the runtime.
|
||||||
pub fn build_local_graph<S: BuildHasher>(
|
pub fn build_local_graph(
|
||||||
&mut self,
|
&mut self,
|
||||||
_root_name: &str,
|
_root_name: &str,
|
||||||
source_map: &HashMap<String, String, S>,
|
source_map: &HashMap<String, String>,
|
||||||
) -> Result<(), ErrBox> {
|
) -> Result<(), ErrBox> {
|
||||||
for (spec, source_code) in source_map.iter() {
|
for (spec, source_code) in source_map.iter() {
|
||||||
self.visit_memory_module(spec.to_string(), source_code.to_string())?;
|
self.visit_memory_module(spec.to_string(), source_code.to_string())?;
|
||||||
|
@ -183,8 +312,8 @@ impl ModuleGraphLoader {
|
||||||
}
|
}
|
||||||
|
|
||||||
/// Consumes the loader and returns created graph.
|
/// Consumes the loader and returns created graph.
|
||||||
pub fn get_graph(self) -> HashMap<String, ModuleGraphFile> {
|
pub fn get_graph(self) -> ModuleGraph {
|
||||||
self.graph.0
|
self.graph
|
||||||
}
|
}
|
||||||
|
|
||||||
fn visit_memory_module(
|
fn visit_memory_module(
|
||||||
|
@ -192,7 +321,6 @@ impl ModuleGraphLoader {
|
||||||
specifier: String,
|
specifier: String,
|
||||||
source_code: String,
|
source_code: String,
|
||||||
) -> Result<(), ErrBox> {
|
) -> Result<(), ErrBox> {
|
||||||
let mut imports = vec![];
|
|
||||||
let mut referenced_files = vec![];
|
let mut referenced_files = vec![];
|
||||||
let mut lib_directives = vec![];
|
let mut lib_directives = vec![];
|
||||||
let mut types_directives = vec![];
|
let mut types_directives = vec![];
|
||||||
|
@ -208,87 +336,40 @@ impl ModuleGraphLoader {
|
||||||
ModuleSpecifier::resolve_url(&format!("memory://{}", specifier))?
|
ModuleSpecifier::resolve_url(&format!("memory://{}", specifier))?
|
||||||
};
|
};
|
||||||
|
|
||||||
let (import_descs, ref_descs) = analyze_dependencies_and_references(
|
let (raw_imports, raw_references) = pre_process_file(
|
||||||
&specifier,
|
&module_specifier.to_string(),
|
||||||
map_file_extension(&PathBuf::from(&specifier)),
|
map_file_extension(&PathBuf::from(&specifier)),
|
||||||
&source_code,
|
&source_code,
|
||||||
self.analyze_dynamic_imports,
|
self.analyze_dynamic_imports,
|
||||||
)?;
|
)?;
|
||||||
|
let (imports, references) = resolve_imports_and_references(
|
||||||
|
module_specifier.clone(),
|
||||||
|
self.maybe_import_map.as_ref(),
|
||||||
|
raw_imports,
|
||||||
|
raw_references,
|
||||||
|
)?;
|
||||||
|
|
||||||
for import_desc in import_descs {
|
for ref_descriptor in references {
|
||||||
let maybe_resolved =
|
match ref_descriptor.kind {
|
||||||
if let Some(import_map) = self.maybe_import_map.as_ref() {
|
|
||||||
import_map
|
|
||||||
.resolve(&import_desc.specifier, &module_specifier.to_string())?
|
|
||||||
} else {
|
|
||||||
None
|
|
||||||
};
|
|
||||||
|
|
||||||
let resolved_specifier = if let Some(resolved) = maybe_resolved {
|
|
||||||
resolved
|
|
||||||
} else {
|
|
||||||
ModuleSpecifier::resolve_import(
|
|
||||||
&import_desc.specifier,
|
|
||||||
&module_specifier.to_string(),
|
|
||||||
)?
|
|
||||||
};
|
|
||||||
|
|
||||||
let resolved_type_directive =
|
|
||||||
if let Some(types_specifier) = import_desc.deno_types.as_ref() {
|
|
||||||
Some(ModuleSpecifier::resolve_import(
|
|
||||||
&types_specifier,
|
|
||||||
&module_specifier.to_string(),
|
|
||||||
)?)
|
|
||||||
} else {
|
|
||||||
None
|
|
||||||
};
|
|
||||||
|
|
||||||
let import_descriptor = ImportDescriptor {
|
|
||||||
specifier: import_desc.specifier.to_string(),
|
|
||||||
resolved_specifier,
|
|
||||||
type_directive: import_desc.deno_types,
|
|
||||||
resolved_type_directive,
|
|
||||||
};
|
|
||||||
|
|
||||||
imports.push(import_descriptor);
|
|
||||||
}
|
|
||||||
|
|
||||||
for ref_desc in ref_descs {
|
|
||||||
if AVAILABLE_LIBS.contains(&ref_desc.specifier.as_str()) {
|
|
||||||
continue;
|
|
||||||
}
|
|
||||||
|
|
||||||
let resolved_specifier = ModuleSpecifier::resolve_import(
|
|
||||||
&ref_desc.specifier,
|
|
||||||
&module_specifier.to_string(),
|
|
||||||
)?;
|
|
||||||
|
|
||||||
let reference_descriptor = ReferenceDescriptor {
|
|
||||||
specifier: ref_desc.specifier.to_string(),
|
|
||||||
resolved_specifier,
|
|
||||||
};
|
|
||||||
|
|
||||||
match ref_desc.kind {
|
|
||||||
TsReferenceKind::Lib => {
|
TsReferenceKind::Lib => {
|
||||||
lib_directives.push(reference_descriptor);
|
lib_directives.push(ref_descriptor);
|
||||||
}
|
}
|
||||||
TsReferenceKind::Types => {
|
TsReferenceKind::Types => {
|
||||||
types_directives.push(reference_descriptor);
|
types_directives.push(ref_descriptor);
|
||||||
}
|
}
|
||||||
TsReferenceKind::Path => {
|
TsReferenceKind::Path => {
|
||||||
referenced_files.push(reference_descriptor);
|
referenced_files.push(ref_descriptor);
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
self.graph.0.insert(
|
self.graph.insert(
|
||||||
module_specifier.to_string(),
|
module_specifier.to_string(),
|
||||||
ModuleGraphFile {
|
ModuleGraphFile {
|
||||||
specifier: specifier.to_string(),
|
specifier: specifier.to_string(),
|
||||||
url: specifier.to_string(),
|
url: specifier.to_string(),
|
||||||
redirect: None,
|
redirect: None,
|
||||||
media_type: map_file_extension(&PathBuf::from(specifier.clone()))
|
media_type: map_file_extension(&PathBuf::from(specifier.clone())),
|
||||||
as i32,
|
|
||||||
filename: specifier,
|
filename: specifier,
|
||||||
source_code,
|
source_code,
|
||||||
imports,
|
imports,
|
||||||
|
@ -307,43 +388,24 @@ impl ModuleGraphLoader {
|
||||||
&mut self,
|
&mut self,
|
||||||
module_specifier: ModuleSpecifier,
|
module_specifier: ModuleSpecifier,
|
||||||
maybe_referrer: Option<ModuleSpecifier>,
|
maybe_referrer: Option<ModuleSpecifier>,
|
||||||
|
maybe_location: Option<Location>,
|
||||||
) -> Result<(), ErrBox> {
|
) -> Result<(), ErrBox> {
|
||||||
if self.has_downloaded.contains(&module_specifier) {
|
if self.has_downloaded.contains(&module_specifier) {
|
||||||
return Ok(());
|
return Ok(());
|
||||||
}
|
}
|
||||||
|
|
||||||
// Disallow http:// imports from modules loaded over https://
|
validate_no_downgrade(
|
||||||
if let Some(referrer) = maybe_referrer.as_ref() {
|
&module_specifier,
|
||||||
if let "https" = referrer.as_url().scheme() {
|
maybe_referrer.as_ref(),
|
||||||
if let "http" = module_specifier.as_url().scheme() {
|
maybe_location.as_ref(),
|
||||||
let e = OpError::permission_denied(
|
)?;
|
||||||
"Modules loaded over https:// are not allowed to import modules over http://".to_string()
|
|
||||||
);
|
|
||||||
return Err(e.into());
|
|
||||||
};
|
|
||||||
};
|
|
||||||
};
|
|
||||||
|
|
||||||
if !self.is_dyn_import {
|
if !self.is_dyn_import {
|
||||||
// Verify that remote file doesn't try to statically import local file.
|
validate_no_file_from_remote(
|
||||||
if let Some(referrer) = maybe_referrer.as_ref() {
|
&module_specifier,
|
||||||
let referrer_url = referrer.as_url();
|
maybe_referrer.as_ref(),
|
||||||
match referrer_url.scheme() {
|
maybe_location.as_ref(),
|
||||||
"http" | "https" => {
|
)?;
|
||||||
let specifier_url = module_specifier.as_url();
|
|
||||||
match specifier_url.scheme() {
|
|
||||||
"http" | "https" => {}
|
|
||||||
_ => {
|
|
||||||
let e = OpError::permission_denied(
|
|
||||||
"Remote modules are not allowed to statically import local modules. Use dynamic import instead.".to_string()
|
|
||||||
);
|
|
||||||
return Err(e.into());
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
_ => {}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
}
|
||||||
|
|
||||||
self.has_downloaded.insert(module_specifier.clone());
|
self.has_downloaded.insert(module_specifier.clone());
|
||||||
|
@ -355,7 +417,9 @@ impl ModuleGraphLoader {
|
||||||
let spec_ = spec.clone();
|
let spec_ = spec.clone();
|
||||||
let source_file = file_fetcher
|
let source_file = file_fetcher
|
||||||
.fetch_source_file(&spec_, maybe_referrer, perms)
|
.fetch_source_file(&spec_, maybe_referrer, perms)
|
||||||
.await?;
|
.await
|
||||||
|
.map_err(|e| err_with_location(e, maybe_location.as_ref()))?;
|
||||||
|
|
||||||
Ok((spec_.clone(), source_file))
|
Ok((spec_.clone(), source_file))
|
||||||
}
|
}
|
||||||
.boxed_local();
|
.boxed_local();
|
||||||
|
@ -383,14 +447,14 @@ impl ModuleGraphLoader {
|
||||||
// for proper URL point to redirect target.
|
// for proper URL point to redirect target.
|
||||||
if module_specifier.as_url() != &source_file.url {
|
if module_specifier.as_url() != &source_file.url {
|
||||||
// TODO(bartlomieju): refactor, this is a band-aid
|
// TODO(bartlomieju): refactor, this is a band-aid
|
||||||
self.graph.0.insert(
|
self.graph.insert(
|
||||||
module_specifier.to_string(),
|
module_specifier.to_string(),
|
||||||
ModuleGraphFile {
|
ModuleGraphFile {
|
||||||
specifier: module_specifier.to_string(),
|
specifier: module_specifier.to_string(),
|
||||||
url: module_specifier.to_string(),
|
url: module_specifier.to_string(),
|
||||||
redirect: Some(source_file.url.to_string()),
|
redirect: Some(source_file.url.to_string()),
|
||||||
filename: source_file.filename.to_str().unwrap().to_string(),
|
filename: source_file.filename.to_str().unwrap().to_string(),
|
||||||
media_type: source_file.media_type as i32,
|
media_type: source_file.media_type,
|
||||||
source_code: "".to_string(),
|
source_code: "".to_string(),
|
||||||
imports: vec![],
|
imports: vec![],
|
||||||
referenced_files: vec![],
|
referenced_files: vec![],
|
||||||
|
@ -412,121 +476,85 @@ impl ModuleGraphLoader {
|
||||||
&types_specifier,
|
&types_specifier,
|
||||||
&module_specifier.to_string(),
|
&module_specifier.to_string(),
|
||||||
)?,
|
)?,
|
||||||
|
kind: TsReferenceKind::Types,
|
||||||
|
// TODO(bartlomieju): location is not needed in here and constructing
|
||||||
|
// location by hand is bad
|
||||||
|
location: Location {
|
||||||
|
filename: module_specifier.to_string(),
|
||||||
|
line: 0,
|
||||||
|
col: 0,
|
||||||
|
},
|
||||||
};
|
};
|
||||||
self.download_module(
|
self.download_module(
|
||||||
type_header.resolved_specifier.clone(),
|
type_header.resolved_specifier.clone(),
|
||||||
Some(module_specifier.clone()),
|
Some(module_specifier.clone()),
|
||||||
|
None,
|
||||||
)?;
|
)?;
|
||||||
type_headers.push(type_header);
|
type_headers.push(type_header);
|
||||||
}
|
}
|
||||||
|
|
||||||
let (import_descs, ref_descs) = analyze_dependencies_and_references(
|
let (raw_imports, raw_refs) = pre_process_file(
|
||||||
&module_specifier.to_string(),
|
&module_specifier.to_string(),
|
||||||
source_file.media_type,
|
source_file.media_type,
|
||||||
&source_code,
|
&source_code,
|
||||||
self.analyze_dynamic_imports,
|
self.analyze_dynamic_imports,
|
||||||
)?;
|
)?;
|
||||||
|
let (imports_, references) = resolve_imports_and_references(
|
||||||
|
module_specifier.clone(),
|
||||||
|
self.maybe_import_map.as_ref(),
|
||||||
|
raw_imports,
|
||||||
|
raw_refs,
|
||||||
|
)?;
|
||||||
|
|
||||||
for import_desc in import_descs {
|
for import_descriptor in imports_ {
|
||||||
let maybe_resolved =
|
self.download_module(
|
||||||
if let Some(import_map) = self.maybe_import_map.as_ref() {
|
import_descriptor.resolved_specifier.clone(),
|
||||||
import_map
|
Some(module_specifier.clone()),
|
||||||
.resolve(&import_desc.specifier, &module_specifier.to_string())?
|
Some(import_descriptor.location.clone()),
|
||||||
} else {
|
)?;
|
||||||
None
|
|
||||||
};
|
|
||||||
|
|
||||||
let resolved_specifier = if let Some(resolved) = maybe_resolved {
|
|
||||||
resolved
|
|
||||||
} else {
|
|
||||||
ModuleSpecifier::resolve_import(
|
|
||||||
&import_desc.specifier,
|
|
||||||
&module_specifier.to_string(),
|
|
||||||
)?
|
|
||||||
};
|
|
||||||
|
|
||||||
let resolved_type_directive =
|
|
||||||
if let Some(types_specifier) = import_desc.deno_types.as_ref() {
|
|
||||||
Some(ModuleSpecifier::resolve_import(
|
|
||||||
&types_specifier,
|
|
||||||
&module_specifier.to_string(),
|
|
||||||
)?)
|
|
||||||
} else {
|
|
||||||
None
|
|
||||||
};
|
|
||||||
|
|
||||||
let import_descriptor = ImportDescriptor {
|
|
||||||
specifier: import_desc.specifier.to_string(),
|
|
||||||
resolved_specifier,
|
|
||||||
type_directive: import_desc.deno_types.clone(),
|
|
||||||
resolved_type_directive,
|
|
||||||
};
|
|
||||||
|
|
||||||
self
|
|
||||||
.download_module(
|
|
||||||
import_descriptor.resolved_specifier.clone(),
|
|
||||||
Some(module_specifier.clone()),
|
|
||||||
)
|
|
||||||
.map_err(|e| err_with_location(e, &import_desc.location))?;
|
|
||||||
|
|
||||||
if let Some(type_dir_url) =
|
if let Some(type_dir_url) =
|
||||||
import_descriptor.resolved_type_directive.as_ref()
|
import_descriptor.resolved_type_directive.as_ref()
|
||||||
{
|
{
|
||||||
self
|
self.download_module(
|
||||||
.download_module(
|
type_dir_url.clone(),
|
||||||
type_dir_url.clone(),
|
Some(module_specifier.clone()),
|
||||||
Some(module_specifier.clone()),
|
Some(import_descriptor.location.clone()),
|
||||||
)
|
)?;
|
||||||
.map_err(|e| err_with_location(e, &import_desc.location))?;
|
|
||||||
}
|
}
|
||||||
|
|
||||||
imports.push(import_descriptor);
|
imports.push(import_descriptor);
|
||||||
}
|
}
|
||||||
|
|
||||||
for ref_desc in ref_descs {
|
for ref_descriptor in references {
|
||||||
if AVAILABLE_LIBS.contains(&ref_desc.specifier.as_str()) {
|
self.download_module(
|
||||||
continue;
|
ref_descriptor.resolved_specifier.clone(),
|
||||||
}
|
Some(module_specifier.clone()),
|
||||||
|
Some(ref_descriptor.location.clone()),
|
||||||
let resolved_specifier = ModuleSpecifier::resolve_import(
|
|
||||||
&ref_desc.specifier,
|
|
||||||
&module_specifier.to_string(),
|
|
||||||
)?;
|
)?;
|
||||||
|
|
||||||
let reference_descriptor = ReferenceDescriptor {
|
match ref_descriptor.kind {
|
||||||
specifier: ref_desc.specifier.to_string(),
|
|
||||||
resolved_specifier,
|
|
||||||
};
|
|
||||||
|
|
||||||
self
|
|
||||||
.download_module(
|
|
||||||
reference_descriptor.resolved_specifier.clone(),
|
|
||||||
Some(module_specifier.clone()),
|
|
||||||
)
|
|
||||||
.map_err(|e| err_with_location(e, &ref_desc.location))?;
|
|
||||||
|
|
||||||
match ref_desc.kind {
|
|
||||||
TsReferenceKind::Lib => {
|
TsReferenceKind::Lib => {
|
||||||
lib_directives.push(reference_descriptor);
|
lib_directives.push(ref_descriptor);
|
||||||
}
|
}
|
||||||
TsReferenceKind::Types => {
|
TsReferenceKind::Types => {
|
||||||
types_directives.push(reference_descriptor);
|
types_directives.push(ref_descriptor);
|
||||||
}
|
}
|
||||||
TsReferenceKind::Path => {
|
TsReferenceKind::Path => {
|
||||||
referenced_files.push(reference_descriptor);
|
referenced_files.push(ref_descriptor);
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
self.graph.0.insert(
|
self.graph.insert(
|
||||||
module_specifier.to_string(),
|
module_specifier.to_string(),
|
||||||
ModuleGraphFile {
|
ModuleGraphFile {
|
||||||
specifier: module_specifier.to_string(),
|
specifier: module_specifier.to_string(),
|
||||||
url: module_specifier.to_string(),
|
url: module_specifier.to_string(),
|
||||||
redirect: None,
|
redirect: None,
|
||||||
filename: source_file.filename.to_str().unwrap().to_string(),
|
filename: source_file.filename.to_str().unwrap().to_string(),
|
||||||
media_type: source_file.media_type as i32,
|
media_type: source_file.media_type,
|
||||||
source_code,
|
source_code,
|
||||||
imports,
|
imports,
|
||||||
referenced_files,
|
referenced_files,
|
||||||
|
@ -546,7 +574,7 @@ mod tests {
|
||||||
|
|
||||||
async fn build_graph(
|
async fn build_graph(
|
||||||
module_specifier: &ModuleSpecifier,
|
module_specifier: &ModuleSpecifier,
|
||||||
) -> Result<HashMap<String, ModuleGraphFile>, ErrBox> {
|
) -> Result<ModuleGraph, ErrBox> {
|
||||||
let global_state = GlobalState::new(Default::default()).unwrap();
|
let global_state = GlobalState::new(Default::default()).unwrap();
|
||||||
let mut graph_loader = ModuleGraphLoader::new(
|
let mut graph_loader = ModuleGraphLoader::new(
|
||||||
global_state.file_fetcher.clone(),
|
global_state.file_fetcher.clone(),
|
||||||
|
@ -824,3 +852,102 @@ mod tests {
|
||||||
drop(http_server_guard);
|
drop(http_server_guard);
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
// TODO(bartlomieju): use baseline tests from TSC to ensure
|
||||||
|
// compatibility
|
||||||
|
#[test]
|
||||||
|
fn test_pre_process_file() {
|
||||||
|
let source = r#"
|
||||||
|
// This comment is placed to make sure that directives are parsed
|
||||||
|
// even when they start on non-first line
|
||||||
|
|
||||||
|
/// <reference lib="dom" />
|
||||||
|
/// <reference types="./type_reference.d.ts" />
|
||||||
|
/// <reference path="./type_reference/dep.ts" />
|
||||||
|
// @deno-types="./type_definitions/foo.d.ts"
|
||||||
|
import { foo } from "./type_definitions/foo.js";
|
||||||
|
// @deno-types="./type_definitions/fizz.d.ts"
|
||||||
|
import "./type_definitions/fizz.js";
|
||||||
|
|
||||||
|
/// <reference path="./type_reference/dep2.ts" />
|
||||||
|
|
||||||
|
import * as qat from "./type_definitions/qat.ts";
|
||||||
|
|
||||||
|
console.log(foo);
|
||||||
|
console.log(fizz);
|
||||||
|
console.log(qat.qat);
|
||||||
|
"#;
|
||||||
|
|
||||||
|
let (imports, references) =
|
||||||
|
pre_process_file("some/file.ts", MediaType::TypeScript, source, true)
|
||||||
|
.expect("Failed to parse");
|
||||||
|
|
||||||
|
assert_eq!(
|
||||||
|
imports,
|
||||||
|
vec![
|
||||||
|
ImportDesc {
|
||||||
|
specifier: "./type_definitions/foo.js".to_string(),
|
||||||
|
deno_types: Some("./type_definitions/foo.d.ts".to_string()),
|
||||||
|
location: Location {
|
||||||
|
filename: "some/file.ts".to_string(),
|
||||||
|
line: 9,
|
||||||
|
col: 0,
|
||||||
|
},
|
||||||
|
},
|
||||||
|
ImportDesc {
|
||||||
|
specifier: "./type_definitions/fizz.js".to_string(),
|
||||||
|
deno_types: Some("./type_definitions/fizz.d.ts".to_string()),
|
||||||
|
location: Location {
|
||||||
|
filename: "some/file.ts".to_string(),
|
||||||
|
line: 11,
|
||||||
|
col: 0,
|
||||||
|
},
|
||||||
|
},
|
||||||
|
ImportDesc {
|
||||||
|
specifier: "./type_definitions/qat.ts".to_string(),
|
||||||
|
deno_types: None,
|
||||||
|
location: Location {
|
||||||
|
filename: "some/file.ts".to_string(),
|
||||||
|
line: 15,
|
||||||
|
col: 0,
|
||||||
|
},
|
||||||
|
},
|
||||||
|
]
|
||||||
|
);
|
||||||
|
|
||||||
|
// According to TS docs (https://www.typescriptlang.org/docs/handbook/triple-slash-directives.html)
|
||||||
|
// directives that are not at the top of the file are ignored, so only
|
||||||
|
// 3 references should be captured instead of 4.
|
||||||
|
assert_eq!(
|
||||||
|
references,
|
||||||
|
vec![
|
||||||
|
TsReferenceDesc {
|
||||||
|
specifier: "dom".to_string(),
|
||||||
|
kind: TsReferenceKind::Lib,
|
||||||
|
location: Location {
|
||||||
|
filename: "some/file.ts".to_string(),
|
||||||
|
line: 5,
|
||||||
|
col: 0,
|
||||||
|
},
|
||||||
|
},
|
||||||
|
TsReferenceDesc {
|
||||||
|
specifier: "./type_reference.d.ts".to_string(),
|
||||||
|
kind: TsReferenceKind::Types,
|
||||||
|
location: Location {
|
||||||
|
filename: "some/file.ts".to_string(),
|
||||||
|
line: 6,
|
||||||
|
col: 0,
|
||||||
|
},
|
||||||
|
},
|
||||||
|
TsReferenceDesc {
|
||||||
|
specifier: "./type_reference/dep.ts".to_string(),
|
||||||
|
kind: TsReferenceKind::Path,
|
||||||
|
location: Location {
|
||||||
|
filename: "some/file.ts".to_string(),
|
||||||
|
line: 7,
|
||||||
|
col: 0,
|
||||||
|
},
|
||||||
|
},
|
||||||
|
]
|
||||||
|
);
|
||||||
|
}
|
||||||
|
|
47
cli/msg.rs
47
cli/msg.rs
|
@ -3,10 +3,11 @@
|
||||||
// Warning! The values in this enum are duplicated in js/compiler.ts
|
// Warning! The values in this enum are duplicated in js/compiler.ts
|
||||||
// Update carefully!
|
// Update carefully!
|
||||||
use serde::Serialize;
|
use serde::Serialize;
|
||||||
|
use serde::Serializer;
|
||||||
|
|
||||||
#[allow(non_camel_case_types)]
|
#[allow(non_camel_case_types)]
|
||||||
#[repr(i8)]
|
#[repr(i32)]
|
||||||
#[derive(Clone, Copy, PartialEq, Debug, Serialize)]
|
#[derive(Clone, Copy, PartialEq, Debug)]
|
||||||
pub enum MediaType {
|
pub enum MediaType {
|
||||||
JavaScript = 0,
|
JavaScript = 0,
|
||||||
JSX = 1,
|
JSX = 1,
|
||||||
|
@ -17,6 +18,24 @@ pub enum MediaType {
|
||||||
Unknown = 6,
|
Unknown = 6,
|
||||||
}
|
}
|
||||||
|
|
||||||
|
impl Serialize for MediaType {
|
||||||
|
fn serialize<S>(&self, serializer: S) -> Result<S::Ok, S::Error>
|
||||||
|
where
|
||||||
|
S: Serializer,
|
||||||
|
{
|
||||||
|
let value: i32 = match self {
|
||||||
|
MediaType::JavaScript => 0 as i32,
|
||||||
|
MediaType::JSX => 1 as i32,
|
||||||
|
MediaType::TypeScript => 2 as i32,
|
||||||
|
MediaType::TSX => 3 as i32,
|
||||||
|
MediaType::Json => 4 as i32,
|
||||||
|
MediaType::Wasm => 5 as i32,
|
||||||
|
MediaType::Unknown => 6 as i32,
|
||||||
|
};
|
||||||
|
Serialize::serialize(&value, serializer)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
pub fn enum_name_media_type(mt: MediaType) -> &'static str {
|
pub fn enum_name_media_type(mt: MediaType) -> &'static str {
|
||||||
match mt {
|
match mt {
|
||||||
MediaType::JavaScript => "JavaScript",
|
MediaType::JavaScript => "JavaScript",
|
||||||
|
@ -32,10 +51,28 @@ pub fn enum_name_media_type(mt: MediaType) -> &'static str {
|
||||||
// Warning! The values in this enum are duplicated in js/compiler.ts
|
// Warning! The values in this enum are duplicated in js/compiler.ts
|
||||||
// Update carefully!
|
// Update carefully!
|
||||||
#[allow(non_camel_case_types)]
|
#[allow(non_camel_case_types)]
|
||||||
#[repr(i8)]
|
#[repr(i32)]
|
||||||
#[derive(Clone, Copy, PartialEq, Debug)]
|
#[derive(Clone, Copy, PartialEq, Debug)]
|
||||||
pub enum CompilerRequestType {
|
pub enum CompilerRequestType {
|
||||||
Compile = 0,
|
Compile = 0,
|
||||||
RuntimeCompile = 1,
|
Bundle = 1,
|
||||||
RuntimeTranspile = 2,
|
RuntimeCompile = 2,
|
||||||
|
RuntimeBundle = 3,
|
||||||
|
RuntimeTranspile = 4,
|
||||||
|
}
|
||||||
|
|
||||||
|
impl Serialize for CompilerRequestType {
|
||||||
|
fn serialize<S>(&self, serializer: S) -> Result<S::Ok, S::Error>
|
||||||
|
where
|
||||||
|
S: Serializer,
|
||||||
|
{
|
||||||
|
let value: i32 = match self {
|
||||||
|
CompilerRequestType::Compile => 0 as i32,
|
||||||
|
CompilerRequestType::Bundle => 1 as i32,
|
||||||
|
CompilerRequestType::RuntimeCompile => 2 as i32,
|
||||||
|
CompilerRequestType::RuntimeBundle => 3 as i32,
|
||||||
|
CompilerRequestType::RuntimeTranspile => 4 as i32,
|
||||||
|
};
|
||||||
|
Serialize::serialize(&value, serializer)
|
||||||
|
}
|
||||||
}
|
}
|
||||||
|
|
|
@ -3,6 +3,7 @@ use super::dispatch_json::{Deserialize, JsonOp, Value};
|
||||||
use crate::futures::FutureExt;
|
use crate::futures::FutureExt;
|
||||||
use crate::op_error::OpError;
|
use crate::op_error::OpError;
|
||||||
use crate::state::State;
|
use crate::state::State;
|
||||||
|
use crate::tsc::runtime_bundle;
|
||||||
use crate::tsc::runtime_compile;
|
use crate::tsc::runtime_compile;
|
||||||
use crate::tsc::runtime_transpile;
|
use crate::tsc::runtime_transpile;
|
||||||
use deno_core::CoreIsolate;
|
use deno_core::CoreIsolate;
|
||||||
|
@ -34,15 +35,27 @@ fn op_compile(
|
||||||
let global_state = s.global_state.clone();
|
let global_state = s.global_state.clone();
|
||||||
let permissions = s.permissions.clone();
|
let permissions = s.permissions.clone();
|
||||||
let fut = async move {
|
let fut = async move {
|
||||||
runtime_compile(
|
let fut = if args.bundle {
|
||||||
global_state,
|
runtime_bundle(
|
||||||
permissions,
|
global_state,
|
||||||
&args.root_name,
|
permissions,
|
||||||
&args.sources,
|
&args.root_name,
|
||||||
args.bundle,
|
&args.sources,
|
||||||
&args.options,
|
&args.options,
|
||||||
)
|
)
|
||||||
.await
|
.boxed_local()
|
||||||
|
} else {
|
||||||
|
runtime_compile(
|
||||||
|
global_state,
|
||||||
|
permissions,
|
||||||
|
&args.root_name,
|
||||||
|
&args.sources,
|
||||||
|
&args.options,
|
||||||
|
)
|
||||||
|
.boxed_local()
|
||||||
|
};
|
||||||
|
|
||||||
|
fut.await
|
||||||
}
|
}
|
||||||
.boxed_local();
|
.boxed_local();
|
||||||
Ok(JsonOp::Async(fut))
|
Ok(JsonOp::Async(fut))
|
||||||
|
|
439
cli/swc_util.rs
439
cli/swc_util.rs
|
@ -1,8 +1,6 @@
|
||||||
// Copyright 2018-2020 the Deno authors. All rights reserved. MIT license.
|
// Copyright 2018-2020 the Deno authors. All rights reserved. MIT license.
|
||||||
use crate::doc::Location;
|
|
||||||
use crate::msg::MediaType;
|
use crate::msg::MediaType;
|
||||||
use crate::swc_common;
|
use crate::swc_common;
|
||||||
use crate::swc_common::comments::CommentKind;
|
|
||||||
use crate::swc_common::comments::Comments;
|
use crate::swc_common::comments::Comments;
|
||||||
use crate::swc_common::errors::Diagnostic;
|
use crate::swc_common::errors::Diagnostic;
|
||||||
use crate::swc_common::errors::DiagnosticBuilder;
|
use crate::swc_common::errors::DiagnosticBuilder;
|
||||||
|
@ -26,8 +24,6 @@ use std::error::Error;
|
||||||
use std::fmt;
|
use std::fmt;
|
||||||
use std::sync::Arc;
|
use std::sync::Arc;
|
||||||
use std::sync::RwLock;
|
use std::sync::RwLock;
|
||||||
use swc_ecma_visit::Node;
|
|
||||||
use swc_ecma_visit::Visit;
|
|
||||||
|
|
||||||
fn get_default_es_config() -> EsConfig {
|
fn get_default_es_config() -> EsConfig {
|
||||||
let mut config = EsConfig::default();
|
let mut config = EsConfig::default();
|
||||||
|
@ -231,438 +227,3 @@ impl AstParser {
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
struct DependencyVisitor {
|
|
||||||
dependencies: Vec<String>,
|
|
||||||
analyze_dynamic_imports: bool,
|
|
||||||
}
|
|
||||||
|
|
||||||
impl Visit for DependencyVisitor {
|
|
||||||
fn visit_import_decl(
|
|
||||||
&mut self,
|
|
||||||
import_decl: &swc_ecma_ast::ImportDecl,
|
|
||||||
_parent: &dyn Node,
|
|
||||||
) {
|
|
||||||
let src_str = import_decl.src.value.to_string();
|
|
||||||
self.dependencies.push(src_str);
|
|
||||||
}
|
|
||||||
|
|
||||||
fn visit_named_export(
|
|
||||||
&mut self,
|
|
||||||
named_export: &swc_ecma_ast::NamedExport,
|
|
||||||
_parent: &dyn Node,
|
|
||||||
) {
|
|
||||||
if let Some(src) = &named_export.src {
|
|
||||||
let src_str = src.value.to_string();
|
|
||||||
self.dependencies.push(src_str);
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
fn visit_export_all(
|
|
||||||
&mut self,
|
|
||||||
export_all: &swc_ecma_ast::ExportAll,
|
|
||||||
_parent: &dyn Node,
|
|
||||||
) {
|
|
||||||
let src_str = export_all.src.value.to_string();
|
|
||||||
self.dependencies.push(src_str);
|
|
||||||
}
|
|
||||||
|
|
||||||
fn visit_call_expr(
|
|
||||||
&mut self,
|
|
||||||
call_expr: &swc_ecma_ast::CallExpr,
|
|
||||||
_parent: &dyn Node,
|
|
||||||
) {
|
|
||||||
if !self.analyze_dynamic_imports {
|
|
||||||
return;
|
|
||||||
}
|
|
||||||
|
|
||||||
use swc_ecma_ast::Expr::*;
|
|
||||||
use swc_ecma_ast::ExprOrSuper::*;
|
|
||||||
|
|
||||||
let boxed_expr = match call_expr.callee.clone() {
|
|
||||||
Super(_) => return,
|
|
||||||
Expr(boxed) => boxed,
|
|
||||||
};
|
|
||||||
|
|
||||||
match &*boxed_expr {
|
|
||||||
Ident(ident) => {
|
|
||||||
if &ident.sym.to_string() != "import" {
|
|
||||||
return;
|
|
||||||
}
|
|
||||||
}
|
|
||||||
_ => return,
|
|
||||||
};
|
|
||||||
|
|
||||||
if let Some(arg) = call_expr.args.get(0) {
|
|
||||||
match &*arg.expr {
|
|
||||||
Lit(lit) => {
|
|
||||||
if let swc_ecma_ast::Lit::Str(str_) = lit {
|
|
||||||
let src_str = str_.value.to_string();
|
|
||||||
self.dependencies.push(src_str);
|
|
||||||
}
|
|
||||||
}
|
|
||||||
_ => return,
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
#[derive(Clone, Debug, PartialEq)]
|
|
||||||
enum DependencyKind {
|
|
||||||
Import,
|
|
||||||
DynamicImport,
|
|
||||||
Export,
|
|
||||||
}
|
|
||||||
|
|
||||||
#[derive(Clone, Debug, PartialEq)]
|
|
||||||
struct DependencyDescriptor {
|
|
||||||
span: Span,
|
|
||||||
specifier: String,
|
|
||||||
kind: DependencyKind,
|
|
||||||
}
|
|
||||||
|
|
||||||
struct NewDependencyVisitor {
|
|
||||||
dependencies: Vec<DependencyDescriptor>,
|
|
||||||
}
|
|
||||||
|
|
||||||
impl Visit for NewDependencyVisitor {
|
|
||||||
fn visit_import_decl(
|
|
||||||
&mut self,
|
|
||||||
import_decl: &swc_ecma_ast::ImportDecl,
|
|
||||||
_parent: &dyn Node,
|
|
||||||
) {
|
|
||||||
let src_str = import_decl.src.value.to_string();
|
|
||||||
self.dependencies.push(DependencyDescriptor {
|
|
||||||
specifier: src_str,
|
|
||||||
kind: DependencyKind::Import,
|
|
||||||
span: import_decl.span,
|
|
||||||
});
|
|
||||||
}
|
|
||||||
|
|
||||||
fn visit_named_export(
|
|
||||||
&mut self,
|
|
||||||
named_export: &swc_ecma_ast::NamedExport,
|
|
||||||
_parent: &dyn Node,
|
|
||||||
) {
|
|
||||||
if let Some(src) = &named_export.src {
|
|
||||||
let src_str = src.value.to_string();
|
|
||||||
self.dependencies.push(DependencyDescriptor {
|
|
||||||
specifier: src_str,
|
|
||||||
kind: DependencyKind::Export,
|
|
||||||
span: named_export.span,
|
|
||||||
});
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
fn visit_export_all(
|
|
||||||
&mut self,
|
|
||||||
export_all: &swc_ecma_ast::ExportAll,
|
|
||||||
_parent: &dyn Node,
|
|
||||||
) {
|
|
||||||
let src_str = export_all.src.value.to_string();
|
|
||||||
self.dependencies.push(DependencyDescriptor {
|
|
||||||
specifier: src_str,
|
|
||||||
kind: DependencyKind::Export,
|
|
||||||
span: export_all.span,
|
|
||||||
});
|
|
||||||
}
|
|
||||||
|
|
||||||
fn visit_ts_import_type(
|
|
||||||
&mut self,
|
|
||||||
ts_import_type: &swc_ecma_ast::TsImportType,
|
|
||||||
_parent: &dyn Node,
|
|
||||||
) {
|
|
||||||
// TODO(bartlomieju): possibly add separate DependencyKind
|
|
||||||
let src_str = ts_import_type.arg.value.to_string();
|
|
||||||
self.dependencies.push(DependencyDescriptor {
|
|
||||||
specifier: src_str,
|
|
||||||
kind: DependencyKind::Import,
|
|
||||||
span: ts_import_type.arg.span,
|
|
||||||
});
|
|
||||||
}
|
|
||||||
|
|
||||||
fn visit_call_expr(
|
|
||||||
&mut self,
|
|
||||||
call_expr: &swc_ecma_ast::CallExpr,
|
|
||||||
parent: &dyn Node,
|
|
||||||
) {
|
|
||||||
use swc_ecma_ast::Expr::*;
|
|
||||||
use swc_ecma_ast::ExprOrSuper::*;
|
|
||||||
|
|
||||||
swc_ecma_visit::visit_call_expr(self, call_expr, parent);
|
|
||||||
let boxed_expr = match call_expr.callee.clone() {
|
|
||||||
Super(_) => return,
|
|
||||||
Expr(boxed) => boxed,
|
|
||||||
};
|
|
||||||
|
|
||||||
match &*boxed_expr {
|
|
||||||
Ident(ident) => {
|
|
||||||
if &ident.sym.to_string() != "import" {
|
|
||||||
return;
|
|
||||||
}
|
|
||||||
}
|
|
||||||
_ => return,
|
|
||||||
};
|
|
||||||
|
|
||||||
if let Some(arg) = call_expr.args.get(0) {
|
|
||||||
match &*arg.expr {
|
|
||||||
Lit(lit) => {
|
|
||||||
if let swc_ecma_ast::Lit::Str(str_) = lit {
|
|
||||||
let src_str = str_.value.to_string();
|
|
||||||
self.dependencies.push(DependencyDescriptor {
|
|
||||||
specifier: src_str,
|
|
||||||
kind: DependencyKind::DynamicImport,
|
|
||||||
span: call_expr.span,
|
|
||||||
});
|
|
||||||
}
|
|
||||||
}
|
|
||||||
_ => return,
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
fn get_deno_types(parser: &AstParser, span: Span) -> Option<String> {
|
|
||||||
let comments = parser.get_span_comments(span);
|
|
||||||
|
|
||||||
if comments.is_empty() {
|
|
||||||
return None;
|
|
||||||
}
|
|
||||||
|
|
||||||
// @deno-types must directly prepend import statement - hence
|
|
||||||
// checking last comment for span
|
|
||||||
let last = comments.last().unwrap();
|
|
||||||
let comment = last.text.trim_start();
|
|
||||||
|
|
||||||
if comment.starts_with("@deno-types") {
|
|
||||||
let split: Vec<String> =
|
|
||||||
comment.split('=').map(|s| s.to_string()).collect();
|
|
||||||
assert_eq!(split.len(), 2);
|
|
||||||
let specifier_in_quotes = split.get(1).unwrap().to_string();
|
|
||||||
let specifier = specifier_in_quotes
|
|
||||||
.trim_start_matches('\"')
|
|
||||||
.trim_start_matches('\'')
|
|
||||||
.trim_end_matches('\"')
|
|
||||||
.trim_end_matches('\'')
|
|
||||||
.to_string();
|
|
||||||
return Some(specifier);
|
|
||||||
}
|
|
||||||
|
|
||||||
None
|
|
||||||
}
|
|
||||||
|
|
||||||
#[derive(Clone, Debug, PartialEq)]
|
|
||||||
pub struct ImportDescriptor {
|
|
||||||
pub specifier: String,
|
|
||||||
pub deno_types: Option<String>,
|
|
||||||
pub location: Location,
|
|
||||||
}
|
|
||||||
|
|
||||||
#[derive(Clone, Debug, PartialEq)]
|
|
||||||
pub enum TsReferenceKind {
|
|
||||||
Lib,
|
|
||||||
Types,
|
|
||||||
Path,
|
|
||||||
}
|
|
||||||
|
|
||||||
#[derive(Clone, Debug, PartialEq)]
|
|
||||||
pub struct TsReferenceDescriptor {
|
|
||||||
pub kind: TsReferenceKind,
|
|
||||||
pub specifier: String,
|
|
||||||
pub location: Location,
|
|
||||||
}
|
|
||||||
|
|
||||||
pub fn analyze_dependencies_and_references(
|
|
||||||
file_name: &str,
|
|
||||||
media_type: MediaType,
|
|
||||||
source_code: &str,
|
|
||||||
analyze_dynamic_imports: bool,
|
|
||||||
) -> Result<
|
|
||||||
(Vec<ImportDescriptor>, Vec<TsReferenceDescriptor>),
|
|
||||||
SwcDiagnosticBuffer,
|
|
||||||
> {
|
|
||||||
let parser = AstParser::new();
|
|
||||||
parser.parse_module(file_name, media_type, source_code, |parse_result| {
|
|
||||||
let module = parse_result?;
|
|
||||||
let mut collector = NewDependencyVisitor {
|
|
||||||
dependencies: vec![],
|
|
||||||
};
|
|
||||||
let module_span = module.span;
|
|
||||||
collector.visit_module(&module, &module);
|
|
||||||
|
|
||||||
let dependency_descriptors = collector.dependencies;
|
|
||||||
|
|
||||||
// for each import check if there's relevant @deno-types directive
|
|
||||||
let imports = dependency_descriptors
|
|
||||||
.iter()
|
|
||||||
.filter(|desc| {
|
|
||||||
if analyze_dynamic_imports {
|
|
||||||
return true;
|
|
||||||
}
|
|
||||||
|
|
||||||
desc.kind != DependencyKind::DynamicImport
|
|
||||||
})
|
|
||||||
.map(|desc| {
|
|
||||||
let location = parser.get_span_location(desc.span);
|
|
||||||
let deno_types = get_deno_types(&parser, desc.span);
|
|
||||||
ImportDescriptor {
|
|
||||||
specifier: desc.specifier.to_string(),
|
|
||||||
deno_types,
|
|
||||||
location: location.into(),
|
|
||||||
}
|
|
||||||
})
|
|
||||||
.collect();
|
|
||||||
|
|
||||||
// analyze comment from beginning of the file and find TS directives
|
|
||||||
let comments = parser
|
|
||||||
.comments
|
|
||||||
.take_leading_comments(module_span.lo())
|
|
||||||
.unwrap_or_else(Vec::new);
|
|
||||||
|
|
||||||
let mut references = vec![];
|
|
||||||
for comment in comments {
|
|
||||||
if comment.kind != CommentKind::Line {
|
|
||||||
continue;
|
|
||||||
}
|
|
||||||
|
|
||||||
// TODO(bartlomieju): you can do better than that...
|
|
||||||
let text = comment.text.to_string();
|
|
||||||
let (kind, specifier_in_quotes) =
|
|
||||||
if text.starts_with("/ <reference path=") {
|
|
||||||
(
|
|
||||||
TsReferenceKind::Path,
|
|
||||||
text.trim_start_matches("/ <reference path="),
|
|
||||||
)
|
|
||||||
} else if text.starts_with("/ <reference lib=") {
|
|
||||||
(
|
|
||||||
TsReferenceKind::Lib,
|
|
||||||
text.trim_start_matches("/ <reference lib="),
|
|
||||||
)
|
|
||||||
} else if text.starts_with("/ <reference types=") {
|
|
||||||
(
|
|
||||||
TsReferenceKind::Types,
|
|
||||||
text.trim_start_matches("/ <reference types="),
|
|
||||||
)
|
|
||||||
} else {
|
|
||||||
continue;
|
|
||||||
};
|
|
||||||
let specifier = specifier_in_quotes
|
|
||||||
.trim_end_matches("/>")
|
|
||||||
.trim_end()
|
|
||||||
.trim_start_matches('\"')
|
|
||||||
.trim_start_matches('\'')
|
|
||||||
.trim_end_matches('\"')
|
|
||||||
.trim_end_matches('\'')
|
|
||||||
.to_string();
|
|
||||||
|
|
||||||
let location = parser.get_span_location(comment.span);
|
|
||||||
references.push(TsReferenceDescriptor {
|
|
||||||
kind,
|
|
||||||
specifier,
|
|
||||||
location: location.into(),
|
|
||||||
});
|
|
||||||
}
|
|
||||||
Ok((imports, references))
|
|
||||||
})
|
|
||||||
}
|
|
||||||
|
|
||||||
#[test]
|
|
||||||
fn test_analyze_dependencies_and_directives() {
|
|
||||||
let source = r#"
|
|
||||||
// This comment is placed to make sure that directives are parsed
|
|
||||||
// even when they start on non-first line
|
|
||||||
|
|
||||||
/// <reference lib="dom" />
|
|
||||||
/// <reference types="./type_reference.d.ts" />
|
|
||||||
/// <reference path="./type_reference/dep.ts" />
|
|
||||||
// @deno-types="./type_definitions/foo.d.ts"
|
|
||||||
import { foo } from "./type_definitions/foo.js";
|
|
||||||
// @deno-types="./type_definitions/fizz.d.ts"
|
|
||||||
import "./type_definitions/fizz.js";
|
|
||||||
|
|
||||||
/// <reference path="./type_reference/dep2.ts" />
|
|
||||||
|
|
||||||
import * as qat from "./type_definitions/qat.ts";
|
|
||||||
|
|
||||||
console.log(foo);
|
|
||||||
console.log(fizz);
|
|
||||||
console.log(qat.qat);
|
|
||||||
"#;
|
|
||||||
|
|
||||||
let (imports, references) = analyze_dependencies_and_references(
|
|
||||||
"some/file.ts",
|
|
||||||
MediaType::TypeScript,
|
|
||||||
source,
|
|
||||||
true,
|
|
||||||
)
|
|
||||||
.expect("Failed to parse");
|
|
||||||
|
|
||||||
assert_eq!(
|
|
||||||
imports,
|
|
||||||
vec![
|
|
||||||
ImportDescriptor {
|
|
||||||
specifier: "./type_definitions/foo.js".to_string(),
|
|
||||||
deno_types: Some("./type_definitions/foo.d.ts".to_string()),
|
|
||||||
location: Location {
|
|
||||||
filename: "some/file.ts".to_string(),
|
|
||||||
line: 9,
|
|
||||||
col: 0,
|
|
||||||
},
|
|
||||||
},
|
|
||||||
ImportDescriptor {
|
|
||||||
specifier: "./type_definitions/fizz.js".to_string(),
|
|
||||||
deno_types: Some("./type_definitions/fizz.d.ts".to_string()),
|
|
||||||
location: Location {
|
|
||||||
filename: "some/file.ts".to_string(),
|
|
||||||
line: 11,
|
|
||||||
col: 0,
|
|
||||||
},
|
|
||||||
},
|
|
||||||
ImportDescriptor {
|
|
||||||
specifier: "./type_definitions/qat.ts".to_string(),
|
|
||||||
deno_types: None,
|
|
||||||
location: Location {
|
|
||||||
filename: "some/file.ts".to_string(),
|
|
||||||
line: 15,
|
|
||||||
col: 0,
|
|
||||||
},
|
|
||||||
},
|
|
||||||
]
|
|
||||||
);
|
|
||||||
|
|
||||||
// According to TS docs (https://www.typescriptlang.org/docs/handbook/triple-slash-directives.html)
|
|
||||||
// directives that are not at the top of the file are ignored, so only
|
|
||||||
// 3 references should be captured instead of 4.
|
|
||||||
assert_eq!(
|
|
||||||
references,
|
|
||||||
vec![
|
|
||||||
TsReferenceDescriptor {
|
|
||||||
specifier: "dom".to_string(),
|
|
||||||
kind: TsReferenceKind::Lib,
|
|
||||||
location: Location {
|
|
||||||
filename: "some/file.ts".to_string(),
|
|
||||||
line: 5,
|
|
||||||
col: 0,
|
|
||||||
},
|
|
||||||
},
|
|
||||||
TsReferenceDescriptor {
|
|
||||||
specifier: "./type_reference.d.ts".to_string(),
|
|
||||||
kind: TsReferenceKind::Types,
|
|
||||||
location: Location {
|
|
||||||
filename: "some/file.ts".to_string(),
|
|
||||||
line: 6,
|
|
||||||
col: 0,
|
|
||||||
},
|
|
||||||
},
|
|
||||||
TsReferenceDescriptor {
|
|
||||||
specifier: "./type_reference/dep.ts".to_string(),
|
|
||||||
kind: TsReferenceKind::Path,
|
|
||||||
location: Location {
|
|
||||||
filename: "some/file.ts".to_string(),
|
|
||||||
line: 7,
|
|
||||||
col: 0,
|
|
||||||
},
|
|
||||||
},
|
|
||||||
]
|
|
||||||
);
|
|
||||||
}
|
|
||||||
|
|
|
@ -1 +1,2 @@
|
||||||
[WILDCARD]error: Cannot resolve module "[WILDCARD]/bad-module.ts" from "[WILDCARD]/error_004_missing_module.ts"
|
[WILDCARD]error: Cannot resolve module "[WILDCARD]/bad-module.ts" from "[WILDCARD]/error_004_missing_module.ts"
|
||||||
|
Imported from "[WILDCARD]/error_004_missing_module.ts:2"
|
||||||
|
|
|
@ -1 +1,2 @@
|
||||||
[WILDCARD]error: Cannot resolve module "[WILDCARD]/non-existent" from "[WILDCARD]/error_006_import_ext_failure.ts"
|
[WILDCARD]error: Cannot resolve module "[WILDCARD]/non-existent" from "[WILDCARD]/error_006_import_ext_failure.ts"
|
||||||
|
Imported from "[WILDCARD]/error_006_import_ext_failure.ts:1"
|
||||||
|
|
|
@ -1,2 +1,3 @@
|
||||||
[WILDCARD]
|
[WILDCARD]
|
||||||
error: Uncaught TypeError: read access to "[WILDCARD]passwd", run again with the --allow-read flag
|
error: Uncaught TypeError: read access to "[WILDCARD]passwd", run again with the --allow-read flag
|
||||||
|
Imported from "[WILDCARD]evil_remote_import.js:3"
|
||||||
|
|
441
cli/tsc.rs
441
cli/tsc.rs
|
@ -3,19 +3,26 @@ use crate::colors;
|
||||||
use crate::diagnostics::Diagnostic;
|
use crate::diagnostics::Diagnostic;
|
||||||
use crate::diagnostics::DiagnosticItem;
|
use crate::diagnostics::DiagnosticItem;
|
||||||
use crate::disk_cache::DiskCache;
|
use crate::disk_cache::DiskCache;
|
||||||
|
use crate::doc::Location;
|
||||||
use crate::file_fetcher::SourceFile;
|
use crate::file_fetcher::SourceFile;
|
||||||
use crate::file_fetcher::SourceFileFetcher;
|
use crate::file_fetcher::SourceFileFetcher;
|
||||||
use crate::global_state::GlobalState;
|
use crate::global_state::GlobalState;
|
||||||
use crate::import_map::ImportMap;
|
use crate::import_map::ImportMap;
|
||||||
use crate::module_graph::ModuleGraphFile;
|
use crate::module_graph::ModuleGraph;
|
||||||
use crate::module_graph::ModuleGraphLoader;
|
use crate::module_graph::ModuleGraphLoader;
|
||||||
use crate::msg;
|
use crate::msg;
|
||||||
|
use crate::msg::MediaType;
|
||||||
use crate::op_error::OpError;
|
use crate::op_error::OpError;
|
||||||
use crate::ops;
|
use crate::ops;
|
||||||
use crate::permissions::Permissions;
|
use crate::permissions::Permissions;
|
||||||
use crate::source_maps::SourceMapGetter;
|
use crate::source_maps::SourceMapGetter;
|
||||||
use crate::startup_data;
|
use crate::startup_data;
|
||||||
use crate::state::State;
|
use crate::state::State;
|
||||||
|
use crate::swc_common::comments::CommentKind;
|
||||||
|
use crate::swc_common::Span;
|
||||||
|
use crate::swc_ecma_ast;
|
||||||
|
use crate::swc_util::AstParser;
|
||||||
|
use crate::swc_util::SwcDiagnosticBuffer;
|
||||||
use crate::version;
|
use crate::version;
|
||||||
use crate::web_worker::WebWorker;
|
use crate::web_worker::WebWorker;
|
||||||
use crate::worker::WorkerEvent;
|
use crate::worker::WorkerEvent;
|
||||||
|
@ -37,7 +44,6 @@ use sourcemap::SourceMap;
|
||||||
use std::collections::HashMap;
|
use std::collections::HashMap;
|
||||||
use std::collections::HashSet;
|
use std::collections::HashSet;
|
||||||
use std::fs;
|
use std::fs;
|
||||||
use std::hash::BuildHasher;
|
|
||||||
use std::io;
|
use std::io;
|
||||||
use std::ops::Deref;
|
use std::ops::Deref;
|
||||||
use std::ops::DerefMut;
|
use std::ops::DerefMut;
|
||||||
|
@ -48,6 +54,8 @@ use std::sync::atomic::Ordering;
|
||||||
use std::sync::Arc;
|
use std::sync::Arc;
|
||||||
use std::sync::Mutex;
|
use std::sync::Mutex;
|
||||||
use std::task::Poll;
|
use std::task::Poll;
|
||||||
|
use swc_ecma_visit::Node;
|
||||||
|
use swc_ecma_visit::Visit;
|
||||||
use url::Url;
|
use url::Url;
|
||||||
|
|
||||||
pub const AVAILABLE_LIBS: &[&str] = &[
|
pub const AVAILABLE_LIBS: &[&str] = &[
|
||||||
|
@ -273,12 +281,10 @@ impl CompilerConfig {
|
||||||
}
|
}
|
||||||
|
|
||||||
/// Information associated with compiled file in cache.
|
/// Information associated with compiled file in cache.
|
||||||
/// Includes source code path and state hash.
|
|
||||||
/// version_hash is used to validate versions of the file
|
/// version_hash is used to validate versions of the file
|
||||||
/// and could be used to remove stale file in cache.
|
/// and could be used to remove stale file in cache.
|
||||||
#[derive(Deserialize, Serialize)]
|
#[derive(Deserialize, Serialize)]
|
||||||
pub struct CompiledFileMetadata {
|
pub struct CompiledFileMetadata {
|
||||||
pub source_path: PathBuf,
|
|
||||||
pub version_hash: String,
|
pub version_hash: String,
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -419,7 +425,6 @@ impl TsCompiler {
|
||||||
|
|
||||||
/// Check if there is compiled source in cache that is valid
|
/// Check if there is compiled source in cache that is valid
|
||||||
/// and can be used again.
|
/// and can be used again.
|
||||||
// TODO(bartlomieju): there should be check that cached file actually exists
|
|
||||||
fn has_compiled_source(
|
fn has_compiled_source(
|
||||||
&self,
|
&self,
|
||||||
file_fetcher: &SourceFileFetcher,
|
file_fetcher: &SourceFileFetcher,
|
||||||
|
@ -430,8 +435,7 @@ impl TsCompiler {
|
||||||
.fetch_cached_source_file(&specifier, Permissions::allow_all())
|
.fetch_cached_source_file(&specifier, Permissions::allow_all())
|
||||||
{
|
{
|
||||||
if let Some(metadata) = self.get_metadata(&url) {
|
if let Some(metadata) = self.get_metadata(&url) {
|
||||||
// 2. compare version hashes
|
// Compare version hashes
|
||||||
// TODO: it would probably be good idea to make it method implemented on SourceFile
|
|
||||||
let version_hash_to_validate = source_code_version_hash(
|
let version_hash_to_validate = source_code_version_hash(
|
||||||
&source_file.source_code,
|
&source_file.source_code,
|
||||||
version::DENO,
|
version::DENO,
|
||||||
|
@ -462,7 +466,7 @@ impl TsCompiler {
|
||||||
source_file: &SourceFile,
|
source_file: &SourceFile,
|
||||||
target: TargetLib,
|
target: TargetLib,
|
||||||
permissions: Permissions,
|
permissions: Permissions,
|
||||||
module_graph: HashMap<String, ModuleGraphFile>,
|
module_graph: ModuleGraph,
|
||||||
allow_js: bool,
|
allow_js: bool,
|
||||||
) -> Result<(), ErrBox> {
|
) -> Result<(), ErrBox> {
|
||||||
let mut has_cached_version = false;
|
let mut has_cached_version = false;
|
||||||
|
@ -504,17 +508,15 @@ impl TsCompiler {
|
||||||
TargetLib::Worker => "worker",
|
TargetLib::Worker => "worker",
|
||||||
};
|
};
|
||||||
let root_names = vec![module_url.to_string()];
|
let root_names = vec![module_url.to_string()];
|
||||||
let bundle = false;
|
|
||||||
let unstable = global_state.flags.unstable;
|
let unstable = global_state.flags.unstable;
|
||||||
let compiler_config = self.config.clone();
|
let compiler_config = self.config.clone();
|
||||||
let cwd = std::env::current_dir().unwrap();
|
let cwd = std::env::current_dir().unwrap();
|
||||||
let j = match (compiler_config.path, compiler_config.content) {
|
let j = match (compiler_config.path, compiler_config.content) {
|
||||||
(Some(config_path), Some(config_data)) => json!({
|
(Some(config_path), Some(config_data)) => json!({
|
||||||
"type": msg::CompilerRequestType::Compile as i32,
|
"type": msg::CompilerRequestType::Compile,
|
||||||
"allowJs": allow_js,
|
"allowJs": allow_js,
|
||||||
"target": target,
|
"target": target,
|
||||||
"rootNames": root_names,
|
"rootNames": root_names,
|
||||||
"bundle": bundle,
|
|
||||||
"unstable": unstable,
|
"unstable": unstable,
|
||||||
"configPath": config_path,
|
"configPath": config_path,
|
||||||
"config": str::from_utf8(&config_data).unwrap(),
|
"config": str::from_utf8(&config_data).unwrap(),
|
||||||
|
@ -522,11 +524,10 @@ impl TsCompiler {
|
||||||
"sourceFileMap": module_graph_json,
|
"sourceFileMap": module_graph_json,
|
||||||
}),
|
}),
|
||||||
_ => json!({
|
_ => json!({
|
||||||
"type": msg::CompilerRequestType::Compile as i32,
|
"type": msg::CompilerRequestType::Compile,
|
||||||
"allowJs": allow_js,
|
"allowJs": allow_js,
|
||||||
"target": target,
|
"target": target,
|
||||||
"rootNames": root_names,
|
"rootNames": root_names,
|
||||||
"bundle": bundle,
|
|
||||||
"unstable": unstable,
|
"unstable": unstable,
|
||||||
"cwd": cwd,
|
"cwd": cwd,
|
||||||
"sourceFileMap": module_graph_json,
|
"sourceFileMap": module_graph_json,
|
||||||
|
@ -563,8 +564,6 @@ impl TsCompiler {
|
||||||
}
|
}
|
||||||
|
|
||||||
fn get_graph_metadata(&self, url: &Url) -> Option<GraphFileMetadata> {
|
fn get_graph_metadata(&self, url: &Url) -> Option<GraphFileMetadata> {
|
||||||
// Try to load cached version:
|
|
||||||
// 1. check if there's 'meta' file
|
|
||||||
let cache_key = self
|
let cache_key = self
|
||||||
.disk_cache
|
.disk_cache
|
||||||
.get_cache_filename_with_extension(url, "graph");
|
.get_cache_filename_with_extension(url, "graph");
|
||||||
|
@ -707,7 +706,6 @@ impl TsCompiler {
|
||||||
filename: compiled_code_filename,
|
filename: compiled_code_filename,
|
||||||
media_type: msg::MediaType::JavaScript,
|
media_type: msg::MediaType::JavaScript,
|
||||||
source_code: compiled_code,
|
source_code: compiled_code,
|
||||||
types_url: None,
|
|
||||||
types_header: None,
|
types_header: None,
|
||||||
};
|
};
|
||||||
|
|
||||||
|
@ -763,10 +761,7 @@ impl TsCompiler {
|
||||||
&self.config.hash,
|
&self.config.hash,
|
||||||
);
|
);
|
||||||
|
|
||||||
let compiled_file_metadata = CompiledFileMetadata {
|
let compiled_file_metadata = CompiledFileMetadata { version_hash };
|
||||||
source_path: source_file.filename,
|
|
||||||
version_hash,
|
|
||||||
};
|
|
||||||
let meta_key = self
|
let meta_key = self
|
||||||
.disk_cache
|
.disk_cache
|
||||||
.get_cache_filename_with_extension(module_specifier.as_url(), "meta");
|
.get_cache_filename_with_extension(module_specifier.as_url(), "meta");
|
||||||
|
@ -795,7 +790,6 @@ impl TsCompiler {
|
||||||
filename: source_map_filename,
|
filename: source_map_filename,
|
||||||
media_type: msg::MediaType::JavaScript,
|
media_type: msg::MediaType::JavaScript,
|
||||||
source_code,
|
source_code,
|
||||||
types_url: None,
|
|
||||||
types_header: None,
|
types_header: None,
|
||||||
};
|
};
|
||||||
|
|
||||||
|
@ -953,7 +947,6 @@ pub async fn bundle(
|
||||||
serde_json::to_value(module_graph).expect("Failed to serialize data");
|
serde_json::to_value(module_graph).expect("Failed to serialize data");
|
||||||
|
|
||||||
let root_names = vec![module_specifier.to_string()];
|
let root_names = vec![module_specifier.to_string()];
|
||||||
let bundle = true;
|
|
||||||
let target = "main";
|
let target = "main";
|
||||||
let cwd = std::env::current_dir().unwrap();
|
let cwd = std::env::current_dir().unwrap();
|
||||||
|
|
||||||
|
@ -961,10 +954,9 @@ pub async fn bundle(
|
||||||
// be optional
|
// be optional
|
||||||
let j = match (compiler_config.path, compiler_config.content) {
|
let j = match (compiler_config.path, compiler_config.content) {
|
||||||
(Some(config_path), Some(config_data)) => json!({
|
(Some(config_path), Some(config_data)) => json!({
|
||||||
"type": msg::CompilerRequestType::Compile as i32,
|
"type": msg::CompilerRequestType::Bundle,
|
||||||
"target": target,
|
"target": target,
|
||||||
"rootNames": root_names,
|
"rootNames": root_names,
|
||||||
"bundle": bundle,
|
|
||||||
"unstable": unstable,
|
"unstable": unstable,
|
||||||
"configPath": config_path,
|
"configPath": config_path,
|
||||||
"config": str::from_utf8(&config_data).unwrap(),
|
"config": str::from_utf8(&config_data).unwrap(),
|
||||||
|
@ -972,10 +964,9 @@ pub async fn bundle(
|
||||||
"sourceFileMap": module_graph_json,
|
"sourceFileMap": module_graph_json,
|
||||||
}),
|
}),
|
||||||
_ => json!({
|
_ => json!({
|
||||||
"type": msg::CompilerRequestType::Compile as i32,
|
"type": msg::CompilerRequestType::Bundle,
|
||||||
"target": target,
|
"target": target,
|
||||||
"rootNames": root_names,
|
"rootNames": root_names,
|
||||||
"bundle": bundle,
|
|
||||||
"unstable": unstable,
|
"unstable": unstable,
|
||||||
"cwd": cwd,
|
"cwd": cwd,
|
||||||
"sourceFileMap": module_graph_json,
|
"sourceFileMap": module_graph_json,
|
||||||
|
@ -1000,20 +991,18 @@ pub async fn bundle(
|
||||||
Ok(output)
|
Ok(output)
|
||||||
}
|
}
|
||||||
|
|
||||||
/// This function is used by `Deno.compile()` and `Deno.bundle()` APIs.
|
async fn create_runtime_module_graph(
|
||||||
pub async fn runtime_compile<S: BuildHasher>(
|
|
||||||
global_state: GlobalState,
|
global_state: GlobalState,
|
||||||
permissions: Permissions,
|
permissions: Permissions,
|
||||||
root_name: &str,
|
root_name: &str,
|
||||||
sources: &Option<HashMap<String, String, S>>,
|
sources: &Option<HashMap<String, String>>,
|
||||||
bundle: bool,
|
|
||||||
maybe_options: &Option<String>,
|
maybe_options: &Option<String>,
|
||||||
) -> Result<Value, OpError> {
|
) -> Result<(Vec<String>, ModuleGraph), OpError> {
|
||||||
let mut root_names = vec![];
|
let mut root_names = vec![];
|
||||||
let mut module_graph_loader = ModuleGraphLoader::new(
|
let mut module_graph_loader = ModuleGraphLoader::new(
|
||||||
global_state.file_fetcher.clone(),
|
global_state.file_fetcher.clone(),
|
||||||
None,
|
None,
|
||||||
permissions.clone(),
|
permissions,
|
||||||
false,
|
false,
|
||||||
false,
|
false,
|
||||||
);
|
);
|
||||||
|
@ -1050,17 +1039,34 @@ pub async fn runtime_compile<S: BuildHasher>(
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
let module_graph = module_graph_loader.get_graph();
|
Ok((root_names, module_graph_loader.get_graph()))
|
||||||
|
}
|
||||||
|
|
||||||
|
/// This function is used by `Deno.compile()` API.
|
||||||
|
pub async fn runtime_compile(
|
||||||
|
global_state: GlobalState,
|
||||||
|
permissions: Permissions,
|
||||||
|
root_name: &str,
|
||||||
|
sources: &Option<HashMap<String, String>>,
|
||||||
|
maybe_options: &Option<String>,
|
||||||
|
) -> Result<Value, OpError> {
|
||||||
|
let (root_names, module_graph) = create_runtime_module_graph(
|
||||||
|
global_state.clone(),
|
||||||
|
permissions.clone(),
|
||||||
|
root_name,
|
||||||
|
sources,
|
||||||
|
maybe_options,
|
||||||
|
)
|
||||||
|
.await?;
|
||||||
let module_graph_json =
|
let module_graph_json =
|
||||||
serde_json::to_value(module_graph).expect("Failed to serialize data");
|
serde_json::to_value(module_graph).expect("Failed to serialize data");
|
||||||
|
|
||||||
let req_msg = json!({
|
let req_msg = json!({
|
||||||
"type": msg::CompilerRequestType::RuntimeCompile as i32,
|
"type": msg::CompilerRequestType::RuntimeCompile,
|
||||||
"target": "runtime",
|
"target": "runtime",
|
||||||
"rootNames": root_names,
|
"rootNames": root_names,
|
||||||
"sourceFileMap": module_graph_json,
|
"sourceFileMap": module_graph_json,
|
||||||
"options": maybe_options,
|
"options": maybe_options,
|
||||||
"bundle": bundle,
|
|
||||||
"unstable": global_state.flags.unstable,
|
"unstable": global_state.flags.unstable,
|
||||||
})
|
})
|
||||||
.to_string()
|
.to_string()
|
||||||
|
@ -1072,12 +1078,6 @@ pub async fn runtime_compile<S: BuildHasher>(
|
||||||
let msg = execute_in_same_thread(global_state, permissions, req_msg).await?;
|
let msg = execute_in_same_thread(global_state, permissions, req_msg).await?;
|
||||||
let json_str = std::str::from_utf8(&msg).unwrap();
|
let json_str = std::str::from_utf8(&msg).unwrap();
|
||||||
|
|
||||||
// TODO(bartlomieju): factor `bundle` path into separate function `runtime_bundle`
|
|
||||||
if bundle {
|
|
||||||
let _response: RuntimeBundleResponse = serde_json::from_str(json_str)?;
|
|
||||||
return Ok(serde_json::from_str::<Value>(json_str).unwrap());
|
|
||||||
}
|
|
||||||
|
|
||||||
let response: RuntimeCompileResponse = serde_json::from_str(json_str)?;
|
let response: RuntimeCompileResponse = serde_json::from_str(json_str)?;
|
||||||
|
|
||||||
if response.diagnostics.is_empty() && sources.is_none() {
|
if response.diagnostics.is_empty() && sources.is_none() {
|
||||||
|
@ -1085,20 +1085,60 @@ pub async fn runtime_compile<S: BuildHasher>(
|
||||||
}
|
}
|
||||||
|
|
||||||
// We're returning `Ok()` instead of `Err()` because it's not runtime
|
// We're returning `Ok()` instead of `Err()` because it's not runtime
|
||||||
// error if there were diagnostics produces; we want to let user handle
|
// error if there were diagnostics produced; we want to let user handle
|
||||||
|
// diagnostics in the runtime.
|
||||||
|
Ok(serde_json::from_str::<Value>(json_str).unwrap())
|
||||||
|
}
|
||||||
|
|
||||||
|
/// This function is used by `Deno.bundle()` API.
|
||||||
|
pub async fn runtime_bundle(
|
||||||
|
global_state: GlobalState,
|
||||||
|
permissions: Permissions,
|
||||||
|
root_name: &str,
|
||||||
|
sources: &Option<HashMap<String, String>>,
|
||||||
|
maybe_options: &Option<String>,
|
||||||
|
) -> Result<Value, OpError> {
|
||||||
|
let (root_names, module_graph) = create_runtime_module_graph(
|
||||||
|
global_state.clone(),
|
||||||
|
permissions.clone(),
|
||||||
|
root_name,
|
||||||
|
sources,
|
||||||
|
maybe_options,
|
||||||
|
)
|
||||||
|
.await?;
|
||||||
|
let module_graph_json =
|
||||||
|
serde_json::to_value(module_graph).expect("Failed to serialize data");
|
||||||
|
|
||||||
|
let req_msg = json!({
|
||||||
|
"type": msg::CompilerRequestType::RuntimeBundle,
|
||||||
|
"target": "runtime",
|
||||||
|
"rootNames": root_names,
|
||||||
|
"sourceFileMap": module_graph_json,
|
||||||
|
"options": maybe_options,
|
||||||
|
"unstable": global_state.flags.unstable,
|
||||||
|
})
|
||||||
|
.to_string()
|
||||||
|
.into_boxed_str()
|
||||||
|
.into_boxed_bytes();
|
||||||
|
|
||||||
|
let msg = execute_in_same_thread(global_state, permissions, req_msg).await?;
|
||||||
|
let json_str = std::str::from_utf8(&msg).unwrap();
|
||||||
|
let _response: RuntimeBundleResponse = serde_json::from_str(json_str)?;
|
||||||
|
// We're returning `Ok()` instead of `Err()` because it's not runtime
|
||||||
|
// error if there were diagnostics produced; we want to let user handle
|
||||||
// diagnostics in the runtime.
|
// diagnostics in the runtime.
|
||||||
Ok(serde_json::from_str::<Value>(json_str).unwrap())
|
Ok(serde_json::from_str::<Value>(json_str).unwrap())
|
||||||
}
|
}
|
||||||
|
|
||||||
/// This function is used by `Deno.transpileOnly()` API.
|
/// This function is used by `Deno.transpileOnly()` API.
|
||||||
pub async fn runtime_transpile<S: BuildHasher>(
|
pub async fn runtime_transpile(
|
||||||
global_state: GlobalState,
|
global_state: GlobalState,
|
||||||
permissions: Permissions,
|
permissions: Permissions,
|
||||||
sources: &HashMap<String, String, S>,
|
sources: &HashMap<String, String>,
|
||||||
options: &Option<String>,
|
options: &Option<String>,
|
||||||
) -> Result<Value, OpError> {
|
) -> Result<Value, OpError> {
|
||||||
let req_msg = json!({
|
let req_msg = json!({
|
||||||
"type": msg::CompilerRequestType::RuntimeTranspile as i32,
|
"type": msg::CompilerRequestType::RuntimeTranspile,
|
||||||
"sources": sources,
|
"sources": sources,
|
||||||
"options": options,
|
"options": options,
|
||||||
})
|
})
|
||||||
|
@ -1113,6 +1153,278 @@ pub async fn runtime_transpile<S: BuildHasher>(
|
||||||
Ok(v)
|
Ok(v)
|
||||||
}
|
}
|
||||||
|
|
||||||
|
#[derive(Clone, Debug, PartialEq)]
|
||||||
|
enum DependencyKind {
|
||||||
|
Import,
|
||||||
|
DynamicImport,
|
||||||
|
Export,
|
||||||
|
}
|
||||||
|
|
||||||
|
#[derive(Clone, Debug, PartialEq)]
|
||||||
|
struct DependencyDescriptor {
|
||||||
|
span: Span,
|
||||||
|
specifier: String,
|
||||||
|
kind: DependencyKind,
|
||||||
|
}
|
||||||
|
|
||||||
|
struct DependencyVisitor {
|
||||||
|
dependencies: Vec<DependencyDescriptor>,
|
||||||
|
}
|
||||||
|
|
||||||
|
impl Visit for DependencyVisitor {
|
||||||
|
fn visit_import_decl(
|
||||||
|
&mut self,
|
||||||
|
import_decl: &swc_ecma_ast::ImportDecl,
|
||||||
|
_parent: &dyn Node,
|
||||||
|
) {
|
||||||
|
let src_str = import_decl.src.value.to_string();
|
||||||
|
self.dependencies.push(DependencyDescriptor {
|
||||||
|
specifier: src_str,
|
||||||
|
kind: DependencyKind::Import,
|
||||||
|
span: import_decl.span,
|
||||||
|
});
|
||||||
|
}
|
||||||
|
|
||||||
|
fn visit_named_export(
|
||||||
|
&mut self,
|
||||||
|
named_export: &swc_ecma_ast::NamedExport,
|
||||||
|
_parent: &dyn Node,
|
||||||
|
) {
|
||||||
|
if let Some(src) = &named_export.src {
|
||||||
|
let src_str = src.value.to_string();
|
||||||
|
self.dependencies.push(DependencyDescriptor {
|
||||||
|
specifier: src_str,
|
||||||
|
kind: DependencyKind::Export,
|
||||||
|
span: named_export.span,
|
||||||
|
});
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
fn visit_export_all(
|
||||||
|
&mut self,
|
||||||
|
export_all: &swc_ecma_ast::ExportAll,
|
||||||
|
_parent: &dyn Node,
|
||||||
|
) {
|
||||||
|
let src_str = export_all.src.value.to_string();
|
||||||
|
self.dependencies.push(DependencyDescriptor {
|
||||||
|
specifier: src_str,
|
||||||
|
kind: DependencyKind::Export,
|
||||||
|
span: export_all.span,
|
||||||
|
});
|
||||||
|
}
|
||||||
|
|
||||||
|
fn visit_ts_import_type(
|
||||||
|
&mut self,
|
||||||
|
ts_import_type: &swc_ecma_ast::TsImportType,
|
||||||
|
_parent: &dyn Node,
|
||||||
|
) {
|
||||||
|
// TODO(bartlomieju): possibly add separate DependencyKind
|
||||||
|
let src_str = ts_import_type.arg.value.to_string();
|
||||||
|
self.dependencies.push(DependencyDescriptor {
|
||||||
|
specifier: src_str,
|
||||||
|
kind: DependencyKind::Import,
|
||||||
|
span: ts_import_type.arg.span,
|
||||||
|
});
|
||||||
|
}
|
||||||
|
|
||||||
|
fn visit_call_expr(
|
||||||
|
&mut self,
|
||||||
|
call_expr: &swc_ecma_ast::CallExpr,
|
||||||
|
parent: &dyn Node,
|
||||||
|
) {
|
||||||
|
use swc_ecma_ast::Expr::*;
|
||||||
|
use swc_ecma_ast::ExprOrSuper::*;
|
||||||
|
|
||||||
|
swc_ecma_visit::visit_call_expr(self, call_expr, parent);
|
||||||
|
let boxed_expr = match call_expr.callee.clone() {
|
||||||
|
Super(_) => return,
|
||||||
|
Expr(boxed) => boxed,
|
||||||
|
};
|
||||||
|
|
||||||
|
match &*boxed_expr {
|
||||||
|
Ident(ident) => {
|
||||||
|
if &ident.sym.to_string() != "import" {
|
||||||
|
return;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
_ => return,
|
||||||
|
};
|
||||||
|
|
||||||
|
if let Some(arg) = call_expr.args.get(0) {
|
||||||
|
match &*arg.expr {
|
||||||
|
Lit(lit) => {
|
||||||
|
if let swc_ecma_ast::Lit::Str(str_) = lit {
|
||||||
|
let src_str = str_.value.to_string();
|
||||||
|
self.dependencies.push(DependencyDescriptor {
|
||||||
|
specifier: src_str,
|
||||||
|
kind: DependencyKind::DynamicImport,
|
||||||
|
span: call_expr.span,
|
||||||
|
});
|
||||||
|
}
|
||||||
|
}
|
||||||
|
_ => return,
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
#[derive(Clone, Debug, PartialEq)]
|
||||||
|
pub struct ImportDesc {
|
||||||
|
pub specifier: String,
|
||||||
|
pub deno_types: Option<String>,
|
||||||
|
pub location: Location,
|
||||||
|
}
|
||||||
|
|
||||||
|
#[derive(Clone, Debug, PartialEq)]
|
||||||
|
pub enum TsReferenceKind {
|
||||||
|
Lib,
|
||||||
|
Types,
|
||||||
|
Path,
|
||||||
|
}
|
||||||
|
|
||||||
|
#[derive(Clone, Debug, PartialEq)]
|
||||||
|
pub struct TsReferenceDesc {
|
||||||
|
pub kind: TsReferenceKind,
|
||||||
|
pub specifier: String,
|
||||||
|
pub location: Location,
|
||||||
|
}
|
||||||
|
|
||||||
|
// TODO(bartlomieju): handle imports in ambient contexts/TS modules
|
||||||
|
/// This function is a port of `ts.preProcessFile()`
|
||||||
|
///
|
||||||
|
/// Additionally it captures `@deno-types` references directly
|
||||||
|
/// preceeding `import .. from` and `export .. from` statements.
|
||||||
|
pub fn pre_process_file(
|
||||||
|
file_name: &str,
|
||||||
|
media_type: MediaType,
|
||||||
|
source_code: &str,
|
||||||
|
analyze_dynamic_imports: bool,
|
||||||
|
) -> Result<(Vec<ImportDesc>, Vec<TsReferenceDesc>), SwcDiagnosticBuffer> {
|
||||||
|
let parser = AstParser::new();
|
||||||
|
parser.parse_module(file_name, media_type, source_code, |parse_result| {
|
||||||
|
let module = parse_result?;
|
||||||
|
let mut collector = DependencyVisitor {
|
||||||
|
dependencies: vec![],
|
||||||
|
};
|
||||||
|
let module_span = module.span;
|
||||||
|
collector.visit_module(&module, &module);
|
||||||
|
|
||||||
|
let dependency_descriptors = collector.dependencies;
|
||||||
|
|
||||||
|
// for each import check if there's relevant @deno-types directive
|
||||||
|
let imports = dependency_descriptors
|
||||||
|
.iter()
|
||||||
|
.filter(|desc| {
|
||||||
|
if analyze_dynamic_imports {
|
||||||
|
return true;
|
||||||
|
}
|
||||||
|
|
||||||
|
desc.kind != DependencyKind::DynamicImport
|
||||||
|
})
|
||||||
|
.map(|desc| {
|
||||||
|
let location = parser.get_span_location(desc.span);
|
||||||
|
let deno_types = get_deno_types(&parser, desc.span);
|
||||||
|
ImportDesc {
|
||||||
|
specifier: desc.specifier.to_string(),
|
||||||
|
deno_types,
|
||||||
|
location: location.into(),
|
||||||
|
}
|
||||||
|
})
|
||||||
|
.collect();
|
||||||
|
|
||||||
|
// analyze comment from beginning of the file and find TS directives
|
||||||
|
let comments = parser
|
||||||
|
.comments
|
||||||
|
.take_leading_comments(module_span.lo())
|
||||||
|
.unwrap_or_else(Vec::new);
|
||||||
|
|
||||||
|
let mut references = vec![];
|
||||||
|
for comment in comments {
|
||||||
|
if comment.kind != CommentKind::Line {
|
||||||
|
continue;
|
||||||
|
}
|
||||||
|
|
||||||
|
let text = comment.text.to_string();
|
||||||
|
if let Some((kind, specifier)) = parse_ts_reference(text.trim()) {
|
||||||
|
let location = parser.get_span_location(comment.span);
|
||||||
|
references.push(TsReferenceDesc {
|
||||||
|
kind,
|
||||||
|
specifier,
|
||||||
|
location: location.into(),
|
||||||
|
});
|
||||||
|
}
|
||||||
|
}
|
||||||
|
Ok((imports, references))
|
||||||
|
})
|
||||||
|
}
|
||||||
|
|
||||||
|
fn get_deno_types(parser: &AstParser, span: Span) -> Option<String> {
|
||||||
|
let comments = parser.get_span_comments(span);
|
||||||
|
|
||||||
|
if comments.is_empty() {
|
||||||
|
return None;
|
||||||
|
}
|
||||||
|
|
||||||
|
// @deno-types must directly prepend import statement - hence
|
||||||
|
// checking last comment for span
|
||||||
|
let last = comments.last().unwrap();
|
||||||
|
let comment = last.text.trim_start();
|
||||||
|
parse_deno_types(&comment)
|
||||||
|
}
|
||||||
|
|
||||||
|
// TODO(bartlomieju): refactor
|
||||||
|
fn parse_ts_reference(comment: &str) -> Option<(TsReferenceKind, String)> {
|
||||||
|
let (kind, specifier_in_quotes) = if comment.starts_with("/ <reference path=")
|
||||||
|
{
|
||||||
|
(
|
||||||
|
TsReferenceKind::Path,
|
||||||
|
comment.trim_start_matches("/ <reference path="),
|
||||||
|
)
|
||||||
|
} else if comment.starts_with("/ <reference lib=") {
|
||||||
|
(
|
||||||
|
TsReferenceKind::Lib,
|
||||||
|
comment.trim_start_matches("/ <reference lib="),
|
||||||
|
)
|
||||||
|
} else if comment.starts_with("/ <reference types=") {
|
||||||
|
(
|
||||||
|
TsReferenceKind::Types,
|
||||||
|
comment.trim_start_matches("/ <reference types="),
|
||||||
|
)
|
||||||
|
} else {
|
||||||
|
return None;
|
||||||
|
};
|
||||||
|
|
||||||
|
let specifier = specifier_in_quotes
|
||||||
|
.trim_end_matches("/>")
|
||||||
|
.trim_end()
|
||||||
|
.trim_start_matches('\"')
|
||||||
|
.trim_start_matches('\'')
|
||||||
|
.trim_end_matches('\"')
|
||||||
|
.trim_end_matches('\'')
|
||||||
|
.to_string();
|
||||||
|
|
||||||
|
Some((kind, specifier))
|
||||||
|
}
|
||||||
|
|
||||||
|
fn parse_deno_types(comment: &str) -> Option<String> {
|
||||||
|
if comment.starts_with("@deno-types") {
|
||||||
|
let split: Vec<String> =
|
||||||
|
comment.split('=').map(|s| s.to_string()).collect();
|
||||||
|
assert_eq!(split.len(), 2);
|
||||||
|
let specifier_in_quotes = split.get(1).unwrap().to_string();
|
||||||
|
let specifier = specifier_in_quotes
|
||||||
|
.trim()
|
||||||
|
.trim_start_matches('\"')
|
||||||
|
.trim_start_matches('\'')
|
||||||
|
.trim_end_matches('\"')
|
||||||
|
.trim_end_matches('\'')
|
||||||
|
.to_string();
|
||||||
|
return Some(specifier);
|
||||||
|
}
|
||||||
|
|
||||||
|
None
|
||||||
|
}
|
||||||
|
|
||||||
#[cfg(test)]
|
#[cfg(test)]
|
||||||
mod tests {
|
mod tests {
|
||||||
use super::*;
|
use super::*;
|
||||||
|
@ -1121,6 +1433,44 @@ mod tests {
|
||||||
use std::path::PathBuf;
|
use std::path::PathBuf;
|
||||||
use tempfile::TempDir;
|
use tempfile::TempDir;
|
||||||
|
|
||||||
|
#[test]
|
||||||
|
fn test_parse_deno_types() {
|
||||||
|
assert_eq!(
|
||||||
|
parse_deno_types("@deno-types=./a/b/c.d.ts"),
|
||||||
|
Some("./a/b/c.d.ts".to_string())
|
||||||
|
);
|
||||||
|
assert_eq!(
|
||||||
|
parse_deno_types("@deno-types = https://dneo.land/x/some/package/a.d.ts"),
|
||||||
|
Some("https://dneo.land/x/some/package/a.d.ts".to_string())
|
||||||
|
);
|
||||||
|
assert_eq!(
|
||||||
|
parse_deno_types("@deno-types = ./a/b/c.d.ts"),
|
||||||
|
Some("./a/b/c.d.ts".to_string())
|
||||||
|
);
|
||||||
|
assert!(parse_deno_types("asdf").is_none());
|
||||||
|
assert!(parse_deno_types("// deno-types = fooo").is_none());
|
||||||
|
}
|
||||||
|
|
||||||
|
#[test]
|
||||||
|
fn test_parse_ts_reference() {
|
||||||
|
assert_eq!(
|
||||||
|
parse_ts_reference(r#"/ <reference lib="deno.shared_globals" />"#),
|
||||||
|
Some((TsReferenceKind::Lib, "deno.shared_globals".to_string()))
|
||||||
|
);
|
||||||
|
assert_eq!(
|
||||||
|
parse_ts_reference(r#"/ <reference path="./type/reference/dep.ts" />"#),
|
||||||
|
Some((TsReferenceKind::Path, "./type/reference/dep.ts".to_string()))
|
||||||
|
);
|
||||||
|
assert_eq!(
|
||||||
|
parse_ts_reference(r#"/ <reference types="./type/reference.d.ts" />"#),
|
||||||
|
Some((TsReferenceKind::Types, "./type/reference.d.ts".to_string()))
|
||||||
|
);
|
||||||
|
assert!(parse_ts_reference("asdf").is_none());
|
||||||
|
assert!(
|
||||||
|
parse_ts_reference(r#"/ <reference unknown="unknown" />"#).is_none()
|
||||||
|
);
|
||||||
|
}
|
||||||
|
|
||||||
#[tokio::test]
|
#[tokio::test]
|
||||||
async fn test_compile() {
|
async fn test_compile() {
|
||||||
let p = std::path::PathBuf::from(env!("CARGO_MANIFEST_DIR"))
|
let p = std::path::PathBuf::from(env!("CARGO_MANIFEST_DIR"))
|
||||||
|
@ -1134,7 +1484,6 @@ mod tests {
|
||||||
filename: PathBuf::from(p.to_str().unwrap().to_string()),
|
filename: PathBuf::from(p.to_str().unwrap().to_string()),
|
||||||
media_type: msg::MediaType::TypeScript,
|
media_type: msg::MediaType::TypeScript,
|
||||||
source_code: include_bytes!("./tests/002_hello.ts").to_vec(),
|
source_code: include_bytes!("./tests/002_hello.ts").to_vec(),
|
||||||
types_url: None,
|
|
||||||
types_header: None,
|
types_header: None,
|
||||||
};
|
};
|
||||||
let mock_state =
|
let mock_state =
|
||||||
|
|
|
@ -212,7 +212,6 @@ impl Future for WebWorker {
|
||||||
match r {
|
match r {
|
||||||
Some(msg) => {
|
Some(msg) => {
|
||||||
let msg = String::from_utf8(msg.to_vec()).unwrap();
|
let msg = String::from_utf8(msg.to_vec()).unwrap();
|
||||||
debug!("received message from host: {}", msg);
|
|
||||||
let script = format!("workerMessageRecvCallback({})", msg);
|
let script = format!("workerMessageRecvCallback({})", msg);
|
||||||
|
|
||||||
if let Err(e) = worker.execute(&script) {
|
if let Err(e) = worker.execute(&script) {
|
||||||
|
|
Loading…
Reference in a new issue