mirror of
https://github.com/denoland/deno.git
synced 2024-11-25 15:29:32 -05:00
Merge branch 'main' into auto-config
This commit is contained in:
commit
a91fbcc4e7
106 changed files with 3795 additions and 3060 deletions
8
.github/workflows/ci.yml
vendored
8
.github/workflows/ci.yml
vendored
|
@ -53,6 +53,7 @@ jobs:
|
||||||
CARGO_TERM_COLOR: always
|
CARGO_TERM_COLOR: always
|
||||||
RUST_BACKTRACE: full
|
RUST_BACKTRACE: full
|
||||||
RUSTC_FORCE_INCREMENTAL: 1
|
RUSTC_FORCE_INCREMENTAL: 1
|
||||||
|
RUSTFLAGS: -D warnings
|
||||||
|
|
||||||
steps:
|
steps:
|
||||||
- name: Configure git
|
- name: Configure git
|
||||||
|
@ -84,7 +85,7 @@ jobs:
|
||||||
- name: Install Rust
|
- name: Install Rust
|
||||||
uses: hecrj/setup-rust-action@v1
|
uses: hecrj/setup-rust-action@v1
|
||||||
with:
|
with:
|
||||||
rust-version: 1.57.0
|
rust-version: 1.58.0
|
||||||
|
|
||||||
- name: Install clippy and rustfmt
|
- name: Install clippy and rustfmt
|
||||||
if: matrix.job == 'lint'
|
if: matrix.job == 'lint'
|
||||||
|
@ -146,11 +147,6 @@ jobs:
|
||||||
service_account_key: ${{ secrets.GCP_SA_KEY }}
|
service_account_key: ${{ secrets.GCP_SA_KEY }}
|
||||||
export_default_credentials: true
|
export_default_credentials: true
|
||||||
|
|
||||||
- name: Error on warning
|
|
||||||
# TODO(piscisaureus): enable this on Windows again.
|
|
||||||
if: "!matrix.use_sysroot && !startsWith(matrix.os, 'windows')"
|
|
||||||
run: echo "RUSTFLAGS=-D warnings" >> $GITHUB_ENV
|
|
||||||
|
|
||||||
- name: Configure canary build
|
- name: Configure canary build
|
||||||
if: |
|
if: |
|
||||||
matrix.job == 'test' &&
|
matrix.job == 'test' &&
|
||||||
|
|
2
.github/workflows/wpt_epoch.yml
vendored
2
.github/workflows/wpt_epoch.yml
vendored
|
@ -20,7 +20,7 @@ jobs:
|
||||||
fail-fast: false
|
fail-fast: false
|
||||||
matrix:
|
matrix:
|
||||||
deno-version: [v1.x, canary]
|
deno-version: [v1.x, canary]
|
||||||
os: [ubuntu-latest-xl]
|
os: [ubuntu-20.04-xl]
|
||||||
|
|
||||||
steps:
|
steps:
|
||||||
- name: Clone repository
|
- name: Clone repository
|
||||||
|
|
655
Cargo.lock
generated
655
Cargo.lock
generated
File diff suppressed because it is too large
Load diff
12
Cargo.toml
12
Cargo.toml
|
@ -88,6 +88,12 @@ opt-level = 3
|
||||||
opt-level = 3
|
opt-level = 3
|
||||||
[profile.bench.package.tokio]
|
[profile.bench.package.tokio]
|
||||||
opt-level = 3
|
opt-level = 3
|
||||||
|
[profile.bench.package.zstd]
|
||||||
|
opt-level = 3
|
||||||
|
[profile.bench.package.lzzzz]
|
||||||
|
opt-level = 3
|
||||||
|
[profile.bench.package.zstd-sys]
|
||||||
|
opt-level = 3
|
||||||
|
|
||||||
# NB: the `bench` and `release` profiles must remain EXACTLY the same.
|
# NB: the `bench` and `release` profiles must remain EXACTLY the same.
|
||||||
[profile.release.package.rand]
|
[profile.release.package.rand]
|
||||||
|
@ -130,3 +136,9 @@ opt-level = 3
|
||||||
opt-level = 3
|
opt-level = 3
|
||||||
[profile.release.package.tokio]
|
[profile.release.package.tokio]
|
||||||
opt-level = 3
|
opt-level = 3
|
||||||
|
[profile.release.package.zstd]
|
||||||
|
opt-level = 3
|
||||||
|
[profile.release.package.lzzzz]
|
||||||
|
opt-level = 3
|
||||||
|
[profile.release.package.zstd-sys]
|
||||||
|
opt-level = 3
|
||||||
|
|
12
Releases.md
12
Releases.md
|
@ -6,6 +6,18 @@ https://github.com/denoland/deno/releases
|
||||||
We also have one-line install commands at:
|
We also have one-line install commands at:
|
||||||
https://github.com/denoland/deno_install
|
https://github.com/denoland/deno_install
|
||||||
|
|
||||||
|
### 1.17.3 / 2022.01.12
|
||||||
|
|
||||||
|
- fix: Get lib.deno_core.d.ts to parse correctly (#13238)
|
||||||
|
- fix: expose "Deno.memoryUsage()" in worker context (#13293)
|
||||||
|
- fix: install shim with `--allow-all` should not output each permission
|
||||||
|
individually (#13325)
|
||||||
|
- fix(compile): fix output flag behaviour on compile command (#13299)
|
||||||
|
- fix(coverage): don't type check (#13324)
|
||||||
|
- fix(coverage): merge coverage ranges (#13334)
|
||||||
|
- fix(ext/web): handle no arguments in atob (#13341)
|
||||||
|
- fix(serde_v8): support #[serde(default)] (#13300)
|
||||||
|
|
||||||
### 1.17.2 / 2022.01.05
|
### 1.17.2 / 2022.01.05
|
||||||
|
|
||||||
- fix(cli): include JSON modules in bundle (#13188)
|
- fix(cli): include JSON modules in bundle (#13188)
|
||||||
|
|
|
@ -1,7 +1,7 @@
|
||||||
# Copyright 2018-2022 the Deno authors. All rights reserved. MIT license.
|
# Copyright 2018-2022 the Deno authors. All rights reserved. MIT license.
|
||||||
[package]
|
[package]
|
||||||
name = "deno_bench_util"
|
name = "deno_bench_util"
|
||||||
version = "0.25.0"
|
version = "0.26.0"
|
||||||
authors = ["the Deno authors"]
|
authors = ["the Deno authors"]
|
||||||
edition = "2021"
|
edition = "2021"
|
||||||
license = "MIT"
|
license = "MIT"
|
||||||
|
@ -14,7 +14,7 @@ description = "Bench and profiling utilities for deno crates"
|
||||||
|
|
||||||
[dependencies]
|
[dependencies]
|
||||||
bencher = "0.1"
|
bencher = "0.1"
|
||||||
deno_core = { version = "0.113.0", path = "../core" }
|
deno_core = { version = "0.114.0", path = "../core" }
|
||||||
tokio = { version = "1.10.1", features = ["full"] }
|
tokio = { version = "1.10.1", features = ["full"] }
|
||||||
|
|
||||||
[[bench]]
|
[[bench]]
|
||||||
|
|
|
@ -2,7 +2,7 @@
|
||||||
|
|
||||||
[package]
|
[package]
|
||||||
name = "deno"
|
name = "deno"
|
||||||
version = "1.17.2"
|
version = "1.17.3"
|
||||||
authors = ["the Deno authors"]
|
authors = ["the Deno authors"]
|
||||||
default-run = "deno"
|
default-run = "deno"
|
||||||
edition = "2021"
|
edition = "2021"
|
||||||
|
@ -20,47 +20,50 @@ harness = false
|
||||||
path = "./bench/main.rs"
|
path = "./bench/main.rs"
|
||||||
|
|
||||||
[build-dependencies]
|
[build-dependencies]
|
||||||
deno_broadcast_channel = { version = "0.25.0", path = "../ext/broadcast_channel" }
|
deno_broadcast_channel = { version = "0.26.0", path = "../ext/broadcast_channel" }
|
||||||
deno_console = { version = "0.31.0", path = "../ext/console" }
|
deno_console = { version = "0.32.0", path = "../ext/console" }
|
||||||
deno_core = { version = "0.113.0", path = "../core" }
|
deno_core = { version = "0.114.0", path = "../core" }
|
||||||
deno_crypto = { version = "0.45.0", path = "../ext/crypto" }
|
deno_crypto = { version = "0.46.0", path = "../ext/crypto" }
|
||||||
deno_fetch = { version = "0.54.0", path = "../ext/fetch" }
|
deno_fetch = { version = "0.55.0", path = "../ext/fetch" }
|
||||||
deno_net = { version = "0.23.0", path = "../ext/net" }
|
deno_net = { version = "0.24.0", path = "../ext/net" }
|
||||||
deno_url = { version = "0.31.0", path = "../ext/url" }
|
deno_url = { version = "0.32.0", path = "../ext/url" }
|
||||||
deno_web = { version = "0.62.0", path = "../ext/web" }
|
deno_web = { version = "0.63.0", path = "../ext/web" }
|
||||||
deno_webgpu = { version = "0.32.0", path = "../ext/webgpu" }
|
deno_webgpu = { version = "0.33.0", path = "../ext/webgpu" }
|
||||||
deno_websocket = { version = "0.36.0", path = "../ext/websocket" }
|
deno_websocket = { version = "0.37.0", path = "../ext/websocket" }
|
||||||
deno_webstorage = { version = "0.26.0", path = "../ext/webstorage" }
|
deno_webstorage = { version = "0.27.0", path = "../ext/webstorage" }
|
||||||
regex = "=1.5.4"
|
regex = "=1.5.4"
|
||||||
serde = { version = "=1.0.133", features = ["derive"] }
|
serde = { version = "=1.0.133", features = ["derive"] }
|
||||||
|
zstd = '=0.9.2'
|
||||||
|
|
||||||
[target.'cfg(windows)'.build-dependencies]
|
[target.'cfg(windows)'.build-dependencies]
|
||||||
winapi = "=0.3.9"
|
winapi = "=0.3.9"
|
||||||
winres = "=0.1.11"
|
winres = "=0.1.11"
|
||||||
|
|
||||||
[dependencies]
|
[dependencies]
|
||||||
deno_ast = { version = "0.8.0", features = ["bundler", "codegen", "dep_graph", "module_specifier", "proposal", "react", "sourcemap", "transforms", "typescript", "view", "visit"] }
|
deno_ast = { version = "0.9.0", features = ["bundler", "codegen", "dep_graph", "module_specifier", "proposal", "react", "sourcemap", "transforms", "transpiling", "typescript", "view", "visit"] }
|
||||||
deno_core = { version = "0.113.0", path = "../core" }
|
deno_core = { version = "0.114.0", path = "../core" }
|
||||||
deno_doc = "0.25.0"
|
deno_doc = "0.26.0"
|
||||||
deno_graph = "0.17.0"
|
deno_graph = "0.18.0"
|
||||||
deno_lint = { version = "0.21.0", features = ["docs"] }
|
deno_lint = { version = "0.22.0", features = ["docs"] }
|
||||||
deno_runtime = { version = "0.39.0", path = "../runtime" }
|
deno_runtime = { version = "0.40.0", path = "../runtime" }
|
||||||
|
|
||||||
atty = "=0.2.14"
|
atty = "=0.2.14"
|
||||||
base64 = "=0.13.0"
|
base64 = "=0.13.0"
|
||||||
cache_control = "=0.2.0"
|
cache_control = "=0.2.0"
|
||||||
chrono = "=0.4.19"
|
chrono = "=0.4.19"
|
||||||
clap = "=2.33.3"
|
clap = "=3.0.7"
|
||||||
|
clap_complete = "=3.0.3"
|
||||||
|
clap_complete_fig = "=3.0.2"
|
||||||
data-url = "=0.1.1"
|
data-url = "=0.1.1"
|
||||||
dissimilar = "=1.0.2"
|
dissimilar = "=1.0.2"
|
||||||
dprint-plugin-json = "=0.14.0"
|
dprint-plugin-json = "=0.14.0"
|
||||||
dprint-plugin-markdown = "=0.12.0"
|
dprint-plugin-markdown = "=0.12.0"
|
||||||
dprint-plugin-typescript = "=0.61.0"
|
dprint-plugin-typescript = "=0.62.0"
|
||||||
encoding_rs = "=0.8.29"
|
encoding_rs = "=0.8.29"
|
||||||
env_logger = "=0.8.4"
|
env_logger = "=0.8.4"
|
||||||
fancy-regex = "=0.7.1"
|
fancy-regex = "=0.7.1"
|
||||||
http = "=0.2.4"
|
http = "=0.2.4"
|
||||||
import_map = "=0.4.0"
|
import_map = "=0.6.0"
|
||||||
jsonc-parser = { version = "=0.17.1", features = ["serde"] }
|
jsonc-parser = { version = "=0.17.1", features = ["serde"] }
|
||||||
libc = "=0.2.106"
|
libc = "=0.2.106"
|
||||||
log = { version = "=0.4.14", features = ["serde"] }
|
log = { version = "=0.4.14", features = ["serde"] }
|
||||||
|
@ -83,8 +86,10 @@ tempfile = "=3.2.0"
|
||||||
text-size = "=1.1.0"
|
text-size = "=1.1.0"
|
||||||
text_lines = "=0.4.1"
|
text_lines = "=0.4.1"
|
||||||
tokio = { version = "=1.14", features = ["full"] }
|
tokio = { version = "=1.14", features = ["full"] }
|
||||||
|
typed-arena = "2.0.1"
|
||||||
uuid = { version = "=0.8.2", features = ["v4", "serde"] }
|
uuid = { version = "=0.8.2", features = ["v4", "serde"] }
|
||||||
walkdir = "=2.3.2"
|
walkdir = "=2.3.2"
|
||||||
|
zstd = '=0.9.2'
|
||||||
|
|
||||||
[target.'cfg(windows)'.dependencies]
|
[target.'cfg(windows)'.dependencies]
|
||||||
fwdansi = "=1.1.0"
|
fwdansi = "=1.1.0"
|
||||||
|
|
|
@ -1,51 +0,0 @@
|
||||||
// Copyright 2018-2022 the Deno authors. All rights reserved. MIT license.
|
|
||||||
use deno_ast::swc::bundler::Hook;
|
|
||||||
use deno_ast::swc::bundler::ModuleRecord;
|
|
||||||
use deno_ast::swc::common::Span;
|
|
||||||
use deno_core::error::AnyError;
|
|
||||||
|
|
||||||
/// This contains the logic for Deno to rewrite the `import.meta` when bundling.
|
|
||||||
pub struct BundleHook;
|
|
||||||
|
|
||||||
impl Hook for BundleHook {
|
|
||||||
fn get_import_meta_props(
|
|
||||||
&self,
|
|
||||||
span: Span,
|
|
||||||
module_record: &ModuleRecord,
|
|
||||||
) -> Result<Vec<deno_ast::swc::ast::KeyValueProp>, AnyError> {
|
|
||||||
use deno_ast::swc::ast;
|
|
||||||
|
|
||||||
Ok(vec![
|
|
||||||
ast::KeyValueProp {
|
|
||||||
key: ast::PropName::Ident(ast::Ident::new("url".into(), span)),
|
|
||||||
value: Box::new(ast::Expr::Lit(ast::Lit::Str(ast::Str {
|
|
||||||
span,
|
|
||||||
value: module_record.file_name.to_string().into(),
|
|
||||||
kind: ast::StrKind::Synthesized,
|
|
||||||
has_escape: false,
|
|
||||||
}))),
|
|
||||||
},
|
|
||||||
ast::KeyValueProp {
|
|
||||||
key: ast::PropName::Ident(ast::Ident::new("main".into(), span)),
|
|
||||||
value: Box::new(if module_record.is_entry {
|
|
||||||
ast::Expr::Member(ast::MemberExpr {
|
|
||||||
span,
|
|
||||||
obj: ast::ExprOrSuper::Expr(Box::new(ast::Expr::MetaProp(
|
|
||||||
ast::MetaPropExpr {
|
|
||||||
meta: ast::Ident::new("import".into(), span),
|
|
||||||
prop: ast::Ident::new("meta".into(), span),
|
|
||||||
},
|
|
||||||
))),
|
|
||||||
prop: Box::new(ast::Expr::Ident(ast::Ident::new(
|
|
||||||
"main".into(),
|
|
||||||
span,
|
|
||||||
))),
|
|
||||||
computed: false,
|
|
||||||
})
|
|
||||||
} else {
|
|
||||||
ast::Expr::Lit(ast::Lit::Bool(ast::Bool { span, value: false }))
|
|
||||||
}),
|
|
||||||
},
|
|
||||||
])
|
|
||||||
}
|
|
||||||
}
|
|
919
cli/ast/mod.rs
919
cli/ast/mod.rs
|
@ -1,919 +0,0 @@
|
||||||
// Copyright 2018-2022 the Deno authors. All rights reserved. MIT license.
|
|
||||||
|
|
||||||
use crate::config_file;
|
|
||||||
use crate::text_encoding::strip_bom;
|
|
||||||
|
|
||||||
use deno_ast::get_syntax;
|
|
||||||
use deno_ast::swc::ast::Module;
|
|
||||||
use deno_ast::swc::ast::Program;
|
|
||||||
use deno_ast::swc::codegen::text_writer::JsWriter;
|
|
||||||
use deno_ast::swc::codegen::Node;
|
|
||||||
use deno_ast::swc::common::chain;
|
|
||||||
use deno_ast::swc::common::comments::SingleThreadedComments;
|
|
||||||
use deno_ast::swc::common::errors::Diagnostic as SwcDiagnostic;
|
|
||||||
use deno_ast::swc::common::BytePos;
|
|
||||||
use deno_ast::swc::common::FileName;
|
|
||||||
use deno_ast::swc::common::Globals;
|
|
||||||
use deno_ast::swc::common::Mark;
|
|
||||||
use deno_ast::swc::common::SourceMap;
|
|
||||||
use deno_ast::swc::common::Spanned;
|
|
||||||
use deno_ast::swc::parser::error::Error as SwcError;
|
|
||||||
use deno_ast::swc::parser::error::SyntaxError;
|
|
||||||
use deno_ast::swc::parser::lexer::Lexer;
|
|
||||||
use deno_ast::swc::parser::StringInput;
|
|
||||||
use deno_ast::swc::transforms::fixer;
|
|
||||||
use deno_ast::swc::transforms::helpers;
|
|
||||||
use deno_ast::swc::transforms::hygiene;
|
|
||||||
use deno_ast::swc::transforms::pass::Optional;
|
|
||||||
use deno_ast::swc::transforms::proposals;
|
|
||||||
use deno_ast::swc::transforms::react;
|
|
||||||
use deno_ast::swc::transforms::resolver_with_mark;
|
|
||||||
use deno_ast::swc::transforms::typescript;
|
|
||||||
use deno_ast::swc::visit::FoldWith;
|
|
||||||
use deno_ast::Diagnostic;
|
|
||||||
use deno_ast::LineAndColumnDisplay;
|
|
||||||
use deno_ast::MediaType;
|
|
||||||
use deno_ast::ParsedSource;
|
|
||||||
use deno_core::anyhow::anyhow;
|
|
||||||
use deno_core::error::AnyError;
|
|
||||||
use deno_core::resolve_url_or_path;
|
|
||||||
use deno_core::serde_json;
|
|
||||||
use deno_core::ModuleSpecifier;
|
|
||||||
use std::cell::RefCell;
|
|
||||||
use std::fmt;
|
|
||||||
use std::rc::Rc;
|
|
||||||
|
|
||||||
mod bundle_hook;
|
|
||||||
mod transforms;
|
|
||||||
|
|
||||||
pub use bundle_hook::BundleHook;
|
|
||||||
|
|
||||||
#[derive(Debug, Clone, Eq, PartialEq)]
|
|
||||||
pub struct Location {
|
|
||||||
pub specifier: String,
|
|
||||||
pub line: usize,
|
|
||||||
pub col: usize,
|
|
||||||
}
|
|
||||||
|
|
||||||
impl Location {
|
|
||||||
pub fn from_pos(parsed_source: &ParsedSource, pos: BytePos) -> Self {
|
|
||||||
Location::from_line_and_column(
|
|
||||||
parsed_source.specifier().to_string(),
|
|
||||||
parsed_source.source().line_and_column_index(pos),
|
|
||||||
)
|
|
||||||
}
|
|
||||||
|
|
||||||
pub fn from_line_and_column(
|
|
||||||
specifier: String,
|
|
||||||
line_and_column: deno_ast::LineAndColumnIndex,
|
|
||||||
) -> Self {
|
|
||||||
Location {
|
|
||||||
specifier,
|
|
||||||
line: line_and_column.line_index + 1,
|
|
||||||
col: line_and_column.column_index,
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
impl From<deno_ast::swc::common::Loc> for Location {
|
|
||||||
fn from(swc_loc: deno_ast::swc::common::Loc) -> Self {
|
|
||||||
use deno_ast::swc::common::FileName::*;
|
|
||||||
|
|
||||||
let filename = match &swc_loc.file.name {
|
|
||||||
Real(path_buf) => path_buf.to_string_lossy().to_string(),
|
|
||||||
Custom(str_) => str_.to_string(),
|
|
||||||
Url(url) => url.to_string(),
|
|
||||||
_ => panic!("invalid filename"),
|
|
||||||
};
|
|
||||||
|
|
||||||
Location {
|
|
||||||
specifier: filename,
|
|
||||||
line: swc_loc.line,
|
|
||||||
col: swc_loc.col.0,
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
impl From<Location> for ModuleSpecifier {
|
|
||||||
fn from(loc: Location) -> Self {
|
|
||||||
resolve_url_or_path(&loc.specifier).unwrap()
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
impl std::fmt::Display for Location {
|
|
||||||
fn fmt(&self, f: &mut std::fmt::Formatter) -> std::fmt::Result {
|
|
||||||
write!(f, "{}:{}:{}", self.specifier, self.line, self.col)
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
#[derive(Debug)]
|
|
||||||
pub struct Diagnostics(pub Vec<Diagnostic>);
|
|
||||||
|
|
||||||
impl std::error::Error for Diagnostics {}
|
|
||||||
|
|
||||||
impl fmt::Display for Diagnostics {
|
|
||||||
fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
|
|
||||||
for (i, diagnostic) in self.0.iter().enumerate() {
|
|
||||||
if i > 0 {
|
|
||||||
write!(f, "\n\n")?;
|
|
||||||
}
|
|
||||||
|
|
||||||
write!(f, "{}", diagnostic)?
|
|
||||||
}
|
|
||||||
|
|
||||||
Ok(())
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
#[derive(Debug, Clone)]
|
|
||||||
pub enum ImportsNotUsedAsValues {
|
|
||||||
Remove,
|
|
||||||
Preserve,
|
|
||||||
Error,
|
|
||||||
}
|
|
||||||
|
|
||||||
/// Options which can be adjusted when transpiling a module.
|
|
||||||
#[derive(Debug, Clone)]
|
|
||||||
pub struct EmitOptions {
|
|
||||||
/// When emitting a legacy decorator, also emit experimental decorator meta
|
|
||||||
/// data. Defaults to `false`.
|
|
||||||
pub emit_metadata: bool,
|
|
||||||
/// What to do with import statements that only import types i.e. whether to
|
|
||||||
/// remove them (`Remove`), keep them as side-effect imports (`Preserve`)
|
|
||||||
/// or error (`Error`). Defaults to `Remove`.
|
|
||||||
pub imports_not_used_as_values: ImportsNotUsedAsValues,
|
|
||||||
/// Should the source map be inlined in the emitted code file, or provided
|
|
||||||
/// as a separate file. Defaults to `true`.
|
|
||||||
pub inline_source_map: bool,
|
|
||||||
/// Should the sources be inlined in the source map. Defaults to `true`.
|
|
||||||
pub inline_sources: bool,
|
|
||||||
/// Should a corresponding .map file be created for the output. This should be
|
|
||||||
/// false if inline_source_map is true. Defaults to `false`.
|
|
||||||
pub source_map: bool,
|
|
||||||
/// `true` if the program should use an implicit JSX import source/the "new"
|
|
||||||
/// JSX transforms.
|
|
||||||
pub jsx_automatic: bool,
|
|
||||||
/// If JSX is automatic, if it is in development mode, meaning that it should
|
|
||||||
/// import `jsx-dev-runtime` and transform JSX using `jsxDEV` import from the
|
|
||||||
/// JSX import source as well as provide additional debug information to the
|
|
||||||
/// JSX factory.
|
|
||||||
pub jsx_development: bool,
|
|
||||||
/// When transforming JSX, what value should be used for the JSX factory.
|
|
||||||
/// Defaults to `React.createElement`.
|
|
||||||
pub jsx_factory: String,
|
|
||||||
/// When transforming JSX, what value should be used for the JSX fragment
|
|
||||||
/// factory. Defaults to `React.Fragment`.
|
|
||||||
pub jsx_fragment_factory: String,
|
|
||||||
/// The string module specifier to implicitly import JSX factories from when
|
|
||||||
/// transpiling JSX.
|
|
||||||
pub jsx_import_source: Option<String>,
|
|
||||||
/// Should JSX be transformed or preserved. Defaults to `true`.
|
|
||||||
pub transform_jsx: bool,
|
|
||||||
/// Should import declarations be transformed to variable declarations.
|
|
||||||
/// This should only be set to true for the REPL. Defaults to `false`.
|
|
||||||
pub repl_imports: bool,
|
|
||||||
}
|
|
||||||
|
|
||||||
impl Default for EmitOptions {
|
|
||||||
fn default() -> Self {
|
|
||||||
EmitOptions {
|
|
||||||
emit_metadata: false,
|
|
||||||
imports_not_used_as_values: ImportsNotUsedAsValues::Remove,
|
|
||||||
inline_source_map: true,
|
|
||||||
inline_sources: true,
|
|
||||||
source_map: false,
|
|
||||||
jsx_automatic: false,
|
|
||||||
jsx_development: false,
|
|
||||||
jsx_factory: "React.createElement".into(),
|
|
||||||
jsx_fragment_factory: "React.Fragment".into(),
|
|
||||||
jsx_import_source: None,
|
|
||||||
transform_jsx: true,
|
|
||||||
repl_imports: false,
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
impl From<config_file::TsConfig> for EmitOptions {
|
|
||||||
fn from(config: config_file::TsConfig) -> Self {
|
|
||||||
let options: config_file::EmitConfigOptions =
|
|
||||||
serde_json::from_value(config.0).unwrap();
|
|
||||||
let imports_not_used_as_values =
|
|
||||||
match options.imports_not_used_as_values.as_str() {
|
|
||||||
"preserve" => ImportsNotUsedAsValues::Preserve,
|
|
||||||
"error" => ImportsNotUsedAsValues::Error,
|
|
||||||
_ => ImportsNotUsedAsValues::Remove,
|
|
||||||
};
|
|
||||||
let (transform_jsx, jsx_automatic, jsx_development) =
|
|
||||||
match options.jsx.as_str() {
|
|
||||||
"react" => (true, false, false),
|
|
||||||
"react-jsx" => (true, true, false),
|
|
||||||
"react-jsxdev" => (true, true, true),
|
|
||||||
_ => (false, false, false),
|
|
||||||
};
|
|
||||||
EmitOptions {
|
|
||||||
emit_metadata: options.emit_decorator_metadata,
|
|
||||||
imports_not_used_as_values,
|
|
||||||
inline_source_map: options.inline_source_map,
|
|
||||||
inline_sources: options.inline_sources,
|
|
||||||
source_map: options.source_map,
|
|
||||||
jsx_automatic,
|
|
||||||
jsx_development,
|
|
||||||
jsx_factory: options.jsx_factory,
|
|
||||||
jsx_fragment_factory: options.jsx_fragment_factory,
|
|
||||||
jsx_import_source: options.jsx_import_source,
|
|
||||||
transform_jsx,
|
|
||||||
repl_imports: false,
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
fn strip_config_from_emit_options(
|
|
||||||
options: &EmitOptions,
|
|
||||||
) -> typescript::strip::Config {
|
|
||||||
typescript::strip::Config {
|
|
||||||
pragma: Some(options.jsx_factory.clone()),
|
|
||||||
pragma_frag: Some(options.jsx_fragment_factory.clone()),
|
|
||||||
import_not_used_as_values: match options.imports_not_used_as_values {
|
|
||||||
ImportsNotUsedAsValues::Remove => {
|
|
||||||
typescript::strip::ImportsNotUsedAsValues::Remove
|
|
||||||
}
|
|
||||||
ImportsNotUsedAsValues::Preserve => {
|
|
||||||
typescript::strip::ImportsNotUsedAsValues::Preserve
|
|
||||||
}
|
|
||||||
// `Error` only affects the type-checking stage. Fall back to `Remove` here.
|
|
||||||
ImportsNotUsedAsValues::Error => {
|
|
||||||
typescript::strip::ImportsNotUsedAsValues::Remove
|
|
||||||
}
|
|
||||||
},
|
|
||||||
use_define_for_class_fields: true,
|
|
||||||
// TODO(bartlomieju): this could be changed to `false` to provide `export {}`
|
|
||||||
// in Typescript files without manual changes
|
|
||||||
no_empty_export: true,
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
/// Implements a configuration trait for source maps that reflects the logic
|
|
||||||
/// to embed sources in the source map or not.
|
|
||||||
#[derive(Debug)]
|
|
||||||
pub(crate) struct SourceMapConfig {
|
|
||||||
pub inline_sources: bool,
|
|
||||||
}
|
|
||||||
|
|
||||||
impl deno_ast::swc::common::source_map::SourceMapGenConfig for SourceMapConfig {
|
|
||||||
fn file_name_to_source(&self, f: &FileName) -> String {
|
|
||||||
f.to_string()
|
|
||||||
}
|
|
||||||
|
|
||||||
fn inline_sources_content(&self, f: &FileName) -> bool {
|
|
||||||
match f {
|
|
||||||
FileName::Real(..) | FileName::Custom(..) => false,
|
|
||||||
FileName::Url(..) => self.inline_sources,
|
|
||||||
_ => true,
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
/// Transform a TypeScript file into a JavaScript file, based on the supplied
|
|
||||||
/// options.
|
|
||||||
///
|
|
||||||
/// The result is a tuple of the code and optional source map as strings.
|
|
||||||
pub fn transpile(
|
|
||||||
parsed_source: &ParsedSource,
|
|
||||||
options: &EmitOptions,
|
|
||||||
) -> Result<(String, Option<String>), AnyError> {
|
|
||||||
ensure_no_fatal_diagnostics(parsed_source.diagnostics().iter())?;
|
|
||||||
let program: Program = (*parsed_source.program()).clone();
|
|
||||||
let source_map = Rc::new(SourceMap::default());
|
|
||||||
let source_map_config = SourceMapConfig {
|
|
||||||
inline_sources: options.inline_sources,
|
|
||||||
};
|
|
||||||
let specifier = resolve_url_or_path(parsed_source.specifier())?;
|
|
||||||
let file_name = FileName::Url(specifier);
|
|
||||||
source_map
|
|
||||||
.new_source_file(file_name, parsed_source.source().text().to_string());
|
|
||||||
let comments = parsed_source.comments().as_single_threaded(); // needs to be mutable
|
|
||||||
let globals = Globals::new();
|
|
||||||
deno_ast::swc::common::GLOBALS.set(&globals, || {
|
|
||||||
let top_level_mark = Mark::fresh(Mark::root());
|
|
||||||
let module = fold_program(
|
|
||||||
program,
|
|
||||||
options,
|
|
||||||
source_map.clone(),
|
|
||||||
&comments,
|
|
||||||
top_level_mark,
|
|
||||||
)?;
|
|
||||||
|
|
||||||
let mut src_map_buf = vec![];
|
|
||||||
let mut buf = vec![];
|
|
||||||
{
|
|
||||||
let writer = Box::new(JsWriter::new(
|
|
||||||
source_map.clone(),
|
|
||||||
"\n",
|
|
||||||
&mut buf,
|
|
||||||
Some(&mut src_map_buf),
|
|
||||||
));
|
|
||||||
let config = deno_ast::swc::codegen::Config { minify: false };
|
|
||||||
let mut emitter = deno_ast::swc::codegen::Emitter {
|
|
||||||
cfg: config,
|
|
||||||
comments: Some(&comments),
|
|
||||||
cm: source_map.clone(),
|
|
||||||
wr: writer,
|
|
||||||
};
|
|
||||||
module.emit_with(&mut emitter)?;
|
|
||||||
}
|
|
||||||
let mut src = String::from_utf8(buf)?;
|
|
||||||
let mut map: Option<String> = None;
|
|
||||||
{
|
|
||||||
let mut buf = Vec::new();
|
|
||||||
source_map
|
|
||||||
.build_source_map_with_config(&mut src_map_buf, None, source_map_config)
|
|
||||||
.to_writer(&mut buf)?;
|
|
||||||
|
|
||||||
if options.inline_source_map {
|
|
||||||
src.push_str("//# sourceMappingURL=data:application/json;base64,");
|
|
||||||
let encoded_map = base64::encode(buf);
|
|
||||||
src.push_str(&encoded_map);
|
|
||||||
} else {
|
|
||||||
map = Some(String::from_utf8(buf)?);
|
|
||||||
}
|
|
||||||
}
|
|
||||||
Ok((src, map))
|
|
||||||
})
|
|
||||||
}
|
|
||||||
|
|
||||||
/// A low level function which transpiles a source module into an swc
|
|
||||||
/// SourceFile.
|
|
||||||
pub fn transpile_module(
|
|
||||||
specifier: &ModuleSpecifier,
|
|
||||||
source: &str,
|
|
||||||
media_type: MediaType,
|
|
||||||
options: &EmitOptions,
|
|
||||||
cm: Rc<SourceMap>,
|
|
||||||
) -> Result<(Rc<deno_ast::swc::common::SourceFile>, Module), AnyError> {
|
|
||||||
let source = strip_bom(source);
|
|
||||||
let source = if media_type == MediaType::Json {
|
|
||||||
format!(
|
|
||||||
"export default JSON.parse(`{}`);",
|
|
||||||
source.replace("${", "\\${").replace('`', "\\`")
|
|
||||||
)
|
|
||||||
} else {
|
|
||||||
source.to_string()
|
|
||||||
};
|
|
||||||
let source_file =
|
|
||||||
cm.new_source_file(FileName::Url(specifier.clone()), source);
|
|
||||||
let input = StringInput::from(&*source_file);
|
|
||||||
let comments = SingleThreadedComments::default();
|
|
||||||
let syntax = if media_type == MediaType::Json {
|
|
||||||
get_syntax(MediaType::JavaScript)
|
|
||||||
} else {
|
|
||||||
get_syntax(media_type)
|
|
||||||
};
|
|
||||||
let lexer = Lexer::new(syntax, deno_ast::ES_VERSION, input, Some(&comments));
|
|
||||||
let mut parser = deno_ast::swc::parser::Parser::new_from(lexer);
|
|
||||||
let module = parser
|
|
||||||
.parse_module()
|
|
||||||
.map_err(|e| swc_err_to_diagnostic(&cm, specifier, e))?;
|
|
||||||
let diagnostics = parser
|
|
||||||
.take_errors()
|
|
||||||
.into_iter()
|
|
||||||
.map(|e| swc_err_to_diagnostic(&cm, specifier, e))
|
|
||||||
.collect::<Vec<_>>();
|
|
||||||
|
|
||||||
ensure_no_fatal_diagnostics(diagnostics.iter())?;
|
|
||||||
|
|
||||||
let top_level_mark = Mark::fresh(Mark::root());
|
|
||||||
let program = fold_program(
|
|
||||||
Program::Module(module),
|
|
||||||
options,
|
|
||||||
cm,
|
|
||||||
&comments,
|
|
||||||
top_level_mark,
|
|
||||||
)?;
|
|
||||||
let module = match program {
|
|
||||||
Program::Module(module) => module,
|
|
||||||
_ => unreachable!(),
|
|
||||||
};
|
|
||||||
|
|
||||||
Ok((source_file, module))
|
|
||||||
}
|
|
||||||
|
|
||||||
#[derive(Default, Clone)]
|
|
||||||
struct DiagnosticCollector {
|
|
||||||
diagnostics_cell: Rc<RefCell<Vec<SwcDiagnostic>>>,
|
|
||||||
}
|
|
||||||
|
|
||||||
impl DiagnosticCollector {
|
|
||||||
pub fn into_handler(self) -> deno_ast::swc::common::errors::Handler {
|
|
||||||
deno_ast::swc::common::errors::Handler::with_emitter(
|
|
||||||
true,
|
|
||||||
false,
|
|
||||||
Box::new(self),
|
|
||||||
)
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
impl deno_ast::swc::common::errors::Emitter for DiagnosticCollector {
|
|
||||||
fn emit(
|
|
||||||
&mut self,
|
|
||||||
db: &deno_ast::swc::common::errors::DiagnosticBuilder<'_>,
|
|
||||||
) {
|
|
||||||
use std::ops::Deref;
|
|
||||||
self.diagnostics_cell.borrow_mut().push(db.deref().clone());
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
fn fold_program(
|
|
||||||
program: Program,
|
|
||||||
options: &EmitOptions,
|
|
||||||
source_map: Rc<SourceMap>,
|
|
||||||
comments: &SingleThreadedComments,
|
|
||||||
top_level_mark: Mark,
|
|
||||||
) -> Result<Program, AnyError> {
|
|
||||||
let jsx_pass = react::react(
|
|
||||||
source_map.clone(),
|
|
||||||
Some(comments),
|
|
||||||
react::Options {
|
|
||||||
pragma: options.jsx_factory.clone(),
|
|
||||||
pragma_frag: options.jsx_fragment_factory.clone(),
|
|
||||||
// this will use `Object.assign()` instead of the `_extends` helper
|
|
||||||
// when spreading props.
|
|
||||||
use_builtins: true,
|
|
||||||
runtime: if options.jsx_automatic {
|
|
||||||
Some(react::Runtime::Automatic)
|
|
||||||
} else {
|
|
||||||
None
|
|
||||||
},
|
|
||||||
development: options.jsx_development,
|
|
||||||
import_source: options.jsx_import_source.clone().unwrap_or_default(),
|
|
||||||
..Default::default()
|
|
||||||
},
|
|
||||||
top_level_mark,
|
|
||||||
);
|
|
||||||
let mut passes = chain!(
|
|
||||||
Optional::new(transforms::DownlevelImportsFolder, options.repl_imports),
|
|
||||||
Optional::new(transforms::StripExportsFolder, options.repl_imports),
|
|
||||||
proposals::decorators::decorators(proposals::decorators::Config {
|
|
||||||
legacy: true,
|
|
||||||
emit_metadata: options.emit_metadata
|
|
||||||
}),
|
|
||||||
helpers::inject_helpers(),
|
|
||||||
resolver_with_mark(top_level_mark),
|
|
||||||
Optional::new(
|
|
||||||
typescript::strip::strip_with_config(
|
|
||||||
strip_config_from_emit_options(options),
|
|
||||||
top_level_mark
|
|
||||||
),
|
|
||||||
!options.transform_jsx
|
|
||||||
),
|
|
||||||
Optional::new(
|
|
||||||
typescript::strip::strip_with_jsx(
|
|
||||||
source_map.clone(),
|
|
||||||
strip_config_from_emit_options(options),
|
|
||||||
comments,
|
|
||||||
top_level_mark
|
|
||||||
),
|
|
||||||
options.transform_jsx
|
|
||||||
),
|
|
||||||
Optional::new(jsx_pass, options.transform_jsx),
|
|
||||||
fixer(Some(comments)),
|
|
||||||
hygiene(),
|
|
||||||
);
|
|
||||||
|
|
||||||
let emitter = DiagnosticCollector::default();
|
|
||||||
let diagnostics_cell = emitter.diagnostics_cell.clone();
|
|
||||||
let handler = emitter.into_handler();
|
|
||||||
let result = deno_ast::swc::utils::HANDLER.set(&handler, || {
|
|
||||||
helpers::HELPERS.set(&helpers::Helpers::new(false), || {
|
|
||||||
program.fold_with(&mut passes)
|
|
||||||
})
|
|
||||||
});
|
|
||||||
|
|
||||||
let diagnostics = diagnostics_cell.borrow();
|
|
||||||
ensure_no_fatal_swc_diagnostics(&source_map, diagnostics.iter())?;
|
|
||||||
Ok(result)
|
|
||||||
}
|
|
||||||
|
|
||||||
fn ensure_no_fatal_swc_diagnostics<'a>(
|
|
||||||
source_map: &SourceMap,
|
|
||||||
diagnostics: impl Iterator<Item = &'a SwcDiagnostic>,
|
|
||||||
) -> Result<(), AnyError> {
|
|
||||||
let fatal_diagnostics = diagnostics
|
|
||||||
.filter(|d| is_fatal_swc_diagnostic(d))
|
|
||||||
.collect::<Vec<_>>();
|
|
||||||
if !fatal_diagnostics.is_empty() {
|
|
||||||
Err(anyhow!(
|
|
||||||
"{}",
|
|
||||||
fatal_diagnostics
|
|
||||||
.iter()
|
|
||||||
.map(|d| format_swc_diagnostic(source_map, d))
|
|
||||||
.collect::<Vec<_>>()
|
|
||||||
.join("\n\n")
|
|
||||||
))
|
|
||||||
} else {
|
|
||||||
Ok(())
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
fn is_fatal_swc_diagnostic(diagnostic: &SwcDiagnostic) -> bool {
|
|
||||||
use deno_ast::swc::common::errors::Level;
|
|
||||||
match diagnostic.level {
|
|
||||||
Level::Bug
|
|
||||||
| Level::Cancelled
|
|
||||||
| Level::FailureNote
|
|
||||||
| Level::Fatal
|
|
||||||
| Level::PhaseFatal
|
|
||||||
| Level::Error => true,
|
|
||||||
Level::Help | Level::Note | Level::Warning => false,
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
fn format_swc_diagnostic(
|
|
||||||
source_map: &SourceMap,
|
|
||||||
diagnostic: &SwcDiagnostic,
|
|
||||||
) -> String {
|
|
||||||
if let Some(span) = &diagnostic.span.primary_span() {
|
|
||||||
let file_name = source_map.span_to_filename(*span);
|
|
||||||
let loc = source_map.lookup_char_pos(span.lo);
|
|
||||||
format!(
|
|
||||||
"{} at {}:{}:{}",
|
|
||||||
diagnostic.message(),
|
|
||||||
file_name.to_string(),
|
|
||||||
loc.line,
|
|
||||||
loc.col_display + 1,
|
|
||||||
)
|
|
||||||
} else {
|
|
||||||
diagnostic.message()
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
fn swc_err_to_diagnostic(
|
|
||||||
source_map: &SourceMap,
|
|
||||||
specifier: &ModuleSpecifier,
|
|
||||||
err: SwcError,
|
|
||||||
) -> Diagnostic {
|
|
||||||
let location = source_map.lookup_char_pos(err.span().lo);
|
|
||||||
Diagnostic {
|
|
||||||
specifier: specifier.to_string(),
|
|
||||||
span: err.span(),
|
|
||||||
display_position: LineAndColumnDisplay {
|
|
||||||
line_number: location.line,
|
|
||||||
column_number: location.col_display + 1,
|
|
||||||
},
|
|
||||||
kind: err.into_kind(),
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
fn ensure_no_fatal_diagnostics<'a>(
|
|
||||||
diagnostics: impl Iterator<Item = &'a Diagnostic>,
|
|
||||||
) -> Result<(), Diagnostics> {
|
|
||||||
let fatal_diagnostics = diagnostics
|
|
||||||
.filter(|d| is_fatal_syntax_error(&d.kind))
|
|
||||||
.map(ToOwned::to_owned)
|
|
||||||
.collect::<Vec<_>>();
|
|
||||||
if !fatal_diagnostics.is_empty() {
|
|
||||||
Err(Diagnostics(fatal_diagnostics))
|
|
||||||
} else {
|
|
||||||
Ok(())
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
fn is_fatal_syntax_error(error_kind: &SyntaxError) -> bool {
|
|
||||||
matches!(
|
|
||||||
error_kind,
|
|
||||||
// expected identifier
|
|
||||||
SyntaxError::TS1003 |
|
|
||||||
// expected semi-colon
|
|
||||||
SyntaxError::TS1005 |
|
|
||||||
// expected expression
|
|
||||||
SyntaxError::TS1109 |
|
|
||||||
// unterminated string literal
|
|
||||||
SyntaxError::UnterminatedStrLit
|
|
||||||
)
|
|
||||||
}
|
|
||||||
|
|
||||||
#[cfg(test)]
|
|
||||||
mod tests {
|
|
||||||
use super::*;
|
|
||||||
use deno_ast::parse_module;
|
|
||||||
use deno_ast::ParseParams;
|
|
||||||
use deno_ast::SourceTextInfo;
|
|
||||||
|
|
||||||
use pretty_assertions::assert_eq;
|
|
||||||
|
|
||||||
#[test]
|
|
||||||
fn test_transpile() {
|
|
||||||
let specifier = resolve_url_or_path("https://deno.land/x/mod.ts").unwrap();
|
|
||||||
let source = r#"
|
|
||||||
enum D {
|
|
||||||
A,
|
|
||||||
B,
|
|
||||||
}
|
|
||||||
|
|
||||||
namespace N {
|
|
||||||
export enum D {
|
|
||||||
A = "value"
|
|
||||||
}
|
|
||||||
export const Value = 5;
|
|
||||||
}
|
|
||||||
|
|
||||||
export class A {
|
|
||||||
private b: string;
|
|
||||||
protected c: number = 1;
|
|
||||||
e: "foo";
|
|
||||||
constructor (public d = D.A) {
|
|
||||||
const e = "foo" as const;
|
|
||||||
this.e = e;
|
|
||||||
console.log(N.Value);
|
|
||||||
}
|
|
||||||
}
|
|
||||||
"#;
|
|
||||||
let module = deno_ast::parse_module(ParseParams {
|
|
||||||
specifier: specifier.as_str().to_string(),
|
|
||||||
source: SourceTextInfo::from_string(source.to_string()),
|
|
||||||
media_type: deno_ast::MediaType::TypeScript,
|
|
||||||
capture_tokens: false,
|
|
||||||
maybe_syntax: None,
|
|
||||||
scope_analysis: false,
|
|
||||||
})
|
|
||||||
.unwrap();
|
|
||||||
let (code, maybe_map) =
|
|
||||||
transpile(&module, &EmitOptions::default()).unwrap();
|
|
||||||
let expected_text = r#"var D;
|
|
||||||
(function(D) {
|
|
||||||
D[D["A"] = 0] = "A";
|
|
||||||
D[D["B"] = 1] = "B";
|
|
||||||
})(D || (D = {}));
|
|
||||||
var N;
|
|
||||||
(function(N1) {
|
|
||||||
let D;
|
|
||||||
(function(D) {
|
|
||||||
D["A"] = "value";
|
|
||||||
})(D = N1.D || (N1.D = {}));
|
|
||||||
var Value = N1.Value = 5;
|
|
||||||
})(N || (N = {}));
|
|
||||||
export class A {
|
|
||||||
d;
|
|
||||||
b;
|
|
||||||
c = 1;
|
|
||||||
e;
|
|
||||||
constructor(d = D.A){
|
|
||||||
this.d = d;
|
|
||||||
const e = "foo";
|
|
||||||
this.e = e;
|
|
||||||
console.log(N.Value);
|
|
||||||
}
|
|
||||||
}
|
|
||||||
"#;
|
|
||||||
assert_eq!(&code[..expected_text.len()], expected_text);
|
|
||||||
assert!(
|
|
||||||
code.contains("\n//# sourceMappingURL=data:application/json;base64,")
|
|
||||||
);
|
|
||||||
assert!(maybe_map.is_none());
|
|
||||||
}
|
|
||||||
|
|
||||||
#[test]
|
|
||||||
fn test_transpile_tsx() {
|
|
||||||
let specifier = resolve_url_or_path("https://deno.land/x/mod.ts").unwrap();
|
|
||||||
let source = r#"
|
|
||||||
export class A {
|
|
||||||
render() {
|
|
||||||
return <div><span></span></div>
|
|
||||||
}
|
|
||||||
}
|
|
||||||
"#;
|
|
||||||
let module = parse_module(ParseParams {
|
|
||||||
specifier: specifier.as_str().to_string(),
|
|
||||||
source: SourceTextInfo::from_string(source.to_string()),
|
|
||||||
media_type: deno_ast::MediaType::Tsx,
|
|
||||||
capture_tokens: false,
|
|
||||||
maybe_syntax: None,
|
|
||||||
scope_analysis: true, // ensure scope analysis doesn't conflict with a second resolver pass
|
|
||||||
})
|
|
||||||
.unwrap();
|
|
||||||
let (code, _) = transpile(&module, &EmitOptions::default()).unwrap();
|
|
||||||
assert!(code.contains("React.createElement(\"div\", null"));
|
|
||||||
}
|
|
||||||
|
|
||||||
#[test]
|
|
||||||
fn test_transpile_jsx_pragma() {
|
|
||||||
let specifier = resolve_url_or_path("https://deno.land/x/mod.ts").unwrap();
|
|
||||||
let source = r#"
|
|
||||||
/** @jsx h */
|
|
||||||
/** @jsxFrag Fragment */
|
|
||||||
import { h, Fragment } from "https://deno.land/x/mod.ts";
|
|
||||||
|
|
||||||
function App() {
|
|
||||||
return (
|
|
||||||
<div><></></div>
|
|
||||||
);
|
|
||||||
}"#;
|
|
||||||
let module = parse_module(ParseParams {
|
|
||||||
specifier: specifier.as_str().to_string(),
|
|
||||||
source: SourceTextInfo::from_string(source.to_string()),
|
|
||||||
media_type: deno_ast::MediaType::Jsx,
|
|
||||||
capture_tokens: false,
|
|
||||||
maybe_syntax: None,
|
|
||||||
scope_analysis: true,
|
|
||||||
})
|
|
||||||
.unwrap();
|
|
||||||
let (code, _) = transpile(&module, &EmitOptions::default()).unwrap();
|
|
||||||
let expected = r#"/** @jsx h */ /** @jsxFrag Fragment */ import { h, Fragment } from "https://deno.land/x/mod.ts";
|
|
||||||
function App() {
|
|
||||||
return(/*#__PURE__*/ h("div", null, /*#__PURE__*/ h(Fragment, null)));
|
|
||||||
}"#;
|
|
||||||
assert_eq!(&code[..expected.len()], expected);
|
|
||||||
}
|
|
||||||
|
|
||||||
#[test]
|
|
||||||
fn test_transpile_jsx_import_source_pragma() {
|
|
||||||
let specifier = resolve_url_or_path("https://deno.land/x/mod.tsx").unwrap();
|
|
||||||
let source = r#"
|
|
||||||
/** @jsxImportSource jsx_lib */
|
|
||||||
|
|
||||||
function App() {
|
|
||||||
return (
|
|
||||||
<div><></></div>
|
|
||||||
);
|
|
||||||
}"#;
|
|
||||||
let module = parse_module(ParseParams {
|
|
||||||
specifier: specifier.as_str().to_string(),
|
|
||||||
source: SourceTextInfo::from_string(source.to_string()),
|
|
||||||
media_type: deno_ast::MediaType::Jsx,
|
|
||||||
capture_tokens: false,
|
|
||||||
maybe_syntax: None,
|
|
||||||
scope_analysis: true,
|
|
||||||
})
|
|
||||||
.unwrap();
|
|
||||||
let (code, _) = transpile(&module, &EmitOptions::default()).unwrap();
|
|
||||||
let expected = r#"import { jsx as _jsx, Fragment as _Fragment } from "jsx_lib/jsx-runtime";
|
|
||||||
/** @jsxImportSource jsx_lib */ function App() {
|
|
||||||
return(/*#__PURE__*/ _jsx("div", {
|
|
||||||
children: /*#__PURE__*/ _jsx(_Fragment, {})
|
|
||||||
}));
|
|
||||||
"#;
|
|
||||||
assert_eq!(&code[..expected.len()], expected);
|
|
||||||
}
|
|
||||||
|
|
||||||
#[test]
|
|
||||||
fn test_transpile_jsx_import_source_no_pragma() {
|
|
||||||
let specifier = resolve_url_or_path("https://deno.land/x/mod.tsx").unwrap();
|
|
||||||
let source = r#"
|
|
||||||
function App() {
|
|
||||||
return (
|
|
||||||
<div><></></div>
|
|
||||||
);
|
|
||||||
}"#;
|
|
||||||
let module = parse_module(ParseParams {
|
|
||||||
specifier: specifier.as_str().to_string(),
|
|
||||||
source: SourceTextInfo::from_string(source.to_string()),
|
|
||||||
media_type: deno_ast::MediaType::Jsx,
|
|
||||||
capture_tokens: false,
|
|
||||||
maybe_syntax: None,
|
|
||||||
scope_analysis: true,
|
|
||||||
})
|
|
||||||
.unwrap();
|
|
||||||
let emit_options = EmitOptions {
|
|
||||||
jsx_automatic: true,
|
|
||||||
jsx_import_source: Some("jsx_lib".to_string()),
|
|
||||||
..Default::default()
|
|
||||||
};
|
|
||||||
let (code, _) = transpile(&module, &emit_options).unwrap();
|
|
||||||
let expected = r#"import { jsx as _jsx, Fragment as _Fragment } from "jsx_lib/jsx-runtime";
|
|
||||||
function App() {
|
|
||||||
return(/*#__PURE__*/ _jsx("div", {
|
|
||||||
children: /*#__PURE__*/ _jsx(_Fragment, {})
|
|
||||||
}));
|
|
||||||
}
|
|
||||||
"#;
|
|
||||||
assert_eq!(&code[..expected.len()], expected);
|
|
||||||
}
|
|
||||||
|
|
||||||
// TODO(@kitsonk) https://github.com/swc-project/swc/issues/2656
|
|
||||||
// #[test]
|
|
||||||
// fn test_transpile_jsx_import_source_no_pragma_dev() {
|
|
||||||
// let specifier = resolve_url_or_path("https://deno.land/x/mod.tsx").unwrap();
|
|
||||||
// let source = r#"
|
|
||||||
// function App() {
|
|
||||||
// return (
|
|
||||||
// <div><></></div>
|
|
||||||
// );
|
|
||||||
// }"#;
|
|
||||||
// let module = parse_module(ParseParams {
|
|
||||||
// specifier: specifier.as_str().to_string(),
|
|
||||||
// source: SourceTextInfo::from_string(source.to_string()),
|
|
||||||
// media_type: deno_ast::MediaType::Jsx,
|
|
||||||
// capture_tokens: false,
|
|
||||||
// maybe_syntax: None,
|
|
||||||
// scope_analysis: true,
|
|
||||||
// })
|
|
||||||
// .unwrap();
|
|
||||||
// let emit_options = EmitOptions {
|
|
||||||
// jsx_automatic: true,
|
|
||||||
// jsx_import_source: Some("jsx_lib".to_string()),
|
|
||||||
// jsx_development: true,
|
|
||||||
// ..Default::default()
|
|
||||||
// };
|
|
||||||
// let (code, _) = transpile(&module, &emit_options).unwrap();
|
|
||||||
// let expected = r#"import { jsx as _jsx, Fragment as _Fragment } from "jsx_lib/jsx-dev-runtime";
|
|
||||||
// function App() {
|
|
||||||
// return(/*#__PURE__*/ _jsx("div", {
|
|
||||||
// children: /*#__PURE__*/ _jsx(_Fragment, {
|
|
||||||
// })
|
|
||||||
// }));
|
|
||||||
// }
|
|
||||||
// "#;
|
|
||||||
// assert_eq!(&code[..expected.len()], expected);
|
|
||||||
// }
|
|
||||||
|
|
||||||
#[test]
|
|
||||||
fn test_transpile_decorators() {
|
|
||||||
let specifier = resolve_url_or_path("https://deno.land/x/mod.ts").unwrap();
|
|
||||||
let source = r#"
|
|
||||||
function enumerable(value: boolean) {
|
|
||||||
return function (
|
|
||||||
_target: any,
|
|
||||||
_propertyKey: string,
|
|
||||||
descriptor: PropertyDescriptor,
|
|
||||||
) {
|
|
||||||
descriptor.enumerable = value;
|
|
||||||
};
|
|
||||||
}
|
|
||||||
|
|
||||||
export class A {
|
|
||||||
@enumerable(false)
|
|
||||||
a() {
|
|
||||||
Test.value;
|
|
||||||
}
|
|
||||||
}
|
|
||||||
"#;
|
|
||||||
let module = parse_module(ParseParams {
|
|
||||||
specifier: specifier.as_str().to_string(),
|
|
||||||
source: SourceTextInfo::from_string(source.to_string()),
|
|
||||||
media_type: deno_ast::MediaType::TypeScript,
|
|
||||||
capture_tokens: false,
|
|
||||||
maybe_syntax: None,
|
|
||||||
scope_analysis: false,
|
|
||||||
})
|
|
||||||
.unwrap();
|
|
||||||
let (code, _) = transpile(&module, &EmitOptions::default()).unwrap();
|
|
||||||
assert!(code.contains("_applyDecoratedDescriptor("));
|
|
||||||
}
|
|
||||||
|
|
||||||
#[test]
|
|
||||||
fn transpile_handle_code_nested_in_ts_nodes_with_jsx_pass() {
|
|
||||||
// from issue 12409
|
|
||||||
let specifier = resolve_url_or_path("https://deno.land/x/mod.ts").unwrap();
|
|
||||||
let source = r#"
|
|
||||||
export function g() {
|
|
||||||
let algorithm: any
|
|
||||||
algorithm = {}
|
|
||||||
|
|
||||||
return <Promise>(
|
|
||||||
test(algorithm, false, keyUsages)
|
|
||||||
)
|
|
||||||
}
|
|
||||||
"#;
|
|
||||||
let module = parse_module(ParseParams {
|
|
||||||
specifier: specifier.as_str().to_string(),
|
|
||||||
source: SourceTextInfo::from_string(source.to_string()),
|
|
||||||
media_type: deno_ast::MediaType::TypeScript,
|
|
||||||
capture_tokens: false,
|
|
||||||
maybe_syntax: None,
|
|
||||||
scope_analysis: false,
|
|
||||||
})
|
|
||||||
.unwrap();
|
|
||||||
let emit_options = EmitOptions {
|
|
||||||
transform_jsx: true,
|
|
||||||
..Default::default()
|
|
||||||
};
|
|
||||||
let (code, _) = transpile(&module, &emit_options).unwrap();
|
|
||||||
let expected = r#"export function g() {
|
|
||||||
let algorithm;
|
|
||||||
algorithm = {};
|
|
||||||
return test(algorithm, false, keyUsages);
|
|
||||||
}"#;
|
|
||||||
assert_eq!(&code[..expected.len()], expected);
|
|
||||||
}
|
|
||||||
|
|
||||||
#[test]
|
|
||||||
fn diagnostic_jsx_spread_instead_of_panic() {
|
|
||||||
let specifier = resolve_url_or_path("https://deno.land/x/mod.ts").unwrap();
|
|
||||||
let source = r#"const A = () => {
|
|
||||||
return <div>{...[]}</div>;
|
|
||||||
};"#;
|
|
||||||
let parsed_source = parse_module(ParseParams {
|
|
||||||
specifier: specifier.as_str().to_string(),
|
|
||||||
source: SourceTextInfo::from_string(source.to_string()),
|
|
||||||
media_type: deno_ast::MediaType::Tsx,
|
|
||||||
capture_tokens: false,
|
|
||||||
maybe_syntax: None,
|
|
||||||
scope_analysis: false,
|
|
||||||
})
|
|
||||||
.unwrap();
|
|
||||||
let err = transpile(&parsed_source, &Default::default())
|
|
||||||
.err()
|
|
||||||
.unwrap();
|
|
||||||
|
|
||||||
assert_eq!(err.to_string(), "Spread children are not supported in React. at https://deno.land/x/mod.ts:2:15");
|
|
||||||
}
|
|
||||||
}
|
|
|
@ -1,524 +0,0 @@
|
||||||
use deno_ast::swc::ast as swc_ast;
|
|
||||||
use deno_ast::swc::common::DUMMY_SP;
|
|
||||||
use deno_ast::swc::visit::noop_fold_type;
|
|
||||||
use deno_ast::swc::visit::Fold;
|
|
||||||
|
|
||||||
/// Transforms import declarations to variable declarations
|
|
||||||
/// with a dynamic import. This is used to provide import
|
|
||||||
/// declaration support in the REPL.
|
|
||||||
pub struct DownlevelImportsFolder;
|
|
||||||
|
|
||||||
impl Fold for DownlevelImportsFolder {
|
|
||||||
noop_fold_type!(); // skip typescript specific nodes
|
|
||||||
|
|
||||||
fn fold_module_item(
|
|
||||||
&mut self,
|
|
||||||
module_item: swc_ast::ModuleItem,
|
|
||||||
) -> swc_ast::ModuleItem {
|
|
||||||
use deno_ast::swc::ast::*;
|
|
||||||
|
|
||||||
match module_item {
|
|
||||||
ModuleItem::ModuleDecl(ModuleDecl::Import(import_decl)) => {
|
|
||||||
// Handle type only imports
|
|
||||||
if import_decl.type_only {
|
|
||||||
// should have no side effects
|
|
||||||
return create_empty_stmt();
|
|
||||||
}
|
|
||||||
|
|
||||||
// The initializer (ex. `await import('./mod.ts')`)
|
|
||||||
let initializer =
|
|
||||||
create_await_import_expr(&import_decl.src.value, import_decl.asserts);
|
|
||||||
|
|
||||||
// Handle imports for the side effects
|
|
||||||
// ex. `import "module.ts"` -> `await import("module.ts");`
|
|
||||||
if import_decl.specifiers.is_empty() {
|
|
||||||
return ModuleItem::Stmt(Stmt::Expr(ExprStmt {
|
|
||||||
span: DUMMY_SP,
|
|
||||||
expr: initializer,
|
|
||||||
}));
|
|
||||||
}
|
|
||||||
|
|
||||||
// Collect the specifiers and create the variable statement
|
|
||||||
let named_import_props = import_decl
|
|
||||||
.specifiers
|
|
||||||
.iter()
|
|
||||||
.filter_map(|specifier| match specifier {
|
|
||||||
ImportSpecifier::Default(specifier) => Some(create_key_value(
|
|
||||||
"default".to_string(),
|
|
||||||
specifier.local.sym.to_string(),
|
|
||||||
)),
|
|
||||||
ImportSpecifier::Named(specifier) => {
|
|
||||||
Some(match specifier.imported.as_ref() {
|
|
||||||
Some(name) => create_key_value(
|
|
||||||
match name {
|
|
||||||
ModuleExportName::Ident(ident) => ident.sym.to_string(),
|
|
||||||
ModuleExportName::Str(str) => str.value.to_string(),
|
|
||||||
},
|
|
||||||
specifier.local.sym.to_string(),
|
|
||||||
),
|
|
||||||
None => create_assignment(specifier.local.sym.to_string()),
|
|
||||||
})
|
|
||||||
}
|
|
||||||
ImportSpecifier::Namespace(_) => None,
|
|
||||||
})
|
|
||||||
.collect::<Vec<_>>();
|
|
||||||
let namespace_import_name =
|
|
||||||
import_decl
|
|
||||||
.specifiers
|
|
||||||
.iter()
|
|
||||||
.find_map(|specifier| match specifier {
|
|
||||||
ImportSpecifier::Namespace(specifier) => {
|
|
||||||
Some(create_binding_ident(specifier.local.sym.to_string()))
|
|
||||||
}
|
|
||||||
_ => None,
|
|
||||||
});
|
|
||||||
|
|
||||||
ModuleItem::Stmt(Stmt::Decl(Decl::Var(VarDecl {
|
|
||||||
span: DUMMY_SP,
|
|
||||||
kind: VarDeclKind::Const,
|
|
||||||
declare: false,
|
|
||||||
decls: {
|
|
||||||
let mut decls = Vec::new();
|
|
||||||
|
|
||||||
if !named_import_props.is_empty() {
|
|
||||||
decls.push(VarDeclarator {
|
|
||||||
span: DUMMY_SP,
|
|
||||||
name: Pat::Object(ObjectPat {
|
|
||||||
span: DUMMY_SP,
|
|
||||||
optional: false,
|
|
||||||
props: named_import_props,
|
|
||||||
type_ann: None,
|
|
||||||
}),
|
|
||||||
definite: false,
|
|
||||||
init: Some(initializer.clone()),
|
|
||||||
});
|
|
||||||
}
|
|
||||||
if let Some(namespace_import) = namespace_import_name {
|
|
||||||
decls.push(VarDeclarator {
|
|
||||||
span: DUMMY_SP,
|
|
||||||
name: Pat::Ident(namespace_import),
|
|
||||||
definite: false,
|
|
||||||
init: Some(initializer),
|
|
||||||
});
|
|
||||||
}
|
|
||||||
|
|
||||||
decls
|
|
||||||
},
|
|
||||||
})))
|
|
||||||
}
|
|
||||||
_ => module_item,
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
/// Strips export declarations and exports on named exports for the REPL.
|
|
||||||
pub struct StripExportsFolder;
|
|
||||||
|
|
||||||
impl Fold for StripExportsFolder {
|
|
||||||
noop_fold_type!(); // skip typescript specific nodes
|
|
||||||
|
|
||||||
fn fold_module_item(
|
|
||||||
&mut self,
|
|
||||||
module_item: swc_ast::ModuleItem,
|
|
||||||
) -> swc_ast::ModuleItem {
|
|
||||||
use deno_ast::swc::ast::*;
|
|
||||||
|
|
||||||
match module_item {
|
|
||||||
ModuleItem::ModuleDecl(ModuleDecl::ExportAll(export_all)) => {
|
|
||||||
ModuleItem::Stmt(Stmt::Expr(ExprStmt {
|
|
||||||
span: DUMMY_SP,
|
|
||||||
expr: create_await_import_expr(
|
|
||||||
&export_all.src.value,
|
|
||||||
export_all.asserts,
|
|
||||||
),
|
|
||||||
}))
|
|
||||||
}
|
|
||||||
ModuleItem::ModuleDecl(ModuleDecl::ExportNamed(export_named)) => {
|
|
||||||
if let Some(src) = export_named.src {
|
|
||||||
ModuleItem::Stmt(Stmt::Expr(ExprStmt {
|
|
||||||
span: DUMMY_SP,
|
|
||||||
expr: create_await_import_expr(&src.value, export_named.asserts),
|
|
||||||
}))
|
|
||||||
} else {
|
|
||||||
create_empty_stmt()
|
|
||||||
}
|
|
||||||
}
|
|
||||||
ModuleItem::ModuleDecl(ModuleDecl::ExportDefaultExpr(default_expr)) => {
|
|
||||||
// transform a default export expression to its expression
|
|
||||||
ModuleItem::Stmt(Stmt::Expr(ExprStmt {
|
|
||||||
span: DUMMY_SP,
|
|
||||||
expr: default_expr.expr,
|
|
||||||
}))
|
|
||||||
}
|
|
||||||
ModuleItem::ModuleDecl(ModuleDecl::ExportDecl(export_decl)) => {
|
|
||||||
// strip the export keyword on an exported declaration
|
|
||||||
ModuleItem::Stmt(Stmt::Decl(export_decl.decl))
|
|
||||||
}
|
|
||||||
ModuleItem::ModuleDecl(ModuleDecl::ExportDefaultDecl(default_decl)) => {
|
|
||||||
// only keep named default exports
|
|
||||||
match default_decl.decl {
|
|
||||||
DefaultDecl::Fn(FnExpr {
|
|
||||||
ident: Some(ident),
|
|
||||||
function,
|
|
||||||
}) => ModuleItem::Stmt(Stmt::Decl(Decl::Fn(FnDecl {
|
|
||||||
declare: false,
|
|
||||||
ident,
|
|
||||||
function,
|
|
||||||
}))),
|
|
||||||
DefaultDecl::Class(ClassExpr {
|
|
||||||
ident: Some(ident),
|
|
||||||
class,
|
|
||||||
}) => ModuleItem::Stmt(Stmt::Decl(Decl::Class(ClassDecl {
|
|
||||||
declare: false,
|
|
||||||
ident,
|
|
||||||
class,
|
|
||||||
}))),
|
|
||||||
_ => create_empty_stmt(),
|
|
||||||
}
|
|
||||||
}
|
|
||||||
_ => module_item,
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
fn create_empty_stmt() -> swc_ast::ModuleItem {
|
|
||||||
use swc_ast::*;
|
|
||||||
ModuleItem::Stmt(Stmt::Empty(EmptyStmt { span: DUMMY_SP }))
|
|
||||||
}
|
|
||||||
|
|
||||||
fn create_binding_ident(name: String) -> swc_ast::BindingIdent {
|
|
||||||
swc_ast::BindingIdent {
|
|
||||||
id: create_ident(name),
|
|
||||||
type_ann: None,
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
fn create_ident(name: String) -> swc_ast::Ident {
|
|
||||||
swc_ast::Ident {
|
|
||||||
span: DUMMY_SP,
|
|
||||||
sym: name.into(),
|
|
||||||
optional: false,
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
fn create_key_value(key: String, value: String) -> swc_ast::ObjectPatProp {
|
|
||||||
swc_ast::ObjectPatProp::KeyValue(swc_ast::KeyValuePatProp {
|
|
||||||
// use a string literal because it will work in more scenarios than an identifier
|
|
||||||
key: swc_ast::PropName::Str(swc_ast::Str {
|
|
||||||
span: DUMMY_SP,
|
|
||||||
value: key.into(),
|
|
||||||
has_escape: false,
|
|
||||||
kind: swc_ast::StrKind::Synthesized,
|
|
||||||
}),
|
|
||||||
value: Box::new(swc_ast::Pat::Ident(swc_ast::BindingIdent {
|
|
||||||
id: swc_ast::Ident {
|
|
||||||
span: DUMMY_SP,
|
|
||||||
sym: value.into(),
|
|
||||||
optional: false,
|
|
||||||
},
|
|
||||||
type_ann: None,
|
|
||||||
})),
|
|
||||||
})
|
|
||||||
}
|
|
||||||
|
|
||||||
fn create_await_import_expr(
|
|
||||||
module_specifier: &str,
|
|
||||||
maybe_asserts: Option<swc_ast::ObjectLit>,
|
|
||||||
) -> Box<swc_ast::Expr> {
|
|
||||||
use swc_ast::*;
|
|
||||||
let mut args = vec![ExprOrSpread {
|
|
||||||
spread: None,
|
|
||||||
expr: Box::new(Expr::Lit(Lit::Str(Str {
|
|
||||||
span: DUMMY_SP,
|
|
||||||
has_escape: false,
|
|
||||||
kind: StrKind::Normal {
|
|
||||||
contains_quote: false,
|
|
||||||
},
|
|
||||||
value: module_specifier.into(),
|
|
||||||
}))),
|
|
||||||
}];
|
|
||||||
|
|
||||||
// add assert object if it exists
|
|
||||||
if let Some(asserts) = maybe_asserts {
|
|
||||||
args.push(ExprOrSpread {
|
|
||||||
spread: None,
|
|
||||||
expr: Box::new(Expr::Object(ObjectLit {
|
|
||||||
span: DUMMY_SP,
|
|
||||||
props: vec![PropOrSpread::Prop(Box::new(Prop::KeyValue(
|
|
||||||
KeyValueProp {
|
|
||||||
key: PropName::Ident(create_ident("assert".to_string())),
|
|
||||||
value: Box::new(Expr::Object(asserts)),
|
|
||||||
},
|
|
||||||
)))],
|
|
||||||
})),
|
|
||||||
})
|
|
||||||
}
|
|
||||||
|
|
||||||
Box::new(Expr::Await(AwaitExpr {
|
|
||||||
span: DUMMY_SP,
|
|
||||||
arg: Box::new(Expr::Call(CallExpr {
|
|
||||||
span: DUMMY_SP,
|
|
||||||
callee: ExprOrSuper::Expr(Box::new(Expr::Ident(Ident {
|
|
||||||
span: DUMMY_SP,
|
|
||||||
sym: "import".into(),
|
|
||||||
optional: false,
|
|
||||||
}))),
|
|
||||||
args,
|
|
||||||
type_args: None,
|
|
||||||
})),
|
|
||||||
}))
|
|
||||||
}
|
|
||||||
|
|
||||||
fn create_assignment(key: String) -> swc_ast::ObjectPatProp {
|
|
||||||
swc_ast::ObjectPatProp::Assign(swc_ast::AssignPatProp {
|
|
||||||
span: DUMMY_SP,
|
|
||||||
key: create_ident(key),
|
|
||||||
value: None,
|
|
||||||
})
|
|
||||||
}
|
|
||||||
|
|
||||||
#[cfg(test)]
|
|
||||||
mod test {
|
|
||||||
use deno_ast::swc::ast::Module;
|
|
||||||
use deno_ast::swc::codegen::text_writer::JsWriter;
|
|
||||||
use deno_ast::swc::codegen::Node;
|
|
||||||
use deno_ast::swc::common::FileName;
|
|
||||||
use deno_ast::swc::common::SourceMap;
|
|
||||||
use deno_ast::swc::parser::Parser;
|
|
||||||
use deno_ast::swc::parser::StringInput;
|
|
||||||
use deno_ast::swc::parser::Syntax;
|
|
||||||
use deno_ast::swc::parser::TsConfig;
|
|
||||||
use deno_ast::swc::visit::Fold;
|
|
||||||
use deno_ast::swc::visit::FoldWith;
|
|
||||||
use deno_ast::ModuleSpecifier;
|
|
||||||
use pretty_assertions::assert_eq;
|
|
||||||
use std::rc::Rc;
|
|
||||||
|
|
||||||
use super::*;
|
|
||||||
|
|
||||||
#[test]
|
|
||||||
fn test_downlevel_imports_type_only() {
|
|
||||||
test_transform(
|
|
||||||
DownlevelImportsFolder,
|
|
||||||
r#"import type { test } from "./mod.ts";"#,
|
|
||||||
";",
|
|
||||||
);
|
|
||||||
}
|
|
||||||
|
|
||||||
#[test]
|
|
||||||
fn test_downlevel_imports_specifier_only() {
|
|
||||||
test_transform(
|
|
||||||
DownlevelImportsFolder,
|
|
||||||
r#"import "./mod.ts";"#,
|
|
||||||
r#"await import("./mod.ts");"#,
|
|
||||||
);
|
|
||||||
|
|
||||||
test_transform(
|
|
||||||
DownlevelImportsFolder,
|
|
||||||
r#"import {} from "./mod.ts";"#,
|
|
||||||
r#"await import("./mod.ts");"#,
|
|
||||||
);
|
|
||||||
}
|
|
||||||
|
|
||||||
#[test]
|
|
||||||
fn test_downlevel_imports_default() {
|
|
||||||
test_transform(
|
|
||||||
DownlevelImportsFolder,
|
|
||||||
r#"import mod from "./mod.ts";"#,
|
|
||||||
r#"const { "default": mod } = await import("./mod.ts");"#,
|
|
||||||
);
|
|
||||||
}
|
|
||||||
|
|
||||||
#[test]
|
|
||||||
fn test_downlevel_imports_named() {
|
|
||||||
test_transform(
|
|
||||||
DownlevelImportsFolder,
|
|
||||||
r#"import { A } from "./mod.ts";"#,
|
|
||||||
r#"const { A } = await import("./mod.ts");"#,
|
|
||||||
);
|
|
||||||
|
|
||||||
test_transform(
|
|
||||||
DownlevelImportsFolder,
|
|
||||||
r#"import { A, B, C } from "./mod.ts";"#,
|
|
||||||
r#"const { A , B , C } = await import("./mod.ts");"#,
|
|
||||||
);
|
|
||||||
|
|
||||||
test_transform(
|
|
||||||
DownlevelImportsFolder,
|
|
||||||
r#"import { A as LocalA, B, C as LocalC } from "./mod.ts";"#,
|
|
||||||
r#"const { "A": LocalA , B , "C": LocalC } = await import("./mod.ts");"#,
|
|
||||||
);
|
|
||||||
}
|
|
||||||
|
|
||||||
#[test]
|
|
||||||
fn test_downlevel_imports_namespace() {
|
|
||||||
test_transform(
|
|
||||||
DownlevelImportsFolder,
|
|
||||||
r#"import * as mod from "./mod.ts";"#,
|
|
||||||
r#"const mod = await import("./mod.ts");"#,
|
|
||||||
);
|
|
||||||
}
|
|
||||||
|
|
||||||
#[test]
|
|
||||||
fn test_downlevel_imports_mixed() {
|
|
||||||
test_transform(
|
|
||||||
DownlevelImportsFolder,
|
|
||||||
r#"import myDefault, { A, B as LocalB } from "./mod.ts";"#,
|
|
||||||
r#"const { "default": myDefault , A , "B": LocalB } = await import("./mod.ts");"#,
|
|
||||||
);
|
|
||||||
|
|
||||||
test_transform(
|
|
||||||
DownlevelImportsFolder,
|
|
||||||
r#"import myDefault, * as mod from "./mod.ts";"#,
|
|
||||||
r#"const { "default": myDefault } = await import("./mod.ts"), mod = await import("./mod.ts");"#,
|
|
||||||
);
|
|
||||||
}
|
|
||||||
|
|
||||||
#[test]
|
|
||||||
fn test_downlevel_imports_assertions() {
|
|
||||||
test_transform(
|
|
||||||
DownlevelImportsFolder,
|
|
||||||
r#"import data from "./mod.json" assert { type: "json" };"#,
|
|
||||||
"const { \"default\": data } = await import(\"./mod.json\", {\n assert: {\n type: \"json\"\n }\n});",
|
|
||||||
);
|
|
||||||
}
|
|
||||||
|
|
||||||
#[test]
|
|
||||||
fn test_strip_exports_export_all() {
|
|
||||||
test_transform(
|
|
||||||
StripExportsFolder,
|
|
||||||
r#"export * from "./test.ts";"#,
|
|
||||||
r#"await import("./test.ts");"#,
|
|
||||||
);
|
|
||||||
}
|
|
||||||
|
|
||||||
#[test]
|
|
||||||
fn test_strip_exports_export_named() {
|
|
||||||
test_transform(
|
|
||||||
StripExportsFolder,
|
|
||||||
r#"export { test } from "./test.ts";"#,
|
|
||||||
r#"await import("./test.ts");"#,
|
|
||||||
);
|
|
||||||
|
|
||||||
test_transform(StripExportsFolder, r#"export { test };"#, ";");
|
|
||||||
}
|
|
||||||
|
|
||||||
#[test]
|
|
||||||
fn test_strip_exports_assertions() {
|
|
||||||
test_transform(
|
|
||||||
StripExportsFolder,
|
|
||||||
r#"export { default as data } from "./mod.json" assert { type: "json" };"#,
|
|
||||||
"await import(\"./mod.json\", {\n assert: {\n type: \"json\"\n }\n});",
|
|
||||||
);
|
|
||||||
}
|
|
||||||
|
|
||||||
#[test]
|
|
||||||
fn test_strip_exports_export_all_assertions() {
|
|
||||||
// even though this doesn't really make sense for someone to do
|
|
||||||
test_transform(
|
|
||||||
StripExportsFolder,
|
|
||||||
r#"export * from "./mod.json" assert { type: "json" };"#,
|
|
||||||
"await import(\"./mod.json\", {\n assert: {\n type: \"json\"\n }\n});",
|
|
||||||
);
|
|
||||||
}
|
|
||||||
|
|
||||||
#[test]
|
|
||||||
fn test_strip_exports_export_default_expr() {
|
|
||||||
test_transform(StripExportsFolder, "export default 5;", "5;");
|
|
||||||
}
|
|
||||||
|
|
||||||
#[test]
|
|
||||||
fn test_strip_exports_export_default_decl_name() {
|
|
||||||
test_transform(
|
|
||||||
StripExportsFolder,
|
|
||||||
"export default class Test {}",
|
|
||||||
"class Test {\n}",
|
|
||||||
);
|
|
||||||
|
|
||||||
test_transform(
|
|
||||||
StripExportsFolder,
|
|
||||||
"export default function test() {}",
|
|
||||||
"function test() {}",
|
|
||||||
);
|
|
||||||
}
|
|
||||||
|
|
||||||
#[test]
|
|
||||||
fn test_strip_exports_export_default_decl_no_name() {
|
|
||||||
test_transform(StripExportsFolder, "export default class {}", ";");
|
|
||||||
|
|
||||||
test_transform(StripExportsFolder, "export default function() {}", ";");
|
|
||||||
}
|
|
||||||
|
|
||||||
#[test]
|
|
||||||
fn test_strip_exports_export_named_decls() {
|
|
||||||
test_transform(
|
|
||||||
StripExportsFolder,
|
|
||||||
"export class Test {}",
|
|
||||||
"class Test {\n}",
|
|
||||||
);
|
|
||||||
|
|
||||||
test_transform(
|
|
||||||
StripExportsFolder,
|
|
||||||
"export function test() {}",
|
|
||||||
"function test() {}",
|
|
||||||
);
|
|
||||||
|
|
||||||
test_transform(StripExportsFolder, "export enum Test {}", "enum Test {\n}");
|
|
||||||
|
|
||||||
test_transform(
|
|
||||||
StripExportsFolder,
|
|
||||||
"export namespace Test {}",
|
|
||||||
"module Test {\n}",
|
|
||||||
);
|
|
||||||
}
|
|
||||||
|
|
||||||
#[test]
|
|
||||||
fn test_strip_exports_not_in_namespace() {
|
|
||||||
test_transform(
|
|
||||||
StripExportsFolder,
|
|
||||||
"namespace Test { export class Test {} }",
|
|
||||||
"module Test {\n export class Test {\n }\n}",
|
|
||||||
);
|
|
||||||
}
|
|
||||||
|
|
||||||
fn test_transform(
|
|
||||||
mut transform: impl Fold,
|
|
||||||
src: &str,
|
|
||||||
expected_output: &str,
|
|
||||||
) {
|
|
||||||
let (source_map, module) = parse(src);
|
|
||||||
let output = print(source_map, module.fold_with(&mut transform));
|
|
||||||
assert_eq!(output, format!("{}\n", expected_output));
|
|
||||||
}
|
|
||||||
|
|
||||||
fn parse(src: &str) -> (Rc<SourceMap>, Module) {
|
|
||||||
let source_map = Rc::new(SourceMap::default());
|
|
||||||
let source_file = source_map.new_source_file(
|
|
||||||
FileName::Url(ModuleSpecifier::parse("file:///test.ts").unwrap()),
|
|
||||||
src.to_string(),
|
|
||||||
);
|
|
||||||
let input = StringInput::from(&*source_file);
|
|
||||||
let syntax = Syntax::Typescript(TsConfig {
|
|
||||||
..Default::default()
|
|
||||||
});
|
|
||||||
let mut parser = Parser::new(syntax, input, None);
|
|
||||||
(source_map, parser.parse_module().unwrap())
|
|
||||||
}
|
|
||||||
|
|
||||||
fn print(source_map: Rc<SourceMap>, module: Module) -> String {
|
|
||||||
let mut buf = vec![];
|
|
||||||
{
|
|
||||||
let writer =
|
|
||||||
Box::new(JsWriter::new(source_map.clone(), "\n", &mut buf, None));
|
|
||||||
let config = deno_ast::swc::codegen::Config { minify: false };
|
|
||||||
let mut emitter = deno_ast::swc::codegen::Emitter {
|
|
||||||
cfg: config,
|
|
||||||
comments: None,
|
|
||||||
cm: source_map,
|
|
||||||
wr: writer,
|
|
||||||
};
|
|
||||||
module.emit_with(&mut emitter).unwrap();
|
|
||||||
}
|
|
||||||
String::from_utf8(buf).unwrap()
|
|
||||||
}
|
|
||||||
}
|
|
|
@ -78,7 +78,7 @@ function benchRead128k() {
|
||||||
return benchAsync(
|
return benchAsync(
|
||||||
"read_128k",
|
"read_128k",
|
||||||
5e4,
|
5e4,
|
||||||
() => Deno.readFile("./cli/bench/fixtures/128k.bin"),
|
() => Deno.readFile("./cli/bench/testdata/128k.bin"),
|
||||||
);
|
);
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
|
@ -13,11 +13,11 @@ use std::time::Duration;
|
||||||
use test_util::lsp::LspClient;
|
use test_util::lsp::LspClient;
|
||||||
use test_util::lsp::LspResponseError;
|
use test_util::lsp::LspResponseError;
|
||||||
|
|
||||||
static FIXTURE_CODE_LENS_TS: &str = include_str!("fixtures/code_lens.ts");
|
static FIXTURE_CODE_LENS_TS: &str = include_str!("testdata/code_lens.ts");
|
||||||
static FIXTURE_DB_TS: &str = include_str!("fixtures/db.ts");
|
static FIXTURE_DB_TS: &str = include_str!("testdata/db.ts");
|
||||||
static FIXTURE_DB_MESSAGES: &[u8] = include_bytes!("fixtures/db_messages.json");
|
static FIXTURE_DB_MESSAGES: &[u8] = include_bytes!("testdata/db_messages.json");
|
||||||
static FIXTURE_INIT_JSON: &[u8] =
|
static FIXTURE_INIT_JSON: &[u8] =
|
||||||
include_bytes!("fixtures/initialize_params.json");
|
include_bytes!("testdata/initialize_params.json");
|
||||||
|
|
||||||
#[derive(Debug, Deserialize)]
|
#[derive(Debug, Deserialize)]
|
||||||
enum FixtureType {
|
enum FixtureType {
|
||||||
|
@ -57,7 +57,7 @@ fn bench_big_file_edits(deno_exe: &Path) -> Result<Duration, AnyError> {
|
||||||
"textDocument/didOpen",
|
"textDocument/didOpen",
|
||||||
json!({
|
json!({
|
||||||
"textDocument": {
|
"textDocument": {
|
||||||
"uri": "file:///fixtures/db.ts",
|
"uri": "file:///testdata/db.ts",
|
||||||
"languageId": "typescript",
|
"languageId": "typescript",
|
||||||
"version": 1,
|
"version": 1,
|
||||||
"text": FIXTURE_DB_TS
|
"text": FIXTURE_DB_TS
|
||||||
|
@ -137,7 +137,7 @@ fn bench_code_lens(deno_exe: &Path) -> Result<Duration, AnyError> {
|
||||||
"textDocument/didOpen",
|
"textDocument/didOpen",
|
||||||
json!({
|
json!({
|
||||||
"textDocument": {
|
"textDocument": {
|
||||||
"uri": "file:///fixtures/code_lens.ts",
|
"uri": "file:///testdata/code_lens.ts",
|
||||||
"languageId": "typescript",
|
"languageId": "typescript",
|
||||||
"version": 1,
|
"version": 1,
|
||||||
"text": FIXTURE_CODE_LENS_TS
|
"text": FIXTURE_CODE_LENS_TS
|
||||||
|
@ -167,7 +167,7 @@ fn bench_code_lens(deno_exe: &Path) -> Result<Duration, AnyError> {
|
||||||
"textDocument/codeLens",
|
"textDocument/codeLens",
|
||||||
json!({
|
json!({
|
||||||
"textDocument": {
|
"textDocument": {
|
||||||
"uri": "file:///fixtures/code_lens.ts"
|
"uri": "file:///testdata/code_lens.ts"
|
||||||
}
|
}
|
||||||
}),
|
}),
|
||||||
)
|
)
|
||||||
|
|
|
@ -3,7 +3,7 @@
|
||||||
"type": "change",
|
"type": "change",
|
||||||
"params": {
|
"params": {
|
||||||
"textDocument": {
|
"textDocument": {
|
||||||
"uri": "file:///fixtures/db.ts",
|
"uri": "file:///testdata/db.ts",
|
||||||
"version": 2
|
"version": 2
|
||||||
},
|
},
|
||||||
"contentChanges": [
|
"contentChanges": [
|
||||||
|
@ -28,7 +28,7 @@
|
||||||
"type": "change",
|
"type": "change",
|
||||||
"params": {
|
"params": {
|
||||||
"textDocument": {
|
"textDocument": {
|
||||||
"uri": "file:///fixtures/db.ts",
|
"uri": "file:///testdata/db.ts",
|
||||||
"version": 3
|
"version": 3
|
||||||
},
|
},
|
||||||
"contentChanges": [
|
"contentChanges": [
|
||||||
|
@ -53,7 +53,7 @@
|
||||||
"type": "completion",
|
"type": "completion",
|
||||||
"params": {
|
"params": {
|
||||||
"textDocument": {
|
"textDocument": {
|
||||||
"uri": "file:///fixtures/db.ts"
|
"uri": "file:///testdata/db.ts"
|
||||||
},
|
},
|
||||||
"position": {
|
"position": {
|
||||||
"line": 8330,
|
"line": 8330,
|
||||||
|
@ -68,7 +68,7 @@
|
||||||
"type": "change",
|
"type": "change",
|
||||||
"params": {
|
"params": {
|
||||||
"textDocument": {
|
"textDocument": {
|
||||||
"uri": "file:///fixtures/db.ts",
|
"uri": "file:///testdata/db.ts",
|
||||||
"version": 4
|
"version": 4
|
||||||
},
|
},
|
||||||
"contentChanges": [
|
"contentChanges": [
|
||||||
|
@ -93,7 +93,7 @@
|
||||||
"type": "change",
|
"type": "change",
|
||||||
"params": {
|
"params": {
|
||||||
"textDocument": {
|
"textDocument": {
|
||||||
"uri": "file:///fixtures/db.ts",
|
"uri": "file:///testdata/db.ts",
|
||||||
"version": 5
|
"version": 5
|
||||||
},
|
},
|
||||||
"contentChanges": [
|
"contentChanges": [
|
||||||
|
@ -118,7 +118,7 @@
|
||||||
"type": "change",
|
"type": "change",
|
||||||
"params": {
|
"params": {
|
||||||
"textDocument": {
|
"textDocument": {
|
||||||
"uri": "file:///fixtures/db.ts",
|
"uri": "file:///testdata/db.ts",
|
||||||
"version": 6
|
"version": 6
|
||||||
},
|
},
|
||||||
"contentChanges": [
|
"contentChanges": [
|
||||||
|
@ -143,7 +143,7 @@
|
||||||
"type": "change",
|
"type": "change",
|
||||||
"params": {
|
"params": {
|
||||||
"textDocument": {
|
"textDocument": {
|
||||||
"uri": "file:///fixtures/db.ts",
|
"uri": "file:///testdata/db.ts",
|
||||||
"version": 7
|
"version": 7
|
||||||
},
|
},
|
||||||
"contentChanges": [
|
"contentChanges": [
|
||||||
|
@ -168,7 +168,7 @@
|
||||||
"type": "change",
|
"type": "change",
|
||||||
"params": {
|
"params": {
|
||||||
"textDocument": {
|
"textDocument": {
|
||||||
"uri": "file:///fixtures/db.ts",
|
"uri": "file:///testdata/db.ts",
|
||||||
"version": 8
|
"version": 8
|
||||||
},
|
},
|
||||||
"contentChanges": [
|
"contentChanges": [
|
||||||
|
@ -193,7 +193,7 @@
|
||||||
"type": "change",
|
"type": "change",
|
||||||
"params": {
|
"params": {
|
||||||
"textDocument": {
|
"textDocument": {
|
||||||
"uri": "file:///fixtures/db.ts",
|
"uri": "file:///testdata/db.ts",
|
||||||
"version": 9
|
"version": 9
|
||||||
},
|
},
|
||||||
"contentChanges": [
|
"contentChanges": [
|
||||||
|
@ -218,7 +218,7 @@
|
||||||
"type": "change",
|
"type": "change",
|
||||||
"params": {
|
"params": {
|
||||||
"textDocument": {
|
"textDocument": {
|
||||||
"uri": "file:///fixtures/db.ts",
|
"uri": "file:///testdata/db.ts",
|
||||||
"version": 10
|
"version": 10
|
||||||
},
|
},
|
||||||
"contentChanges": [
|
"contentChanges": [
|
||||||
|
@ -243,7 +243,7 @@
|
||||||
"type": "change",
|
"type": "change",
|
||||||
"params": {
|
"params": {
|
||||||
"textDocument": {
|
"textDocument": {
|
||||||
"uri": "file:///fixtures/db.ts",
|
"uri": "file:///testdata/db.ts",
|
||||||
"version": 11
|
"version": 11
|
||||||
},
|
},
|
||||||
"contentChanges": [
|
"contentChanges": [
|
||||||
|
@ -268,7 +268,7 @@
|
||||||
"type": "change",
|
"type": "change",
|
||||||
"params": {
|
"params": {
|
||||||
"textDocument": {
|
"textDocument": {
|
||||||
"uri": "file:///fixtures/db.ts",
|
"uri": "file:///testdata/db.ts",
|
||||||
"version": 12
|
"version": 12
|
||||||
},
|
},
|
||||||
"contentChanges": [
|
"contentChanges": [
|
||||||
|
@ -293,7 +293,7 @@
|
||||||
"type": "action",
|
"type": "action",
|
||||||
"params": {
|
"params": {
|
||||||
"textDocument": {
|
"textDocument": {
|
||||||
"uri": "file:///fixtures/db.ts"
|
"uri": "file:///testdata/db.ts"
|
||||||
},
|
},
|
||||||
"range": {
|
"range": {
|
||||||
"start": {
|
"start": {
|
||||||
|
@ -314,7 +314,7 @@
|
||||||
"type": "change",
|
"type": "change",
|
||||||
"params": {
|
"params": {
|
||||||
"textDocument": {
|
"textDocument": {
|
||||||
"uri": "file:///fixtures/db.ts",
|
"uri": "file:///testdata/db.ts",
|
||||||
"version": 13
|
"version": 13
|
||||||
},
|
},
|
||||||
"contentChanges": [
|
"contentChanges": [
|
||||||
|
@ -339,7 +339,7 @@
|
||||||
"type": "action",
|
"type": "action",
|
||||||
"params": {
|
"params": {
|
||||||
"textDocument": {
|
"textDocument": {
|
||||||
"uri": "file:///fixtures/db.ts"
|
"uri": "file:///testdata/db.ts"
|
||||||
},
|
},
|
||||||
"range": {
|
"range": {
|
||||||
"start": {
|
"start": {
|
||||||
|
@ -360,7 +360,7 @@
|
||||||
"type": "action",
|
"type": "action",
|
||||||
"params": {
|
"params": {
|
||||||
"textDocument": {
|
"textDocument": {
|
||||||
"uri": "file:///fixtures/db.ts"
|
"uri": "file:///testdata/db.ts"
|
||||||
},
|
},
|
||||||
"range": {
|
"range": {
|
||||||
"start": {
|
"start": {
|
||||||
|
@ -381,7 +381,7 @@
|
||||||
"type": "change",
|
"type": "change",
|
||||||
"params": {
|
"params": {
|
||||||
"textDocument": {
|
"textDocument": {
|
||||||
"uri": "file:///fixtures/db.ts",
|
"uri": "file:///testdata/db.ts",
|
||||||
"version": 14
|
"version": 14
|
||||||
},
|
},
|
||||||
"contentChanges": [
|
"contentChanges": [
|
||||||
|
@ -406,7 +406,7 @@
|
||||||
"type": "change",
|
"type": "change",
|
||||||
"params": {
|
"params": {
|
||||||
"textDocument": {
|
"textDocument": {
|
||||||
"uri": "file:///fixtures/db.ts",
|
"uri": "file:///testdata/db.ts",
|
||||||
"version": 15
|
"version": 15
|
||||||
},
|
},
|
||||||
"contentChanges": [
|
"contentChanges": [
|
||||||
|
@ -431,7 +431,7 @@
|
||||||
"type": "change",
|
"type": "change",
|
||||||
"params": {
|
"params": {
|
||||||
"textDocument": {
|
"textDocument": {
|
||||||
"uri": "file:///fixtures/db.ts",
|
"uri": "file:///testdata/db.ts",
|
||||||
"version": 16
|
"version": 16
|
||||||
},
|
},
|
||||||
"contentChanges": [
|
"contentChanges": [
|
||||||
|
@ -456,7 +456,7 @@
|
||||||
"type": "change",
|
"type": "change",
|
||||||
"params": {
|
"params": {
|
||||||
"textDocument": {
|
"textDocument": {
|
||||||
"uri": "file:///fixtures/db.ts",
|
"uri": "file:///testdata/db.ts",
|
||||||
"version": 17
|
"version": 17
|
||||||
},
|
},
|
||||||
"contentChanges": [
|
"contentChanges": [
|
||||||
|
@ -481,7 +481,7 @@
|
||||||
"type": "completion",
|
"type": "completion",
|
||||||
"params": {
|
"params": {
|
||||||
"textDocument": {
|
"textDocument": {
|
||||||
"uri": "file:///fixtures/db.ts"
|
"uri": "file:///testdata/db.ts"
|
||||||
},
|
},
|
||||||
"position": {
|
"position": {
|
||||||
"line": 8331,
|
"line": 8331,
|
||||||
|
@ -496,7 +496,7 @@
|
||||||
"type": "change",
|
"type": "change",
|
||||||
"params": {
|
"params": {
|
||||||
"textDocument": {
|
"textDocument": {
|
||||||
"uri": "file:///fixtures/db.ts",
|
"uri": "file:///testdata/db.ts",
|
||||||
"version": 18
|
"version": 18
|
||||||
},
|
},
|
||||||
"contentChanges": [
|
"contentChanges": [
|
||||||
|
@ -521,7 +521,7 @@
|
||||||
"type": "change",
|
"type": "change",
|
||||||
"params": {
|
"params": {
|
||||||
"textDocument": {
|
"textDocument": {
|
||||||
"uri": "file:///fixtures/db.ts",
|
"uri": "file:///testdata/db.ts",
|
||||||
"version": 19
|
"version": 19
|
||||||
},
|
},
|
||||||
"contentChanges": [
|
"contentChanges": [
|
||||||
|
@ -546,7 +546,7 @@
|
||||||
"type": "change",
|
"type": "change",
|
||||||
"params": {
|
"params": {
|
||||||
"textDocument": {
|
"textDocument": {
|
||||||
"uri": "file:///fixtures/db.ts",
|
"uri": "file:///testdata/db.ts",
|
||||||
"version": 20
|
"version": 20
|
||||||
},
|
},
|
||||||
"contentChanges": [
|
"contentChanges": [
|
||||||
|
@ -571,7 +571,7 @@
|
||||||
"type": "change",
|
"type": "change",
|
||||||
"params": {
|
"params": {
|
||||||
"textDocument": {
|
"textDocument": {
|
||||||
"uri": "file:///fixtures/db.ts",
|
"uri": "file:///testdata/db.ts",
|
||||||
"version": 21
|
"version": 21
|
||||||
},
|
},
|
||||||
"contentChanges": [
|
"contentChanges": [
|
||||||
|
@ -596,7 +596,7 @@
|
||||||
"type": "action",
|
"type": "action",
|
||||||
"params": {
|
"params": {
|
||||||
"textDocument": {
|
"textDocument": {
|
||||||
"uri": "file:///fixtures/db.ts"
|
"uri": "file:///testdata/db.ts"
|
||||||
},
|
},
|
||||||
"range": {
|
"range": {
|
||||||
"start": {
|
"start": {
|
||||||
|
@ -617,7 +617,7 @@
|
||||||
"type": "action",
|
"type": "action",
|
||||||
"params": {
|
"params": {
|
||||||
"textDocument": {
|
"textDocument": {
|
||||||
"uri": "file:///fixtures/db.ts"
|
"uri": "file:///testdata/db.ts"
|
||||||
},
|
},
|
||||||
"range": {
|
"range": {
|
||||||
"start": {
|
"start": {
|
||||||
|
@ -638,7 +638,7 @@
|
||||||
"type": "action",
|
"type": "action",
|
||||||
"params": {
|
"params": {
|
||||||
"textDocument": {
|
"textDocument": {
|
||||||
"uri": "file:///fixtures/db.ts"
|
"uri": "file:///testdata/db.ts"
|
||||||
},
|
},
|
||||||
"range": {
|
"range": {
|
||||||
"start": {
|
"start": {
|
||||||
|
@ -659,7 +659,7 @@
|
||||||
"type": "action",
|
"type": "action",
|
||||||
"params": {
|
"params": {
|
||||||
"textDocument": {
|
"textDocument": {
|
||||||
"uri": "file:///fixtures/db.ts"
|
"uri": "file:///testdata/db.ts"
|
||||||
},
|
},
|
||||||
"range": {
|
"range": {
|
||||||
"start": {
|
"start": {
|
||||||
|
@ -680,7 +680,7 @@
|
||||||
"type": "change",
|
"type": "change",
|
||||||
"params": {
|
"params": {
|
||||||
"textDocument": {
|
"textDocument": {
|
||||||
"uri": "file:///fixtures/db.ts",
|
"uri": "file:///testdata/db.ts",
|
||||||
"version": 22
|
"version": 22
|
||||||
},
|
},
|
||||||
"contentChanges": [
|
"contentChanges": [
|
||||||
|
@ -705,7 +705,7 @@
|
||||||
"type": "change",
|
"type": "change",
|
||||||
"params": {
|
"params": {
|
||||||
"textDocument": {
|
"textDocument": {
|
||||||
"uri": "file:///fixtures/db.ts",
|
"uri": "file:///testdata/db.ts",
|
||||||
"version": 23
|
"version": 23
|
||||||
},
|
},
|
||||||
"contentChanges": [
|
"contentChanges": [
|
||||||
|
@ -730,7 +730,7 @@
|
||||||
"type": "change",
|
"type": "change",
|
||||||
"params": {
|
"params": {
|
||||||
"textDocument": {
|
"textDocument": {
|
||||||
"uri": "file:///fixtures/db.ts",
|
"uri": "file:///testdata/db.ts",
|
||||||
"version": 24
|
"version": 24
|
||||||
},
|
},
|
||||||
"contentChanges": [
|
"contentChanges": [
|
||||||
|
@ -755,7 +755,7 @@
|
||||||
"type": "change",
|
"type": "change",
|
||||||
"params": {
|
"params": {
|
||||||
"textDocument": {
|
"textDocument": {
|
||||||
"uri": "file:///fixtures/db.ts",
|
"uri": "file:///testdata/db.ts",
|
||||||
"version": 25
|
"version": 25
|
||||||
},
|
},
|
||||||
"contentChanges": [
|
"contentChanges": [
|
||||||
|
@ -780,7 +780,7 @@
|
||||||
"type": "change",
|
"type": "change",
|
||||||
"params": {
|
"params": {
|
||||||
"textDocument": {
|
"textDocument": {
|
||||||
"uri": "file:///fixtures/db.ts",
|
"uri": "file:///testdata/db.ts",
|
||||||
"version": 26
|
"version": 26
|
||||||
},
|
},
|
||||||
"contentChanges": [
|
"contentChanges": [
|
||||||
|
@ -805,7 +805,7 @@
|
||||||
"type": "change",
|
"type": "change",
|
||||||
"params": {
|
"params": {
|
||||||
"textDocument": {
|
"textDocument": {
|
||||||
"uri": "file:///fixtures/db.ts",
|
"uri": "file:///testdata/db.ts",
|
||||||
"version": 27
|
"version": 27
|
||||||
},
|
},
|
||||||
"contentChanges": [
|
"contentChanges": [
|
||||||
|
@ -830,7 +830,7 @@
|
||||||
"type": "change",
|
"type": "change",
|
||||||
"params": {
|
"params": {
|
||||||
"textDocument": {
|
"textDocument": {
|
||||||
"uri": "file:///fixtures/db.ts",
|
"uri": "file:///testdata/db.ts",
|
||||||
"version": 28
|
"version": 28
|
||||||
},
|
},
|
||||||
"contentChanges": [
|
"contentChanges": [
|
||||||
|
@ -855,7 +855,7 @@
|
||||||
"type": "change",
|
"type": "change",
|
||||||
"params": {
|
"params": {
|
||||||
"textDocument": {
|
"textDocument": {
|
||||||
"uri": "file:///fixtures/db.ts",
|
"uri": "file:///testdata/db.ts",
|
||||||
"version": 29
|
"version": 29
|
||||||
},
|
},
|
||||||
"contentChanges": [
|
"contentChanges": [
|
||||||
|
@ -880,7 +880,7 @@
|
||||||
"type": "action",
|
"type": "action",
|
||||||
"params": {
|
"params": {
|
||||||
"textDocument": {
|
"textDocument": {
|
||||||
"uri": "file:///fixtures/db.ts"
|
"uri": "file:///testdata/db.ts"
|
||||||
},
|
},
|
||||||
"range": {
|
"range": {
|
||||||
"start": {
|
"start": {
|
||||||
|
@ -901,7 +901,7 @@
|
||||||
"type": "change",
|
"type": "change",
|
||||||
"params": {
|
"params": {
|
||||||
"textDocument": {
|
"textDocument": {
|
||||||
"uri": "file:///fixtures/db.ts",
|
"uri": "file:///testdata/db.ts",
|
||||||
"version": 30
|
"version": 30
|
||||||
},
|
},
|
||||||
"contentChanges": [
|
"contentChanges": [
|
||||||
|
@ -926,7 +926,7 @@
|
||||||
"type": "completion",
|
"type": "completion",
|
||||||
"params": {
|
"params": {
|
||||||
"textDocument": {
|
"textDocument": {
|
||||||
"uri": "file:///fixtures/db.ts"
|
"uri": "file:///testdata/db.ts"
|
||||||
},
|
},
|
||||||
"position": {
|
"position": {
|
||||||
"line": 8331,
|
"line": 8331,
|
||||||
|
@ -942,7 +942,7 @@
|
||||||
"type": "action",
|
"type": "action",
|
||||||
"params": {
|
"params": {
|
||||||
"textDocument": {
|
"textDocument": {
|
||||||
"uri": "file:///fixtures/db.ts"
|
"uri": "file:///testdata/db.ts"
|
||||||
},
|
},
|
||||||
"range": {
|
"range": {
|
||||||
"start": {
|
"start": {
|
||||||
|
@ -963,7 +963,7 @@
|
||||||
"type": "action",
|
"type": "action",
|
||||||
"params": {
|
"params": {
|
||||||
"textDocument": {
|
"textDocument": {
|
||||||
"uri": "file:///fixtures/db.ts"
|
"uri": "file:///testdata/db.ts"
|
||||||
},
|
},
|
||||||
"range": {
|
"range": {
|
||||||
"start": {
|
"start": {
|
||||||
|
@ -984,7 +984,7 @@
|
||||||
"type": "action",
|
"type": "action",
|
||||||
"params": {
|
"params": {
|
||||||
"textDocument": {
|
"textDocument": {
|
||||||
"uri": "file:///fixtures/db.ts"
|
"uri": "file:///testdata/db.ts"
|
||||||
},
|
},
|
||||||
"range": {
|
"range": {
|
||||||
"start": {
|
"start": {
|
||||||
|
@ -1005,7 +1005,7 @@
|
||||||
"type": "action",
|
"type": "action",
|
||||||
"params": {
|
"params": {
|
||||||
"textDocument": {
|
"textDocument": {
|
||||||
"uri": "file:///fixtures/db.ts"
|
"uri": "file:///testdata/db.ts"
|
||||||
},
|
},
|
||||||
"range": {
|
"range": {
|
||||||
"start": {
|
"start": {
|
||||||
|
@ -1026,7 +1026,7 @@
|
||||||
"type": "action",
|
"type": "action",
|
||||||
"params": {
|
"params": {
|
||||||
"textDocument": {
|
"textDocument": {
|
||||||
"uri": "file:///fixtures/db.ts"
|
"uri": "file:///testdata/db.ts"
|
||||||
},
|
},
|
||||||
"range": {
|
"range": {
|
||||||
"start": {
|
"start": {
|
||||||
|
@ -1047,7 +1047,7 @@
|
||||||
"type": "action",
|
"type": "action",
|
||||||
"params": {
|
"params": {
|
||||||
"textDocument": {
|
"textDocument": {
|
||||||
"uri": "file:///fixtures/db.ts"
|
"uri": "file:///testdata/db.ts"
|
||||||
},
|
},
|
||||||
"range": {
|
"range": {
|
||||||
"start": {
|
"start": {
|
||||||
|
@ -1068,7 +1068,7 @@
|
||||||
"type": "action",
|
"type": "action",
|
||||||
"params": {
|
"params": {
|
||||||
"textDocument": {
|
"textDocument": {
|
||||||
"uri": "file:///fixtures/db.ts"
|
"uri": "file:///testdata/db.ts"
|
||||||
},
|
},
|
||||||
"range": {
|
"range": {
|
||||||
"start": {
|
"start": {
|
||||||
|
@ -1089,7 +1089,7 @@
|
||||||
"type": "action",
|
"type": "action",
|
||||||
"params": {
|
"params": {
|
||||||
"textDocument": {
|
"textDocument": {
|
||||||
"uri": "file:///fixtures/db.ts"
|
"uri": "file:///testdata/db.ts"
|
||||||
},
|
},
|
||||||
"range": {
|
"range": {
|
||||||
"start": {
|
"start": {
|
||||||
|
@ -1110,7 +1110,7 @@
|
||||||
"type": "action",
|
"type": "action",
|
||||||
"params": {
|
"params": {
|
||||||
"textDocument": {
|
"textDocument": {
|
||||||
"uri": "file:///fixtures/db.ts"
|
"uri": "file:///testdata/db.ts"
|
||||||
},
|
},
|
||||||
"range": {
|
"range": {
|
||||||
"start": {
|
"start": {
|
||||||
|
@ -1131,7 +1131,7 @@
|
||||||
"type": "action",
|
"type": "action",
|
||||||
"params": {
|
"params": {
|
||||||
"textDocument": {
|
"textDocument": {
|
||||||
"uri": "file:///fixtures/db.ts"
|
"uri": "file:///testdata/db.ts"
|
||||||
},
|
},
|
||||||
"range": {
|
"range": {
|
||||||
"start": {
|
"start": {
|
||||||
|
@ -1152,7 +1152,7 @@
|
||||||
"type": "action",
|
"type": "action",
|
||||||
"params": {
|
"params": {
|
||||||
"textDocument": {
|
"textDocument": {
|
||||||
"uri": "file:///fixtures/db.ts"
|
"uri": "file:///testdata/db.ts"
|
||||||
},
|
},
|
||||||
"range": {
|
"range": {
|
||||||
"start": {
|
"start": {
|
||||||
|
@ -1173,7 +1173,7 @@
|
||||||
"type": "action",
|
"type": "action",
|
||||||
"params": {
|
"params": {
|
||||||
"textDocument": {
|
"textDocument": {
|
||||||
"uri": "file:///fixtures/db.ts"
|
"uri": "file:///testdata/db.ts"
|
||||||
},
|
},
|
||||||
"range": {
|
"range": {
|
||||||
"start": {
|
"start": {
|
||||||
|
@ -1194,7 +1194,7 @@
|
||||||
"type": "action",
|
"type": "action",
|
||||||
"params": {
|
"params": {
|
||||||
"textDocument": {
|
"textDocument": {
|
||||||
"uri": "file:///fixtures/db.ts"
|
"uri": "file:///testdata/db.ts"
|
||||||
},
|
},
|
||||||
"range": {
|
"range": {
|
||||||
"start": {
|
"start": {
|
||||||
|
@ -1215,7 +1215,7 @@
|
||||||
"type": "action",
|
"type": "action",
|
||||||
"params": {
|
"params": {
|
||||||
"textDocument": {
|
"textDocument": {
|
||||||
"uri": "file:///fixtures/db.ts"
|
"uri": "file:///testdata/db.ts"
|
||||||
},
|
},
|
||||||
"range": {
|
"range": {
|
||||||
"start": {
|
"start": {
|
||||||
|
@ -1236,7 +1236,7 @@
|
||||||
"type": "action",
|
"type": "action",
|
||||||
"params": {
|
"params": {
|
||||||
"textDocument": {
|
"textDocument": {
|
||||||
"uri": "file:///fixtures/db.ts"
|
"uri": "file:///testdata/db.ts"
|
||||||
},
|
},
|
||||||
"range": {
|
"range": {
|
||||||
"start": {
|
"start": {
|
||||||
|
@ -1257,7 +1257,7 @@
|
||||||
"type": "action",
|
"type": "action",
|
||||||
"params": {
|
"params": {
|
||||||
"textDocument": {
|
"textDocument": {
|
||||||
"uri": "file:///fixtures/db.ts"
|
"uri": "file:///testdata/db.ts"
|
||||||
},
|
},
|
||||||
"range": {
|
"range": {
|
||||||
"start": {
|
"start": {
|
||||||
|
@ -1278,7 +1278,7 @@
|
||||||
"type": "action",
|
"type": "action",
|
||||||
"params": {
|
"params": {
|
||||||
"textDocument": {
|
"textDocument": {
|
||||||
"uri": "file:///fixtures/db.ts"
|
"uri": "file:///testdata/db.ts"
|
||||||
},
|
},
|
||||||
"range": {
|
"range": {
|
||||||
"start": {
|
"start": {
|
||||||
|
@ -1299,7 +1299,7 @@
|
||||||
"type": "action",
|
"type": "action",
|
||||||
"params": {
|
"params": {
|
||||||
"textDocument": {
|
"textDocument": {
|
||||||
"uri": "file:///fixtures/db.ts"
|
"uri": "file:///testdata/db.ts"
|
||||||
},
|
},
|
||||||
"range": {
|
"range": {
|
||||||
"start": {
|
"start": {
|
||||||
|
@ -1320,7 +1320,7 @@
|
||||||
"type": "action",
|
"type": "action",
|
||||||
"params": {
|
"params": {
|
||||||
"textDocument": {
|
"textDocument": {
|
||||||
"uri": "file:///fixtures/db.ts"
|
"uri": "file:///testdata/db.ts"
|
||||||
},
|
},
|
||||||
"range": {
|
"range": {
|
||||||
"start": {
|
"start": {
|
||||||
|
@ -1341,7 +1341,7 @@
|
||||||
"type": "action",
|
"type": "action",
|
||||||
"params": {
|
"params": {
|
||||||
"textDocument": {
|
"textDocument": {
|
||||||
"uri": "file:///fixtures/db.ts"
|
"uri": "file:///testdata/db.ts"
|
||||||
},
|
},
|
||||||
"range": {
|
"range": {
|
||||||
"start": {
|
"start": {
|
||||||
|
@ -1362,7 +1362,7 @@
|
||||||
"type": "action",
|
"type": "action",
|
||||||
"params": {
|
"params": {
|
||||||
"textDocument": {
|
"textDocument": {
|
||||||
"uri": "file:///fixtures/db.ts"
|
"uri": "file:///testdata/db.ts"
|
||||||
},
|
},
|
||||||
"range": {
|
"range": {
|
||||||
"start": {
|
"start": {
|
||||||
|
@ -1383,7 +1383,7 @@
|
||||||
"type": "action",
|
"type": "action",
|
||||||
"params": {
|
"params": {
|
||||||
"textDocument": {
|
"textDocument": {
|
||||||
"uri": "file:///fixtures/db.ts"
|
"uri": "file:///testdata/db.ts"
|
||||||
},
|
},
|
||||||
"range": {
|
"range": {
|
||||||
"start": {
|
"start": {
|
||||||
|
@ -1404,7 +1404,7 @@
|
||||||
"type": "change",
|
"type": "change",
|
||||||
"params": {
|
"params": {
|
||||||
"textDocument": {
|
"textDocument": {
|
||||||
"uri": "file:///fixtures/db.ts",
|
"uri": "file:///testdata/db.ts",
|
||||||
"version": 31
|
"version": 31
|
||||||
},
|
},
|
||||||
"contentChanges": [
|
"contentChanges": [
|
||||||
|
@ -1429,7 +1429,7 @@
|
||||||
"type": "action",
|
"type": "action",
|
||||||
"params": {
|
"params": {
|
||||||
"textDocument": {
|
"textDocument": {
|
||||||
"uri": "file:///fixtures/db.ts"
|
"uri": "file:///testdata/db.ts"
|
||||||
},
|
},
|
||||||
"range": {
|
"range": {
|
||||||
"start": {
|
"start": {
|
||||||
|
@ -1450,7 +1450,7 @@
|
||||||
"type": "change",
|
"type": "change",
|
||||||
"params": {
|
"params": {
|
||||||
"textDocument": {
|
"textDocument": {
|
||||||
"uri": "file:///fixtures/db.ts",
|
"uri": "file:///testdata/db.ts",
|
||||||
"version": 32
|
"version": 32
|
||||||
},
|
},
|
||||||
"contentChanges": [
|
"contentChanges": [
|
||||||
|
@ -1475,7 +1475,7 @@
|
||||||
"type": "action",
|
"type": "action",
|
||||||
"params": {
|
"params": {
|
||||||
"textDocument": {
|
"textDocument": {
|
||||||
"uri": "file:///fixtures/db.ts"
|
"uri": "file:///testdata/db.ts"
|
||||||
},
|
},
|
||||||
"range": {
|
"range": {
|
||||||
"start": {
|
"start": {
|
||||||
|
@ -1496,7 +1496,7 @@
|
||||||
"type": "action",
|
"type": "action",
|
||||||
"params": {
|
"params": {
|
||||||
"textDocument": {
|
"textDocument": {
|
||||||
"uri": "file:///fixtures/db.ts"
|
"uri": "file:///testdata/db.ts"
|
||||||
},
|
},
|
||||||
"range": {
|
"range": {
|
||||||
"start": {
|
"start": {
|
||||||
|
@ -1517,7 +1517,7 @@
|
||||||
"type": "action",
|
"type": "action",
|
||||||
"params": {
|
"params": {
|
||||||
"textDocument": {
|
"textDocument": {
|
||||||
"uri": "file:///fixtures/db.ts"
|
"uri": "file:///testdata/db.ts"
|
||||||
},
|
},
|
||||||
"range": {
|
"range": {
|
||||||
"start": {
|
"start": {
|
||||||
|
@ -1538,7 +1538,7 @@
|
||||||
"type": "change",
|
"type": "change",
|
||||||
"params": {
|
"params": {
|
||||||
"textDocument": {
|
"textDocument": {
|
||||||
"uri": "file:///fixtures/db.ts",
|
"uri": "file:///testdata/db.ts",
|
||||||
"version": 33
|
"version": 33
|
||||||
},
|
},
|
||||||
"contentChanges": [
|
"contentChanges": [
|
||||||
|
@ -1563,7 +1563,7 @@
|
||||||
"type": "completion",
|
"type": "completion",
|
||||||
"params": {
|
"params": {
|
||||||
"textDocument": {
|
"textDocument": {
|
||||||
"uri": "file:///fixtures/db.ts"
|
"uri": "file:///testdata/db.ts"
|
||||||
},
|
},
|
||||||
"position": {
|
"position": {
|
||||||
"line": 8331,
|
"line": 8331,
|
||||||
|
@ -1579,7 +1579,7 @@
|
||||||
"type": "action",
|
"type": "action",
|
||||||
"params": {
|
"params": {
|
||||||
"textDocument": {
|
"textDocument": {
|
||||||
"uri": "file:///fixtures/db.ts"
|
"uri": "file:///testdata/db.ts"
|
||||||
},
|
},
|
||||||
"range": {
|
"range": {
|
||||||
"start": {
|
"start": {
|
||||||
|
@ -1600,7 +1600,7 @@
|
||||||
"type": "action",
|
"type": "action",
|
||||||
"params": {
|
"params": {
|
||||||
"textDocument": {
|
"textDocument": {
|
||||||
"uri": "file:///fixtures/db.ts"
|
"uri": "file:///testdata/db.ts"
|
||||||
},
|
},
|
||||||
"range": {
|
"range": {
|
||||||
"start": {
|
"start": {
|
||||||
|
@ -1621,7 +1621,7 @@
|
||||||
"type": "change",
|
"type": "change",
|
||||||
"params": {
|
"params": {
|
||||||
"textDocument": {
|
"textDocument": {
|
||||||
"uri": "file:///fixtures/db.ts",
|
"uri": "file:///testdata/db.ts",
|
||||||
"version": 34
|
"version": 34
|
||||||
},
|
},
|
||||||
"contentChanges": [
|
"contentChanges": [
|
||||||
|
@ -1646,7 +1646,7 @@
|
||||||
"type": "action",
|
"type": "action",
|
||||||
"params": {
|
"params": {
|
||||||
"textDocument": {
|
"textDocument": {
|
||||||
"uri": "file:///fixtures/db.ts"
|
"uri": "file:///testdata/db.ts"
|
||||||
},
|
},
|
||||||
"range": {
|
"range": {
|
||||||
"start": {
|
"start": {
|
||||||
|
@ -1667,7 +1667,7 @@
|
||||||
"type": "action",
|
"type": "action",
|
||||||
"params": {
|
"params": {
|
||||||
"textDocument": {
|
"textDocument": {
|
||||||
"uri": "file:///fixtures/db.ts"
|
"uri": "file:///testdata/db.ts"
|
||||||
},
|
},
|
||||||
"range": {
|
"range": {
|
||||||
"start": {
|
"start": {
|
||||||
|
@ -1688,7 +1688,7 @@
|
||||||
"type": "change",
|
"type": "change",
|
||||||
"params": {
|
"params": {
|
||||||
"textDocument": {
|
"textDocument": {
|
||||||
"uri": "file:///fixtures/db.ts",
|
"uri": "file:///testdata/db.ts",
|
||||||
"version": 35
|
"version": 35
|
||||||
},
|
},
|
||||||
"contentChanges": [
|
"contentChanges": [
|
||||||
|
@ -1713,7 +1713,7 @@
|
||||||
"type": "action",
|
"type": "action",
|
||||||
"params": {
|
"params": {
|
||||||
"textDocument": {
|
"textDocument": {
|
||||||
"uri": "file:///fixtures/db.ts"
|
"uri": "file:///testdata/db.ts"
|
||||||
},
|
},
|
||||||
"range": {
|
"range": {
|
||||||
"start": {
|
"start": {
|
||||||
|
@ -1734,7 +1734,7 @@
|
||||||
"type": "action",
|
"type": "action",
|
||||||
"params": {
|
"params": {
|
||||||
"textDocument": {
|
"textDocument": {
|
||||||
"uri": "file:///fixtures/db.ts"
|
"uri": "file:///testdata/db.ts"
|
||||||
},
|
},
|
||||||
"range": {
|
"range": {
|
||||||
"start": {
|
"start": {
|
||||||
|
@ -1755,7 +1755,7 @@
|
||||||
"type": "hover",
|
"type": "hover",
|
||||||
"params": {
|
"params": {
|
||||||
"textDocument": {
|
"textDocument": {
|
||||||
"uri": "file:///fixtures/db.ts"
|
"uri": "file:///testdata/db.ts"
|
||||||
},
|
},
|
||||||
"position": {
|
"position": {
|
||||||
"line": 8331,
|
"line": 8331,
|
||||||
|
@ -1767,7 +1767,7 @@
|
||||||
"type": "action",
|
"type": "action",
|
||||||
"params": {
|
"params": {
|
||||||
"textDocument": {
|
"textDocument": {
|
||||||
"uri": "file:///fixtures/db.ts"
|
"uri": "file:///testdata/db.ts"
|
||||||
},
|
},
|
||||||
"range": {
|
"range": {
|
||||||
"start": {
|
"start": {
|
||||||
|
@ -1809,7 +1809,7 @@
|
||||||
"type": "hover",
|
"type": "hover",
|
||||||
"params": {
|
"params": {
|
||||||
"textDocument": {
|
"textDocument": {
|
||||||
"uri": "file:///fixtures/db.ts"
|
"uri": "file:///testdata/db.ts"
|
||||||
},
|
},
|
||||||
"position": {
|
"position": {
|
||||||
"line": 8331,
|
"line": 8331,
|
||||||
|
@ -1821,7 +1821,7 @@
|
||||||
"type": "hover",
|
"type": "hover",
|
||||||
"params": {
|
"params": {
|
||||||
"textDocument": {
|
"textDocument": {
|
||||||
"uri": "file:///fixtures/db.ts"
|
"uri": "file:///testdata/db.ts"
|
||||||
},
|
},
|
||||||
"position": {
|
"position": {
|
||||||
"line": 8331,
|
"line": 8331,
|
||||||
|
@ -1833,7 +1833,7 @@
|
||||||
"type": "action",
|
"type": "action",
|
||||||
"params": {
|
"params": {
|
||||||
"textDocument": {
|
"textDocument": {
|
||||||
"uri": "file:///fixtures/db.ts"
|
"uri": "file:///testdata/db.ts"
|
||||||
},
|
},
|
||||||
"range": {
|
"range": {
|
||||||
"start": {
|
"start": {
|
||||||
|
@ -1875,7 +1875,7 @@
|
||||||
"type": "hover",
|
"type": "hover",
|
||||||
"params": {
|
"params": {
|
||||||
"textDocument": {
|
"textDocument": {
|
||||||
"uri": "file:///fixtures/db.ts"
|
"uri": "file:///testdata/db.ts"
|
||||||
},
|
},
|
||||||
"position": {
|
"position": {
|
||||||
"line": 8331,
|
"line": 8331,
|
||||||
|
@ -1887,7 +1887,7 @@
|
||||||
"type": "hover",
|
"type": "hover",
|
||||||
"params": {
|
"params": {
|
||||||
"textDocument": {
|
"textDocument": {
|
||||||
"uri": "file:///fixtures/db.ts"
|
"uri": "file:///testdata/db.ts"
|
||||||
},
|
},
|
||||||
"position": {
|
"position": {
|
||||||
"line": 8331,
|
"line": 8331,
|
||||||
|
@ -1899,7 +1899,7 @@
|
||||||
"type": "action",
|
"type": "action",
|
||||||
"params": {
|
"params": {
|
||||||
"textDocument": {
|
"textDocument": {
|
||||||
"uri": "file:///fixtures/db.ts"
|
"uri": "file:///testdata/db.ts"
|
||||||
},
|
},
|
||||||
"range": {
|
"range": {
|
||||||
"start": {
|
"start": {
|
||||||
|
@ -1941,7 +1941,7 @@
|
||||||
"type": "hover",
|
"type": "hover",
|
||||||
"params": {
|
"params": {
|
||||||
"textDocument": {
|
"textDocument": {
|
||||||
"uri": "file:///fixtures/db.ts"
|
"uri": "file:///testdata/db.ts"
|
||||||
},
|
},
|
||||||
"position": {
|
"position": {
|
||||||
"line": 8331,
|
"line": 8331,
|
||||||
|
@ -1953,7 +1953,7 @@
|
||||||
"type": "hover",
|
"type": "hover",
|
||||||
"params": {
|
"params": {
|
||||||
"textDocument": {
|
"textDocument": {
|
||||||
"uri": "file:///fixtures/db.ts"
|
"uri": "file:///testdata/db.ts"
|
||||||
},
|
},
|
||||||
"position": {
|
"position": {
|
||||||
"line": 8331,
|
"line": 8331,
|
||||||
|
@ -1965,7 +1965,7 @@
|
||||||
"type": "action",
|
"type": "action",
|
||||||
"params": {
|
"params": {
|
||||||
"textDocument": {
|
"textDocument": {
|
||||||
"uri": "file:///fixtures/db.ts"
|
"uri": "file:///testdata/db.ts"
|
||||||
},
|
},
|
||||||
"range": {
|
"range": {
|
||||||
"start": {
|
"start": {
|
||||||
|
@ -2007,7 +2007,7 @@
|
||||||
"type": "hover",
|
"type": "hover",
|
||||||
"params": {
|
"params": {
|
||||||
"textDocument": {
|
"textDocument": {
|
||||||
"uri": "file:///fixtures/db.ts"
|
"uri": "file:///testdata/db.ts"
|
||||||
},
|
},
|
||||||
"position": {
|
"position": {
|
||||||
"line": 8331,
|
"line": 8331,
|
||||||
|
@ -2019,7 +2019,7 @@
|
||||||
"type": "action",
|
"type": "action",
|
||||||
"params": {
|
"params": {
|
||||||
"textDocument": {
|
"textDocument": {
|
||||||
"uri": "file:///fixtures/db.ts"
|
"uri": "file:///testdata/db.ts"
|
||||||
},
|
},
|
||||||
"range": {
|
"range": {
|
||||||
"start": {
|
"start": {
|
||||||
|
@ -2061,7 +2061,7 @@
|
||||||
"type": "highlight",
|
"type": "highlight",
|
||||||
"params": {
|
"params": {
|
||||||
"textDocument": {
|
"textDocument": {
|
||||||
"uri": "file:///fixtures/db.ts"
|
"uri": "file:///testdata/db.ts"
|
||||||
},
|
},
|
||||||
"position": {
|
"position": {
|
||||||
"line": 8331,
|
"line": 8331,
|
||||||
|
@ -2073,7 +2073,7 @@
|
||||||
"type": "action",
|
"type": "action",
|
||||||
"params": {
|
"params": {
|
||||||
"textDocument": {
|
"textDocument": {
|
||||||
"uri": "file:///fixtures/db.ts"
|
"uri": "file:///testdata/db.ts"
|
||||||
},
|
},
|
||||||
"range": {
|
"range": {
|
||||||
"start": {
|
"start": {
|
||||||
|
@ -2112,7 +2112,7 @@
|
||||||
"type": "hover",
|
"type": "hover",
|
||||||
"params": {
|
"params": {
|
||||||
"textDocument": {
|
"textDocument": {
|
||||||
"uri": "file:///fixtures/db.ts"
|
"uri": "file:///testdata/db.ts"
|
||||||
},
|
},
|
||||||
"position": {
|
"position": {
|
||||||
"line": 8331,
|
"line": 8331,
|
||||||
|
@ -2124,7 +2124,7 @@
|
||||||
"type": "change",
|
"type": "change",
|
||||||
"params": {
|
"params": {
|
||||||
"textDocument": {
|
"textDocument": {
|
||||||
"uri": "file:///fixtures/db.ts",
|
"uri": "file:///testdata/db.ts",
|
||||||
"version": 36
|
"version": 36
|
||||||
},
|
},
|
||||||
"contentChanges": [
|
"contentChanges": [
|
||||||
|
@ -2149,7 +2149,7 @@
|
||||||
"type": "action",
|
"type": "action",
|
||||||
"params": {
|
"params": {
|
||||||
"textDocument": {
|
"textDocument": {
|
||||||
"uri": "file:///fixtures/db.ts"
|
"uri": "file:///testdata/db.ts"
|
||||||
},
|
},
|
||||||
"range": {
|
"range": {
|
||||||
"start": {
|
"start": {
|
||||||
|
@ -2188,7 +2188,7 @@
|
||||||
"type": "action",
|
"type": "action",
|
||||||
"params": {
|
"params": {
|
||||||
"textDocument": {
|
"textDocument": {
|
||||||
"uri": "file:///fixtures/db.ts"
|
"uri": "file:///testdata/db.ts"
|
||||||
},
|
},
|
||||||
"range": {
|
"range": {
|
||||||
"start": {
|
"start": {
|
||||||
|
@ -2227,7 +2227,7 @@
|
||||||
"type": "action",
|
"type": "action",
|
||||||
"params": {
|
"params": {
|
||||||
"textDocument": {
|
"textDocument": {
|
||||||
"uri": "file:///fixtures/db.ts"
|
"uri": "file:///testdata/db.ts"
|
||||||
},
|
},
|
||||||
"range": {
|
"range": {
|
||||||
"start": {
|
"start": {
|
25
cli/build.rs
25
cli/build.rs
|
@ -38,7 +38,30 @@ fn create_snapshot(
|
||||||
let snapshot = js_runtime.snapshot();
|
let snapshot = js_runtime.snapshot();
|
||||||
let snapshot_slice: &[u8] = &*snapshot;
|
let snapshot_slice: &[u8] = &*snapshot;
|
||||||
println!("Snapshot size: {}", snapshot_slice.len());
|
println!("Snapshot size: {}", snapshot_slice.len());
|
||||||
std::fs::write(&snapshot_path, snapshot_slice).unwrap();
|
|
||||||
|
let compressed_snapshot_with_size = {
|
||||||
|
let mut vec = vec![];
|
||||||
|
|
||||||
|
vec.extend_from_slice(
|
||||||
|
&u32::try_from(snapshot.len())
|
||||||
|
.expect("snapshot larger than 4gb")
|
||||||
|
.to_le_bytes(),
|
||||||
|
);
|
||||||
|
|
||||||
|
vec.extend_from_slice(
|
||||||
|
&zstd::block::compress(snapshot_slice, 22)
|
||||||
|
.expect("snapshot compression failed"),
|
||||||
|
);
|
||||||
|
|
||||||
|
vec
|
||||||
|
};
|
||||||
|
|
||||||
|
println!(
|
||||||
|
"Snapshot compressed size: {}",
|
||||||
|
compressed_snapshot_with_size.len()
|
||||||
|
);
|
||||||
|
|
||||||
|
std::fs::write(&snapshot_path, compressed_snapshot_with_size).unwrap();
|
||||||
println!("Snapshot written to: {} ", snapshot_path.display());
|
println!("Snapshot written to: {} ", snapshot_path.display());
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
|
@ -149,7 +149,7 @@ impl Loader for FetchCacher {
|
||||||
let file_fetcher = self.file_fetcher.clone();
|
let file_fetcher = self.file_fetcher.clone();
|
||||||
|
|
||||||
async move {
|
async move {
|
||||||
let load_result = file_fetcher
|
file_fetcher
|
||||||
.fetch(&specifier, &mut permissions)
|
.fetch(&specifier, &mut permissions)
|
||||||
.await
|
.await
|
||||||
.map_or_else(
|
.map_or_else(
|
||||||
|
@ -170,9 +170,7 @@ impl Loader for FetchCacher {
|
||||||
content: file.source,
|
content: file.source,
|
||||||
}))
|
}))
|
||||||
},
|
},
|
||||||
);
|
)
|
||||||
|
|
||||||
(specifier, load_result)
|
|
||||||
}
|
}
|
||||||
.boxed()
|
.boxed()
|
||||||
}
|
}
|
||||||
|
@ -295,7 +293,7 @@ impl Loader for MemoryCacher {
|
||||||
maybe_headers: None,
|
maybe_headers: None,
|
||||||
content: c.to_owned(),
|
content: c.to_owned(),
|
||||||
});
|
});
|
||||||
Box::pin(future::ready((specifier.clone(), Ok(response))))
|
Box::pin(future::ready(Ok(response)))
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
|
@ -1193,7 +1193,7 @@ mod tests {
|
||||||
let cwd = testdir("basic");
|
let cwd = testdir("basic");
|
||||||
let main = Url::from_file_path(cwd.join("main.js")).unwrap();
|
let main = Url::from_file_path(cwd.join("main.js")).unwrap();
|
||||||
let expected =
|
let expected =
|
||||||
Url::parse("https://deno.land/std@0.120.0/node/http.ts").unwrap();
|
Url::parse("https://deno.land/std@0.121.0/node/http.ts").unwrap();
|
||||||
|
|
||||||
let actual = node_resolve("http", main.as_str(), &cwd).unwrap();
|
let actual = node_resolve("http", main.as_str(), &cwd).unwrap();
|
||||||
println!("actual {}", actual);
|
println!("actual {}", actual);
|
||||||
|
|
|
@ -16,7 +16,7 @@ pub(crate) use esm_resolver::NodeEsmResolver;
|
||||||
// each release, a better mechanism is preferable, but it's a quick and dirty
|
// each release, a better mechanism is preferable, but it's a quick and dirty
|
||||||
// solution to avoid printing `X-Deno-Warning` headers when the compat layer is
|
// solution to avoid printing `X-Deno-Warning` headers when the compat layer is
|
||||||
// downloaded
|
// downloaded
|
||||||
static STD_URL_STR: &str = "https://deno.land/std@0.120.0/";
|
static STD_URL_STR: &str = "https://deno.land/std@0.121.0/";
|
||||||
|
|
||||||
static SUPPORTED_MODULES: &[&str] = &[
|
static SUPPORTED_MODULES: &[&str] = &[
|
||||||
"assert",
|
"assert",
|
||||||
|
|
|
@ -409,7 +409,7 @@ impl fmt::Display for Diagnostics {
|
||||||
if i > 0 {
|
if i > 0 {
|
||||||
write!(f, "\n\n")?;
|
write!(f, "\n\n")?;
|
||||||
}
|
}
|
||||||
write!(f, "{}", item.to_string())?;
|
write!(f, "{}", item)?;
|
||||||
i += 1;
|
i += 1;
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
24
cli/dts/lib.deno.unstable.d.ts
vendored
24
cli/dts/lib.deno.unstable.d.ts
vendored
|
@ -126,6 +126,8 @@ declare namespace Deno {
|
||||||
Result extends NativeType = NativeType,
|
Result extends NativeType = NativeType,
|
||||||
NonBlocking extends boolean = boolean,
|
NonBlocking extends boolean = boolean,
|
||||||
> {
|
> {
|
||||||
|
/** Name of the symbol, defaults to the key name in symbols object. */
|
||||||
|
name?: string;
|
||||||
parameters: Parameters;
|
parameters: Parameters;
|
||||||
result: Result;
|
result: Result;
|
||||||
/** When true, function calls will run on a dedicated blocking thread and will return a Promise resolving to the `result`. */
|
/** When true, function calls will run on a dedicated blocking thread and will return a Promise resolving to the `result`. */
|
||||||
|
@ -149,7 +151,7 @@ declare namespace Deno {
|
||||||
|
|
||||||
type StaticForeignFunctionParameter<T> = T extends "void" ? void
|
type StaticForeignFunctionParameter<T> = T extends "void" ? void
|
||||||
: T extends StaticNativeNumberType ? number
|
: T extends StaticNativeNumberType ? number
|
||||||
: T extends "pointer" ? Deno.UnsafePointer | Deno.TypedArray
|
: T extends "pointer" ? Deno.UnsafePointer | Deno.TypedArray | null
|
||||||
: unknown;
|
: unknown;
|
||||||
|
|
||||||
/** Infers a foreign function parameter list. */
|
/** Infers a foreign function parameter list. */
|
||||||
|
@ -248,6 +250,26 @@ declare namespace Deno {
|
||||||
copyInto(destination: TypedArray, offset?: number): void;
|
copyInto(destination: TypedArray, offset?: number): void;
|
||||||
}
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* **UNSTABLE**: Unsafe and new API, beware!
|
||||||
|
*
|
||||||
|
* An unsafe pointer to a function, for calling functions that are not
|
||||||
|
* present as symbols.
|
||||||
|
*/
|
||||||
|
export class UnsafeFnPointer<Fn extends ForeignFunction> {
|
||||||
|
pointer: UnsafePointer;
|
||||||
|
definition: Fn;
|
||||||
|
|
||||||
|
constructor(pointer: UnsafePointer, definition: Fn);
|
||||||
|
|
||||||
|
call(
|
||||||
|
...args: StaticForeignFunctionParameters<Fn["parameters"]>
|
||||||
|
): ConditionalAsync<
|
||||||
|
Fn["nonblocking"],
|
||||||
|
StaticForeignFunctionResult<Fn["result"]>
|
||||||
|
>;
|
||||||
|
}
|
||||||
|
|
||||||
/** A dynamic library resource */
|
/** A dynamic library resource */
|
||||||
export interface DynamicLibrary<S extends ForeignFunctionInterface> {
|
export interface DynamicLibrary<S extends ForeignFunctionInterface> {
|
||||||
/** All of the registered symbols along with functions for calling them */
|
/** All of the registered symbols along with functions for calling them */
|
||||||
|
|
199
cli/emit.rs
199
cli/emit.rs
|
@ -4,10 +4,10 @@
|
||||||
//! populate a cache, emit files, and transform a graph into the structures for
|
//! populate a cache, emit files, and transform a graph into the structures for
|
||||||
//! loading into an isolate.
|
//! loading into an isolate.
|
||||||
|
|
||||||
use crate::ast;
|
|
||||||
use crate::cache::CacheType;
|
use crate::cache::CacheType;
|
||||||
use crate::cache::Cacher;
|
use crate::cache::Cacher;
|
||||||
use crate::colors;
|
use crate::colors;
|
||||||
|
use crate::config_file;
|
||||||
use crate::config_file::ConfigFile;
|
use crate::config_file::ConfigFile;
|
||||||
use crate::config_file::IgnoredCompilerOptions;
|
use crate::config_file::IgnoredCompilerOptions;
|
||||||
use crate::config_file::TsConfig;
|
use crate::config_file::TsConfig;
|
||||||
|
@ -15,10 +15,25 @@ use crate::diagnostics::Diagnostics;
|
||||||
use crate::flags;
|
use crate::flags;
|
||||||
use crate::graph_util::GraphData;
|
use crate::graph_util::GraphData;
|
||||||
use crate::graph_util::ModuleEntry;
|
use crate::graph_util::ModuleEntry;
|
||||||
|
use crate::text_encoding::strip_bom;
|
||||||
use crate::tsc;
|
use crate::tsc;
|
||||||
use crate::version;
|
use crate::version;
|
||||||
|
|
||||||
|
use deno_ast::get_syntax;
|
||||||
use deno_ast::swc;
|
use deno_ast::swc;
|
||||||
|
use deno_ast::swc::bundler::Hook;
|
||||||
|
use deno_ast::swc::bundler::ModuleRecord;
|
||||||
|
use deno_ast::swc::common::comments::SingleThreadedComments;
|
||||||
|
use deno_ast::swc::common::FileName;
|
||||||
|
use deno_ast::swc::common::Mark;
|
||||||
|
use deno_ast::swc::common::SourceMap;
|
||||||
|
use deno_ast::swc::common::Span;
|
||||||
|
use deno_ast::swc::common::Spanned;
|
||||||
|
use deno_ast::swc::parser::error::Error as SwcError;
|
||||||
|
use deno_ast::swc::parser::lexer::Lexer;
|
||||||
|
use deno_ast::swc::parser::StringInput;
|
||||||
|
use deno_ast::Diagnostic;
|
||||||
|
use deno_ast::LineAndColumnDisplay;
|
||||||
use deno_core::anyhow::anyhow;
|
use deno_core::anyhow::anyhow;
|
||||||
use deno_core::anyhow::Context;
|
use deno_core::anyhow::Context;
|
||||||
use deno_core::error::AnyError;
|
use deno_core::error::AnyError;
|
||||||
|
@ -27,6 +42,7 @@ use deno_core::serde::Deserialize;
|
||||||
use deno_core::serde::Deserializer;
|
use deno_core::serde::Deserializer;
|
||||||
use deno_core::serde::Serialize;
|
use deno_core::serde::Serialize;
|
||||||
use deno_core::serde::Serializer;
|
use deno_core::serde::Serializer;
|
||||||
|
use deno_core::serde_json;
|
||||||
use deno_core::serde_json::json;
|
use deno_core::serde_json::json;
|
||||||
use deno_core::serde_json::Value;
|
use deno_core::serde_json::Value;
|
||||||
use deno_core::ModuleSpecifier;
|
use deno_core::ModuleSpecifier;
|
||||||
|
@ -42,6 +58,13 @@ use std::result;
|
||||||
use std::sync::Arc;
|
use std::sync::Arc;
|
||||||
use std::time::Instant;
|
use std::time::Instant;
|
||||||
|
|
||||||
|
const IGNORE_DIRECTIVES: &[&str] = &[
|
||||||
|
"// deno-fmt-ignore-file",
|
||||||
|
"// deno-lint-ignore-file",
|
||||||
|
"// This code was bundled using `deno bundle` and it's not recommended to edit it manually",
|
||||||
|
""
|
||||||
|
];
|
||||||
|
|
||||||
/// Represents the "default" type library that should be used when type
|
/// Represents the "default" type library that should be used when type
|
||||||
/// checking the code in the module graph. Note that a user provided config
|
/// checking the code in the module graph. Note that a user provided config
|
||||||
/// of `"lib"` would override this value.
|
/// of `"lib"` would override this value.
|
||||||
|
@ -503,13 +526,14 @@ impl From<BundleType> for swc::bundler::ModuleType {
|
||||||
pub(crate) struct BundleOptions {
|
pub(crate) struct BundleOptions {
|
||||||
pub bundle_type: BundleType,
|
pub bundle_type: BundleType,
|
||||||
pub ts_config: TsConfig,
|
pub ts_config: TsConfig,
|
||||||
|
pub emit_ignore_directives: bool,
|
||||||
}
|
}
|
||||||
|
|
||||||
/// A module loader for swc which does the appropriate retrieval and transpiling
|
/// A module loader for swc which does the appropriate retrieval and transpiling
|
||||||
/// of modules from the graph.
|
/// of modules from the graph.
|
||||||
struct BundleLoader<'a> {
|
struct BundleLoader<'a> {
|
||||||
cm: Rc<swc::common::SourceMap>,
|
cm: Rc<swc::common::SourceMap>,
|
||||||
emit_options: &'a ast::EmitOptions,
|
emit_options: &'a deno_ast::EmitOptions,
|
||||||
graph: &'a ModuleGraph,
|
graph: &'a ModuleGraph,
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -521,7 +545,7 @@ impl swc::bundler::Load for BundleLoader<'_> {
|
||||||
match file_name {
|
match file_name {
|
||||||
swc::common::FileName::Url(specifier) => {
|
swc::common::FileName::Url(specifier) => {
|
||||||
if let Some(m) = self.graph.get(specifier) {
|
if let Some(m) = self.graph.get(specifier) {
|
||||||
let (fm, module) = ast::transpile_module(
|
let (fm, module) = transpile_module(
|
||||||
specifier,
|
specifier,
|
||||||
m.maybe_source().unwrap_or(""),
|
m.maybe_source().unwrap_or(""),
|
||||||
*m.media_type(),
|
*m.media_type(),
|
||||||
|
@ -548,6 +572,77 @@ impl swc::bundler::Load for BundleLoader<'_> {
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
/// Transpiles a source module into an swc SourceFile.
|
||||||
|
fn transpile_module(
|
||||||
|
specifier: &ModuleSpecifier,
|
||||||
|
source: &str,
|
||||||
|
media_type: MediaType,
|
||||||
|
options: &deno_ast::EmitOptions,
|
||||||
|
cm: Rc<swc::common::SourceMap>,
|
||||||
|
) -> Result<(Rc<swc::common::SourceFile>, swc::ast::Module), AnyError> {
|
||||||
|
let source = strip_bom(source);
|
||||||
|
let source = if media_type == MediaType::Json {
|
||||||
|
format!(
|
||||||
|
"export default JSON.parse(`{}`);",
|
||||||
|
source.replace("${", "\\${").replace('`', "\\`")
|
||||||
|
)
|
||||||
|
} else {
|
||||||
|
source.to_string()
|
||||||
|
};
|
||||||
|
let source_file =
|
||||||
|
cm.new_source_file(FileName::Url(specifier.clone()), source);
|
||||||
|
let input = StringInput::from(&*source_file);
|
||||||
|
let comments = SingleThreadedComments::default();
|
||||||
|
let syntax = if media_type == MediaType::Json {
|
||||||
|
get_syntax(MediaType::JavaScript)
|
||||||
|
} else {
|
||||||
|
get_syntax(media_type)
|
||||||
|
};
|
||||||
|
let lexer = Lexer::new(syntax, deno_ast::ES_VERSION, input, Some(&comments));
|
||||||
|
let mut parser = swc::parser::Parser::new_from(lexer);
|
||||||
|
let module = parser
|
||||||
|
.parse_module()
|
||||||
|
.map_err(|e| swc_err_to_diagnostic(&cm, specifier, e))?;
|
||||||
|
let diagnostics = parser
|
||||||
|
.take_errors()
|
||||||
|
.into_iter()
|
||||||
|
.map(|e| swc_err_to_diagnostic(&cm, specifier, e))
|
||||||
|
.collect::<Vec<_>>();
|
||||||
|
|
||||||
|
let top_level_mark = Mark::fresh(Mark::root());
|
||||||
|
let program = deno_ast::fold_program(
|
||||||
|
swc::ast::Program::Module(module),
|
||||||
|
options,
|
||||||
|
cm,
|
||||||
|
&comments,
|
||||||
|
top_level_mark,
|
||||||
|
&diagnostics,
|
||||||
|
)?;
|
||||||
|
let module = match program {
|
||||||
|
swc::ast::Program::Module(module) => module,
|
||||||
|
_ => unreachable!(),
|
||||||
|
};
|
||||||
|
|
||||||
|
Ok((source_file, module))
|
||||||
|
}
|
||||||
|
|
||||||
|
fn swc_err_to_diagnostic(
|
||||||
|
source_map: &SourceMap,
|
||||||
|
specifier: &ModuleSpecifier,
|
||||||
|
err: SwcError,
|
||||||
|
) -> Diagnostic {
|
||||||
|
let location = source_map.lookup_char_pos(err.span().lo);
|
||||||
|
Diagnostic {
|
||||||
|
specifier: specifier.to_string(),
|
||||||
|
span: err.span(),
|
||||||
|
display_position: LineAndColumnDisplay {
|
||||||
|
line_number: location.line,
|
||||||
|
column_number: location.col_display + 1,
|
||||||
|
},
|
||||||
|
kind: err.into_kind(),
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
/// A resolver implementation for swc that resolves specifiers from the graph.
|
/// A resolver implementation for swc that resolves specifiers from the graph.
|
||||||
struct BundleResolver<'a>(&'a ModuleGraph);
|
struct BundleResolver<'a>(&'a ModuleGraph);
|
||||||
|
|
||||||
|
@ -589,8 +684,8 @@ pub(crate) fn bundle(
|
||||||
) -> Result<(String, Option<String>), AnyError> {
|
) -> Result<(String, Option<String>), AnyError> {
|
||||||
let globals = swc::common::Globals::new();
|
let globals = swc::common::Globals::new();
|
||||||
deno_ast::swc::common::GLOBALS.set(&globals, || {
|
deno_ast::swc::common::GLOBALS.set(&globals, || {
|
||||||
let emit_options: ast::EmitOptions = options.ts_config.into();
|
let emit_options: deno_ast::EmitOptions = options.ts_config.into();
|
||||||
let source_map_config = ast::SourceMapConfig {
|
let source_map_config = deno_ast::SourceMapConfig {
|
||||||
inline_sources: emit_options.inline_sources,
|
inline_sources: emit_options.inline_sources,
|
||||||
};
|
};
|
||||||
|
|
||||||
|
@ -609,7 +704,7 @@ pub(crate) fn bundle(
|
||||||
};
|
};
|
||||||
// This hook will rewrite the `import.meta` when bundling to give a consistent
|
// This hook will rewrite the `import.meta` when bundling to give a consistent
|
||||||
// behavior between bundled and unbundled code.
|
// behavior between bundled and unbundled code.
|
||||||
let hook = Box::new(ast::BundleHook);
|
let hook = Box::new(BundleHook);
|
||||||
let mut bundler = swc::bundler::Bundler::new(
|
let mut bundler = swc::bundler::Bundler::new(
|
||||||
&globals,
|
&globals,
|
||||||
cm.clone(),
|
cm.clone(),
|
||||||
|
@ -630,12 +725,21 @@ pub(crate) fn bundle(
|
||||||
let mut srcmap = Vec::new();
|
let mut srcmap = Vec::new();
|
||||||
{
|
{
|
||||||
let cfg = swc::codegen::Config { minify: false };
|
let cfg = swc::codegen::Config { minify: false };
|
||||||
let wr = Box::new(swc::codegen::text_writer::JsWriter::new(
|
let mut wr = Box::new(swc::codegen::text_writer::JsWriter::new(
|
||||||
cm.clone(),
|
cm.clone(),
|
||||||
"\n",
|
"\n",
|
||||||
&mut buf,
|
&mut buf,
|
||||||
Some(&mut srcmap),
|
Some(&mut srcmap),
|
||||||
));
|
));
|
||||||
|
|
||||||
|
if options.emit_ignore_directives {
|
||||||
|
// write leading comments in bundled file
|
||||||
|
use swc::codegen::text_writer::WriteJs;
|
||||||
|
use swc::common::source_map::DUMMY_SP;
|
||||||
|
let cmt = IGNORE_DIRECTIVES.join("\n") + "\n";
|
||||||
|
wr.write_comment(DUMMY_SP, &cmt)?;
|
||||||
|
}
|
||||||
|
|
||||||
let mut emitter = swc::codegen::Emitter {
|
let mut emitter = swc::codegen::Emitter {
|
||||||
cfg,
|
cfg,
|
||||||
cm: cm.clone(),
|
cm: cm.clone(),
|
||||||
|
@ -705,11 +809,10 @@ pub(crate) fn emit(
|
||||||
if is_valid && !needs_reload {
|
if is_valid && !needs_reload {
|
||||||
continue;
|
continue;
|
||||||
}
|
}
|
||||||
let (emit, maybe_map) =
|
let transpiled_source = module.parsed_source.transpile(&emit_options)?;
|
||||||
ast::transpile(&module.parsed_source, &emit_options)?;
|
|
||||||
emit_count += 1;
|
emit_count += 1;
|
||||||
cache.set(CacheType::Emit, &module.specifier, emit)?;
|
cache.set(CacheType::Emit, &module.specifier, transpiled_source.text)?;
|
||||||
if let Some(map) = maybe_map {
|
if let Some(map) = transpiled_source.source_map {
|
||||||
cache.set(CacheType::SourceMap, &module.specifier, map)?;
|
cache.set(CacheType::SourceMap, &module.specifier, map)?;
|
||||||
}
|
}
|
||||||
if !is_valid {
|
if !is_valid {
|
||||||
|
@ -844,6 +947,80 @@ pub(crate) fn to_file_map(
|
||||||
files
|
files
|
||||||
}
|
}
|
||||||
|
|
||||||
|
/// This contains the logic for Deno to rewrite the `import.meta` when bundling.
|
||||||
|
pub struct BundleHook;
|
||||||
|
|
||||||
|
impl Hook for BundleHook {
|
||||||
|
fn get_import_meta_props(
|
||||||
|
&self,
|
||||||
|
span: Span,
|
||||||
|
module_record: &ModuleRecord,
|
||||||
|
) -> Result<Vec<deno_ast::swc::ast::KeyValueProp>, AnyError> {
|
||||||
|
use deno_ast::swc::ast;
|
||||||
|
|
||||||
|
Ok(vec![
|
||||||
|
ast::KeyValueProp {
|
||||||
|
key: ast::PropName::Ident(ast::Ident::new("url".into(), span)),
|
||||||
|
value: Box::new(ast::Expr::Lit(ast::Lit::Str(ast::Str {
|
||||||
|
span,
|
||||||
|
value: module_record.file_name.to_string().into(),
|
||||||
|
kind: ast::StrKind::Synthesized,
|
||||||
|
has_escape: false,
|
||||||
|
}))),
|
||||||
|
},
|
||||||
|
ast::KeyValueProp {
|
||||||
|
key: ast::PropName::Ident(ast::Ident::new("main".into(), span)),
|
||||||
|
value: Box::new(if module_record.is_entry {
|
||||||
|
ast::Expr::Member(ast::MemberExpr {
|
||||||
|
span,
|
||||||
|
obj: Box::new(ast::Expr::MetaProp(ast::MetaPropExpr {
|
||||||
|
span,
|
||||||
|
kind: ast::MetaPropKind::ImportMeta,
|
||||||
|
})),
|
||||||
|
prop: ast::MemberProp::Ident(ast::Ident::new("main".into(), span)),
|
||||||
|
})
|
||||||
|
} else {
|
||||||
|
ast::Expr::Lit(ast::Lit::Bool(ast::Bool { span, value: false }))
|
||||||
|
}),
|
||||||
|
},
|
||||||
|
])
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
impl From<config_file::TsConfig> for deno_ast::EmitOptions {
|
||||||
|
fn from(config: config_file::TsConfig) -> Self {
|
||||||
|
let options: config_file::EmitConfigOptions =
|
||||||
|
serde_json::from_value(config.0).unwrap();
|
||||||
|
let imports_not_used_as_values =
|
||||||
|
match options.imports_not_used_as_values.as_str() {
|
||||||
|
"preserve" => deno_ast::ImportsNotUsedAsValues::Preserve,
|
||||||
|
"error" => deno_ast::ImportsNotUsedAsValues::Error,
|
||||||
|
_ => deno_ast::ImportsNotUsedAsValues::Remove,
|
||||||
|
};
|
||||||
|
let (transform_jsx, jsx_automatic, jsx_development) =
|
||||||
|
match options.jsx.as_str() {
|
||||||
|
"react" => (true, false, false),
|
||||||
|
"react-jsx" => (true, true, false),
|
||||||
|
"react-jsxdev" => (true, true, true),
|
||||||
|
_ => (false, false, false),
|
||||||
|
};
|
||||||
|
deno_ast::EmitOptions {
|
||||||
|
emit_metadata: options.emit_decorator_metadata,
|
||||||
|
imports_not_used_as_values,
|
||||||
|
inline_source_map: options.inline_source_map,
|
||||||
|
inline_sources: options.inline_sources,
|
||||||
|
source_map: options.source_map,
|
||||||
|
jsx_automatic,
|
||||||
|
jsx_development,
|
||||||
|
jsx_factory: options.jsx_factory,
|
||||||
|
jsx_fragment_factory: options.jsx_fragment_factory,
|
||||||
|
jsx_import_source: options.jsx_import_source,
|
||||||
|
transform_jsx,
|
||||||
|
var_decl_imports: false,
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
#[cfg(test)]
|
#[cfg(test)]
|
||||||
mod tests {
|
mod tests {
|
||||||
use super::*;
|
use super::*;
|
||||||
|
|
|
@ -60,7 +60,7 @@ where
|
||||||
{
|
{
|
||||||
let result = watch_future.await;
|
let result = watch_future.await;
|
||||||
if let Err(err) = result {
|
if let Err(err) = result {
|
||||||
let msg = format!("{}: {}", colors::red_bold("error"), err.to_string(),);
|
let msg = format!("{}: {}", colors::red_bold("error"), err);
|
||||||
eprintln!("{}", msg);
|
eprintln!("{}", msg);
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
555
cli/flags.rs
555
cli/flags.rs
File diff suppressed because it is too large
Load diff
|
@ -26,7 +26,7 @@ impl FromStr for BarePort {
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
pub fn validator(host_and_port: String) -> Result<(), String> {
|
pub fn validator(host_and_port: &str) -> Result<(), String> {
|
||||||
if Url::parse(&format!("deno://{}", host_and_port)).is_ok()
|
if Url::parse(&format!("deno://{}", host_and_port)).is_ok()
|
||||||
|| host_and_port.parse::<IpAddr>().is_ok()
|
|| host_and_port.parse::<IpAddr>().is_ok()
|
||||||
|| host_and_port.parse::<BarePort>().is_ok()
|
|| host_and_port.parse::<BarePort>().is_ok()
|
||||||
|
@ -53,9 +53,9 @@ pub fn parse(paths: Vec<String>) -> clap::Result<Vec<String>> {
|
||||||
out.push(format!("{}:{}", host, port.0));
|
out.push(format!("{}:{}", host, port.0));
|
||||||
}
|
}
|
||||||
} else {
|
} else {
|
||||||
return Err(clap::Error::with_description(
|
return Err(clap::Error::raw(
|
||||||
&format!("Bad host:port pair: {}", host_and_port),
|
|
||||||
clap::ErrorKind::InvalidValue,
|
clap::ErrorKind::InvalidValue,
|
||||||
|
format!("Bad host:port pair: {}", host_and_port),
|
||||||
));
|
));
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
|
@ -81,7 +81,7 @@ fn format_frame(frame: &JsStackFrame) -> String {
|
||||||
if frame.is_promise_all {
|
if frame.is_promise_all {
|
||||||
result += &italic_bold(&format!(
|
result += &italic_bold(&format!(
|
||||||
"Promise.all (index {})",
|
"Promise.all (index {})",
|
||||||
frame.promise_index.unwrap_or_default().to_string()
|
frame.promise_index.unwrap_or_default()
|
||||||
))
|
))
|
||||||
.to_string();
|
.to_string();
|
||||||
return result;
|
return result;
|
||||||
|
|
|
@ -348,6 +348,20 @@ pub fn specifier_parent(specifier: &ModuleSpecifier) -> ModuleSpecifier {
|
||||||
specifier
|
specifier
|
||||||
}
|
}
|
||||||
|
|
||||||
|
/// This function checks if input path has trailing slash or not. If input path
|
||||||
|
/// has trailing slash it will return true else it will return false.
|
||||||
|
pub fn path_has_trailing_slash(path: &Path) -> bool {
|
||||||
|
if let Some(path_str) = path.to_str() {
|
||||||
|
if cfg!(windows) {
|
||||||
|
path_str.ends_with('\\')
|
||||||
|
} else {
|
||||||
|
path_str.ends_with('/')
|
||||||
|
}
|
||||||
|
} else {
|
||||||
|
false
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
#[cfg(test)]
|
#[cfg(test)]
|
||||||
mod tests {
|
mod tests {
|
||||||
use super::*;
|
use super::*;
|
||||||
|
@ -691,4 +705,29 @@ mod tests {
|
||||||
assert_eq!(result.to_string(), expected);
|
assert_eq!(result.to_string(), expected);
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
#[test]
|
||||||
|
fn test_path_has_trailing_slash() {
|
||||||
|
#[cfg(not(windows))]
|
||||||
|
{
|
||||||
|
run_test("/Users/johndoe/Desktop/deno-project/target/", true);
|
||||||
|
run_test(r"/Users/johndoe/deno-project/target//", true);
|
||||||
|
run_test("/Users/johndoe/Desktop/deno-project", false);
|
||||||
|
run_test(r"/Users/johndoe/deno-project\", false);
|
||||||
|
}
|
||||||
|
|
||||||
|
#[cfg(windows)]
|
||||||
|
{
|
||||||
|
run_test(r"C:\test\deno-project\", true);
|
||||||
|
run_test(r"C:\test\deno-project\\", true);
|
||||||
|
run_test(r"C:\test\file.txt", false);
|
||||||
|
run_test(r"C:\test\file.txt/", false);
|
||||||
|
}
|
||||||
|
|
||||||
|
fn run_test(path_str: &str, expected: bool) {
|
||||||
|
let path = Path::new(path_str);
|
||||||
|
let result = path_has_trailing_slash(path);
|
||||||
|
assert_eq!(result, expected);
|
||||||
|
}
|
||||||
|
}
|
||||||
}
|
}
|
||||||
|
|
|
@ -288,7 +288,7 @@ impl GraphData {
|
||||||
if !range.specifier.as_str().contains("$deno") {
|
if !range.specifier.as_str().contains("$deno") {
|
||||||
return Some(Err(custom_error(
|
return Some(Err(custom_error(
|
||||||
get_error_class_name(&error.clone().into()),
|
get_error_class_name(&error.clone().into()),
|
||||||
format!("{}\n at {}", error.to_string(), range),
|
format!("{}\n at {}", error, range),
|
||||||
)));
|
)));
|
||||||
}
|
}
|
||||||
return Some(Err(error.clone().into()));
|
return Some(Err(error.clone().into()));
|
||||||
|
@ -307,7 +307,7 @@ impl GraphData {
|
||||||
if !range.specifier.as_str().contains("$deno") {
|
if !range.specifier.as_str().contains("$deno") {
|
||||||
return Some(Err(custom_error(
|
return Some(Err(custom_error(
|
||||||
get_error_class_name(&error.clone().into()),
|
get_error_class_name(&error.clone().into()),
|
||||||
format!("{}\n at {}", error.to_string(), range),
|
format!("{}\n at {}", error, range),
|
||||||
)));
|
)));
|
||||||
}
|
}
|
||||||
return Some(Err(error.clone().into()));
|
return Some(Err(error.clone().into()));
|
||||||
|
@ -323,7 +323,7 @@ impl GraphData {
|
||||||
if !range.specifier.as_str().contains("$deno") {
|
if !range.specifier.as_str().contains("$deno") {
|
||||||
return Some(Err(custom_error(
|
return Some(Err(custom_error(
|
||||||
get_error_class_name(&error.clone().into()),
|
get_error_class_name(&error.clone().into()),
|
||||||
format!("{}\n at {}", error.to_string(), range),
|
format!("{}\n at {}", error, range),
|
||||||
)));
|
)));
|
||||||
}
|
}
|
||||||
return Some(Err(error.clone().into()));
|
return Some(Err(error.clone().into()));
|
||||||
|
|
|
@ -80,6 +80,7 @@ impl CacheServer {
|
||||||
maybe_resolver,
|
maybe_resolver,
|
||||||
None,
|
None,
|
||||||
None,
|
None,
|
||||||
|
None,
|
||||||
)
|
)
|
||||||
.await;
|
.await;
|
||||||
|
|
||||||
|
|
|
@ -157,7 +157,7 @@ impl DenoTestCollector {
|
||||||
|
|
||||||
impl Visit for DenoTestCollector {
|
impl Visit for DenoTestCollector {
|
||||||
fn visit_call_expr(&mut self, node: &ast::CallExpr) {
|
fn visit_call_expr(&mut self, node: &ast::CallExpr) {
|
||||||
if let ast::ExprOrSuper::Expr(callee_expr) = &node.callee {
|
if let ast::Callee::Expr(callee_expr) = &node.callee {
|
||||||
match callee_expr.as_ref() {
|
match callee_expr.as_ref() {
|
||||||
ast::Expr::Ident(ident) => {
|
ast::Expr::Ident(ident) => {
|
||||||
if self.test_vars.contains(&ident.sym.to_string()) {
|
if self.test_vars.contains(&ident.sym.to_string()) {
|
||||||
|
@ -165,13 +165,11 @@ impl Visit for DenoTestCollector {
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
ast::Expr::Member(member_expr) => {
|
ast::Expr::Member(member_expr) => {
|
||||||
if let ast::Expr::Ident(ns_prop_ident) = member_expr.prop.as_ref() {
|
if let ast::MemberProp::Ident(ns_prop_ident) = &member_expr.prop {
|
||||||
if ns_prop_ident.sym.to_string() == "test" {
|
if ns_prop_ident.sym.to_string() == "test" {
|
||||||
if let ast::ExprOrSuper::Expr(obj_expr) = &member_expr.obj {
|
if let ast::Expr::Ident(ident) = member_expr.obj.as_ref() {
|
||||||
if let ast::Expr::Ident(ident) = obj_expr.as_ref() {
|
if ident.sym.to_string() == "Deno" {
|
||||||
if ident.sym.to_string() == "Deno" {
|
self.check_call_expr(node, &ns_prop_ident.span);
|
||||||
self.check_call_expr(node, &ns_prop_ident.span);
|
|
||||||
}
|
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
@ -219,16 +217,12 @@ impl Visit for DenoTestCollector {
|
||||||
}
|
}
|
||||||
// Identify variable assignments where the init is `Deno.test`
|
// Identify variable assignments where the init is `Deno.test`
|
||||||
ast::Expr::Member(member_expr) => {
|
ast::Expr::Member(member_expr) => {
|
||||||
if let ast::ExprOrSuper::Expr(expr) = &member_expr.obj {
|
if let ast::Expr::Ident(obj_ident) = member_expr.obj.as_ref() {
|
||||||
if let ast::Expr::Ident(obj_ident) = expr.as_ref() {
|
if obj_ident.sym.to_string() == "Deno" {
|
||||||
if obj_ident.sym.to_string() == "Deno" {
|
if let ast::MemberProp::Ident(prop_ident) = &member_expr.prop {
|
||||||
if let ast::Expr::Ident(prop_ident) =
|
if prop_ident.sym.to_string() == "test" {
|
||||||
&member_expr.prop.as_ref()
|
if let ast::Pat::Ident(binding_ident) = &decl.name {
|
||||||
{
|
self.test_vars.insert(binding_ident.id.sym.to_string());
|
||||||
if prop_ident.sym.to_string() == "test" {
|
|
||||||
if let ast::Pat::Ident(binding_ident) = &decl.name {
|
|
||||||
self.test_vars.insert(binding_ident.id.sym.to_string());
|
|
||||||
}
|
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
|
@ -428,18 +428,10 @@ fn relative_specifier(
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
if parts.is_empty() {
|
if parts.is_empty() {
|
||||||
format!(
|
format!("./{}{}", last_a, &specifier[Position::AfterPath..])
|
||||||
"./{}{}",
|
|
||||||
last_a,
|
|
||||||
specifier[Position::AfterPath..].to_string()
|
|
||||||
)
|
|
||||||
} else {
|
} else {
|
||||||
parts.push(last_a);
|
parts.push(last_a);
|
||||||
format!(
|
format!("{}{}", parts.join("/"), &specifier[Position::AfterPath..])
|
||||||
"{}{}",
|
|
||||||
parts.join("/"),
|
|
||||||
specifier[Position::AfterPath..].to_string()
|
|
||||||
)
|
|
||||||
}
|
}
|
||||||
} else {
|
} else {
|
||||||
specifier[Position::BeforePath..].into()
|
specifier[Position::BeforePath..].into()
|
||||||
|
|
|
@ -588,8 +588,8 @@ pub(crate) fn to_hover_text(
|
||||||
"blob" => "_(a blob url)_".to_string(),
|
"blob" => "_(a blob url)_".to_string(),
|
||||||
_ => format!(
|
_ => format!(
|
||||||
"{}​{}",
|
"{}​{}",
|
||||||
specifier[..url::Position::AfterScheme].to_string(),
|
&specifier[..url::Position::AfterScheme],
|
||||||
specifier[url::Position::AfterScheme..].to_string()
|
&specifier[url::Position::AfterScheme..],
|
||||||
)
|
)
|
||||||
.replace('@', "​@"),
|
.replace('@', "​@"),
|
||||||
},
|
},
|
||||||
|
|
|
@ -62,6 +62,7 @@ use crate::file_fetcher::get_source_from_data_url;
|
||||||
use crate::fs_util;
|
use crate::fs_util;
|
||||||
use crate::logger;
|
use crate::logger;
|
||||||
use crate::lsp::logging::lsp_log;
|
use crate::lsp::logging::lsp_log;
|
||||||
|
use crate::proc_state::import_map_from_text;
|
||||||
use crate::tools::fmt::format_file;
|
use crate::tools::fmt::format_file;
|
||||||
use crate::tools::fmt::format_parsed_source;
|
use crate::tools::fmt::format_parsed_source;
|
||||||
|
|
||||||
|
@ -495,12 +496,9 @@ impl Inner {
|
||||||
)
|
)
|
||||||
})?
|
})?
|
||||||
};
|
};
|
||||||
let import_map = Arc::new(ImportMap::from_json(
|
let import_map = import_map_from_text(&import_map_url, &import_map_json)?;
|
||||||
&import_map_url.to_string(),
|
|
||||||
&import_map_json,
|
|
||||||
)?);
|
|
||||||
self.maybe_import_map_uri = Some(import_map_url);
|
self.maybe_import_map_uri = Some(import_map_url);
|
||||||
self.maybe_import_map = Some(import_map);
|
self.maybe_import_map = Some(Arc::new(import_map));
|
||||||
} else {
|
} else {
|
||||||
self.maybe_import_map = None;
|
self.maybe_import_map = None;
|
||||||
}
|
}
|
||||||
|
|
27
cli/main.rs
27
cli/main.rs
|
@ -1,6 +1,5 @@
|
||||||
// Copyright 2018-2022 the Deno authors. All rights reserved. MIT license.
|
// Copyright 2018-2022 the Deno authors. All rights reserved. MIT license.
|
||||||
|
|
||||||
mod ast;
|
|
||||||
mod auth_tokens;
|
mod auth_tokens;
|
||||||
mod cache;
|
mod cache;
|
||||||
mod checksum;
|
mod checksum;
|
||||||
|
@ -378,7 +377,14 @@ async fn compile_command(
|
||||||
let ps = ProcState::build(flags.clone()).await?;
|
let ps = ProcState::build(flags.clone()).await?;
|
||||||
let deno_dir = &ps.dir;
|
let deno_dir = &ps.dir;
|
||||||
|
|
||||||
let output = compile_flags.output.or_else(|| {
|
let output = compile_flags.output.and_then(|output| {
|
||||||
|
if fs_util::path_has_trailing_slash(&output) {
|
||||||
|
let infer_file_name = infer_name_from_url(&module_specifier).map(PathBuf::from)?;
|
||||||
|
Some(output.join(infer_file_name))
|
||||||
|
} else {
|
||||||
|
Some(output)
|
||||||
|
}
|
||||||
|
}).or_else(|| {
|
||||||
infer_name_from_url(&module_specifier).map(PathBuf::from)
|
infer_name_from_url(&module_specifier).map(PathBuf::from)
|
||||||
}).ok_or_else(|| generic_error(
|
}).ok_or_else(|| generic_error(
|
||||||
"An executable name was not provided. One could not be inferred from the URL. Aborting.",
|
"An executable name was not provided. One could not be inferred from the URL. Aborting.",
|
||||||
|
@ -468,6 +474,7 @@ async fn info_command(
|
||||||
maybe_resolver,
|
maybe_resolver,
|
||||||
maybe_locker,
|
maybe_locker,
|
||||||
None,
|
None,
|
||||||
|
None,
|
||||||
)
|
)
|
||||||
.await;
|
.await;
|
||||||
|
|
||||||
|
@ -647,6 +654,7 @@ async fn create_graph_and_maybe_check(
|
||||||
maybe_resolver,
|
maybe_resolver,
|
||||||
maybe_locker,
|
maybe_locker,
|
||||||
None,
|
None,
|
||||||
|
None,
|
||||||
)
|
)
|
||||||
.await,
|
.await,
|
||||||
);
|
);
|
||||||
|
@ -725,6 +733,7 @@ fn bundle_module_graph(
|
||||||
emit::BundleOptions {
|
emit::BundleOptions {
|
||||||
bundle_type: emit::BundleType::Module,
|
bundle_type: emit::BundleType::Module,
|
||||||
ts_config,
|
ts_config,
|
||||||
|
emit_ignore_directives: true,
|
||||||
},
|
},
|
||||||
)
|
)
|
||||||
}
|
}
|
||||||
|
@ -991,6 +1000,7 @@ async fn run_with_watch(flags: Flags, script: String) -> Result<i32, AnyError> {
|
||||||
maybe_resolver,
|
maybe_resolver,
|
||||||
maybe_locker,
|
maybe_locker,
|
||||||
None,
|
None,
|
||||||
|
None,
|
||||||
)
|
)
|
||||||
.await;
|
.await;
|
||||||
let check_js = ps
|
let check_js = ps
|
||||||
|
@ -1366,7 +1376,11 @@ fn unwrap_or_exit<T>(result: Result<T, AnyError>) -> T {
|
||||||
match result {
|
match result {
|
||||||
Ok(value) => value,
|
Ok(value) => value,
|
||||||
Err(error) => {
|
Err(error) => {
|
||||||
eprintln!("{}: {:?}", colors::red_bold("error"), error);
|
eprintln!(
|
||||||
|
"{}: {}",
|
||||||
|
colors::red_bold("error"),
|
||||||
|
format!("{:?}", error).trim_start_matches("error: ")
|
||||||
|
);
|
||||||
std::process::exit(1);
|
std::process::exit(1);
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
@ -1394,11 +1408,10 @@ pub fn main() {
|
||||||
let flags = match flags::flags_from_vec(args) {
|
let flags = match flags::flags_from_vec(args) {
|
||||||
Ok(flags) => flags,
|
Ok(flags) => flags,
|
||||||
Err(err @ clap::Error { .. })
|
Err(err @ clap::Error { .. })
|
||||||
if err.kind == clap::ErrorKind::HelpDisplayed
|
if err.kind == clap::ErrorKind::DisplayHelp
|
||||||
|| err.kind == clap::ErrorKind::VersionDisplayed =>
|
|| err.kind == clap::ErrorKind::DisplayVersion =>
|
||||||
{
|
{
|
||||||
err.write_to(&mut std::io::stdout()).unwrap();
|
err.print().unwrap();
|
||||||
std::io::stdout().write_all(b"\n").unwrap();
|
|
||||||
std::process::exit(0);
|
std::process::exit(0);
|
||||||
}
|
}
|
||||||
Err(err) => unwrap_or_exit(Err(AnyError::from(err))),
|
Err(err) => unwrap_or_exit(Err(AnyError::from(err))),
|
||||||
|
|
|
@ -7,6 +7,7 @@ use crate::emit;
|
||||||
use crate::errors::get_error_class_name;
|
use crate::errors::get_error_class_name;
|
||||||
use crate::flags;
|
use crate::flags;
|
||||||
use crate::graph_util::graph_valid;
|
use crate::graph_util::graph_valid;
|
||||||
|
use crate::proc_state::import_map_from_text;
|
||||||
use crate::proc_state::ProcState;
|
use crate::proc_state::ProcState;
|
||||||
use crate::resolver::ImportMapResolver;
|
use crate::resolver::ImportMapResolver;
|
||||||
use crate::resolver::JsxResolver;
|
use crate::resolver::JsxResolver;
|
||||||
|
@ -24,7 +25,6 @@ use deno_core::Extension;
|
||||||
use deno_core::ModuleSpecifier;
|
use deno_core::ModuleSpecifier;
|
||||||
use deno_core::OpState;
|
use deno_core::OpState;
|
||||||
use deno_runtime::permissions::Permissions;
|
use deno_runtime::permissions::Permissions;
|
||||||
use import_map::ImportMap;
|
|
||||||
use serde::Deserialize;
|
use serde::Deserialize;
|
||||||
use serde::Serialize;
|
use serde::Serialize;
|
||||||
use std::cell::RefCell;
|
use std::cell::RefCell;
|
||||||
|
@ -174,8 +174,8 @@ async fn op_emit(
|
||||||
.with_context(|| {
|
.with_context(|| {
|
||||||
format!("Bad URL (\"{}\") for import map.", import_map_str)
|
format!("Bad URL (\"{}\") for import map.", import_map_str)
|
||||||
})?;
|
})?;
|
||||||
let import_map = if let Some(value) = args.import_map {
|
let import_map_source = if let Some(value) = args.import_map {
|
||||||
ImportMap::from_json(import_map_specifier.as_str(), &value.to_string())?
|
Arc::new(value.to_string())
|
||||||
} else {
|
} else {
|
||||||
let file = ps
|
let file = ps
|
||||||
.file_fetcher
|
.file_fetcher
|
||||||
|
@ -187,8 +187,10 @@ async fn op_emit(
|
||||||
import_map_specifier, e
|
import_map_specifier, e
|
||||||
))
|
))
|
||||||
})?;
|
})?;
|
||||||
ImportMap::from_json(import_map_specifier.as_str(), &file.source)?
|
file.source
|
||||||
};
|
};
|
||||||
|
let import_map =
|
||||||
|
import_map_from_text(&import_map_specifier, &import_map_source)?;
|
||||||
Some(ImportMapResolver::new(Arc::new(import_map)))
|
Some(ImportMapResolver::new(Arc::new(import_map)))
|
||||||
} else if args.import_map.is_some() {
|
} else if args.import_map.is_some() {
|
||||||
return Err(generic_error("An importMap was specified, but no importMapPath was provided, which is required."));
|
return Err(generic_error("An importMap was specified, but no importMapPath was provided, which is required."));
|
||||||
|
@ -217,6 +219,7 @@ async fn op_emit(
|
||||||
maybe_resolver,
|
maybe_resolver,
|
||||||
None,
|
None,
|
||||||
None,
|
None,
|
||||||
|
None,
|
||||||
)
|
)
|
||||||
.await,
|
.await,
|
||||||
);
|
);
|
||||||
|
@ -285,6 +288,7 @@ async fn op_emit(
|
||||||
emit::BundleOptions {
|
emit::BundleOptions {
|
||||||
bundle_type: bundle.into(),
|
bundle_type: bundle.into(),
|
||||||
ts_config,
|
ts_config,
|
||||||
|
emit_ignore_directives: true,
|
||||||
},
|
},
|
||||||
)?;
|
)?;
|
||||||
let mut files = HashMap::new();
|
let mut files = HashMap::new();
|
||||||
|
|
|
@ -51,6 +51,7 @@ use deno_runtime::deno_web::BlobStore;
|
||||||
use deno_runtime::inspector_server::InspectorServer;
|
use deno_runtime::inspector_server::InspectorServer;
|
||||||
use deno_runtime::permissions::Permissions;
|
use deno_runtime::permissions::Permissions;
|
||||||
use import_map::ImportMap;
|
use import_map::ImportMap;
|
||||||
|
use log::warn;
|
||||||
use std::collections::HashSet;
|
use std::collections::HashSet;
|
||||||
use std::env;
|
use std::env;
|
||||||
use std::fs::File;
|
use std::fs::File;
|
||||||
|
@ -225,7 +226,7 @@ impl ProcState {
|
||||||
import_map_specifier
|
import_map_specifier
|
||||||
))?;
|
))?;
|
||||||
let import_map =
|
let import_map =
|
||||||
ImportMap::from_json(import_map_specifier.as_str(), &file.source)?;
|
import_map_from_text(&import_map_specifier, &file.source)?;
|
||||||
Some(Arc::new(import_map))
|
Some(Arc::new(import_map))
|
||||||
}
|
}
|
||||||
};
|
};
|
||||||
|
@ -377,10 +378,9 @@ impl ProcState {
|
||||||
let graph_data = self.graph_data.read();
|
let graph_data = self.graph_data.read();
|
||||||
let found_specifier = graph_data.follow_redirect(specifier);
|
let found_specifier = graph_data.follow_redirect(specifier);
|
||||||
match graph_data.get(&found_specifier) {
|
match graph_data.get(&found_specifier) {
|
||||||
Some(_) if !self.reload => Box::pin(futures::future::ready((
|
Some(_) if !self.reload => {
|
||||||
specifier.clone(),
|
Box::pin(futures::future::ready(Err(anyhow!(""))))
|
||||||
Err(anyhow!("")),
|
}
|
||||||
))),
|
|
||||||
_ => self.inner.load(specifier, is_dynamic),
|
_ => self.inner.load(specifier, is_dynamic),
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
@ -398,6 +398,7 @@ impl ProcState {
|
||||||
maybe_resolver,
|
maybe_resolver,
|
||||||
maybe_locker,
|
maybe_locker,
|
||||||
None,
|
None,
|
||||||
|
None,
|
||||||
)
|
)
|
||||||
.await;
|
.await;
|
||||||
// If there was a locker, validate the integrity of all the modules in the
|
// If there was a locker, validate the integrity of all the modules in the
|
||||||
|
@ -671,6 +672,25 @@ impl SourceMapGetter for ProcState {
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
pub fn import_map_from_text(
|
||||||
|
specifier: &Url,
|
||||||
|
json_text: &str,
|
||||||
|
) -> Result<ImportMap, AnyError> {
|
||||||
|
let result = ImportMap::from_json_with_diagnostics(specifier, json_text)?;
|
||||||
|
if !result.diagnostics.is_empty() {
|
||||||
|
warn!(
|
||||||
|
"Import map diagnostics:\n{}",
|
||||||
|
result
|
||||||
|
.diagnostics
|
||||||
|
.into_iter()
|
||||||
|
.map(|d| format!(" - {}", d))
|
||||||
|
.collect::<Vec<_>>()
|
||||||
|
.join("\n")
|
||||||
|
)
|
||||||
|
}
|
||||||
|
Ok(result.import_map)
|
||||||
|
}
|
||||||
|
|
||||||
fn source_map_from_code(code: String) -> Option<Vec<u8>> {
|
fn source_map_from_code(code: String) -> Option<Vec<u8>> {
|
||||||
let lines: Vec<&str> = code.split('\n').collect();
|
let lines: Vec<&str> = code.split('\n').collect();
|
||||||
if let Some(last_line) = lines.last() {
|
if let Some(last_line) = lines.last() {
|
||||||
|
|
|
@ -31,7 +31,7 @@ impl Resolver for ImportMapResolver {
|
||||||
) -> Result<ModuleSpecifier, AnyError> {
|
) -> Result<ModuleSpecifier, AnyError> {
|
||||||
self
|
self
|
||||||
.0
|
.0
|
||||||
.resolve(specifier, referrer.as_str())
|
.resolve(specifier, referrer)
|
||||||
.map_err(|err| err.into())
|
.map_err(|err| err.into())
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
|
@ -442,3 +442,8 @@ itest!(bundle_export_specifier_with_alias {
|
||||||
args: "bundle bundle/file_tests-fixture16.ts",
|
args: "bundle bundle/file_tests-fixture16.ts",
|
||||||
output: "bundle/fixture16.out",
|
output: "bundle/fixture16.out",
|
||||||
});
|
});
|
||||||
|
|
||||||
|
itest!(bundle_ignore_directives {
|
||||||
|
args: "bundle subdir/mod1.ts",
|
||||||
|
output: "bundle_ignore_directives.test.out",
|
||||||
|
});
|
||||||
|
|
|
@ -1,5 +1,6 @@
|
||||||
// Copyright 2018-2022 the Deno authors. All rights reserved. MIT license.
|
// Copyright 2018-2022 the Deno authors. All rights reserved. MIT license.
|
||||||
|
|
||||||
|
use std::fs::File;
|
||||||
use std::process::Command;
|
use std::process::Command;
|
||||||
use tempfile::TempDir;
|
use tempfile::TempDir;
|
||||||
use test_util as util;
|
use test_util as util;
|
||||||
|
@ -129,7 +130,7 @@ fn standalone_error() {
|
||||||
.unwrap();
|
.unwrap();
|
||||||
assert!(!output.status.success());
|
assert!(!output.status.success());
|
||||||
assert_eq!(output.stdout, b"");
|
assert_eq!(output.stdout, b"");
|
||||||
let expected_stderr = "error: Error: boom!\n at boom (file://$deno$/bundle.js:2:11)\n at foo (file://$deno$/bundle.js:5:5)\n at file://$deno$/bundle.js:7:1\n";
|
let expected_stderr = "error: Error: boom!\n at boom (file://$deno$/bundle.js:6:11)\n at foo (file://$deno$/bundle.js:9:5)\n at file://$deno$/bundle.js:11:1\n";
|
||||||
let stderr = String::from_utf8(output.stderr).unwrap();
|
let stderr = String::from_utf8(output.stderr).unwrap();
|
||||||
assert_eq!(stderr, expected_stderr);
|
assert_eq!(stderr, expected_stderr);
|
||||||
}
|
}
|
||||||
|
@ -233,6 +234,73 @@ fn standalone_compiler_ops() {
|
||||||
assert_eq!(output.stdout, b"Hello, Compiler API!\n");
|
assert_eq!(output.stdout, b"Hello, Compiler API!\n");
|
||||||
}
|
}
|
||||||
|
|
||||||
|
#[test]
|
||||||
|
fn compile_with_directory_output_flag() {
|
||||||
|
let dir = TempDir::new().expect("tempdir fail");
|
||||||
|
let output_path = if cfg!(windows) {
|
||||||
|
dir.path().join(r"args\random\")
|
||||||
|
} else {
|
||||||
|
dir.path().join("args/random/")
|
||||||
|
};
|
||||||
|
let output = util::deno_cmd()
|
||||||
|
.current_dir(util::testdata_path())
|
||||||
|
.arg("compile")
|
||||||
|
.arg("--unstable")
|
||||||
|
.arg("--output")
|
||||||
|
.arg(&output_path)
|
||||||
|
.arg("./standalone_compiler_ops.ts")
|
||||||
|
.stdout(std::process::Stdio::piped())
|
||||||
|
.spawn()
|
||||||
|
.unwrap()
|
||||||
|
.wait_with_output()
|
||||||
|
.unwrap();
|
||||||
|
assert!(output.status.success());
|
||||||
|
let exe = if cfg!(windows) {
|
||||||
|
output_path.join("standalone_compiler_ops.exe")
|
||||||
|
} else {
|
||||||
|
output_path.join("standalone_compiler_ops")
|
||||||
|
};
|
||||||
|
assert!(&exe.exists());
|
||||||
|
let output = Command::new(exe)
|
||||||
|
.stdout(std::process::Stdio::piped())
|
||||||
|
.stderr(std::process::Stdio::piped())
|
||||||
|
.spawn()
|
||||||
|
.unwrap()
|
||||||
|
.wait_with_output()
|
||||||
|
.unwrap();
|
||||||
|
assert!(output.status.success());
|
||||||
|
assert_eq!(output.stdout, b"Hello, Compiler API!\n");
|
||||||
|
}
|
||||||
|
|
||||||
|
#[test]
|
||||||
|
fn compile_with_file_exists_error() {
|
||||||
|
let dir = TempDir::new().expect("tempdir fail");
|
||||||
|
let output_path = if cfg!(windows) {
|
||||||
|
dir.path().join(r"args\")
|
||||||
|
} else {
|
||||||
|
dir.path().join("args/")
|
||||||
|
};
|
||||||
|
let file_path = dir.path().join("args");
|
||||||
|
File::create(&file_path).expect("cannot create file");
|
||||||
|
let output = util::deno_cmd()
|
||||||
|
.current_dir(util::testdata_path())
|
||||||
|
.arg("compile")
|
||||||
|
.arg("--unstable")
|
||||||
|
.arg("--output")
|
||||||
|
.arg(&output_path)
|
||||||
|
.arg("./028_args.ts")
|
||||||
|
.stderr(std::process::Stdio::piped())
|
||||||
|
.spawn()
|
||||||
|
.unwrap()
|
||||||
|
.wait_with_output()
|
||||||
|
.unwrap();
|
||||||
|
assert!(!output.status.success());
|
||||||
|
let expected_stderr =
|
||||||
|
format!("Could not compile: {:?} is a file.\n", &file_path);
|
||||||
|
let stderr = String::from_utf8(output.stderr).unwrap();
|
||||||
|
assert!(stderr.contains(&expected_stderr));
|
||||||
|
}
|
||||||
|
|
||||||
#[test]
|
#[test]
|
||||||
fn compile_with_directory_exists_error() {
|
fn compile_with_directory_exists_error() {
|
||||||
let dir = TempDir::new().expect("tempdir fail");
|
let dir = TempDir::new().expect("tempdir fail");
|
||||||
|
|
|
@ -97,3 +97,83 @@ fn run_coverage_text(test_name: &str, extension: &str) {
|
||||||
|
|
||||||
assert!(output.status.success());
|
assert!(output.status.success());
|
||||||
}
|
}
|
||||||
|
|
||||||
|
#[test]
|
||||||
|
fn multifile_coverage() {
|
||||||
|
let deno_dir = TempDir::new().expect("tempdir fail");
|
||||||
|
let tempdir = TempDir::new().expect("tempdir fail");
|
||||||
|
let tempdir = tempdir.path().join("cov");
|
||||||
|
|
||||||
|
let status = util::deno_cmd_with_deno_dir(deno_dir.path())
|
||||||
|
.current_dir(util::testdata_path())
|
||||||
|
.arg("test")
|
||||||
|
.arg("--quiet")
|
||||||
|
.arg("--unstable")
|
||||||
|
.arg(format!("--coverage={}", tempdir.to_str().unwrap()))
|
||||||
|
.arg("coverage/multifile/")
|
||||||
|
.stdout(std::process::Stdio::piped())
|
||||||
|
.stderr(std::process::Stdio::inherit())
|
||||||
|
.status()
|
||||||
|
.expect("failed to spawn test runner");
|
||||||
|
|
||||||
|
assert!(status.success());
|
||||||
|
|
||||||
|
let output = util::deno_cmd_with_deno_dir(deno_dir.path())
|
||||||
|
.current_dir(util::testdata_path())
|
||||||
|
.arg("coverage")
|
||||||
|
.arg("--unstable")
|
||||||
|
.arg(format!("{}/", tempdir.to_str().unwrap()))
|
||||||
|
.stdout(std::process::Stdio::piped())
|
||||||
|
.stderr(std::process::Stdio::piped())
|
||||||
|
.output()
|
||||||
|
.expect("failed to spawn coverage reporter");
|
||||||
|
|
||||||
|
// Verify there's no "Check" being printed
|
||||||
|
assert!(output.stderr.is_empty());
|
||||||
|
|
||||||
|
let actual =
|
||||||
|
util::strip_ansi_codes(std::str::from_utf8(&output.stdout).unwrap())
|
||||||
|
.to_string();
|
||||||
|
|
||||||
|
let expected = fs::read_to_string(
|
||||||
|
util::testdata_path().join("coverage/multifile/expected.out"),
|
||||||
|
)
|
||||||
|
.unwrap();
|
||||||
|
|
||||||
|
if !util::wildcard_match(&expected, &actual) {
|
||||||
|
println!("OUTPUT\n{}\nOUTPUT", actual);
|
||||||
|
println!("EXPECTED\n{}\nEXPECTED", expected);
|
||||||
|
panic!("pattern match failed");
|
||||||
|
}
|
||||||
|
|
||||||
|
assert!(output.status.success());
|
||||||
|
|
||||||
|
let output = util::deno_cmd_with_deno_dir(deno_dir.path())
|
||||||
|
.current_dir(util::testdata_path())
|
||||||
|
.arg("coverage")
|
||||||
|
.arg("--quiet")
|
||||||
|
.arg("--unstable")
|
||||||
|
.arg("--lcov")
|
||||||
|
.arg(format!("{}/", tempdir.to_str().unwrap()))
|
||||||
|
.stdout(std::process::Stdio::piped())
|
||||||
|
.stderr(std::process::Stdio::inherit())
|
||||||
|
.output()
|
||||||
|
.expect("failed to spawn coverage reporter");
|
||||||
|
|
||||||
|
let actual =
|
||||||
|
util::strip_ansi_codes(std::str::from_utf8(&output.stdout).unwrap())
|
||||||
|
.to_string();
|
||||||
|
|
||||||
|
let expected = fs::read_to_string(
|
||||||
|
util::testdata_path().join("coverage/multifile/expected.lcov"),
|
||||||
|
)
|
||||||
|
.unwrap();
|
||||||
|
|
||||||
|
if !util::wildcard_match(&expected, &actual) {
|
||||||
|
println!("OUTPUT\n{}\nOUTPUT", actual);
|
||||||
|
println!("EXPECTED\n{}\nEXPECTED", expected);
|
||||||
|
panic!("pattern match failed");
|
||||||
|
}
|
||||||
|
|
||||||
|
assert!(output.status.success());
|
||||||
|
}
|
||||||
|
|
|
@ -1087,6 +1087,34 @@ fn typecheck_declarations_unstable() {
|
||||||
assert!(output.status.success());
|
assert!(output.status.success());
|
||||||
}
|
}
|
||||||
|
|
||||||
|
#[test]
|
||||||
|
fn typecheck_core() {
|
||||||
|
let deno_dir = TempDir::new().expect("tempdir fail");
|
||||||
|
let test_file = deno_dir.path().join("test_deno_core_types.ts");
|
||||||
|
std::fs::write(
|
||||||
|
&test_file,
|
||||||
|
format!(
|
||||||
|
"import \"{}\";",
|
||||||
|
deno_core::resolve_path(
|
||||||
|
util::root_path()
|
||||||
|
.join("core/lib.deno_core.d.ts")
|
||||||
|
.to_str()
|
||||||
|
.unwrap()
|
||||||
|
)
|
||||||
|
.unwrap()
|
||||||
|
),
|
||||||
|
)
|
||||||
|
.unwrap();
|
||||||
|
let output = util::deno_cmd_with_deno_dir(deno_dir.path())
|
||||||
|
.arg("run")
|
||||||
|
.arg(test_file.to_str().unwrap())
|
||||||
|
.output()
|
||||||
|
.unwrap();
|
||||||
|
println!("stdout: {}", String::from_utf8(output.stdout).unwrap());
|
||||||
|
println!("stderr: {}", String::from_utf8(output.stderr).unwrap());
|
||||||
|
assert!(output.status.success());
|
||||||
|
}
|
||||||
|
|
||||||
#[test]
|
#[test]
|
||||||
fn js_unit_tests_lint() {
|
fn js_unit_tests_lint() {
|
||||||
let status = util::deno_cmd()
|
let status = util::deno_cmd()
|
||||||
|
|
6
cli/tests/testdata/bundle_ignore_directives.test.out
vendored
Normal file
6
cli/tests/testdata/bundle_ignore_directives.test.out
vendored
Normal file
|
@ -0,0 +1,6 @@
|
||||||
|
[WILDCARD]
|
||||||
|
// deno-fmt-ignore-file
|
||||||
|
// deno-lint-ignore-file
|
||||||
|
// This code was bundled using `deno bundle` and it's not recommended to edit it manually
|
||||||
|
|
||||||
|
[WILDCARD]
|
2
cli/tests/testdata/compat/import_map.json
vendored
2
cli/tests/testdata/compat/import_map.json
vendored
|
@ -1,5 +1,5 @@
|
||||||
{
|
{
|
||||||
"imports": {
|
"imports": {
|
||||||
"std/": "https://deno.land/std@0.120.0/"
|
"std/": "https://deno.land/std@0.121.0/"
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
|
@ -1,5 +1,5 @@
|
||||||
import { sortBy } from "std/collections/sort_by.ts";
|
import { sortBy } from "std/collections/sort_by.ts";
|
||||||
import { findSingle } from "https://deno.land/std@0.120.0/collections/find_single.ts";
|
import { findSingle } from "https://deno.land/std@0.121.0/collections/find_single.ts";
|
||||||
import os from "node:os";
|
import os from "node:os";
|
||||||
|
|
||||||
console.log(sortBy([2, 3, 1], (it) => it));
|
console.log(sortBy([2, 3, 1], (it) => it));
|
||||||
|
|
13
cli/tests/testdata/compiler_api_test.ts
vendored
13
cli/tests/testdata/compiler_api_test.ts
vendored
|
@ -418,10 +418,21 @@ Deno.test({
|
||||||
"/b.ts": `export const b = "b";`,
|
"/b.ts": `export const b = "b";`,
|
||||||
},
|
},
|
||||||
});
|
});
|
||||||
|
const ignoreDirecives = [
|
||||||
|
"// deno-fmt-ignore-file",
|
||||||
|
"// deno-lint-ignore-file",
|
||||||
|
"// This code was bundled using `deno bundle` and it's not recommended to edit it manually",
|
||||||
|
"",
|
||||||
|
"",
|
||||||
|
].join("\n");
|
||||||
assert(diagnostics);
|
assert(diagnostics);
|
||||||
assertEquals(diagnostics.length, 0);
|
assertEquals(diagnostics.length, 0);
|
||||||
assertEquals(Object.keys(files).length, 2);
|
assertEquals(Object.keys(files).length, 2);
|
||||||
assert(files["deno:///bundle.js"].startsWith("(function() {\n"));
|
assert(
|
||||||
|
files["deno:///bundle.js"].startsWith(
|
||||||
|
ignoreDirecives + "(function() {\n",
|
||||||
|
),
|
||||||
|
);
|
||||||
assert(files["deno:///bundle.js"].endsWith("})();\n"));
|
assert(files["deno:///bundle.js"].endsWith("})();\n"));
|
||||||
assert(files["deno:///bundle.js.map"]);
|
assert(files["deno:///bundle.js.map"]);
|
||||||
},
|
},
|
||||||
|
|
8
cli/tests/testdata/coverage/multifile/a_test.js
vendored
Normal file
8
cli/tests/testdata/coverage/multifile/a_test.js
vendored
Normal file
|
@ -0,0 +1,8 @@
|
||||||
|
import { test } from "./mod.js";
|
||||||
|
|
||||||
|
Deno.test({
|
||||||
|
name: "bugrepo a",
|
||||||
|
fn: () => {
|
||||||
|
test(true);
|
||||||
|
},
|
||||||
|
});
|
8
cli/tests/testdata/coverage/multifile/b_test.js
vendored
Normal file
8
cli/tests/testdata/coverage/multifile/b_test.js
vendored
Normal file
|
@ -0,0 +1,8 @@
|
||||||
|
import { test } from "./mod.js";
|
||||||
|
|
||||||
|
Deno.test({
|
||||||
|
name: "bugrepo b",
|
||||||
|
fn: () => {
|
||||||
|
test(false);
|
||||||
|
},
|
||||||
|
});
|
18
cli/tests/testdata/coverage/multifile/expected.lcov
vendored
Normal file
18
cli/tests/testdata/coverage/multifile/expected.lcov
vendored
Normal file
|
@ -0,0 +1,18 @@
|
||||||
|
SF:[WILDCARD]mod.js
|
||||||
|
FN:1,test
|
||||||
|
FNDA:2,test
|
||||||
|
FNF:1
|
||||||
|
FNH:1
|
||||||
|
BRDA:2,1,0,1
|
||||||
|
BRF:1
|
||||||
|
BRH:1
|
||||||
|
DA:1,2
|
||||||
|
DA:2,4
|
||||||
|
DA:3,5
|
||||||
|
DA:4,5
|
||||||
|
DA:5,5
|
||||||
|
DA:6,4
|
||||||
|
DA:7,1
|
||||||
|
LH:7
|
||||||
|
LF:7
|
||||||
|
end_of_record
|
1
cli/tests/testdata/coverage/multifile/expected.out
vendored
Normal file
1
cli/tests/testdata/coverage/multifile/expected.out
vendored
Normal file
|
@ -0,0 +1 @@
|
||||||
|
cover [WILDCARD]/multifile/mod.js ... 100.000% (7/7)
|
6
cli/tests/testdata/coverage/multifile/mod.js
vendored
Normal file
6
cli/tests/testdata/coverage/multifile/mod.js
vendored
Normal file
|
@ -0,0 +1,6 @@
|
||||||
|
export function test(a) {
|
||||||
|
if (a) {
|
||||||
|
return 0;
|
||||||
|
}
|
||||||
|
return 1;
|
||||||
|
}
|
|
@ -1444,6 +1444,15 @@ Deno.test(async function testAesGcmEncrypt() {
|
||||||
// deno-fmt-ignore
|
// deno-fmt-ignore
|
||||||
new Uint8Array([50,223,112,178,166,156,255,110,125,138,95,141,82,47,14,164,134,247,22]),
|
new Uint8Array([50,223,112,178,166,156,255,110,125,138,95,141,82,47,14,164,134,247,22]),
|
||||||
);
|
);
|
||||||
|
|
||||||
|
const plainText = await crypto.subtle.decrypt(
|
||||||
|
{ name: "AES-GCM", iv, additionalData: new Uint8Array() },
|
||||||
|
key,
|
||||||
|
cipherText,
|
||||||
|
);
|
||||||
|
assert(plainText instanceof ArrayBuffer);
|
||||||
|
assertEquals(plainText.byteLength, 3);
|
||||||
|
assertEquals(new Uint8Array(plainText), data);
|
||||||
});
|
});
|
||||||
|
|
||||||
async function roundTripSecretJwk(
|
async function roundTripSecretJwk(
|
||||||
|
@ -1561,3 +1570,61 @@ Deno.test(async function testSecretJwkBase64Url() {
|
||||||
},
|
},
|
||||||
);
|
);
|
||||||
});
|
});
|
||||||
|
|
||||||
|
Deno.test(async function testAESWrapKey() {
|
||||||
|
const key = await crypto.subtle.generateKey(
|
||||||
|
{
|
||||||
|
name: "AES-KW",
|
||||||
|
length: 128,
|
||||||
|
},
|
||||||
|
true,
|
||||||
|
["wrapKey", "unwrapKey"],
|
||||||
|
);
|
||||||
|
|
||||||
|
const hmacKey = await crypto.subtle.generateKey(
|
||||||
|
{
|
||||||
|
name: "HMAC",
|
||||||
|
hash: "SHA-256",
|
||||||
|
length: 128,
|
||||||
|
},
|
||||||
|
true,
|
||||||
|
["sign"],
|
||||||
|
);
|
||||||
|
|
||||||
|
//round-trip
|
||||||
|
// wrap-unwrap-export compare
|
||||||
|
const wrappedKey = await crypto.subtle.wrapKey(
|
||||||
|
"raw",
|
||||||
|
hmacKey,
|
||||||
|
key,
|
||||||
|
{
|
||||||
|
name: "AES-KW",
|
||||||
|
},
|
||||||
|
);
|
||||||
|
|
||||||
|
assert(wrappedKey instanceof ArrayBuffer);
|
||||||
|
assertEquals(wrappedKey.byteLength, 16 + 8); // 8 = 'auth tag'
|
||||||
|
|
||||||
|
const unwrappedKey = await crypto.subtle.unwrapKey(
|
||||||
|
"raw",
|
||||||
|
wrappedKey,
|
||||||
|
key,
|
||||||
|
{
|
||||||
|
name: "AES-KW",
|
||||||
|
},
|
||||||
|
{
|
||||||
|
name: "HMAC",
|
||||||
|
hash: "SHA-256",
|
||||||
|
},
|
||||||
|
true,
|
||||||
|
["sign"],
|
||||||
|
);
|
||||||
|
|
||||||
|
assert(unwrappedKey instanceof CryptoKey);
|
||||||
|
assertEquals((unwrappedKey.algorithm as HmacKeyAlgorithm).length, 128);
|
||||||
|
|
||||||
|
const hmacKeyBytes = await crypto.subtle.exportKey("raw", hmacKey);
|
||||||
|
const unwrappedKeyBytes = await crypto.subtle.exportKey("raw", unwrappedKey);
|
||||||
|
|
||||||
|
assertEquals(new Uint8Array(hmacKeyBytes), new Uint8Array(unwrappedKeyBytes));
|
||||||
|
});
|
||||||
|
|
58
cli/tools/coverage/json_types.rs
Normal file
58
cli/tools/coverage/json_types.rs
Normal file
|
@ -0,0 +1,58 @@
|
||||||
|
// Copyright 2018-2022 the Deno authors. All rights reserved. MIT license.
|
||||||
|
|
||||||
|
use serde::Deserialize;
|
||||||
|
use serde::Serialize;
|
||||||
|
|
||||||
|
#[derive(Debug, Eq, PartialEq, Serialize, Deserialize, Clone)]
|
||||||
|
#[serde(rename_all = "camelCase")]
|
||||||
|
pub struct CoverageRange {
|
||||||
|
/// Start byte index.
|
||||||
|
pub start_offset: usize,
|
||||||
|
/// End byte index.
|
||||||
|
pub end_offset: usize,
|
||||||
|
pub count: i64,
|
||||||
|
}
|
||||||
|
|
||||||
|
#[derive(Debug, Eq, PartialEq, Serialize, Deserialize, Clone)]
|
||||||
|
#[serde(rename_all = "camelCase")]
|
||||||
|
pub struct FunctionCoverage {
|
||||||
|
pub function_name: String,
|
||||||
|
pub ranges: Vec<CoverageRange>,
|
||||||
|
pub is_block_coverage: bool,
|
||||||
|
}
|
||||||
|
|
||||||
|
#[derive(Debug, Eq, PartialEq, Serialize, Deserialize, Clone)]
|
||||||
|
#[serde(rename_all = "camelCase")]
|
||||||
|
pub struct ScriptCoverage {
|
||||||
|
pub script_id: String,
|
||||||
|
pub url: String,
|
||||||
|
pub functions: Vec<FunctionCoverage>,
|
||||||
|
}
|
||||||
|
|
||||||
|
#[derive(Debug, Serialize, Deserialize)]
|
||||||
|
#[serde(rename_all = "camelCase")]
|
||||||
|
pub struct StartPreciseCoverageParameters {
|
||||||
|
pub call_count: bool,
|
||||||
|
pub detailed: bool,
|
||||||
|
pub allow_triggered_updates: bool,
|
||||||
|
}
|
||||||
|
|
||||||
|
#[derive(Debug, Serialize, Deserialize)]
|
||||||
|
#[serde(rename_all = "camelCase")]
|
||||||
|
pub struct StartPreciseCoverageReturnObject {
|
||||||
|
pub timestamp: f64,
|
||||||
|
}
|
||||||
|
|
||||||
|
#[derive(Debug, Serialize, Deserialize)]
|
||||||
|
#[serde(rename_all = "camelCase")]
|
||||||
|
pub struct TakePreciseCoverageReturnObject {
|
||||||
|
pub result: Vec<ScriptCoverage>,
|
||||||
|
pub timestamp: f64,
|
||||||
|
}
|
||||||
|
|
||||||
|
// TODO(bartlomieju): remove me
|
||||||
|
#[derive(Eq, PartialEq, Clone, Debug, Serialize, Deserialize)]
|
||||||
|
#[serde(rename_all = "camelCase")]
|
||||||
|
pub struct ProcessCoverage {
|
||||||
|
pub result: Vec<ScriptCoverage>,
|
||||||
|
}
|
840
cli/tools/coverage/merge.rs
Normal file
840
cli/tools/coverage/merge.rs
Normal file
|
@ -0,0 +1,840 @@
|
||||||
|
// Forked from https://github.com/demurgos/v8-coverage/tree/d0ca18da8740198681e0bc68971b0a6cdb11db3e/rust
|
||||||
|
// Copyright 2021 Charles Samborski. All rights reserved. MIT license.
|
||||||
|
// Copyright 2018-2022 the Deno authors. All rights reserved. MIT license.
|
||||||
|
|
||||||
|
use super::json_types::CoverageRange;
|
||||||
|
use super::json_types::FunctionCoverage;
|
||||||
|
use super::json_types::ProcessCoverage;
|
||||||
|
use super::json_types::ScriptCoverage;
|
||||||
|
use super::range_tree::RangeTree;
|
||||||
|
use super::range_tree::RangeTreeArena;
|
||||||
|
use std::collections::BTreeMap;
|
||||||
|
use std::collections::BTreeSet;
|
||||||
|
use std::collections::HashMap;
|
||||||
|
use std::iter::Peekable;
|
||||||
|
|
||||||
|
pub fn merge_processes(
|
||||||
|
mut processes: Vec<ProcessCoverage>,
|
||||||
|
) -> Option<ProcessCoverage> {
|
||||||
|
if processes.len() <= 1 {
|
||||||
|
return processes.pop();
|
||||||
|
}
|
||||||
|
let mut url_to_scripts: BTreeMap<String, Vec<ScriptCoverage>> =
|
||||||
|
BTreeMap::new();
|
||||||
|
for process_cov in processes {
|
||||||
|
for script_cov in process_cov.result {
|
||||||
|
url_to_scripts
|
||||||
|
.entry(script_cov.url.clone())
|
||||||
|
.or_insert_with(Vec::new)
|
||||||
|
.push(script_cov);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
let result: Vec<ScriptCoverage> = url_to_scripts
|
||||||
|
.into_iter()
|
||||||
|
.enumerate()
|
||||||
|
.map(|(script_id, (_, scripts))| (script_id, scripts))
|
||||||
|
.map(|(script_id, scripts)| {
|
||||||
|
let mut merged: ScriptCoverage = merge_scripts(scripts.to_vec()).unwrap();
|
||||||
|
merged.script_id = script_id.to_string();
|
||||||
|
merged
|
||||||
|
})
|
||||||
|
.collect();
|
||||||
|
|
||||||
|
Some(ProcessCoverage { result })
|
||||||
|
}
|
||||||
|
|
||||||
|
pub fn merge_scripts(
|
||||||
|
mut scripts: Vec<ScriptCoverage>,
|
||||||
|
) -> Option<ScriptCoverage> {
|
||||||
|
if scripts.len() <= 1 {
|
||||||
|
return scripts.pop();
|
||||||
|
}
|
||||||
|
let (script_id, url) = {
|
||||||
|
let first: &ScriptCoverage = &scripts[0];
|
||||||
|
(first.script_id.clone(), first.url.clone())
|
||||||
|
};
|
||||||
|
let mut range_to_funcs: BTreeMap<Range, Vec<FunctionCoverage>> =
|
||||||
|
BTreeMap::new();
|
||||||
|
for script_cov in scripts {
|
||||||
|
for func_cov in script_cov.functions {
|
||||||
|
let root_range = {
|
||||||
|
let root_range_cov: &CoverageRange = &func_cov.ranges[0];
|
||||||
|
Range {
|
||||||
|
start: root_range_cov.start_offset,
|
||||||
|
end: root_range_cov.end_offset,
|
||||||
|
}
|
||||||
|
};
|
||||||
|
range_to_funcs
|
||||||
|
.entry(root_range)
|
||||||
|
.or_insert_with(Vec::new)
|
||||||
|
.push(func_cov);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
let functions: Vec<FunctionCoverage> = range_to_funcs
|
||||||
|
.into_iter()
|
||||||
|
.map(|(_, funcs)| merge_functions(funcs).unwrap())
|
||||||
|
.collect();
|
||||||
|
|
||||||
|
Some(ScriptCoverage {
|
||||||
|
script_id,
|
||||||
|
url,
|
||||||
|
functions,
|
||||||
|
})
|
||||||
|
}
|
||||||
|
|
||||||
|
#[derive(Eq, PartialEq, Hash, Copy, Clone, Debug)]
|
||||||
|
struct Range {
|
||||||
|
start: usize,
|
||||||
|
end: usize,
|
||||||
|
}
|
||||||
|
|
||||||
|
impl Ord for Range {
|
||||||
|
fn cmp(&self, other: &Self) -> ::std::cmp::Ordering {
|
||||||
|
if self.start != other.start {
|
||||||
|
self.start.cmp(&other.start)
|
||||||
|
} else {
|
||||||
|
other.end.cmp(&self.end)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
impl PartialOrd for Range {
|
||||||
|
fn partial_cmp(&self, other: &Self) -> Option<::std::cmp::Ordering> {
|
||||||
|
if self.start != other.start {
|
||||||
|
self.start.partial_cmp(&other.start)
|
||||||
|
} else {
|
||||||
|
other.end.partial_cmp(&self.end)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
pub fn merge_functions(
|
||||||
|
mut funcs: Vec<FunctionCoverage>,
|
||||||
|
) -> Option<FunctionCoverage> {
|
||||||
|
if funcs.len() <= 1 {
|
||||||
|
return funcs.pop();
|
||||||
|
}
|
||||||
|
let function_name = funcs[0].function_name.clone();
|
||||||
|
let rta_capacity: usize =
|
||||||
|
funcs.iter().fold(0, |acc, func| acc + func.ranges.len());
|
||||||
|
let rta = RangeTreeArena::with_capacity(rta_capacity);
|
||||||
|
let mut trees: Vec<&mut RangeTree> = Vec::new();
|
||||||
|
for func in funcs {
|
||||||
|
if let Some(tree) = RangeTree::from_sorted_ranges(&rta, &func.ranges) {
|
||||||
|
trees.push(tree);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
let merged =
|
||||||
|
RangeTree::normalize(&rta, merge_range_trees(&rta, trees).unwrap());
|
||||||
|
let ranges = merged.to_ranges();
|
||||||
|
let is_block_coverage: bool = !(ranges.len() == 1 && ranges[0].count == 0);
|
||||||
|
|
||||||
|
Some(FunctionCoverage {
|
||||||
|
function_name,
|
||||||
|
ranges,
|
||||||
|
is_block_coverage,
|
||||||
|
})
|
||||||
|
}
|
||||||
|
|
||||||
|
fn merge_range_trees<'a>(
|
||||||
|
rta: &'a RangeTreeArena<'a>,
|
||||||
|
mut trees: Vec<&'a mut RangeTree<'a>>,
|
||||||
|
) -> Option<&'a mut RangeTree<'a>> {
|
||||||
|
if trees.len() <= 1 {
|
||||||
|
return trees.pop();
|
||||||
|
}
|
||||||
|
let (start, end) = {
|
||||||
|
let first = &trees[0];
|
||||||
|
(first.start, first.end)
|
||||||
|
};
|
||||||
|
let delta: i64 = trees.iter().fold(0, |acc, tree| acc + tree.delta);
|
||||||
|
let children = merge_range_tree_children(rta, trees);
|
||||||
|
|
||||||
|
Some(rta.alloc(RangeTree::new(start, end, delta, children)))
|
||||||
|
}
|
||||||
|
|
||||||
|
struct StartEvent<'a> {
|
||||||
|
offset: usize,
|
||||||
|
trees: Vec<(usize, &'a mut RangeTree<'a>)>,
|
||||||
|
}
|
||||||
|
|
||||||
|
fn into_start_events<'a>(trees: Vec<&'a mut RangeTree<'a>>) -> Vec<StartEvent> {
|
||||||
|
let mut result: BTreeMap<usize, Vec<(usize, &'a mut RangeTree<'a>)>> =
|
||||||
|
BTreeMap::new();
|
||||||
|
for (parent_index, tree) in trees.into_iter().enumerate() {
|
||||||
|
for child in tree.children.drain(..) {
|
||||||
|
result
|
||||||
|
.entry(child.start)
|
||||||
|
.or_insert_with(Vec::new)
|
||||||
|
.push((parent_index, child));
|
||||||
|
}
|
||||||
|
}
|
||||||
|
result
|
||||||
|
.into_iter()
|
||||||
|
.map(|(offset, trees)| StartEvent { offset, trees })
|
||||||
|
.collect()
|
||||||
|
}
|
||||||
|
|
||||||
|
struct StartEventQueue<'a> {
|
||||||
|
pending: Option<StartEvent<'a>>,
|
||||||
|
queue: Peekable<::std::vec::IntoIter<StartEvent<'a>>>,
|
||||||
|
}
|
||||||
|
|
||||||
|
impl<'a> StartEventQueue<'a> {
|
||||||
|
pub fn new(queue: Vec<StartEvent<'a>>) -> StartEventQueue<'a> {
|
||||||
|
StartEventQueue {
|
||||||
|
pending: None,
|
||||||
|
queue: queue.into_iter().peekable(),
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
pub(crate) fn set_pending_offset(&mut self, offset: usize) {
|
||||||
|
self.pending = Some(StartEvent {
|
||||||
|
offset,
|
||||||
|
trees: Vec::new(),
|
||||||
|
});
|
||||||
|
}
|
||||||
|
|
||||||
|
pub(crate) fn push_pending_tree(
|
||||||
|
&mut self,
|
||||||
|
tree: (usize, &'a mut RangeTree<'a>),
|
||||||
|
) {
|
||||||
|
self.pending = self.pending.take().map(|mut start_event| {
|
||||||
|
start_event.trees.push(tree);
|
||||||
|
start_event
|
||||||
|
});
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
impl<'a> Iterator for StartEventQueue<'a> {
|
||||||
|
type Item = StartEvent<'a>;
|
||||||
|
|
||||||
|
fn next(&mut self) -> Option<<Self as Iterator>::Item> {
|
||||||
|
let pending_offset: Option<usize> = match &self.pending {
|
||||||
|
Some(ref start_event) if !start_event.trees.is_empty() => {
|
||||||
|
Some(start_event.offset)
|
||||||
|
}
|
||||||
|
_ => None,
|
||||||
|
};
|
||||||
|
|
||||||
|
match pending_offset {
|
||||||
|
Some(pending_offset) => {
|
||||||
|
let queue_offset =
|
||||||
|
self.queue.peek().map(|start_event| start_event.offset);
|
||||||
|
match queue_offset {
|
||||||
|
None => self.pending.take(),
|
||||||
|
Some(queue_offset) => {
|
||||||
|
if pending_offset < queue_offset {
|
||||||
|
self.pending.take()
|
||||||
|
} else {
|
||||||
|
let mut result = self.queue.next().unwrap();
|
||||||
|
if pending_offset == queue_offset {
|
||||||
|
let pending_trees = self.pending.take().unwrap().trees;
|
||||||
|
result.trees.extend(pending_trees.into_iter())
|
||||||
|
}
|
||||||
|
Some(result)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
None => self.queue.next(),
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
fn merge_range_tree_children<'a>(
|
||||||
|
rta: &'a RangeTreeArena<'a>,
|
||||||
|
parent_trees: Vec<&'a mut RangeTree<'a>>,
|
||||||
|
) -> Vec<&'a mut RangeTree<'a>> {
|
||||||
|
let mut flat_children: Vec<Vec<&'a mut RangeTree<'a>>> =
|
||||||
|
Vec::with_capacity(parent_trees.len());
|
||||||
|
let mut wrapped_children: Vec<Vec<&'a mut RangeTree<'a>>> =
|
||||||
|
Vec::with_capacity(parent_trees.len());
|
||||||
|
let mut open_range: Option<Range> = None;
|
||||||
|
|
||||||
|
for _parent_tree in parent_trees.iter() {
|
||||||
|
flat_children.push(Vec::new());
|
||||||
|
wrapped_children.push(Vec::new());
|
||||||
|
}
|
||||||
|
|
||||||
|
let mut start_event_queue =
|
||||||
|
StartEventQueue::new(into_start_events(parent_trees));
|
||||||
|
|
||||||
|
let mut parent_to_nested: HashMap<usize, Vec<&'a mut RangeTree<'a>>> =
|
||||||
|
HashMap::new();
|
||||||
|
|
||||||
|
while let Some(event) = start_event_queue.next() {
|
||||||
|
open_range = if let Some(open_range) = open_range {
|
||||||
|
if open_range.end <= event.offset {
|
||||||
|
for (parent_index, nested) in parent_to_nested {
|
||||||
|
wrapped_children[parent_index].push(rta.alloc(RangeTree::new(
|
||||||
|
open_range.start,
|
||||||
|
open_range.end,
|
||||||
|
0,
|
||||||
|
nested,
|
||||||
|
)));
|
||||||
|
}
|
||||||
|
parent_to_nested = HashMap::new();
|
||||||
|
None
|
||||||
|
} else {
|
||||||
|
Some(open_range)
|
||||||
|
}
|
||||||
|
} else {
|
||||||
|
None
|
||||||
|
};
|
||||||
|
|
||||||
|
match open_range {
|
||||||
|
Some(open_range) => {
|
||||||
|
for (parent_index, tree) in event.trees {
|
||||||
|
let child = if tree.end > open_range.end {
|
||||||
|
let (left, right) = RangeTree::split(rta, tree, open_range.end);
|
||||||
|
start_event_queue.push_pending_tree((parent_index, right));
|
||||||
|
left
|
||||||
|
} else {
|
||||||
|
tree
|
||||||
|
};
|
||||||
|
parent_to_nested
|
||||||
|
.entry(parent_index)
|
||||||
|
.or_insert_with(Vec::new)
|
||||||
|
.push(child);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
None => {
|
||||||
|
let mut open_range_end: usize = event.offset + 1;
|
||||||
|
for (_, ref tree) in &event.trees {
|
||||||
|
open_range_end = if tree.end > open_range_end {
|
||||||
|
tree.end
|
||||||
|
} else {
|
||||||
|
open_range_end
|
||||||
|
};
|
||||||
|
}
|
||||||
|
for (parent_index, tree) in event.trees {
|
||||||
|
if tree.end == open_range_end {
|
||||||
|
flat_children[parent_index].push(tree);
|
||||||
|
continue;
|
||||||
|
}
|
||||||
|
parent_to_nested
|
||||||
|
.entry(parent_index)
|
||||||
|
.or_insert_with(Vec::new)
|
||||||
|
.push(tree);
|
||||||
|
}
|
||||||
|
start_event_queue.set_pending_offset(open_range_end);
|
||||||
|
open_range = Some(Range {
|
||||||
|
start: event.offset,
|
||||||
|
end: open_range_end,
|
||||||
|
});
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
if let Some(open_range) = open_range {
|
||||||
|
for (parent_index, nested) in parent_to_nested {
|
||||||
|
wrapped_children[parent_index].push(rta.alloc(RangeTree::new(
|
||||||
|
open_range.start,
|
||||||
|
open_range.end,
|
||||||
|
0,
|
||||||
|
nested,
|
||||||
|
)));
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
let child_forests: Vec<Vec<&'a mut RangeTree<'a>>> = flat_children
|
||||||
|
.into_iter()
|
||||||
|
.zip(wrapped_children.into_iter())
|
||||||
|
.map(|(flat, wrapped)| merge_children_lists(flat, wrapped))
|
||||||
|
.collect();
|
||||||
|
|
||||||
|
let events = get_child_events_from_forests(&child_forests);
|
||||||
|
|
||||||
|
let mut child_forests: Vec<
|
||||||
|
Peekable<::std::vec::IntoIter<&'a mut RangeTree<'a>>>,
|
||||||
|
> = child_forests
|
||||||
|
.into_iter()
|
||||||
|
.map(|forest| forest.into_iter().peekable())
|
||||||
|
.collect();
|
||||||
|
|
||||||
|
let mut result: Vec<&'a mut RangeTree<'a>> = Vec::new();
|
||||||
|
for event in events.iter() {
|
||||||
|
let mut matching_trees: Vec<&'a mut RangeTree<'a>> = Vec::new();
|
||||||
|
for (_parent_index, children) in child_forests.iter_mut().enumerate() {
|
||||||
|
let next_tree: Option<&'a mut RangeTree<'a>> = {
|
||||||
|
if children.peek().map_or(false, |tree| tree.start == *event) {
|
||||||
|
children.next()
|
||||||
|
} else {
|
||||||
|
None
|
||||||
|
}
|
||||||
|
};
|
||||||
|
if let Some(next_tree) = next_tree {
|
||||||
|
matching_trees.push(next_tree);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
if let Some(merged) = merge_range_trees(rta, matching_trees) {
|
||||||
|
result.push(merged);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
result
|
||||||
|
}
|
||||||
|
|
||||||
|
fn get_child_events_from_forests<'a>(
|
||||||
|
forests: &[Vec<&'a mut RangeTree<'a>>],
|
||||||
|
) -> BTreeSet<usize> {
|
||||||
|
let mut event_set: BTreeSet<usize> = BTreeSet::new();
|
||||||
|
for forest in forests {
|
||||||
|
for tree in forest {
|
||||||
|
event_set.insert(tree.start);
|
||||||
|
event_set.insert(tree.end);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
event_set
|
||||||
|
}
|
||||||
|
|
||||||
|
// TODO: itertools?
|
||||||
|
// https://play.integer32.com/?gist=ad2cd20d628e647a5dbdd82e68a15cb6&version=stable&mode=debug&edition=2015
|
||||||
|
fn merge_children_lists<'a>(
|
||||||
|
a: Vec<&'a mut RangeTree<'a>>,
|
||||||
|
b: Vec<&'a mut RangeTree<'a>>,
|
||||||
|
) -> Vec<&'a mut RangeTree<'a>> {
|
||||||
|
let mut merged: Vec<&'a mut RangeTree<'a>> = Vec::new();
|
||||||
|
let mut a = a.into_iter();
|
||||||
|
let mut b = b.into_iter();
|
||||||
|
let mut next_a = a.next();
|
||||||
|
let mut next_b = b.next();
|
||||||
|
loop {
|
||||||
|
match (next_a, next_b) {
|
||||||
|
(Some(tree_a), Some(tree_b)) => {
|
||||||
|
if tree_a.start < tree_b.start {
|
||||||
|
merged.push(tree_a);
|
||||||
|
next_a = a.next();
|
||||||
|
next_b = Some(tree_b);
|
||||||
|
} else {
|
||||||
|
merged.push(tree_b);
|
||||||
|
next_a = Some(tree_a);
|
||||||
|
next_b = b.next();
|
||||||
|
}
|
||||||
|
}
|
||||||
|
(Some(tree_a), None) => {
|
||||||
|
merged.push(tree_a);
|
||||||
|
merged.extend(a);
|
||||||
|
break;
|
||||||
|
}
|
||||||
|
(None, Some(tree_b)) => {
|
||||||
|
merged.push(tree_b);
|
||||||
|
merged.extend(b);
|
||||||
|
break;
|
||||||
|
}
|
||||||
|
(None, None) => break,
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
merged
|
||||||
|
}
|
||||||
|
|
||||||
|
#[cfg(test)]
|
||||||
|
mod tests {
|
||||||
|
use super::*;
|
||||||
|
// use test_generator::test_resources;
|
||||||
|
|
||||||
|
#[test]
|
||||||
|
fn empty() {
|
||||||
|
let inputs: Vec<ProcessCoverage> = Vec::new();
|
||||||
|
let expected: Option<ProcessCoverage> = None;
|
||||||
|
|
||||||
|
assert_eq!(merge_processes(inputs), expected);
|
||||||
|
}
|
||||||
|
|
||||||
|
#[test]
|
||||||
|
fn two_flat_trees() {
|
||||||
|
let inputs: Vec<ProcessCoverage> = vec![
|
||||||
|
ProcessCoverage {
|
||||||
|
result: vec![ScriptCoverage {
|
||||||
|
script_id: String::from("0"),
|
||||||
|
url: String::from("/lib.js"),
|
||||||
|
functions: vec![FunctionCoverage {
|
||||||
|
function_name: String::from("lib"),
|
||||||
|
is_block_coverage: true,
|
||||||
|
ranges: vec![CoverageRange {
|
||||||
|
start_offset: 0,
|
||||||
|
end_offset: 9,
|
||||||
|
count: 1,
|
||||||
|
}],
|
||||||
|
}],
|
||||||
|
}],
|
||||||
|
},
|
||||||
|
ProcessCoverage {
|
||||||
|
result: vec![ScriptCoverage {
|
||||||
|
script_id: String::from("0"),
|
||||||
|
url: String::from("/lib.js"),
|
||||||
|
functions: vec![FunctionCoverage {
|
||||||
|
function_name: String::from("lib"),
|
||||||
|
is_block_coverage: true,
|
||||||
|
ranges: vec![CoverageRange {
|
||||||
|
start_offset: 0,
|
||||||
|
end_offset: 9,
|
||||||
|
count: 2,
|
||||||
|
}],
|
||||||
|
}],
|
||||||
|
}],
|
||||||
|
},
|
||||||
|
];
|
||||||
|
let expected: Option<ProcessCoverage> = Some(ProcessCoverage {
|
||||||
|
result: vec![ScriptCoverage {
|
||||||
|
script_id: String::from("0"),
|
||||||
|
url: String::from("/lib.js"),
|
||||||
|
functions: vec![FunctionCoverage {
|
||||||
|
function_name: String::from("lib"),
|
||||||
|
is_block_coverage: true,
|
||||||
|
ranges: vec![CoverageRange {
|
||||||
|
start_offset: 0,
|
||||||
|
end_offset: 9,
|
||||||
|
count: 3,
|
||||||
|
}],
|
||||||
|
}],
|
||||||
|
}],
|
||||||
|
});
|
||||||
|
|
||||||
|
assert_eq!(merge_processes(inputs), expected);
|
||||||
|
}
|
||||||
|
|
||||||
|
#[test]
|
||||||
|
fn two_trees_with_matching_children() {
|
||||||
|
let inputs: Vec<ProcessCoverage> = vec![
|
||||||
|
ProcessCoverage {
|
||||||
|
result: vec![ScriptCoverage {
|
||||||
|
script_id: String::from("0"),
|
||||||
|
url: String::from("/lib.js"),
|
||||||
|
functions: vec![FunctionCoverage {
|
||||||
|
function_name: String::from("lib"),
|
||||||
|
is_block_coverage: true,
|
||||||
|
ranges: vec![
|
||||||
|
CoverageRange {
|
||||||
|
start_offset: 0,
|
||||||
|
end_offset: 9,
|
||||||
|
count: 10,
|
||||||
|
},
|
||||||
|
CoverageRange {
|
||||||
|
start_offset: 3,
|
||||||
|
end_offset: 6,
|
||||||
|
count: 1,
|
||||||
|
},
|
||||||
|
],
|
||||||
|
}],
|
||||||
|
}],
|
||||||
|
},
|
||||||
|
ProcessCoverage {
|
||||||
|
result: vec![ScriptCoverage {
|
||||||
|
script_id: String::from("0"),
|
||||||
|
url: String::from("/lib.js"),
|
||||||
|
functions: vec![FunctionCoverage {
|
||||||
|
function_name: String::from("lib"),
|
||||||
|
is_block_coverage: true,
|
||||||
|
ranges: vec![
|
||||||
|
CoverageRange {
|
||||||
|
start_offset: 0,
|
||||||
|
end_offset: 9,
|
||||||
|
count: 20,
|
||||||
|
},
|
||||||
|
CoverageRange {
|
||||||
|
start_offset: 3,
|
||||||
|
end_offset: 6,
|
||||||
|
count: 2,
|
||||||
|
},
|
||||||
|
],
|
||||||
|
}],
|
||||||
|
}],
|
||||||
|
},
|
||||||
|
];
|
||||||
|
let expected: Option<ProcessCoverage> = Some(ProcessCoverage {
|
||||||
|
result: vec![ScriptCoverage {
|
||||||
|
script_id: String::from("0"),
|
||||||
|
url: String::from("/lib.js"),
|
||||||
|
functions: vec![FunctionCoverage {
|
||||||
|
function_name: String::from("lib"),
|
||||||
|
is_block_coverage: true,
|
||||||
|
ranges: vec![
|
||||||
|
CoverageRange {
|
||||||
|
start_offset: 0,
|
||||||
|
end_offset: 9,
|
||||||
|
count: 30,
|
||||||
|
},
|
||||||
|
CoverageRange {
|
||||||
|
start_offset: 3,
|
||||||
|
end_offset: 6,
|
||||||
|
count: 3,
|
||||||
|
},
|
||||||
|
],
|
||||||
|
}],
|
||||||
|
}],
|
||||||
|
});
|
||||||
|
|
||||||
|
assert_eq!(merge_processes(inputs), expected);
|
||||||
|
}
|
||||||
|
|
||||||
|
#[test]
|
||||||
|
fn two_trees_with_partially_overlapping_children() {
|
||||||
|
let inputs: Vec<ProcessCoverage> = vec![
|
||||||
|
ProcessCoverage {
|
||||||
|
result: vec![ScriptCoverage {
|
||||||
|
script_id: String::from("0"),
|
||||||
|
url: String::from("/lib.js"),
|
||||||
|
functions: vec![FunctionCoverage {
|
||||||
|
function_name: String::from("lib"),
|
||||||
|
is_block_coverage: true,
|
||||||
|
ranges: vec![
|
||||||
|
CoverageRange {
|
||||||
|
start_offset: 0,
|
||||||
|
end_offset: 9,
|
||||||
|
count: 10,
|
||||||
|
},
|
||||||
|
CoverageRange {
|
||||||
|
start_offset: 2,
|
||||||
|
end_offset: 5,
|
||||||
|
count: 1,
|
||||||
|
},
|
||||||
|
],
|
||||||
|
}],
|
||||||
|
}],
|
||||||
|
},
|
||||||
|
ProcessCoverage {
|
||||||
|
result: vec![ScriptCoverage {
|
||||||
|
script_id: String::from("0"),
|
||||||
|
url: String::from("/lib.js"),
|
||||||
|
functions: vec![FunctionCoverage {
|
||||||
|
function_name: String::from("lib"),
|
||||||
|
is_block_coverage: true,
|
||||||
|
ranges: vec![
|
||||||
|
CoverageRange {
|
||||||
|
start_offset: 0,
|
||||||
|
end_offset: 9,
|
||||||
|
count: 20,
|
||||||
|
},
|
||||||
|
CoverageRange {
|
||||||
|
start_offset: 4,
|
||||||
|
end_offset: 7,
|
||||||
|
count: 2,
|
||||||
|
},
|
||||||
|
],
|
||||||
|
}],
|
||||||
|
}],
|
||||||
|
},
|
||||||
|
];
|
||||||
|
let expected: Option<ProcessCoverage> = Some(ProcessCoverage {
|
||||||
|
result: vec![ScriptCoverage {
|
||||||
|
script_id: String::from("0"),
|
||||||
|
url: String::from("/lib.js"),
|
||||||
|
functions: vec![FunctionCoverage {
|
||||||
|
function_name: String::from("lib"),
|
||||||
|
is_block_coverage: true,
|
||||||
|
ranges: vec![
|
||||||
|
CoverageRange {
|
||||||
|
start_offset: 0,
|
||||||
|
end_offset: 9,
|
||||||
|
count: 30,
|
||||||
|
},
|
||||||
|
CoverageRange {
|
||||||
|
start_offset: 2,
|
||||||
|
end_offset: 5,
|
||||||
|
count: 21,
|
||||||
|
},
|
||||||
|
CoverageRange {
|
||||||
|
start_offset: 4,
|
||||||
|
end_offset: 5,
|
||||||
|
count: 3,
|
||||||
|
},
|
||||||
|
CoverageRange {
|
||||||
|
start_offset: 5,
|
||||||
|
end_offset: 7,
|
||||||
|
count: 12,
|
||||||
|
},
|
||||||
|
],
|
||||||
|
}],
|
||||||
|
}],
|
||||||
|
});
|
||||||
|
|
||||||
|
assert_eq!(merge_processes(inputs), expected);
|
||||||
|
}
|
||||||
|
|
||||||
|
#[test]
|
||||||
|
fn two_trees_with_with_complementary_children_summing_to_the_same_count() {
|
||||||
|
let inputs: Vec<ProcessCoverage> = vec![
|
||||||
|
ProcessCoverage {
|
||||||
|
result: vec![ScriptCoverage {
|
||||||
|
script_id: String::from("0"),
|
||||||
|
url: String::from("/lib.js"),
|
||||||
|
functions: vec![FunctionCoverage {
|
||||||
|
function_name: String::from("lib"),
|
||||||
|
is_block_coverage: true,
|
||||||
|
ranges: vec![
|
||||||
|
CoverageRange {
|
||||||
|
start_offset: 0,
|
||||||
|
end_offset: 9,
|
||||||
|
count: 1,
|
||||||
|
},
|
||||||
|
CoverageRange {
|
||||||
|
start_offset: 1,
|
||||||
|
end_offset: 8,
|
||||||
|
count: 6,
|
||||||
|
},
|
||||||
|
CoverageRange {
|
||||||
|
start_offset: 1,
|
||||||
|
end_offset: 5,
|
||||||
|
count: 5,
|
||||||
|
},
|
||||||
|
CoverageRange {
|
||||||
|
start_offset: 5,
|
||||||
|
end_offset: 8,
|
||||||
|
count: 7,
|
||||||
|
},
|
||||||
|
],
|
||||||
|
}],
|
||||||
|
}],
|
||||||
|
},
|
||||||
|
ProcessCoverage {
|
||||||
|
result: vec![ScriptCoverage {
|
||||||
|
script_id: String::from("0"),
|
||||||
|
url: String::from("/lib.js"),
|
||||||
|
functions: vec![FunctionCoverage {
|
||||||
|
function_name: String::from("lib"),
|
||||||
|
is_block_coverage: true,
|
||||||
|
ranges: vec![
|
||||||
|
CoverageRange {
|
||||||
|
start_offset: 0,
|
||||||
|
end_offset: 9,
|
||||||
|
count: 4,
|
||||||
|
},
|
||||||
|
CoverageRange {
|
||||||
|
start_offset: 1,
|
||||||
|
end_offset: 8,
|
||||||
|
count: 8,
|
||||||
|
},
|
||||||
|
CoverageRange {
|
||||||
|
start_offset: 1,
|
||||||
|
end_offset: 5,
|
||||||
|
count: 9,
|
||||||
|
},
|
||||||
|
CoverageRange {
|
||||||
|
start_offset: 5,
|
||||||
|
end_offset: 8,
|
||||||
|
count: 7,
|
||||||
|
},
|
||||||
|
],
|
||||||
|
}],
|
||||||
|
}],
|
||||||
|
},
|
||||||
|
];
|
||||||
|
let expected: Option<ProcessCoverage> = Some(ProcessCoverage {
|
||||||
|
result: vec![ScriptCoverage {
|
||||||
|
script_id: String::from("0"),
|
||||||
|
url: String::from("/lib.js"),
|
||||||
|
functions: vec![FunctionCoverage {
|
||||||
|
function_name: String::from("lib"),
|
||||||
|
is_block_coverage: true,
|
||||||
|
ranges: vec![
|
||||||
|
CoverageRange {
|
||||||
|
start_offset: 0,
|
||||||
|
end_offset: 9,
|
||||||
|
count: 5,
|
||||||
|
},
|
||||||
|
CoverageRange {
|
||||||
|
start_offset: 1,
|
||||||
|
end_offset: 8,
|
||||||
|
count: 14,
|
||||||
|
},
|
||||||
|
],
|
||||||
|
}],
|
||||||
|
}],
|
||||||
|
});
|
||||||
|
|
||||||
|
assert_eq!(merge_processes(inputs), expected);
|
||||||
|
}
|
||||||
|
|
||||||
|
#[test]
|
||||||
|
fn merges_a_similar_sliding_chain_a_bc() {
|
||||||
|
let inputs: Vec<ProcessCoverage> = vec![
|
||||||
|
ProcessCoverage {
|
||||||
|
result: vec![ScriptCoverage {
|
||||||
|
script_id: String::from("0"),
|
||||||
|
url: String::from("/lib.js"),
|
||||||
|
functions: vec![FunctionCoverage {
|
||||||
|
function_name: String::from("lib"),
|
||||||
|
is_block_coverage: true,
|
||||||
|
ranges: vec![
|
||||||
|
CoverageRange {
|
||||||
|
start_offset: 0,
|
||||||
|
end_offset: 7,
|
||||||
|
count: 10,
|
||||||
|
},
|
||||||
|
CoverageRange {
|
||||||
|
start_offset: 0,
|
||||||
|
end_offset: 4,
|
||||||
|
count: 1,
|
||||||
|
},
|
||||||
|
],
|
||||||
|
}],
|
||||||
|
}],
|
||||||
|
},
|
||||||
|
ProcessCoverage {
|
||||||
|
result: vec![ScriptCoverage {
|
||||||
|
script_id: String::from("0"),
|
||||||
|
url: String::from("/lib.js"),
|
||||||
|
functions: vec![FunctionCoverage {
|
||||||
|
function_name: String::from("lib"),
|
||||||
|
is_block_coverage: true,
|
||||||
|
ranges: vec![
|
||||||
|
CoverageRange {
|
||||||
|
start_offset: 0,
|
||||||
|
end_offset: 7,
|
||||||
|
count: 20,
|
||||||
|
},
|
||||||
|
CoverageRange {
|
||||||
|
start_offset: 1,
|
||||||
|
end_offset: 6,
|
||||||
|
count: 11,
|
||||||
|
},
|
||||||
|
CoverageRange {
|
||||||
|
start_offset: 2,
|
||||||
|
end_offset: 5,
|
||||||
|
count: 2,
|
||||||
|
},
|
||||||
|
],
|
||||||
|
}],
|
||||||
|
}],
|
||||||
|
},
|
||||||
|
];
|
||||||
|
let expected: Option<ProcessCoverage> = Some(ProcessCoverage {
|
||||||
|
result: vec![ScriptCoverage {
|
||||||
|
script_id: String::from("0"),
|
||||||
|
url: String::from("/lib.js"),
|
||||||
|
functions: vec![FunctionCoverage {
|
||||||
|
function_name: String::from("lib"),
|
||||||
|
is_block_coverage: true,
|
||||||
|
ranges: vec![
|
||||||
|
CoverageRange {
|
||||||
|
start_offset: 0,
|
||||||
|
end_offset: 7,
|
||||||
|
count: 30,
|
||||||
|
},
|
||||||
|
CoverageRange {
|
||||||
|
start_offset: 0,
|
||||||
|
end_offset: 6,
|
||||||
|
count: 21,
|
||||||
|
},
|
||||||
|
CoverageRange {
|
||||||
|
start_offset: 1,
|
||||||
|
end_offset: 5,
|
||||||
|
count: 12,
|
||||||
|
},
|
||||||
|
CoverageRange {
|
||||||
|
start_offset: 2,
|
||||||
|
end_offset: 4,
|
||||||
|
count: 3,
|
||||||
|
},
|
||||||
|
],
|
||||||
|
}],
|
||||||
|
}],
|
||||||
|
});
|
||||||
|
|
||||||
|
assert_eq!(merge_processes(inputs), expected);
|
||||||
|
}
|
||||||
|
}
|
|
@ -17,8 +17,6 @@ use deno_core::serde_json;
|
||||||
use deno_core::url::Url;
|
use deno_core::url::Url;
|
||||||
use deno_core::LocalInspectorSession;
|
use deno_core::LocalInspectorSession;
|
||||||
use regex::Regex;
|
use regex::Regex;
|
||||||
use serde::Deserialize;
|
|
||||||
use serde::Serialize;
|
|
||||||
use sourcemap::SourceMap;
|
use sourcemap::SourceMap;
|
||||||
use std::fs;
|
use std::fs;
|
||||||
use std::fs::File;
|
use std::fs::File;
|
||||||
|
@ -28,52 +26,11 @@ use std::path::PathBuf;
|
||||||
use text_lines::TextLines;
|
use text_lines::TextLines;
|
||||||
use uuid::Uuid;
|
use uuid::Uuid;
|
||||||
|
|
||||||
#[derive(Debug, Serialize, Deserialize, Clone)]
|
mod json_types;
|
||||||
#[serde(rename_all = "camelCase")]
|
mod merge;
|
||||||
struct CoverageRange {
|
mod range_tree;
|
||||||
/// Start byte index.
|
|
||||||
start_offset: usize,
|
|
||||||
/// End byte index.
|
|
||||||
end_offset: usize,
|
|
||||||
count: usize,
|
|
||||||
}
|
|
||||||
|
|
||||||
#[derive(Debug, Serialize, Deserialize, Clone)]
|
use json_types::*;
|
||||||
#[serde(rename_all = "camelCase")]
|
|
||||||
struct FunctionCoverage {
|
|
||||||
function_name: String,
|
|
||||||
ranges: Vec<CoverageRange>,
|
|
||||||
is_block_coverage: bool,
|
|
||||||
}
|
|
||||||
|
|
||||||
#[derive(Debug, Serialize, Deserialize, Clone)]
|
|
||||||
#[serde(rename_all = "camelCase")]
|
|
||||||
struct ScriptCoverage {
|
|
||||||
script_id: String,
|
|
||||||
url: String,
|
|
||||||
functions: Vec<FunctionCoverage>,
|
|
||||||
}
|
|
||||||
|
|
||||||
#[derive(Debug, Serialize, Deserialize)]
|
|
||||||
#[serde(rename_all = "camelCase")]
|
|
||||||
struct StartPreciseCoverageParameters {
|
|
||||||
call_count: bool,
|
|
||||||
detailed: bool,
|
|
||||||
allow_triggered_updates: bool,
|
|
||||||
}
|
|
||||||
|
|
||||||
#[derive(Debug, Serialize, Deserialize)]
|
|
||||||
#[serde(rename_all = "camelCase")]
|
|
||||||
struct StartPreciseCoverageReturnObject {
|
|
||||||
timestamp: f64,
|
|
||||||
}
|
|
||||||
|
|
||||||
#[derive(Debug, Serialize, Deserialize)]
|
|
||||||
#[serde(rename_all = "camelCase")]
|
|
||||||
struct TakePreciseCoverageReturnObject {
|
|
||||||
result: Vec<ScriptCoverage>,
|
|
||||||
timestamp: f64,
|
|
||||||
}
|
|
||||||
|
|
||||||
pub struct CoverageCollector {
|
pub struct CoverageCollector {
|
||||||
pub dir: PathBuf,
|
pub dir: PathBuf,
|
||||||
|
@ -175,21 +132,21 @@ struct BranchCoverageItem {
|
||||||
line_index: usize,
|
line_index: usize,
|
||||||
block_number: usize,
|
block_number: usize,
|
||||||
branch_number: usize,
|
branch_number: usize,
|
||||||
taken: Option<usize>,
|
taken: Option<i64>,
|
||||||
is_hit: bool,
|
is_hit: bool,
|
||||||
}
|
}
|
||||||
|
|
||||||
struct FunctionCoverageItem {
|
struct FunctionCoverageItem {
|
||||||
name: String,
|
name: String,
|
||||||
line_index: usize,
|
line_index: usize,
|
||||||
execution_count: usize,
|
execution_count: i64,
|
||||||
}
|
}
|
||||||
|
|
||||||
struct CoverageReport {
|
struct CoverageReport {
|
||||||
url: ModuleSpecifier,
|
url: ModuleSpecifier,
|
||||||
named_functions: Vec<FunctionCoverageItem>,
|
named_functions: Vec<FunctionCoverageItem>,
|
||||||
branches: Vec<BranchCoverageItem>,
|
branches: Vec<BranchCoverageItem>,
|
||||||
found_lines: Vec<(usize, usize)>,
|
found_lines: Vec<(usize, i64)>,
|
||||||
}
|
}
|
||||||
|
|
||||||
fn generate_coverage_report(
|
fn generate_coverage_report(
|
||||||
|
@ -353,7 +310,7 @@ fn generate_coverage_report(
|
||||||
results.into_iter()
|
results.into_iter()
|
||||||
})
|
})
|
||||||
.flatten()
|
.flatten()
|
||||||
.collect::<Vec<(usize, usize)>>();
|
.collect::<Vec<(usize, i64)>>();
|
||||||
|
|
||||||
found_lines.sort_unstable_by_key(|(index, _)| *index);
|
found_lines.sort_unstable_by_key(|(index, _)| *index);
|
||||||
// combine duplicated lines
|
// combine duplicated lines
|
||||||
|
@ -369,7 +326,7 @@ fn generate_coverage_report(
|
||||||
.into_iter()
|
.into_iter()
|
||||||
.enumerate()
|
.enumerate()
|
||||||
.map(|(index, count)| (index, count))
|
.map(|(index, count)| (index, count))
|
||||||
.collect::<Vec<(usize, usize)>>()
|
.collect::<Vec<(usize, i64)>>()
|
||||||
};
|
};
|
||||||
|
|
||||||
coverage_report
|
coverage_report
|
||||||
|
@ -553,38 +510,7 @@ fn collect_coverages(
|
||||||
for file_path in file_paths {
|
for file_path in file_paths {
|
||||||
let json = fs::read_to_string(file_path.as_path())?;
|
let json = fs::read_to_string(file_path.as_path())?;
|
||||||
let new_coverage: ScriptCoverage = serde_json::from_str(&json)?;
|
let new_coverage: ScriptCoverage = serde_json::from_str(&json)?;
|
||||||
|
coverages.push(new_coverage);
|
||||||
let existing_coverage =
|
|
||||||
coverages.iter_mut().find(|x| x.url == new_coverage.url);
|
|
||||||
|
|
||||||
if let Some(existing_coverage) = existing_coverage {
|
|
||||||
for new_function in new_coverage.functions {
|
|
||||||
let existing_function = existing_coverage
|
|
||||||
.functions
|
|
||||||
.iter_mut()
|
|
||||||
.find(|x| x.function_name == new_function.function_name);
|
|
||||||
|
|
||||||
if let Some(existing_function) = existing_function {
|
|
||||||
for new_range in new_function.ranges {
|
|
||||||
let existing_range =
|
|
||||||
existing_function.ranges.iter_mut().find(|x| {
|
|
||||||
x.start_offset == new_range.start_offset
|
|
||||||
&& x.end_offset == new_range.end_offset
|
|
||||||
});
|
|
||||||
|
|
||||||
if let Some(existing_range) = existing_range {
|
|
||||||
existing_range.count += new_range.count;
|
|
||||||
} else {
|
|
||||||
existing_function.ranges.push(new_range);
|
|
||||||
}
|
|
||||||
}
|
|
||||||
} else {
|
|
||||||
existing_coverage.functions.push(new_function);
|
|
||||||
}
|
|
||||||
}
|
|
||||||
} else {
|
|
||||||
coverages.push(new_coverage);
|
|
||||||
}
|
|
||||||
}
|
}
|
||||||
|
|
||||||
coverages.sort_by_key(|k| k.url.clone());
|
coverages.sort_by_key(|k| k.url.clone());
|
||||||
|
@ -632,6 +558,18 @@ pub async fn cover_files(
|
||||||
coverage_flags.exclude,
|
coverage_flags.exclude,
|
||||||
);
|
);
|
||||||
|
|
||||||
|
let proc_coverages: Vec<_> = script_coverages
|
||||||
|
.into_iter()
|
||||||
|
.map(|cov| ProcessCoverage { result: vec![cov] })
|
||||||
|
.collect();
|
||||||
|
|
||||||
|
let script_coverages = if let Some(c) = merge::merge_processes(proc_coverages)
|
||||||
|
{
|
||||||
|
c.result
|
||||||
|
} else {
|
||||||
|
vec![]
|
||||||
|
};
|
||||||
|
|
||||||
let reporter_kind = if coverage_flags.lcov {
|
let reporter_kind = if coverage_flags.lcov {
|
||||||
CoverageReporterKind::Lcov
|
CoverageReporterKind::Lcov
|
||||||
} else {
|
} else {
|
207
cli/tools/coverage/range_tree.rs
Normal file
207
cli/tools/coverage/range_tree.rs
Normal file
|
@ -0,0 +1,207 @@
|
||||||
|
// Forked from https://github.com/demurgos/v8-coverage/tree/d0ca18da8740198681e0bc68971b0a6cdb11db3e/rust
|
||||||
|
// Copyright 2021 Charles Samborski. All rights reserved. MIT license.
|
||||||
|
// Copyright 2018-2022 the Deno authors. All rights reserved. MIT license.
|
||||||
|
|
||||||
|
use super::json_types::CoverageRange;
|
||||||
|
use std::iter::Peekable;
|
||||||
|
use typed_arena::Arena;
|
||||||
|
|
||||||
|
pub struct RangeTreeArena<'a>(Arena<RangeTree<'a>>);
|
||||||
|
|
||||||
|
impl<'a> RangeTreeArena<'a> {
|
||||||
|
#[cfg(test)]
|
||||||
|
pub fn new() -> Self {
|
||||||
|
RangeTreeArena(Arena::new())
|
||||||
|
}
|
||||||
|
|
||||||
|
pub fn with_capacity(n: usize) -> Self {
|
||||||
|
RangeTreeArena(Arena::with_capacity(n))
|
||||||
|
}
|
||||||
|
|
||||||
|
#[allow(clippy::mut_from_ref)]
|
||||||
|
pub fn alloc(&'a self, value: RangeTree<'a>) -> &'a mut RangeTree<'a> {
|
||||||
|
self.0.alloc(value)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
#[derive(Eq, PartialEq, Debug)]
|
||||||
|
pub struct RangeTree<'a> {
|
||||||
|
pub start: usize,
|
||||||
|
pub end: usize,
|
||||||
|
pub delta: i64,
|
||||||
|
pub children: Vec<&'a mut RangeTree<'a>>,
|
||||||
|
}
|
||||||
|
|
||||||
|
impl<'rt> RangeTree<'rt> {
|
||||||
|
pub fn new<'a>(
|
||||||
|
start: usize,
|
||||||
|
end: usize,
|
||||||
|
delta: i64,
|
||||||
|
children: Vec<&'a mut RangeTree<'a>>,
|
||||||
|
) -> RangeTree<'a> {
|
||||||
|
RangeTree {
|
||||||
|
start,
|
||||||
|
end,
|
||||||
|
delta,
|
||||||
|
children,
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
pub fn split<'a>(
|
||||||
|
rta: &'a RangeTreeArena<'a>,
|
||||||
|
tree: &'a mut RangeTree<'a>,
|
||||||
|
value: usize,
|
||||||
|
) -> (&'a mut RangeTree<'a>, &'a mut RangeTree<'a>) {
|
||||||
|
let mut left_children: Vec<&'a mut RangeTree<'a>> = Vec::new();
|
||||||
|
let mut right_children: Vec<&'a mut RangeTree<'a>> = Vec::new();
|
||||||
|
for child in tree.children.iter_mut() {
|
||||||
|
if child.end <= value {
|
||||||
|
left_children.push(child);
|
||||||
|
} else if value <= child.start {
|
||||||
|
right_children.push(child);
|
||||||
|
} else {
|
||||||
|
let (left_child, right_child) = Self::split(rta, child, value);
|
||||||
|
left_children.push(left_child);
|
||||||
|
right_children.push(right_child);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
let left = RangeTree::new(tree.start, value, tree.delta, left_children);
|
||||||
|
let right = RangeTree::new(value, tree.end, tree.delta, right_children);
|
||||||
|
(rta.alloc(left), rta.alloc(right))
|
||||||
|
}
|
||||||
|
|
||||||
|
pub fn normalize<'a>(
|
||||||
|
rta: &'a RangeTreeArena<'a>,
|
||||||
|
tree: &'a mut RangeTree<'a>,
|
||||||
|
) -> &'a mut RangeTree<'a> {
|
||||||
|
tree.children = {
|
||||||
|
let mut children: Vec<&'a mut RangeTree<'a>> = Vec::new();
|
||||||
|
let mut chain: Vec<&'a mut RangeTree<'a>> = Vec::new();
|
||||||
|
for child in tree.children.drain(..) {
|
||||||
|
let is_chain_end: bool =
|
||||||
|
match chain.last().map(|tree| (tree.delta, tree.end)) {
|
||||||
|
Some((delta, chain_end)) => {
|
||||||
|
(delta, chain_end) != (child.delta, child.start)
|
||||||
|
}
|
||||||
|
None => false,
|
||||||
|
};
|
||||||
|
if is_chain_end {
|
||||||
|
let mut chain_iter = chain.drain(..);
|
||||||
|
let mut head: &'a mut RangeTree<'a> = chain_iter.next().unwrap();
|
||||||
|
for tree in chain_iter {
|
||||||
|
head.end = tree.end;
|
||||||
|
for sub_child in tree.children.drain(..) {
|
||||||
|
sub_child.delta += tree.delta - head.delta;
|
||||||
|
head.children.push(sub_child);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
children.push(RangeTree::normalize(rta, head));
|
||||||
|
}
|
||||||
|
chain.push(child)
|
||||||
|
}
|
||||||
|
if !chain.is_empty() {
|
||||||
|
let mut chain_iter = chain.drain(..);
|
||||||
|
let mut head: &'a mut RangeTree<'a> = chain_iter.next().unwrap();
|
||||||
|
for tree in chain_iter {
|
||||||
|
head.end = tree.end;
|
||||||
|
for sub_child in tree.children.drain(..) {
|
||||||
|
sub_child.delta += tree.delta - head.delta;
|
||||||
|
head.children.push(sub_child);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
children.push(RangeTree::normalize(rta, head));
|
||||||
|
}
|
||||||
|
|
||||||
|
if children.len() == 1
|
||||||
|
&& children[0].start == tree.start
|
||||||
|
&& children[0].end == tree.end
|
||||||
|
{
|
||||||
|
let normalized = children.remove(0);
|
||||||
|
normalized.delta += tree.delta;
|
||||||
|
return normalized;
|
||||||
|
}
|
||||||
|
|
||||||
|
children
|
||||||
|
};
|
||||||
|
|
||||||
|
tree
|
||||||
|
}
|
||||||
|
|
||||||
|
pub fn to_ranges(&self) -> Vec<CoverageRange> {
|
||||||
|
let mut ranges: Vec<CoverageRange> = Vec::new();
|
||||||
|
let mut stack: Vec<(&RangeTree, i64)> = vec![(self, 0)];
|
||||||
|
while let Some((cur, parent_count)) = stack.pop() {
|
||||||
|
let count: i64 = parent_count + cur.delta;
|
||||||
|
ranges.push(CoverageRange {
|
||||||
|
start_offset: cur.start,
|
||||||
|
end_offset: cur.end,
|
||||||
|
count,
|
||||||
|
});
|
||||||
|
for child in cur.children.iter().rev() {
|
||||||
|
stack.push((child, count))
|
||||||
|
}
|
||||||
|
}
|
||||||
|
ranges
|
||||||
|
}
|
||||||
|
|
||||||
|
pub fn from_sorted_ranges<'a>(
|
||||||
|
rta: &'a RangeTreeArena<'a>,
|
||||||
|
ranges: &[CoverageRange],
|
||||||
|
) -> Option<&'a mut RangeTree<'a>> {
|
||||||
|
Self::from_sorted_ranges_inner(
|
||||||
|
rta,
|
||||||
|
&mut ranges.iter().peekable(),
|
||||||
|
::std::usize::MAX,
|
||||||
|
0,
|
||||||
|
)
|
||||||
|
}
|
||||||
|
|
||||||
|
fn from_sorted_ranges_inner<'a, 'b, 'c: 'b>(
|
||||||
|
rta: &'a RangeTreeArena<'a>,
|
||||||
|
ranges: &'b mut Peekable<impl Iterator<Item = &'c CoverageRange>>,
|
||||||
|
parent_end: usize,
|
||||||
|
parent_count: i64,
|
||||||
|
) -> Option<&'a mut RangeTree<'a>> {
|
||||||
|
let has_range: bool = match ranges.peek() {
|
||||||
|
None => false,
|
||||||
|
Some(range) => range.start_offset < parent_end,
|
||||||
|
};
|
||||||
|
if !has_range {
|
||||||
|
return None;
|
||||||
|
}
|
||||||
|
let range = ranges.next().unwrap();
|
||||||
|
let start: usize = range.start_offset;
|
||||||
|
let end: usize = range.end_offset;
|
||||||
|
let count: i64 = range.count;
|
||||||
|
let delta: i64 = count - parent_count;
|
||||||
|
let mut children: Vec<&mut RangeTree> = Vec::new();
|
||||||
|
while let Some(child) =
|
||||||
|
Self::from_sorted_ranges_inner(rta, ranges, end, count)
|
||||||
|
{
|
||||||
|
children.push(child);
|
||||||
|
}
|
||||||
|
Some(rta.alloc(RangeTree::new(start, end, delta, children)))
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
#[cfg(test)]
|
||||||
|
mod tests {
|
||||||
|
use super::*;
|
||||||
|
|
||||||
|
#[test]
|
||||||
|
fn from_sorted_ranges_empty() {
|
||||||
|
let rta = RangeTreeArena::new();
|
||||||
|
let inputs: Vec<CoverageRange> = vec![CoverageRange {
|
||||||
|
start_offset: 0,
|
||||||
|
end_offset: 9,
|
||||||
|
count: 1,
|
||||||
|
}];
|
||||||
|
let actual: Option<&mut RangeTree> =
|
||||||
|
RangeTree::from_sorted_ranges(&rta, &inputs);
|
||||||
|
let expected: Option<&mut RangeTree> =
|
||||||
|
Some(rta.alloc(RangeTree::new(0, 9, 1, Vec::new())));
|
||||||
|
|
||||||
|
assert_eq!(actual, expected);
|
||||||
|
}
|
||||||
|
}
|
|
@ -30,10 +30,10 @@ struct StubDocLoader;
|
||||||
impl Loader for StubDocLoader {
|
impl Loader for StubDocLoader {
|
||||||
fn load(
|
fn load(
|
||||||
&mut self,
|
&mut self,
|
||||||
specifier: &ModuleSpecifier,
|
_specifier: &ModuleSpecifier,
|
||||||
_is_dynamic: bool,
|
_is_dynamic: bool,
|
||||||
) -> LoadFuture {
|
) -> LoadFuture {
|
||||||
Box::pin(future::ready((specifier.clone(), Ok(None))))
|
Box::pin(future::ready(Ok(None)))
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -50,7 +50,7 @@ impl Resolver for DocResolver {
|
||||||
) -> Result<ModuleSpecifier, AnyError> {
|
) -> Result<ModuleSpecifier, AnyError> {
|
||||||
if let Some(import_map) = &self.import_map {
|
if let Some(import_map) = &self.import_map {
|
||||||
return import_map
|
return import_map
|
||||||
.resolve(specifier, referrer.as_str())
|
.resolve(specifier, referrer)
|
||||||
.map_err(AnyError::from);
|
.map_err(AnyError::from);
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -74,18 +74,16 @@ impl Loader for DocLoader {
|
||||||
let specifier = specifier.clone();
|
let specifier = specifier.clone();
|
||||||
let ps = self.ps.clone();
|
let ps = self.ps.clone();
|
||||||
async move {
|
async move {
|
||||||
let result = ps
|
ps.file_fetcher
|
||||||
.file_fetcher
|
|
||||||
.fetch(&specifier, &mut Permissions::allow_all())
|
.fetch(&specifier, &mut Permissions::allow_all())
|
||||||
.await
|
.await
|
||||||
.map(|file| {
|
.map(|file| {
|
||||||
Some(LoadResponse {
|
Some(LoadResponse {
|
||||||
specifier: specifier.clone(),
|
specifier,
|
||||||
content: file.source.clone(),
|
content: file.source.clone(),
|
||||||
maybe_headers: file.maybe_headers,
|
maybe_headers: file.maybe_headers,
|
||||||
})
|
})
|
||||||
});
|
})
|
||||||
(specifier.clone(), result)
|
|
||||||
}
|
}
|
||||||
.boxed_local()
|
.boxed_local()
|
||||||
}
|
}
|
||||||
|
@ -113,6 +111,7 @@ pub async fn print_docs(
|
||||||
None,
|
None,
|
||||||
None,
|
None,
|
||||||
None,
|
None,
|
||||||
|
None,
|
||||||
)
|
)
|
||||||
.await;
|
.await;
|
||||||
let doc_parser =
|
let doc_parser =
|
||||||
|
@ -152,6 +151,7 @@ pub async fn print_docs(
|
||||||
Some(&resolver),
|
Some(&resolver),
|
||||||
None,
|
None,
|
||||||
None,
|
None,
|
||||||
|
None,
|
||||||
)
|
)
|
||||||
.await;
|
.await;
|
||||||
let doc_parser =
|
let doc_parser =
|
||||||
|
|
|
@ -46,11 +46,12 @@ fn validate_name(exec_name: &str) -> Result<(), AnyError> {
|
||||||
/// One compatible with cmd & powershell with a .cmd extension
|
/// One compatible with cmd & powershell with a .cmd extension
|
||||||
/// A second compatible with git bash / MINGW64
|
/// A second compatible with git bash / MINGW64
|
||||||
/// Generate batch script to satisfy that.
|
/// Generate batch script to satisfy that.
|
||||||
fn generate_executable_file(
|
fn generate_executable_file(shim_data: &ShimData) -> Result<(), AnyError> {
|
||||||
mut file_path: PathBuf,
|
let args: Vec<String> = shim_data
|
||||||
args: Vec<String>,
|
.args
|
||||||
) -> Result<(), AnyError> {
|
.iter()
|
||||||
let args: Vec<String> = args.iter().map(|c| format!("\"{}\"", c)).collect();
|
.map(|c| format!("\"{}\"", c))
|
||||||
|
.collect();
|
||||||
let template = format!(
|
let template = format!(
|
||||||
"% generated by deno install %\n@deno {} %*\n",
|
"% generated by deno install %\n@deno {} %*\n",
|
||||||
args
|
args
|
||||||
|
@ -59,12 +60,11 @@ fn generate_executable_file(
|
||||||
.collect::<Vec<_>>()
|
.collect::<Vec<_>>()
|
||||||
.join(" ")
|
.join(" ")
|
||||||
);
|
);
|
||||||
let mut file = File::create(&file_path)?;
|
let mut file = File::create(&shim_data.file_path)?;
|
||||||
file.write_all(template.as_bytes())?;
|
file.write_all(template.as_bytes())?;
|
||||||
|
|
||||||
// write file for bash
|
// write file for bash
|
||||||
// create filepath without extensions
|
// create filepath without extensions
|
||||||
file_path.set_extension("");
|
|
||||||
let template = format!(
|
let template = format!(
|
||||||
r#"#!/bin/sh
|
r#"#!/bin/sh
|
||||||
# generated by deno install
|
# generated by deno install
|
||||||
|
@ -72,19 +72,17 @@ deno {} "$@"
|
||||||
"#,
|
"#,
|
||||||
args.join(" "),
|
args.join(" "),
|
||||||
);
|
);
|
||||||
let mut file = File::create(&file_path)?;
|
let mut file = File::create(&shim_data.file_path.with_extension(""))?;
|
||||||
file.write_all(template.as_bytes())?;
|
file.write_all(template.as_bytes())?;
|
||||||
Ok(())
|
Ok(())
|
||||||
}
|
}
|
||||||
|
|
||||||
#[cfg(not(windows))]
|
#[cfg(not(windows))]
|
||||||
fn generate_executable_file(
|
fn generate_executable_file(shim_data: &ShimData) -> Result<(), AnyError> {
|
||||||
file_path: PathBuf,
|
|
||||||
args: Vec<String>,
|
|
||||||
) -> Result<(), AnyError> {
|
|
||||||
use shell_escape::escape;
|
use shell_escape::escape;
|
||||||
let args: Vec<String> = args
|
let args: Vec<String> = shim_data
|
||||||
.into_iter()
|
.args
|
||||||
|
.iter()
|
||||||
.map(|c| escape(c.into()).into_owned())
|
.map(|c| escape(c.into()).into_owned())
|
||||||
.collect();
|
.collect();
|
||||||
let template = format!(
|
let template = format!(
|
||||||
|
@ -94,12 +92,12 @@ exec deno {} "$@"
|
||||||
"#,
|
"#,
|
||||||
args.join(" "),
|
args.join(" "),
|
||||||
);
|
);
|
||||||
let mut file = File::create(&file_path)?;
|
let mut file = File::create(&shim_data.file_path)?;
|
||||||
file.write_all(template.as_bytes())?;
|
file.write_all(template.as_bytes())?;
|
||||||
let _metadata = fs::metadata(&file_path)?;
|
let _metadata = fs::metadata(&shim_data.file_path)?;
|
||||||
let mut permissions = _metadata.permissions();
|
let mut permissions = _metadata.permissions();
|
||||||
permissions.set_mode(0o755);
|
permissions.set_mode(0o755);
|
||||||
fs::set_permissions(&file_path, permissions)?;
|
fs::set_permissions(&shim_data.file_path, permissions)?;
|
||||||
Ok(())
|
Ok(())
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -195,27 +193,73 @@ pub fn install(
|
||||||
flags: Flags,
|
flags: Flags,
|
||||||
install_flags: InstallFlags,
|
install_flags: InstallFlags,
|
||||||
) -> Result<(), AnyError> {
|
) -> Result<(), AnyError> {
|
||||||
let root = if let Some(root) = install_flags.root {
|
let shim_data = resolve_shim_data(&flags, &install_flags)?;
|
||||||
canonicalize_path(&root)?
|
|
||||||
} else {
|
|
||||||
get_installer_root()?
|
|
||||||
};
|
|
||||||
let installation_dir = root.join("bin");
|
|
||||||
|
|
||||||
// ensure directory exists
|
// ensure directory exists
|
||||||
if let Ok(metadata) = fs::metadata(&installation_dir) {
|
if let Ok(metadata) = fs::metadata(&shim_data.installation_dir) {
|
||||||
if !metadata.is_dir() {
|
if !metadata.is_dir() {
|
||||||
return Err(generic_error("Installation path is not a directory"));
|
return Err(generic_error("Installation path is not a directory"));
|
||||||
}
|
}
|
||||||
} else {
|
} else {
|
||||||
fs::create_dir_all(&installation_dir)?;
|
fs::create_dir_all(&shim_data.installation_dir)?;
|
||||||
};
|
};
|
||||||
|
|
||||||
|
if shim_data.file_path.exists() && !install_flags.force {
|
||||||
|
return Err(generic_error(
|
||||||
|
"Existing installation found. Aborting (Use -f to overwrite).",
|
||||||
|
));
|
||||||
|
};
|
||||||
|
|
||||||
|
generate_executable_file(&shim_data)?;
|
||||||
|
for (path, contents) in shim_data.extra_files {
|
||||||
|
fs::write(path, contents)?;
|
||||||
|
}
|
||||||
|
|
||||||
|
println!("✅ Successfully installed {}", shim_data.name);
|
||||||
|
println!("{}", shim_data.file_path.display());
|
||||||
|
if cfg!(windows) {
|
||||||
|
let display_path = shim_data.file_path.with_extension("");
|
||||||
|
println!("{} (shell)", display_path.display());
|
||||||
|
}
|
||||||
|
let installation_dir_str = shim_data.installation_dir.to_string_lossy();
|
||||||
|
|
||||||
|
if !is_in_path(&shim_data.installation_dir) {
|
||||||
|
println!("ℹ️ Add {} to PATH", installation_dir_str);
|
||||||
|
if cfg!(windows) {
|
||||||
|
println!(" set PATH=%PATH%;{}", installation_dir_str);
|
||||||
|
} else {
|
||||||
|
println!(" export PATH=\"{}:$PATH\"", installation_dir_str);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
Ok(())
|
||||||
|
}
|
||||||
|
|
||||||
|
struct ShimData {
|
||||||
|
name: String,
|
||||||
|
installation_dir: PathBuf,
|
||||||
|
file_path: PathBuf,
|
||||||
|
args: Vec<String>,
|
||||||
|
extra_files: Vec<(PathBuf, String)>,
|
||||||
|
}
|
||||||
|
|
||||||
|
fn resolve_shim_data(
|
||||||
|
flags: &Flags,
|
||||||
|
install_flags: &InstallFlags,
|
||||||
|
) -> Result<ShimData, AnyError> {
|
||||||
|
let root = if let Some(root) = &install_flags.root {
|
||||||
|
canonicalize_path(root)?
|
||||||
|
} else {
|
||||||
|
get_installer_root()?
|
||||||
|
};
|
||||||
|
let installation_dir = root.join("bin");
|
||||||
|
|
||||||
// Check if module_url is remote
|
// Check if module_url is remote
|
||||||
let module_url = resolve_url_or_path(&install_flags.module_url)?;
|
let module_url = resolve_url_or_path(&install_flags.module_url)?;
|
||||||
|
|
||||||
let name = install_flags
|
let name = install_flags
|
||||||
.name
|
.name
|
||||||
|
.clone()
|
||||||
.or_else(|| infer_name_from_url(&module_url));
|
.or_else(|| infer_name_from_url(&module_url));
|
||||||
|
|
||||||
let name = match name {
|
let name = match name {
|
||||||
|
@ -232,12 +276,6 @@ pub fn install(
|
||||||
file_path = file_path.with_extension("cmd");
|
file_path = file_path.with_extension("cmd");
|
||||||
}
|
}
|
||||||
|
|
||||||
if file_path.exists() && !install_flags.force {
|
|
||||||
return Err(generic_error(
|
|
||||||
"Existing installation found. Aborting (Use -f to overwrite).",
|
|
||||||
));
|
|
||||||
};
|
|
||||||
|
|
||||||
let mut extra_files: Vec<(PathBuf, String)> = vec![];
|
let mut extra_files: Vec<(PathBuf, String)> = vec![];
|
||||||
|
|
||||||
let mut executable_args = vec!["run".to_string()];
|
let mut executable_args = vec!["run".to_string()];
|
||||||
|
@ -246,9 +284,9 @@ pub fn install(
|
||||||
executable_args.push("--location".to_string());
|
executable_args.push("--location".to_string());
|
||||||
executable_args.push(url.to_string());
|
executable_args.push(url.to_string());
|
||||||
}
|
}
|
||||||
if let Some(ca_file) = flags.ca_file {
|
if let Some(ca_file) = &flags.ca_file {
|
||||||
executable_args.push("--cert".to_string());
|
executable_args.push("--cert".to_string());
|
||||||
executable_args.push(ca_file)
|
executable_args.push(ca_file.to_owned())
|
||||||
}
|
}
|
||||||
if let Some(log_level) = flags.log_level {
|
if let Some(log_level) = flags.log_level {
|
||||||
if log_level == Level::Error {
|
if log_level == Level::Error {
|
||||||
|
@ -290,6 +328,10 @@ pub fn install(
|
||||||
executable_args.push("--cached-only".to_string());
|
executable_args.push("--cached-only".to_string());
|
||||||
}
|
}
|
||||||
|
|
||||||
|
if flags.prompt {
|
||||||
|
executable_args.push("--prompt".to_string());
|
||||||
|
}
|
||||||
|
|
||||||
if !flags.v8_flags.is_empty() {
|
if !flags.v8_flags.is_empty() {
|
||||||
executable_args.push(format!("--v8-flags={}", flags.v8_flags.join(",")));
|
executable_args.push(format!("--v8-flags={}", flags.v8_flags.join(",")));
|
||||||
}
|
}
|
||||||
|
@ -300,20 +342,20 @@ pub fn install(
|
||||||
}
|
}
|
||||||
|
|
||||||
if let Some(inspect) = flags.inspect {
|
if let Some(inspect) = flags.inspect {
|
||||||
executable_args.push(format!("--inspect={}", inspect.to_string()));
|
executable_args.push(format!("--inspect={}", inspect));
|
||||||
}
|
}
|
||||||
|
|
||||||
if let Some(inspect_brk) = flags.inspect_brk {
|
if let Some(inspect_brk) = flags.inspect_brk {
|
||||||
executable_args.push(format!("--inspect-brk={}", inspect_brk.to_string()));
|
executable_args.push(format!("--inspect-brk={}", inspect_brk));
|
||||||
}
|
}
|
||||||
|
|
||||||
if let Some(import_map_path) = flags.import_map_path {
|
if let Some(import_map_path) = &flags.import_map_path {
|
||||||
let import_map_url = resolve_url_or_path(&import_map_path)?;
|
let import_map_url = resolve_url_or_path(import_map_path)?;
|
||||||
executable_args.push("--import-map".to_string());
|
executable_args.push("--import-map".to_string());
|
||||||
executable_args.push(import_map_url.to_string());
|
executable_args.push(import_map_url.to_string());
|
||||||
}
|
}
|
||||||
|
|
||||||
if let Some(config_path) = flags.config_path {
|
if let Some(config_path) = &flags.config_path {
|
||||||
let mut copy_path = file_path.clone();
|
let mut copy_path = file_path.clone();
|
||||||
copy_path.set_extension("tsconfig.json");
|
copy_path.set_extension("tsconfig.json");
|
||||||
executable_args.push("--config".to_string());
|
executable_args.push("--config".to_string());
|
||||||
|
@ -321,7 +363,7 @@ pub fn install(
|
||||||
extra_files.push((copy_path, fs::read_to_string(config_path)?));
|
extra_files.push((copy_path, fs::read_to_string(config_path)?));
|
||||||
}
|
}
|
||||||
|
|
||||||
if let Some(lock_path) = flags.lock {
|
if let Some(lock_path) = &flags.lock {
|
||||||
let mut copy_path = file_path.clone();
|
let mut copy_path = file_path.clone();
|
||||||
copy_path.set_extension("lock.json");
|
copy_path.set_extension("lock.json");
|
||||||
executable_args.push("--lock".to_string());
|
executable_args.push("--lock".to_string());
|
||||||
|
@ -332,29 +374,13 @@ pub fn install(
|
||||||
executable_args.push(module_url.to_string());
|
executable_args.push(module_url.to_string());
|
||||||
executable_args.extend_from_slice(&install_flags.args);
|
executable_args.extend_from_slice(&install_flags.args);
|
||||||
|
|
||||||
generate_executable_file(file_path.to_owned(), executable_args)?;
|
Ok(ShimData {
|
||||||
for (path, contents) in extra_files {
|
name,
|
||||||
fs::write(path, contents)?;
|
installation_dir,
|
||||||
}
|
file_path,
|
||||||
|
args: executable_args,
|
||||||
println!("✅ Successfully installed {}", name);
|
extra_files,
|
||||||
println!("{}", file_path.to_string_lossy());
|
})
|
||||||
if cfg!(windows) {
|
|
||||||
file_path.set_extension("");
|
|
||||||
println!("{} (shell)", file_path.to_string_lossy());
|
|
||||||
}
|
|
||||||
let installation_dir_str = installation_dir.to_string_lossy();
|
|
||||||
|
|
||||||
if !is_in_path(&installation_dir) {
|
|
||||||
println!("ℹ️ Add {} to PATH", installation_dir_str);
|
|
||||||
if cfg!(windows) {
|
|
||||||
println!(" set PATH=%PATH%;{}", installation_dir_str);
|
|
||||||
} else {
|
|
||||||
println!(" export PATH=\"{}:$PATH\"", installation_dir_str);
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
Ok(())
|
|
||||||
}
|
}
|
||||||
|
|
||||||
fn is_in_path(dir: &Path) -> bool {
|
fn is_in_path(dir: &Path) -> bool {
|
||||||
|
@ -480,6 +506,16 @@ mod tests {
|
||||||
)
|
)
|
||||||
.expect("Install failed");
|
.expect("Install failed");
|
||||||
|
|
||||||
|
if let Some(home) = original_home {
|
||||||
|
env::set_var("HOME", home);
|
||||||
|
}
|
||||||
|
if let Some(user_profile) = original_user_profile {
|
||||||
|
env::set_var("USERPROFILE", user_profile);
|
||||||
|
}
|
||||||
|
if let Some(install_root) = original_install_root {
|
||||||
|
env::set_var("DENO_INSTALL_ROOT", install_root);
|
||||||
|
}
|
||||||
|
|
||||||
let mut file_path = temp_dir.path().join(".deno/bin/echo_test");
|
let mut file_path = temp_dir.path().join(".deno/bin/echo_test");
|
||||||
assert!(file_path.exists());
|
assert!(file_path.exists());
|
||||||
|
|
||||||
|
@ -498,15 +534,6 @@ mod tests {
|
||||||
} else {
|
} else {
|
||||||
assert!(content.contains(r#"run 'http://localhost:4545/echo_server.ts'"#));
|
assert!(content.contains(r#"run 'http://localhost:4545/echo_server.ts'"#));
|
||||||
}
|
}
|
||||||
if let Some(home) = original_home {
|
|
||||||
env::set_var("HOME", home);
|
|
||||||
}
|
|
||||||
if let Some(user_profile) = original_user_profile {
|
|
||||||
env::set_var("USERPROFILE", user_profile);
|
|
||||||
}
|
|
||||||
if let Some(install_root) = original_install_root {
|
|
||||||
env::set_var("DENO_INSTALL_ROOT", install_root);
|
|
||||||
}
|
|
||||||
}
|
}
|
||||||
|
|
||||||
#[test]
|
#[test]
|
||||||
|
@ -551,104 +578,65 @@ mod tests {
|
||||||
|
|
||||||
#[test]
|
#[test]
|
||||||
fn install_inferred_name() {
|
fn install_inferred_name() {
|
||||||
let temp_dir = TempDir::new().expect("tempdir fail");
|
let shim_data = resolve_shim_data(
|
||||||
let bin_dir = temp_dir.path().join("bin");
|
&Flags::default(),
|
||||||
std::fs::create_dir(&bin_dir).unwrap();
|
&InstallFlags {
|
||||||
|
|
||||||
install(
|
|
||||||
Flags::default(),
|
|
||||||
InstallFlags {
|
|
||||||
module_url: "http://localhost:4545/echo_server.ts".to_string(),
|
module_url: "http://localhost:4545/echo_server.ts".to_string(),
|
||||||
args: vec![],
|
args: vec![],
|
||||||
name: None,
|
name: None,
|
||||||
root: Some(temp_dir.path().to_path_buf()),
|
root: Some(env::temp_dir()),
|
||||||
force: false,
|
force: false,
|
||||||
},
|
},
|
||||||
)
|
)
|
||||||
.expect("Install failed");
|
.unwrap();
|
||||||
|
|
||||||
let mut file_path = bin_dir.join("echo_server");
|
assert_eq!(shim_data.name, "echo_server");
|
||||||
if cfg!(windows) {
|
assert_eq!(
|
||||||
file_path = file_path.with_extension("cmd");
|
shim_data.args,
|
||||||
}
|
vec!["run", "http://localhost:4545/echo_server.ts",]
|
||||||
|
);
|
||||||
assert!(file_path.exists());
|
|
||||||
let content = fs::read_to_string(file_path).unwrap();
|
|
||||||
if cfg!(windows) {
|
|
||||||
assert!(
|
|
||||||
content.contains(r#""run" "http://localhost:4545/echo_server.ts""#)
|
|
||||||
);
|
|
||||||
} else {
|
|
||||||
assert!(content.contains(r#"run 'http://localhost:4545/echo_server.ts'"#));
|
|
||||||
}
|
|
||||||
}
|
}
|
||||||
|
|
||||||
#[test]
|
#[test]
|
||||||
fn install_inferred_name_from_parent() {
|
fn install_inferred_name_from_parent() {
|
||||||
let temp_dir = TempDir::new().expect("tempdir fail");
|
let shim_data = resolve_shim_data(
|
||||||
let bin_dir = temp_dir.path().join("bin");
|
&Flags::default(),
|
||||||
std::fs::create_dir(&bin_dir).unwrap();
|
&InstallFlags {
|
||||||
|
|
||||||
install(
|
|
||||||
Flags::default(),
|
|
||||||
InstallFlags {
|
|
||||||
module_url: "http://localhost:4545/subdir/main.ts".to_string(),
|
module_url: "http://localhost:4545/subdir/main.ts".to_string(),
|
||||||
args: vec![],
|
args: vec![],
|
||||||
name: None,
|
name: None,
|
||||||
root: Some(temp_dir.path().to_path_buf()),
|
root: Some(env::temp_dir()),
|
||||||
force: false,
|
force: false,
|
||||||
},
|
},
|
||||||
)
|
)
|
||||||
.expect("Install failed");
|
.unwrap();
|
||||||
|
|
||||||
let mut file_path = bin_dir.join("subdir");
|
assert_eq!(shim_data.name, "subdir");
|
||||||
if cfg!(windows) {
|
assert_eq!(
|
||||||
file_path = file_path.with_extension("cmd");
|
shim_data.args,
|
||||||
}
|
vec!["run", "http://localhost:4545/subdir/main.ts",]
|
||||||
|
);
|
||||||
assert!(file_path.exists());
|
|
||||||
let content = fs::read_to_string(file_path).unwrap();
|
|
||||||
if cfg!(windows) {
|
|
||||||
assert!(
|
|
||||||
content.contains(r#""run" "http://localhost:4545/subdir/main.ts""#)
|
|
||||||
);
|
|
||||||
} else {
|
|
||||||
assert!(content.contains(r#"run 'http://localhost:4545/subdir/main.ts'"#));
|
|
||||||
}
|
|
||||||
}
|
}
|
||||||
|
|
||||||
#[test]
|
#[test]
|
||||||
fn install_custom_dir_option() {
|
fn install_custom_dir_option() {
|
||||||
let temp_dir = TempDir::new().expect("tempdir fail");
|
let shim_data = resolve_shim_data(
|
||||||
let bin_dir = temp_dir.path().join("bin");
|
&Flags::default(),
|
||||||
std::fs::create_dir(&bin_dir).unwrap();
|
&InstallFlags {
|
||||||
|
|
||||||
install(
|
|
||||||
Flags::default(),
|
|
||||||
InstallFlags {
|
|
||||||
module_url: "http://localhost:4545/echo_server.ts".to_string(),
|
module_url: "http://localhost:4545/echo_server.ts".to_string(),
|
||||||
args: vec![],
|
args: vec![],
|
||||||
name: Some("echo_test".to_string()),
|
name: Some("echo_test".to_string()),
|
||||||
root: Some(temp_dir.path().to_path_buf()),
|
root: Some(env::temp_dir()),
|
||||||
force: false,
|
force: false,
|
||||||
},
|
},
|
||||||
)
|
)
|
||||||
.expect("Install failed");
|
.unwrap();
|
||||||
|
|
||||||
let mut file_path = bin_dir.join("echo_test");
|
assert_eq!(shim_data.name, "echo_test");
|
||||||
if cfg!(windows) {
|
assert_eq!(
|
||||||
file_path = file_path.with_extension("cmd");
|
shim_data.args,
|
||||||
}
|
vec!["run", "http://localhost:4545/echo_server.ts",]
|
||||||
|
);
|
||||||
assert!(file_path.exists());
|
|
||||||
let content = fs::read_to_string(file_path).unwrap();
|
|
||||||
if cfg!(windows) {
|
|
||||||
assert!(
|
|
||||||
content.contains(r#""run" "http://localhost:4545/echo_server.ts""#)
|
|
||||||
);
|
|
||||||
} else {
|
|
||||||
assert!(content.contains(r#"run 'http://localhost:4545/echo_server.ts'"#));
|
|
||||||
}
|
|
||||||
}
|
}
|
||||||
|
|
||||||
#[test]
|
#[test]
|
||||||
|
@ -660,9 +648,9 @@ mod tests {
|
||||||
let original_install_root = env::var_os("DENO_INSTALL_ROOT");
|
let original_install_root = env::var_os("DENO_INSTALL_ROOT");
|
||||||
env::set_var("DENO_INSTALL_ROOT", temp_dir.path().to_path_buf());
|
env::set_var("DENO_INSTALL_ROOT", temp_dir.path().to_path_buf());
|
||||||
|
|
||||||
install(
|
let shim_data = resolve_shim_data(
|
||||||
Flags::default(),
|
&Flags::default(),
|
||||||
InstallFlags {
|
&InstallFlags {
|
||||||
module_url: "http://localhost:4545/echo_server.ts".to_string(),
|
module_url: "http://localhost:4545/echo_server.ts".to_string(),
|
||||||
args: vec![],
|
args: vec![],
|
||||||
name: Some("echo_test".to_string()),
|
name: Some("echo_test".to_string()),
|
||||||
|
@ -670,100 +658,102 @@ mod tests {
|
||||||
force: false,
|
force: false,
|
||||||
},
|
},
|
||||||
)
|
)
|
||||||
.expect("Install failed");
|
.unwrap();
|
||||||
|
|
||||||
let mut file_path = bin_dir.join("echo_test");
|
|
||||||
if cfg!(windows) {
|
|
||||||
file_path = file_path.with_extension("cmd");
|
|
||||||
}
|
|
||||||
|
|
||||||
assert!(file_path.exists());
|
|
||||||
let content = fs::read_to_string(file_path).unwrap();
|
|
||||||
if cfg!(windows) {
|
|
||||||
assert!(
|
|
||||||
content.contains(r#""run" "http://localhost:4545/echo_server.ts""#)
|
|
||||||
);
|
|
||||||
} else {
|
|
||||||
assert!(content.contains(r#"run 'http://localhost:4545/echo_server.ts'"#));
|
|
||||||
}
|
|
||||||
if let Some(install_root) = original_install_root {
|
if let Some(install_root) = original_install_root {
|
||||||
env::set_var("DENO_INSTALL_ROOT", install_root);
|
env::set_var("DENO_INSTALL_ROOT", install_root);
|
||||||
}
|
}
|
||||||
|
|
||||||
|
assert_eq!(
|
||||||
|
fs::canonicalize(shim_data.installation_dir).unwrap(),
|
||||||
|
fs::canonicalize(bin_dir).unwrap()
|
||||||
|
);
|
||||||
|
assert_eq!(shim_data.name, "echo_test");
|
||||||
|
assert_eq!(
|
||||||
|
shim_data.args,
|
||||||
|
vec!["run", "http://localhost:4545/echo_server.ts",]
|
||||||
|
);
|
||||||
}
|
}
|
||||||
|
|
||||||
#[test]
|
#[test]
|
||||||
fn install_with_flags() {
|
fn install_with_flags() {
|
||||||
let temp_dir = TempDir::new().expect("tempdir fail");
|
let shim_data = resolve_shim_data(
|
||||||
let bin_dir = temp_dir.path().join("bin");
|
&Flags {
|
||||||
std::fs::create_dir(&bin_dir).unwrap();
|
|
||||||
|
|
||||||
install(
|
|
||||||
Flags {
|
|
||||||
allow_net: Some(vec![]),
|
allow_net: Some(vec![]),
|
||||||
allow_read: Some(vec![]),
|
allow_read: Some(vec![]),
|
||||||
check: CheckFlag::None,
|
check: CheckFlag::None,
|
||||||
log_level: Some(Level::Error),
|
log_level: Some(Level::Error),
|
||||||
..Flags::default()
|
..Flags::default()
|
||||||
},
|
},
|
||||||
InstallFlags {
|
&InstallFlags {
|
||||||
module_url: "http://localhost:4545/echo_server.ts".to_string(),
|
module_url: "http://localhost:4545/echo_server.ts".to_string(),
|
||||||
args: vec!["--foobar".to_string()],
|
args: vec!["--foobar".to_string()],
|
||||||
name: Some("echo_test".to_string()),
|
name: Some("echo_test".to_string()),
|
||||||
root: Some(temp_dir.path().to_path_buf()),
|
root: Some(env::temp_dir()),
|
||||||
force: false,
|
|
||||||
},
|
|
||||||
)
|
|
||||||
.expect("Install failed");
|
|
||||||
|
|
||||||
let mut file_path = bin_dir.join("echo_test");
|
|
||||||
if cfg!(windows) {
|
|
||||||
file_path = file_path.with_extension("cmd");
|
|
||||||
}
|
|
||||||
|
|
||||||
assert!(file_path.exists());
|
|
||||||
let content = fs::read_to_string(file_path).unwrap();
|
|
||||||
if cfg!(windows) {
|
|
||||||
assert!(content.contains(r#""run" "--allow-read" "--allow-net" "--quiet" "--no-check" "http://localhost:4545/echo_server.ts" "--foobar""#));
|
|
||||||
} else {
|
|
||||||
assert!(content.contains(r#"run --allow-read --allow-net --quiet --no-check 'http://localhost:4545/echo_server.ts' --foobar"#));
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
#[test]
|
|
||||||
fn install_allow_all() {
|
|
||||||
let temp_dir = TempDir::new().expect("tempdir fail");
|
|
||||||
let bin_dir = temp_dir.path().join("bin");
|
|
||||||
std::fs::create_dir(&bin_dir).unwrap();
|
|
||||||
|
|
||||||
install(
|
|
||||||
Flags {
|
|
||||||
allow_all: true,
|
|
||||||
..Flags::default()
|
|
||||||
},
|
|
||||||
InstallFlags {
|
|
||||||
module_url: "http://localhost:4545/echo_server.ts".to_string(),
|
|
||||||
args: vec![],
|
|
||||||
name: Some("echo_test".to_string()),
|
|
||||||
root: Some(temp_dir.path().to_path_buf()),
|
|
||||||
force: false,
|
force: false,
|
||||||
},
|
},
|
||||||
)
|
)
|
||||||
.unwrap();
|
.unwrap();
|
||||||
|
|
||||||
let mut file_path = bin_dir.join("echo_test");
|
assert_eq!(shim_data.name, "echo_test");
|
||||||
if cfg!(windows) {
|
assert_eq!(
|
||||||
file_path = file_path.with_extension("cmd");
|
shim_data.args,
|
||||||
}
|
vec![
|
||||||
|
"run",
|
||||||
|
"--allow-read",
|
||||||
|
"--allow-net",
|
||||||
|
"--quiet",
|
||||||
|
"--no-check",
|
||||||
|
"http://localhost:4545/echo_server.ts",
|
||||||
|
"--foobar",
|
||||||
|
]
|
||||||
|
);
|
||||||
|
}
|
||||||
|
|
||||||
let content = fs::read_to_string(file_path).unwrap();
|
#[test]
|
||||||
if cfg!(windows) {
|
fn install_prompt() {
|
||||||
assert!(content.contains(
|
let shim_data = resolve_shim_data(
|
||||||
r#""run" "--allow-all" "http://localhost:4545/echo_server.ts""#
|
&Flags {
|
||||||
));
|
prompt: true,
|
||||||
} else {
|
..Flags::default()
|
||||||
assert!(content
|
},
|
||||||
.contains(r#"run --allow-all 'http://localhost:4545/echo_server.ts'"#));
|
&InstallFlags {
|
||||||
}
|
module_url: "http://localhost:4545/echo_server.ts".to_string(),
|
||||||
|
args: vec![],
|
||||||
|
name: Some("echo_test".to_string()),
|
||||||
|
root: Some(env::temp_dir()),
|
||||||
|
force: false,
|
||||||
|
},
|
||||||
|
)
|
||||||
|
.unwrap();
|
||||||
|
|
||||||
|
assert_eq!(
|
||||||
|
shim_data.args,
|
||||||
|
vec!["run", "--prompt", "http://localhost:4545/echo_server.ts",]
|
||||||
|
);
|
||||||
|
}
|
||||||
|
|
||||||
|
#[test]
|
||||||
|
fn install_allow_all() {
|
||||||
|
let shim_data = resolve_shim_data(
|
||||||
|
&Flags {
|
||||||
|
allow_all: true,
|
||||||
|
..Flags::default()
|
||||||
|
},
|
||||||
|
&InstallFlags {
|
||||||
|
module_url: "http://localhost:4545/echo_server.ts".to_string(),
|
||||||
|
args: vec![],
|
||||||
|
name: Some("echo_test".to_string()),
|
||||||
|
root: Some(env::temp_dir()),
|
||||||
|
force: false,
|
||||||
|
},
|
||||||
|
)
|
||||||
|
.unwrap();
|
||||||
|
|
||||||
|
assert_eq!(
|
||||||
|
shim_data.args,
|
||||||
|
vec!["run", "--allow-all", "http://localhost:4545/echo_server.ts",]
|
||||||
|
);
|
||||||
}
|
}
|
||||||
|
|
||||||
#[test]
|
#[test]
|
||||||
|
@ -999,12 +989,12 @@ mod tests {
|
||||||
|
|
||||||
let mut expected_string = format!(
|
let mut expected_string = format!(
|
||||||
"--import-map '{}' 'http://localhost:4545/cat.ts'",
|
"--import-map '{}' 'http://localhost:4545/cat.ts'",
|
||||||
import_map_url.to_string()
|
import_map_url
|
||||||
);
|
);
|
||||||
if cfg!(windows) {
|
if cfg!(windows) {
|
||||||
expected_string = format!(
|
expected_string = format!(
|
||||||
"\"--import-map\" \"{}\" \"http://localhost:4545/cat.ts\"",
|
"\"--import-map\" \"{}\" \"http://localhost:4545/cat.ts\"",
|
||||||
import_map_url.to_string()
|
import_map_url
|
||||||
);
|
);
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
|
@ -1,10 +1,9 @@
|
||||||
// Copyright 2018-2022 the Deno authors. All rights reserved. MIT license.
|
// Copyright 2018-2022 the Deno authors. All rights reserved. MIT license.
|
||||||
|
|
||||||
use crate::ast::transpile;
|
|
||||||
use crate::ast::Diagnostics;
|
|
||||||
use crate::ast::ImportsNotUsedAsValues;
|
|
||||||
use crate::colors;
|
use crate::colors;
|
||||||
use crate::lsp::ReplLanguageServer;
|
use crate::lsp::ReplLanguageServer;
|
||||||
|
use deno_ast::DiagnosticsError;
|
||||||
|
use deno_ast::ImportsNotUsedAsValues;
|
||||||
use deno_core::error::AnyError;
|
use deno_core::error::AnyError;
|
||||||
use deno_core::futures::FutureExt;
|
use deno_core::futures::FutureExt;
|
||||||
use deno_core::serde_json::json;
|
use deno_core::serde_json::json;
|
||||||
|
@ -184,7 +183,7 @@ impl ReplSession {
|
||||||
Some(diagnostic) => {
|
Some(diagnostic) => {
|
||||||
Ok(EvaluationOutput::Error(format_diagnostic(diagnostic)))
|
Ok(EvaluationOutput::Error(format_diagnostic(diagnostic)))
|
||||||
}
|
}
|
||||||
None => match err.downcast_ref::<Diagnostics>() {
|
None => match err.downcast_ref::<DiagnosticsError>() {
|
||||||
Some(diagnostics) => Ok(EvaluationOutput::Error(
|
Some(diagnostics) => Ok(EvaluationOutput::Error(
|
||||||
diagnostics
|
diagnostics
|
||||||
.0
|
.0
|
||||||
|
@ -311,9 +310,8 @@ impl ReplSession {
|
||||||
scope_analysis: false,
|
scope_analysis: false,
|
||||||
})?;
|
})?;
|
||||||
|
|
||||||
let transpiled_src = transpile(
|
let transpiled_src = parsed_module
|
||||||
&parsed_module,
|
.transpile(&deno_ast::EmitOptions {
|
||||||
&crate::ast::EmitOptions {
|
|
||||||
emit_metadata: false,
|
emit_metadata: false,
|
||||||
source_map: false,
|
source_map: false,
|
||||||
inline_source_map: false,
|
inline_source_map: false,
|
||||||
|
@ -326,10 +324,9 @@ impl ReplSession {
|
||||||
jsx_factory: "React.createElement".into(),
|
jsx_factory: "React.createElement".into(),
|
||||||
jsx_fragment_factory: "React.Fragment".into(),
|
jsx_fragment_factory: "React.Fragment".into(),
|
||||||
jsx_import_source: None,
|
jsx_import_source: None,
|
||||||
repl_imports: true,
|
var_decl_imports: true,
|
||||||
},
|
})?
|
||||||
)?
|
.text;
|
||||||
.0;
|
|
||||||
|
|
||||||
let value = self
|
let value = self
|
||||||
.evaluate_expression(&format!(
|
.evaluate_expression(&format!(
|
||||||
|
|
|
@ -136,14 +136,14 @@ pub async fn write_standalone_binary(
|
||||||
let output = match target {
|
let output = match target {
|
||||||
Some(target) => {
|
Some(target) => {
|
||||||
if target.contains("windows") {
|
if target.contains("windows") {
|
||||||
PathBuf::from(output.display().to_string() + ".exe")
|
output.with_extension("exe")
|
||||||
} else {
|
} else {
|
||||||
output
|
output
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
None => {
|
None => {
|
||||||
if cfg!(windows) && output.extension().unwrap_or_default() != "exe" {
|
if cfg!(windows) && output.extension().unwrap_or_default() != "exe" {
|
||||||
PathBuf::from(output.display().to_string() + ".exe")
|
output.with_extension("exe")
|
||||||
} else {
|
} else {
|
||||||
output
|
output
|
||||||
}
|
}
|
||||||
|
@ -175,7 +175,14 @@ pub async fn write_standalone_binary(
|
||||||
// Remove file if it was indeed a deno compiled binary, to avoid corruption
|
// Remove file if it was indeed a deno compiled binary, to avoid corruption
|
||||||
// (see https://github.com/denoland/deno/issues/10310)
|
// (see https://github.com/denoland/deno/issues/10310)
|
||||||
std::fs::remove_file(&output)?;
|
std::fs::remove_file(&output)?;
|
||||||
|
} else {
|
||||||
|
let output_base = &output.parent().unwrap();
|
||||||
|
if output_base.exists() && output_base.is_file() {
|
||||||
|
bail!("Could not compile: {:?} is a file.", &output_base);
|
||||||
|
}
|
||||||
|
tokio::fs::create_dir_all(output_base).await?;
|
||||||
}
|
}
|
||||||
|
|
||||||
tokio::fs::write(&output, final_bin).await?;
|
tokio::fs::write(&output, final_bin).await?;
|
||||||
#[cfg(unix)]
|
#[cfg(unix)]
|
||||||
{
|
{
|
||||||
|
|
|
@ -1,6 +1,5 @@
|
||||||
// Copyright 2018-2022 the Deno authors. All rights reserved. MIT license.
|
// Copyright 2018-2022 the Deno authors. All rights reserved. MIT license.
|
||||||
|
|
||||||
use crate::ast::Location;
|
|
||||||
use crate::cache;
|
use crate::cache;
|
||||||
use crate::cache::CacherLoader;
|
use crate::cache::CacherLoader;
|
||||||
use crate::colors;
|
use crate::colors;
|
||||||
|
@ -268,11 +267,7 @@ impl PrettyTestReporter {
|
||||||
print!("{}", " ".repeat(description.level));
|
print!("{}", " ".repeat(description.level));
|
||||||
}
|
}
|
||||||
|
|
||||||
println!(
|
println!("{} {}", status, colors::gray(human_elapsed(elapsed.into())));
|
||||||
"{} {}",
|
|
||||||
status,
|
|
||||||
colors::gray(human_elapsed(elapsed.into())).to_string()
|
|
||||||
);
|
|
||||||
|
|
||||||
if let Some(error_text) = result.error() {
|
if let Some(error_text) = result.error() {
|
||||||
for line in error_text.lines() {
|
for line in error_text.lines() {
|
||||||
|
@ -358,11 +353,7 @@ impl TestReporter for PrettyTestReporter {
|
||||||
print!(" ");
|
print!(" ");
|
||||||
}
|
}
|
||||||
|
|
||||||
println!(
|
println!("{} {}", status, colors::gray(human_elapsed(elapsed.into())));
|
||||||
"{} {}",
|
|
||||||
status,
|
|
||||||
colors::gray(human_elapsed(elapsed.into())).to_string()
|
|
||||||
);
|
|
||||||
}
|
}
|
||||||
|
|
||||||
fn report_step_wait(&mut self, description: &TestStepDescription) {
|
fn report_step_wait(&mut self, description: &TestStepDescription) {
|
||||||
|
@ -535,9 +526,10 @@ async fn test_specifier(
|
||||||
}
|
}
|
||||||
|
|
||||||
fn extract_files_from_regex_blocks(
|
fn extract_files_from_regex_blocks(
|
||||||
location: &Location,
|
specifier: &ModuleSpecifier,
|
||||||
source: &str,
|
source: &str,
|
||||||
media_type: MediaType,
|
media_type: MediaType,
|
||||||
|
file_line_index: usize,
|
||||||
blocks_regex: &Regex,
|
blocks_regex: &Regex,
|
||||||
lines_regex: &Regex,
|
lines_regex: &Regex,
|
||||||
) -> Result<Vec<File>, AnyError> {
|
) -> Result<Vec<File>, AnyError> {
|
||||||
|
@ -594,9 +586,9 @@ fn extract_files_from_regex_blocks(
|
||||||
|
|
||||||
let file_specifier = deno_core::resolve_url_or_path(&format!(
|
let file_specifier = deno_core::resolve_url_or_path(&format!(
|
||||||
"{}${}-{}{}",
|
"{}${}-{}{}",
|
||||||
location.specifier,
|
specifier,
|
||||||
location.line + line_offset,
|
file_line_index + line_offset + 1,
|
||||||
location.line + line_offset + line_count,
|
file_line_index + line_offset + line_count + 1,
|
||||||
file_media_type.as_ts_extension(),
|
file_media_type.as_ts_extension(),
|
||||||
))
|
))
|
||||||
.unwrap();
|
.unwrap();
|
||||||
|
@ -642,12 +634,11 @@ fn extract_files_from_source_comments(
|
||||||
true
|
true
|
||||||
})
|
})
|
||||||
.flat_map(|comment| {
|
.flat_map(|comment| {
|
||||||
let location = Location::from_pos(&parsed_source, comment.span.lo);
|
|
||||||
|
|
||||||
extract_files_from_regex_blocks(
|
extract_files_from_regex_blocks(
|
||||||
&location,
|
specifier,
|
||||||
&comment.text,
|
&comment.text,
|
||||||
media_type,
|
media_type,
|
||||||
|
parsed_source.source().line_index(comment.span.lo),
|
||||||
&blocks_regex,
|
&blocks_regex,
|
||||||
&lines_regex,
|
&lines_regex,
|
||||||
)
|
)
|
||||||
|
@ -663,19 +654,14 @@ fn extract_files_from_fenced_blocks(
|
||||||
source: &str,
|
source: &str,
|
||||||
media_type: MediaType,
|
media_type: MediaType,
|
||||||
) -> Result<Vec<File>, AnyError> {
|
) -> Result<Vec<File>, AnyError> {
|
||||||
let location = Location {
|
|
||||||
specifier: specifier.to_string(),
|
|
||||||
line: 1,
|
|
||||||
col: 0,
|
|
||||||
};
|
|
||||||
|
|
||||||
let blocks_regex = Regex::new(r"```([^\r\n]*)\r?\n([\S\s]*?)```")?;
|
let blocks_regex = Regex::new(r"```([^\r\n]*)\r?\n([\S\s]*?)```")?;
|
||||||
let lines_regex = Regex::new(r"(?:\# ?)?(.*)")?;
|
let lines_regex = Regex::new(r"(?:\# ?)?(.*)")?;
|
||||||
|
|
||||||
extract_files_from_regex_blocks(
|
extract_files_from_regex_blocks(
|
||||||
&location,
|
specifier,
|
||||||
source,
|
source,
|
||||||
media_type,
|
media_type,
|
||||||
|
/* file line index */ 0,
|
||||||
&blocks_regex,
|
&blocks_regex,
|
||||||
&lines_regex,
|
&lines_regex,
|
||||||
)
|
)
|
||||||
|
@ -1150,6 +1136,7 @@ pub async fn run_tests_with_watch(
|
||||||
maybe_resolver,
|
maybe_resolver,
|
||||||
maybe_locker,
|
maybe_locker,
|
||||||
None,
|
None,
|
||||||
|
None,
|
||||||
)
|
)
|
||||||
.await;
|
.await;
|
||||||
graph_valid(&graph, !no_check, check_js)?;
|
graph_valid(&graph, !no_check, check_js)?;
|
||||||
|
|
24
cli/tsc.rs
24
cli/tsc.rs
|
@ -51,11 +51,25 @@ pub static SHARED_GLOBALS_LIB: &str =
|
||||||
pub static WINDOW_LIB: &str = include_str!("dts/lib.deno.window.d.ts");
|
pub static WINDOW_LIB: &str = include_str!("dts/lib.deno.window.d.ts");
|
||||||
pub static UNSTABLE_NS_LIB: &str = include_str!("dts/lib.deno.unstable.d.ts");
|
pub static UNSTABLE_NS_LIB: &str = include_str!("dts/lib.deno.unstable.d.ts");
|
||||||
|
|
||||||
pub static COMPILER_SNAPSHOT: &[u8] =
|
pub static COMPILER_SNAPSHOT: Lazy<Box<[u8]>> = Lazy::new(
|
||||||
include_bytes!(concat!(env!("OUT_DIR"), "/COMPILER_SNAPSHOT.bin"));
|
#[cold]
|
||||||
|
#[inline(never)]
|
||||||
|
|| {
|
||||||
|
static COMPRESSED_COMPILER_SNAPSHOT: &[u8] =
|
||||||
|
include_bytes!(concat!(env!("OUT_DIR"), "/COMPILER_SNAPSHOT.bin"));
|
||||||
|
|
||||||
|
zstd::block::decompress(
|
||||||
|
&COMPRESSED_COMPILER_SNAPSHOT[4..],
|
||||||
|
u32::from_le_bytes(COMPRESSED_COMPILER_SNAPSHOT[0..4].try_into().unwrap())
|
||||||
|
as usize,
|
||||||
|
)
|
||||||
|
.unwrap()
|
||||||
|
.into_boxed_slice()
|
||||||
|
},
|
||||||
|
);
|
||||||
|
|
||||||
pub fn compiler_snapshot() -> Snapshot {
|
pub fn compiler_snapshot() -> Snapshot {
|
||||||
Snapshot::Static(COMPILER_SNAPSHOT)
|
Snapshot::Static(&*COMPILER_SNAPSHOT)
|
||||||
}
|
}
|
||||||
|
|
||||||
macro_rules! inc {
|
macro_rules! inc {
|
||||||
|
@ -675,7 +689,7 @@ mod tests {
|
||||||
})
|
})
|
||||||
})
|
})
|
||||||
.map_err(|err| err.into());
|
.map_err(|err| err.into());
|
||||||
Box::pin(future::ready((specifier.clone(), response)))
|
Box::pin(future::ready(response))
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -697,6 +711,7 @@ mod tests {
|
||||||
None,
|
None,
|
||||||
None,
|
None,
|
||||||
None,
|
None,
|
||||||
|
None,
|
||||||
)
|
)
|
||||||
.await;
|
.await;
|
||||||
State::new(
|
State::new(
|
||||||
|
@ -723,6 +738,7 @@ mod tests {
|
||||||
None,
|
None,
|
||||||
None,
|
None,
|
||||||
None,
|
None,
|
||||||
|
None,
|
||||||
)
|
)
|
||||||
.await;
|
.await;
|
||||||
let config = TsConfig::new(json!({
|
let config = TsConfig::new(json!({
|
||||||
|
|
|
@ -1,7 +1,7 @@
|
||||||
# Copyright 2018-2022 the Deno authors. All rights reserved. MIT license.
|
# Copyright 2018-2022 the Deno authors. All rights reserved. MIT license.
|
||||||
[package]
|
[package]
|
||||||
name = "deno_core"
|
name = "deno_core"
|
||||||
version = "0.113.0"
|
version = "0.114.0"
|
||||||
authors = ["the Deno authors"]
|
authors = ["the Deno authors"]
|
||||||
edition = "2021"
|
edition = "2021"
|
||||||
license = "MIT"
|
license = "MIT"
|
||||||
|
@ -23,7 +23,7 @@ parking_lot = "0.11.1"
|
||||||
pin-project = "1.0.7"
|
pin-project = "1.0.7"
|
||||||
serde = { version = "1.0.129", features = ["derive"] }
|
serde = { version = "1.0.129", features = ["derive"] }
|
||||||
serde_json = { version = "1.0.66", features = ["preserve_order"] }
|
serde_json = { version = "1.0.66", features = ["preserve_order"] }
|
||||||
serde_v8 = { version = "0.24.0", path = "../serde_v8" }
|
serde_v8 = { version = "0.25.0", path = "../serde_v8" }
|
||||||
url = { version = "2.2.2", features = ["serde"] }
|
url = { version = "2.2.2", features = ["serde"] }
|
||||||
v8 = "0.37.0"
|
v8 = "0.37.0"
|
||||||
|
|
||||||
|
|
|
@ -372,6 +372,7 @@ impl ErrWithV8Handle {
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
#[allow(clippy::non_send_fields_in_send_ty)]
|
||||||
unsafe impl Send for ErrWithV8Handle {}
|
unsafe impl Send for ErrWithV8Handle {}
|
||||||
unsafe impl Sync for ErrWithV8Handle {}
|
unsafe impl Sync for ErrWithV8Handle {}
|
||||||
|
|
||||||
|
|
|
@ -43,7 +43,7 @@ impl fmt::Display for ModuleResolutionError {
|
||||||
specifier,
|
specifier,
|
||||||
match maybe_referrer {
|
match maybe_referrer {
|
||||||
Some(referrer) => format!(" from \"{}\"", referrer),
|
Some(referrer) => format!(" from \"{}\"", referrer),
|
||||||
None => format!(""),
|
None => String::new(),
|
||||||
}
|
}
|
||||||
),
|
),
|
||||||
}
|
}
|
||||||
|
|
|
@ -2,7 +2,7 @@
|
||||||
|
|
||||||
[package]
|
[package]
|
||||||
name = "deno_broadcast_channel"
|
name = "deno_broadcast_channel"
|
||||||
version = "0.25.0"
|
version = "0.26.0"
|
||||||
authors = ["the Deno authors"]
|
authors = ["the Deno authors"]
|
||||||
edition = "2021"
|
edition = "2021"
|
||||||
license = "MIT"
|
license = "MIT"
|
||||||
|
@ -15,6 +15,6 @@ path = "lib.rs"
|
||||||
|
|
||||||
[dependencies]
|
[dependencies]
|
||||||
async-trait = "0.1"
|
async-trait = "0.1"
|
||||||
deno_core = { version = "0.113.0", path = "../../core" }
|
deno_core = { version = "0.114.0", path = "../../core" }
|
||||||
tokio = { version = "1.10.1", features = ["full"] }
|
tokio = { version = "1.10.1", features = ["full"] }
|
||||||
uuid = { version = "0.8.2", features = ["v4"] }
|
uuid = { version = "0.8.2", features = ["v4"] }
|
||||||
|
|
|
@ -2,7 +2,7 @@
|
||||||
|
|
||||||
[package]
|
[package]
|
||||||
name = "deno_console"
|
name = "deno_console"
|
||||||
version = "0.31.0"
|
version = "0.32.0"
|
||||||
authors = ["the Deno authors"]
|
authors = ["the Deno authors"]
|
||||||
edition = "2021"
|
edition = "2021"
|
||||||
license = "MIT"
|
license = "MIT"
|
||||||
|
@ -14,4 +14,4 @@ description = "Implementation of Console API for Deno"
|
||||||
path = "lib.rs"
|
path = "lib.rs"
|
||||||
|
|
||||||
[dependencies]
|
[dependencies]
|
||||||
deno_core = { version = "0.113.0", path = "../../core" }
|
deno_core = { version = "0.114.0", path = "../../core" }
|
||||||
|
|
|
@ -133,6 +133,7 @@
|
||||||
"decrypt": {
|
"decrypt": {
|
||||||
"RSA-OAEP": "RsaOaepParams",
|
"RSA-OAEP": "RsaOaepParams",
|
||||||
"AES-CBC": "AesCbcParams",
|
"AES-CBC": "AesCbcParams",
|
||||||
|
"AES-GCM": "AesGcmParams",
|
||||||
"AES-CTR": "AesCtrParams",
|
"AES-CTR": "AesCtrParams",
|
||||||
},
|
},
|
||||||
"get key length": {
|
"get key length": {
|
||||||
|
@ -145,12 +146,10 @@
|
||||||
"PBKDF2": null,
|
"PBKDF2": null,
|
||||||
},
|
},
|
||||||
"wrapKey": {
|
"wrapKey": {
|
||||||
// TODO(@littledivy): Enable this once implemented.
|
"AES-KW": null,
|
||||||
// "AES-KW": "AesKeyWrapParams",
|
|
||||||
},
|
},
|
||||||
"unwrapKey": {
|
"unwrapKey": {
|
||||||
// TODO(@littledivy): Enable this once implemented.
|
"AES-KW": null,
|
||||||
// "AES-KW": "AesKeyWrapParams",
|
|
||||||
},
|
},
|
||||||
};
|
};
|
||||||
|
|
||||||
|
@ -633,6 +632,66 @@
|
||||||
// 4.
|
// 4.
|
||||||
return cipherText.buffer;
|
return cipherText.buffer;
|
||||||
}
|
}
|
||||||
|
case "AES-GCM": {
|
||||||
|
normalizedAlgorithm.iv = copyBuffer(normalizedAlgorithm.iv);
|
||||||
|
|
||||||
|
// 1.
|
||||||
|
if (normalizedAlgorithm.tagLength === undefined) {
|
||||||
|
normalizedAlgorithm.tagLength = 128;
|
||||||
|
} else if (
|
||||||
|
!ArrayPrototypeIncludes(
|
||||||
|
[32, 64, 96, 104, 112, 120, 128],
|
||||||
|
normalizedAlgorithm.tagLength,
|
||||||
|
)
|
||||||
|
) {
|
||||||
|
throw new DOMException(
|
||||||
|
"Invalid tag length",
|
||||||
|
"OperationError",
|
||||||
|
);
|
||||||
|
}
|
||||||
|
|
||||||
|
// 2.
|
||||||
|
if (data.byteLength < normalizedAlgorithm.tagLength / 8) {
|
||||||
|
throw new DOMException(
|
||||||
|
"Tag length overflows ciphertext",
|
||||||
|
"OperationError",
|
||||||
|
);
|
||||||
|
}
|
||||||
|
|
||||||
|
// 3. We only support 96-bit nonce for now.
|
||||||
|
if (normalizedAlgorithm.iv.byteLength !== 12) {
|
||||||
|
throw new DOMException(
|
||||||
|
"Initialization vector length not supported",
|
||||||
|
"NotSupportedError",
|
||||||
|
);
|
||||||
|
}
|
||||||
|
|
||||||
|
// 4.
|
||||||
|
if (normalizedAlgorithm.additionalData !== undefined) {
|
||||||
|
if (normalizedAlgorithm.additionalData.byteLength > (2 ** 64) - 1) {
|
||||||
|
throw new DOMException(
|
||||||
|
"Additional data too large",
|
||||||
|
"OperationError",
|
||||||
|
);
|
||||||
|
}
|
||||||
|
normalizedAlgorithm.additionalData = copyBuffer(
|
||||||
|
normalizedAlgorithm.additionalData,
|
||||||
|
);
|
||||||
|
}
|
||||||
|
|
||||||
|
// 5-8.
|
||||||
|
const plaintext = await core.opAsync("op_crypto_decrypt", {
|
||||||
|
key: keyData,
|
||||||
|
algorithm: "AES-GCM",
|
||||||
|
length: key[_algorithm].length,
|
||||||
|
iv: normalizedAlgorithm.iv,
|
||||||
|
additionalData: normalizedAlgorithm.additionalData,
|
||||||
|
tagLength: normalizedAlgorithm.tagLength,
|
||||||
|
}, data);
|
||||||
|
|
||||||
|
// 9.
|
||||||
|
return plaintext.buffer;
|
||||||
|
}
|
||||||
default:
|
default:
|
||||||
throw new DOMException("Not implemented", "NotSupportedError");
|
throw new DOMException("Not implemented", "NotSupportedError");
|
||||||
}
|
}
|
||||||
|
@ -1271,14 +1330,30 @@
|
||||||
if (
|
if (
|
||||||
supportedAlgorithms["wrapKey"][normalizedAlgorithm.name] !== undefined
|
supportedAlgorithms["wrapKey"][normalizedAlgorithm.name] !== undefined
|
||||||
) {
|
) {
|
||||||
// TODO(@littledivy): Implement this for AES-KW.
|
const handle = wrappingKey[_handle];
|
||||||
throw new DOMException(
|
const keyData = WeakMapPrototypeGet(KEY_STORE, handle);
|
||||||
"Not implemented",
|
|
||||||
"NotSupportedError",
|
switch (normalizedAlgorithm.name) {
|
||||||
);
|
case "AES-KW": {
|
||||||
|
const cipherText = await core.opSync("op_crypto_wrap_key", {
|
||||||
|
key: keyData,
|
||||||
|
algorithm: normalizedAlgorithm.name,
|
||||||
|
}, bytes);
|
||||||
|
|
||||||
|
// 4.
|
||||||
|
return cipherText.buffer;
|
||||||
|
}
|
||||||
|
default: {
|
||||||
|
throw new DOMException(
|
||||||
|
"Not implemented",
|
||||||
|
"NotSupportedError",
|
||||||
|
);
|
||||||
|
}
|
||||||
|
}
|
||||||
} else if (
|
} else if (
|
||||||
supportedAlgorithms["encrypt"][normalizedAlgorithm.name] !== undefined
|
supportedAlgorithms["encrypt"][normalizedAlgorithm.name] !== undefined
|
||||||
) {
|
) {
|
||||||
|
// must construct a new key, since keyUsages is ["wrapKey"] and not ["encrypt"]
|
||||||
return await encrypt(
|
return await encrypt(
|
||||||
normalizedAlgorithm,
|
normalizedAlgorithm,
|
||||||
constructKey(
|
constructKey(
|
||||||
|
@ -1391,14 +1466,31 @@
|
||||||
if (
|
if (
|
||||||
supportedAlgorithms["unwrapKey"][normalizedAlgorithm.name] !== undefined
|
supportedAlgorithms["unwrapKey"][normalizedAlgorithm.name] !== undefined
|
||||||
) {
|
) {
|
||||||
// TODO(@littledivy): Implement this for AES-KW.
|
const handle = unwrappingKey[_handle];
|
||||||
throw new DOMException(
|
const keyData = WeakMapPrototypeGet(KEY_STORE, handle);
|
||||||
"Not implemented",
|
|
||||||
"NotSupportedError",
|
switch (normalizedAlgorithm.name) {
|
||||||
);
|
case "AES-KW": {
|
||||||
|
const plainText = await core.opSync("op_crypto_unwrap_key", {
|
||||||
|
key: keyData,
|
||||||
|
algorithm: normalizedAlgorithm.name,
|
||||||
|
}, wrappedKey);
|
||||||
|
|
||||||
|
// 4.
|
||||||
|
key = plainText.buffer;
|
||||||
|
break;
|
||||||
|
}
|
||||||
|
default: {
|
||||||
|
throw new DOMException(
|
||||||
|
"Not implemented",
|
||||||
|
"NotSupportedError",
|
||||||
|
);
|
||||||
|
}
|
||||||
|
}
|
||||||
} else if (
|
} else if (
|
||||||
supportedAlgorithms["decrypt"][normalizedAlgorithm.name] !== undefined
|
supportedAlgorithms["decrypt"][normalizedAlgorithm.name] !== undefined
|
||||||
) {
|
) {
|
||||||
|
// must construct a new key, since keyUsages is ["unwrapKey"] and not ["decrypt"]
|
||||||
key = await this.decrypt(
|
key = await this.decrypt(
|
||||||
normalizedAlgorithm,
|
normalizedAlgorithm,
|
||||||
constructKey(
|
constructKey(
|
||||||
|
|
|
@ -2,7 +2,7 @@
|
||||||
|
|
||||||
[package]
|
[package]
|
||||||
name = "deno_crypto"
|
name = "deno_crypto"
|
||||||
version = "0.45.0"
|
version = "0.46.0"
|
||||||
authors = ["the Deno authors"]
|
authors = ["the Deno authors"]
|
||||||
edition = "2021"
|
edition = "2021"
|
||||||
license = "MIT"
|
license = "MIT"
|
||||||
|
@ -16,11 +16,12 @@ path = "lib.rs"
|
||||||
[dependencies]
|
[dependencies]
|
||||||
aes = "0.7.5"
|
aes = "0.7.5"
|
||||||
aes-gcm = "0.9.4"
|
aes-gcm = "0.9.4"
|
||||||
|
aes-kw = { version = "0.1", features = ["alloc"] }
|
||||||
base64 = "0.13.0"
|
base64 = "0.13.0"
|
||||||
block-modes = "0.8.1"
|
block-modes = "0.8.1"
|
||||||
ctr = "0.8.0"
|
ctr = "0.8.0"
|
||||||
deno_core = { version = "0.113.0", path = "../../core" }
|
deno_core = { version = "0.114.0", path = "../../core" }
|
||||||
deno_web = { version = "0.62.0", path = "../web" }
|
deno_web = { version = "0.63.0", path = "../web" }
|
||||||
elliptic-curve = { version = "0.10.6", features = ["std", "pem"] }
|
elliptic-curve = { version = "0.10.6", features = ["std", "pem"] }
|
||||||
num-traits = "0.2.14"
|
num-traits = "0.2.14"
|
||||||
once_cell = "=1.9.0"
|
once_cell = "=1.9.0"
|
||||||
|
|
|
@ -2,8 +2,16 @@ use std::cell::RefCell;
|
||||||
use std::rc::Rc;
|
use std::rc::Rc;
|
||||||
|
|
||||||
use crate::shared::*;
|
use crate::shared::*;
|
||||||
|
use aes::cipher::generic_array::GenericArray;
|
||||||
|
use aes::Aes192;
|
||||||
use aes::BlockEncrypt;
|
use aes::BlockEncrypt;
|
||||||
use aes::NewBlockCipher;
|
use aes::NewBlockCipher;
|
||||||
|
use aes_gcm::AeadCore;
|
||||||
|
use aes_gcm::AeadInPlace;
|
||||||
|
use aes_gcm::Aes128Gcm;
|
||||||
|
use aes_gcm::Aes256Gcm;
|
||||||
|
use aes_gcm::NewAead;
|
||||||
|
use aes_gcm::Nonce;
|
||||||
use block_modes::BlockMode;
|
use block_modes::BlockMode;
|
||||||
use ctr::cipher::NewCipher;
|
use ctr::cipher::NewCipher;
|
||||||
use ctr::cipher::StreamCipher;
|
use ctr::cipher::StreamCipher;
|
||||||
|
@ -17,6 +25,7 @@ use deno_core::error::type_error;
|
||||||
use deno_core::error::AnyError;
|
use deno_core::error::AnyError;
|
||||||
use deno_core::OpState;
|
use deno_core::OpState;
|
||||||
use deno_core::ZeroCopyBuf;
|
use deno_core::ZeroCopyBuf;
|
||||||
|
use elliptic_curve::consts::U12;
|
||||||
use rsa::pkcs1::FromRsaPrivateKey;
|
use rsa::pkcs1::FromRsaPrivateKey;
|
||||||
use rsa::PaddingScheme;
|
use rsa::PaddingScheme;
|
||||||
use serde::Deserialize;
|
use serde::Deserialize;
|
||||||
|
@ -56,8 +65,19 @@ pub enum DecryptAlgorithm {
|
||||||
ctr_length: usize,
|
ctr_length: usize,
|
||||||
key_length: usize,
|
key_length: usize,
|
||||||
},
|
},
|
||||||
|
#[serde(rename = "AES-GCM", rename_all = "camelCase")]
|
||||||
|
AesGcm {
|
||||||
|
#[serde(with = "serde_bytes")]
|
||||||
|
iv: Vec<u8>,
|
||||||
|
#[serde(with = "serde_bytes")]
|
||||||
|
additional_data: Option<Vec<u8>>,
|
||||||
|
length: usize,
|
||||||
|
tag_length: usize,
|
||||||
|
},
|
||||||
}
|
}
|
||||||
|
|
||||||
|
type Aes192Gcm = aes_gcm::AesGcm<Aes192, U12>;
|
||||||
|
|
||||||
pub async fn op_crypto_decrypt(
|
pub async fn op_crypto_decrypt(
|
||||||
_state: Rc<RefCell<OpState>>,
|
_state: Rc<RefCell<OpState>>,
|
||||||
opts: DecryptOptions,
|
opts: DecryptOptions,
|
||||||
|
@ -76,6 +96,12 @@ pub async fn op_crypto_decrypt(
|
||||||
ctr_length,
|
ctr_length,
|
||||||
key_length,
|
key_length,
|
||||||
} => decrypt_aes_ctr(key, key_length, &counter, ctr_length, &data),
|
} => decrypt_aes_ctr(key, key_length, &counter, ctr_length, &data),
|
||||||
|
DecryptAlgorithm::AesGcm {
|
||||||
|
iv,
|
||||||
|
additional_data,
|
||||||
|
length,
|
||||||
|
tag_length,
|
||||||
|
} => decrypt_aes_gcm(key, length, tag_length, iv, additional_data, &data),
|
||||||
};
|
};
|
||||||
let buf = tokio::task::spawn_blocking(fun).await.unwrap()?;
|
let buf = tokio::task::spawn_blocking(fun).await.unwrap()?;
|
||||||
Ok(buf.into())
|
Ok(buf.into())
|
||||||
|
@ -195,6 +221,30 @@ where
|
||||||
Ok(plaintext)
|
Ok(plaintext)
|
||||||
}
|
}
|
||||||
|
|
||||||
|
fn decrypt_aes_gcm_gen<B>(
|
||||||
|
key: &[u8],
|
||||||
|
tag: &GenericArray<u8, <B as AeadCore>::TagSize>,
|
||||||
|
nonce: &GenericArray<u8, <B as AeadCore>::NonceSize>,
|
||||||
|
additional_data: Vec<u8>,
|
||||||
|
plaintext: &mut [u8],
|
||||||
|
) -> Result<(), AnyError>
|
||||||
|
where
|
||||||
|
B: AeadInPlace + NewAead,
|
||||||
|
{
|
||||||
|
let cipher =
|
||||||
|
B::new_from_slice(key).map_err(|_| operation_error("Decryption failed"))?;
|
||||||
|
cipher
|
||||||
|
.decrypt_in_place_detached(
|
||||||
|
nonce,
|
||||||
|
additional_data.as_slice(),
|
||||||
|
plaintext,
|
||||||
|
tag,
|
||||||
|
)
|
||||||
|
.map_err(|_| operation_error("Decryption failed"))?;
|
||||||
|
|
||||||
|
Ok(())
|
||||||
|
}
|
||||||
|
|
||||||
fn decrypt_aes_ctr(
|
fn decrypt_aes_ctr(
|
||||||
key: RawKeyData,
|
key: RawKeyData,
|
||||||
key_length: usize,
|
key_length: usize,
|
||||||
|
@ -228,3 +278,53 @@ fn decrypt_aes_ctr(
|
||||||
)),
|
)),
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
fn decrypt_aes_gcm(
|
||||||
|
key: RawKeyData,
|
||||||
|
length: usize,
|
||||||
|
tag_length: usize,
|
||||||
|
iv: Vec<u8>,
|
||||||
|
additional_data: Option<Vec<u8>>,
|
||||||
|
data: &[u8],
|
||||||
|
) -> Result<Vec<u8>, AnyError> {
|
||||||
|
let key = key.as_secret_key()?;
|
||||||
|
let additional_data = additional_data.unwrap_or_default();
|
||||||
|
|
||||||
|
// Fixed 96-bit nonce
|
||||||
|
if iv.len() != 12 {
|
||||||
|
return Err(type_error("iv length not equal to 12"));
|
||||||
|
}
|
||||||
|
|
||||||
|
let nonce = Nonce::from_slice(&iv);
|
||||||
|
|
||||||
|
let sep = data.len() - (tag_length / 8);
|
||||||
|
let tag = &data[sep..];
|
||||||
|
// The actual ciphertext, called plaintext because it is reused in place.
|
||||||
|
let mut plaintext = data[..sep].to_vec();
|
||||||
|
match length {
|
||||||
|
128 => decrypt_aes_gcm_gen::<Aes128Gcm>(
|
||||||
|
key,
|
||||||
|
tag.into(),
|
||||||
|
nonce,
|
||||||
|
additional_data,
|
||||||
|
&mut plaintext,
|
||||||
|
)?,
|
||||||
|
192 => decrypt_aes_gcm_gen::<Aes192Gcm>(
|
||||||
|
key,
|
||||||
|
tag.into(),
|
||||||
|
nonce,
|
||||||
|
additional_data,
|
||||||
|
&mut plaintext,
|
||||||
|
)?,
|
||||||
|
256 => decrypt_aes_gcm_gen::<Aes256Gcm>(
|
||||||
|
key,
|
||||||
|
tag.into(),
|
||||||
|
nonce,
|
||||||
|
additional_data,
|
||||||
|
&mut plaintext,
|
||||||
|
)?,
|
||||||
|
_ => return Err(type_error("invalid length")),
|
||||||
|
};
|
||||||
|
|
||||||
|
Ok(plaintext)
|
||||||
|
}
|
||||||
|
|
14
ext/crypto/lib.deno_crypto.d.ts
vendored
14
ext/crypto/lib.deno_crypto.d.ts
vendored
|
@ -264,6 +264,7 @@ interface SubtleCrypto {
|
||||||
| AlgorithmIdentifier
|
| AlgorithmIdentifier
|
||||||
| RsaOaepParams
|
| RsaOaepParams
|
||||||
| AesCbcParams
|
| AesCbcParams
|
||||||
|
| AesGcmParams
|
||||||
| AesCtrParams,
|
| AesCtrParams,
|
||||||
key: CryptoKey,
|
key: CryptoKey,
|
||||||
data: BufferSource,
|
data: BufferSource,
|
||||||
|
@ -293,7 +294,11 @@ interface SubtleCrypto {
|
||||||
format: KeyFormat,
|
format: KeyFormat,
|
||||||
key: CryptoKey,
|
key: CryptoKey,
|
||||||
wrappingKey: CryptoKey,
|
wrappingKey: CryptoKey,
|
||||||
wrapAlgorithm: AlgorithmIdentifier | RsaOaepParams,
|
wrapAlgorithm:
|
||||||
|
| AlgorithmIdentifier
|
||||||
|
| RsaOaepParams
|
||||||
|
| AesCbcParams
|
||||||
|
| AesCtrParams,
|
||||||
): Promise<ArrayBuffer>;
|
): Promise<ArrayBuffer>;
|
||||||
unwrapKey(
|
unwrapKey(
|
||||||
format: KeyFormat,
|
format: KeyFormat,
|
||||||
|
@ -302,12 +307,13 @@ interface SubtleCrypto {
|
||||||
unwrapAlgorithm:
|
unwrapAlgorithm:
|
||||||
| AlgorithmIdentifier
|
| AlgorithmIdentifier
|
||||||
| RsaOaepParams
|
| RsaOaepParams
|
||||||
| AesCbcParams,
|
| AesCbcParams
|
||||||
|
| AesCtrParams,
|
||||||
unwrappedKeyAlgorithm:
|
unwrappedKeyAlgorithm:
|
||||||
| AlgorithmIdentifier
|
| AlgorithmIdentifier
|
||||||
| RsaHashedImportParams
|
|
||||||
| HmacImportParams
|
| HmacImportParams
|
||||||
| AesKeyAlgorithm,
|
| RsaHashedImportParams
|
||||||
|
| EcImportParams,
|
||||||
extractable: boolean,
|
extractable: boolean,
|
||||||
keyUsages: KeyUsage[],
|
keyUsages: KeyUsage[],
|
||||||
): Promise<CryptoKey>;
|
): Promise<CryptoKey>;
|
||||||
|
|
|
@ -1,5 +1,9 @@
|
||||||
// Copyright 2018-2022 the Deno authors. All rights reserved. MIT license.
|
// Copyright 2018-2022 the Deno authors. All rights reserved. MIT license.
|
||||||
|
|
||||||
|
use aes_kw::KekAes128;
|
||||||
|
use aes_kw::KekAes192;
|
||||||
|
use aes_kw::KekAes256;
|
||||||
|
|
||||||
use deno_core::error::custom_error;
|
use deno_core::error::custom_error;
|
||||||
use deno_core::error::not_supported;
|
use deno_core::error::not_supported;
|
||||||
use deno_core::error::type_error;
|
use deno_core::error::type_error;
|
||||||
|
@ -11,6 +15,7 @@ use deno_core::Extension;
|
||||||
use deno_core::OpState;
|
use deno_core::OpState;
|
||||||
use deno_core::ZeroCopyBuf;
|
use deno_core::ZeroCopyBuf;
|
||||||
use serde::Deserialize;
|
use serde::Deserialize;
|
||||||
|
use shared::operation_error;
|
||||||
|
|
||||||
use std::cell::RefCell;
|
use std::cell::RefCell;
|
||||||
use std::num::NonZeroU32;
|
use std::num::NonZeroU32;
|
||||||
|
@ -47,6 +52,7 @@ use sha2::Digest;
|
||||||
use sha2::Sha256;
|
use sha2::Sha256;
|
||||||
use sha2::Sha384;
|
use sha2::Sha384;
|
||||||
use sha2::Sha512;
|
use sha2::Sha512;
|
||||||
|
use std::convert::TryFrom;
|
||||||
use std::path::PathBuf;
|
use std::path::PathBuf;
|
||||||
|
|
||||||
pub use rand; // Re-export rand
|
pub use rand; // Re-export rand
|
||||||
|
@ -68,6 +74,7 @@ use crate::key::Algorithm;
|
||||||
use crate::key::CryptoHash;
|
use crate::key::CryptoHash;
|
||||||
use crate::key::CryptoNamedCurve;
|
use crate::key::CryptoNamedCurve;
|
||||||
use crate::key::HkdfOutput;
|
use crate::key::HkdfOutput;
|
||||||
|
use crate::shared::RawKeyData;
|
||||||
use crate::shared::ID_MFG1;
|
use crate::shared::ID_MFG1;
|
||||||
use crate::shared::ID_P_SPECIFIED;
|
use crate::shared::ID_P_SPECIFIED;
|
||||||
use crate::shared::ID_SHA1_OID;
|
use crate::shared::ID_SHA1_OID;
|
||||||
|
@ -95,6 +102,8 @@ pub fn init(maybe_seed: Option<u64>) -> Extension {
|
||||||
("op_crypto_decrypt", op_async(op_crypto_decrypt)),
|
("op_crypto_decrypt", op_async(op_crypto_decrypt)),
|
||||||
("op_crypto_subtle_digest", op_async(op_crypto_subtle_digest)),
|
("op_crypto_subtle_digest", op_async(op_crypto_subtle_digest)),
|
||||||
("op_crypto_random_uuid", op_sync(op_crypto_random_uuid)),
|
("op_crypto_random_uuid", op_sync(op_crypto_random_uuid)),
|
||||||
|
("op_crypto_wrap_key", op_sync(op_crypto_wrap_key)),
|
||||||
|
("op_crypto_unwrap_key", op_sync(op_crypto_unwrap_key)),
|
||||||
])
|
])
|
||||||
.state(move |state| {
|
.state(move |state| {
|
||||||
if let Some(seed) = maybe_seed {
|
if let Some(seed) = maybe_seed {
|
||||||
|
@ -815,6 +824,72 @@ pub async fn op_crypto_subtle_digest(
|
||||||
Ok(output)
|
Ok(output)
|
||||||
}
|
}
|
||||||
|
|
||||||
|
#[derive(Deserialize)]
|
||||||
|
#[serde(rename_all = "camelCase")]
|
||||||
|
pub struct WrapUnwrapKeyArg {
|
||||||
|
key: RawKeyData,
|
||||||
|
algorithm: Algorithm,
|
||||||
|
}
|
||||||
|
|
||||||
|
pub fn op_crypto_wrap_key(
|
||||||
|
_state: &mut OpState,
|
||||||
|
args: WrapUnwrapKeyArg,
|
||||||
|
data: ZeroCopyBuf,
|
||||||
|
) -> Result<ZeroCopyBuf, AnyError> {
|
||||||
|
let algorithm = args.algorithm;
|
||||||
|
|
||||||
|
match algorithm {
|
||||||
|
Algorithm::AesKw => {
|
||||||
|
let key = args.key.as_secret_key()?;
|
||||||
|
|
||||||
|
if data.len() % 8 != 0 {
|
||||||
|
return Err(type_error("Data must be multiple of 8 bytes"));
|
||||||
|
}
|
||||||
|
|
||||||
|
let wrapped_key = match key.len() {
|
||||||
|
16 => KekAes128::new(key.into()).wrap_vec(&data),
|
||||||
|
24 => KekAes192::new(key.into()).wrap_vec(&data),
|
||||||
|
32 => KekAes256::new(key.into()).wrap_vec(&data),
|
||||||
|
_ => return Err(type_error("Invalid key length")),
|
||||||
|
}
|
||||||
|
.map_err(|_| operation_error("encryption error"))?;
|
||||||
|
|
||||||
|
Ok(wrapped_key.into())
|
||||||
|
}
|
||||||
|
_ => Err(type_error("Unsupported algorithm")),
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
pub fn op_crypto_unwrap_key(
|
||||||
|
_state: &mut OpState,
|
||||||
|
args: WrapUnwrapKeyArg,
|
||||||
|
data: ZeroCopyBuf,
|
||||||
|
) -> Result<ZeroCopyBuf, AnyError> {
|
||||||
|
let algorithm = args.algorithm;
|
||||||
|
match algorithm {
|
||||||
|
Algorithm::AesKw => {
|
||||||
|
let key = args.key.as_secret_key()?;
|
||||||
|
|
||||||
|
if data.len() % 8 != 0 {
|
||||||
|
return Err(type_error("Data must be multiple of 8 bytes"));
|
||||||
|
}
|
||||||
|
|
||||||
|
let unwrapped_key = match key.len() {
|
||||||
|
16 => KekAes128::new(key.into()).unwrap_vec(&data),
|
||||||
|
24 => KekAes192::new(key.into()).unwrap_vec(&data),
|
||||||
|
32 => KekAes256::new(key.into()).unwrap_vec(&data),
|
||||||
|
_ => return Err(type_error("Invalid key length")),
|
||||||
|
}
|
||||||
|
.map_err(|_| {
|
||||||
|
operation_error("decryption error - integrity check failed")
|
||||||
|
})?;
|
||||||
|
|
||||||
|
Ok(unwrapped_key.into())
|
||||||
|
}
|
||||||
|
_ => Err(type_error("Unsupported algorithm")),
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
pub fn get_declaration() -> PathBuf {
|
pub fn get_declaration() -> PathBuf {
|
||||||
PathBuf::from(env!("CARGO_MANIFEST_DIR")).join("lib.deno_crypto.d.ts")
|
PathBuf::from(env!("CARGO_MANIFEST_DIR")).join("lib.deno_crypto.d.ts")
|
||||||
}
|
}
|
||||||
|
|
|
@ -2,7 +2,7 @@
|
||||||
|
|
||||||
[package]
|
[package]
|
||||||
name = "deno_fetch"
|
name = "deno_fetch"
|
||||||
version = "0.54.0"
|
version = "0.55.0"
|
||||||
authors = ["the Deno authors"]
|
authors = ["the Deno authors"]
|
||||||
edition = "2021"
|
edition = "2021"
|
||||||
license = "MIT"
|
license = "MIT"
|
||||||
|
@ -16,8 +16,8 @@ path = "lib.rs"
|
||||||
[dependencies]
|
[dependencies]
|
||||||
bytes = "1.1.0"
|
bytes = "1.1.0"
|
||||||
data-url = "0.1.0"
|
data-url = "0.1.0"
|
||||||
deno_core = { version = "0.113.0", path = "../../core" }
|
deno_core = { version = "0.114.0", path = "../../core" }
|
||||||
deno_tls = { version = "0.18.0", path = "../tls" }
|
deno_tls = { version = "0.19.0", path = "../tls" }
|
||||||
dyn-clone = "1"
|
dyn-clone = "1"
|
||||||
http = "0.2.4"
|
http = "0.2.4"
|
||||||
reqwest = { version = "0.11.7", default-features = false, features = ["rustls-tls", "stream", "gzip", "brotli"] }
|
reqwest = { version = "0.11.7", default-features = false, features = ["rustls-tls", "stream", "gzip", "brotli"] }
|
||||||
|
|
|
@ -142,6 +142,84 @@
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
function prepareArgs(types, args) {
|
||||||
|
const parameters = [];
|
||||||
|
const buffers = [];
|
||||||
|
|
||||||
|
for (let i = 0; i < types.length; i++) {
|
||||||
|
const type = types[i];
|
||||||
|
const arg = args[i];
|
||||||
|
|
||||||
|
if (type === "pointer") {
|
||||||
|
if (
|
||||||
|
arg?.buffer instanceof ArrayBuffer &&
|
||||||
|
arg.byteLength !== undefined
|
||||||
|
) {
|
||||||
|
parameters.push(buffers.length);
|
||||||
|
buffers.push(arg);
|
||||||
|
} else if (arg instanceof UnsafePointer) {
|
||||||
|
parameters.push(packU64(arg.value));
|
||||||
|
buffers.push(undefined);
|
||||||
|
} else if (arg === null) {
|
||||||
|
parameters.push(null);
|
||||||
|
buffers.push(undefined);
|
||||||
|
} else {
|
||||||
|
throw new TypeError(
|
||||||
|
"Invalid ffi arg value, expected TypedArray, UnsafePointer or null",
|
||||||
|
);
|
||||||
|
}
|
||||||
|
} else {
|
||||||
|
parameters.push(arg);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
return { parameters, buffers };
|
||||||
|
}
|
||||||
|
|
||||||
|
class UnsafeFnPointer {
|
||||||
|
pointer;
|
||||||
|
definition;
|
||||||
|
|
||||||
|
constructor(pointer, definition) {
|
||||||
|
this.pointer = pointer;
|
||||||
|
this.definition = definition;
|
||||||
|
}
|
||||||
|
|
||||||
|
call(...args) {
|
||||||
|
const { parameters, buffers } = prepareArgs(
|
||||||
|
this.definition.parameters,
|
||||||
|
args,
|
||||||
|
);
|
||||||
|
if (this.definition.nonblocking) {
|
||||||
|
const promise = core.opAsync("op_ffi_call_ptr_nonblocking", {
|
||||||
|
pointer: packU64(this.pointer.value),
|
||||||
|
def: this.definition,
|
||||||
|
parameters,
|
||||||
|
buffers,
|
||||||
|
});
|
||||||
|
|
||||||
|
if (this.definition.result === "pointer") {
|
||||||
|
return promise.then((value) => new UnsafePointer(unpackU64(value)));
|
||||||
|
}
|
||||||
|
|
||||||
|
return promise;
|
||||||
|
} else {
|
||||||
|
const result = core.opSync("op_ffi_call_ptr", {
|
||||||
|
pointer: packU64(this.pointer.value),
|
||||||
|
def: this.definition,
|
||||||
|
parameters,
|
||||||
|
buffers,
|
||||||
|
});
|
||||||
|
|
||||||
|
if (this.definition.result === "pointer") {
|
||||||
|
return new UnsafePointer(unpackU64(result));
|
||||||
|
}
|
||||||
|
|
||||||
|
return result;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
class DynamicLibrary {
|
class DynamicLibrary {
|
||||||
#rid;
|
#rid;
|
||||||
symbols = {};
|
symbols = {};
|
||||||
|
@ -154,35 +232,7 @@
|
||||||
const types = symbols[symbol].parameters;
|
const types = symbols[symbol].parameters;
|
||||||
|
|
||||||
this.symbols[symbol] = (...args) => {
|
this.symbols[symbol] = (...args) => {
|
||||||
const parameters = [];
|
const { parameters, buffers } = prepareArgs(types, args);
|
||||||
const buffers = [];
|
|
||||||
|
|
||||||
for (let i = 0; i < types.length; i++) {
|
|
||||||
const type = types[i];
|
|
||||||
const arg = args[i];
|
|
||||||
|
|
||||||
if (type === "pointer") {
|
|
||||||
if (
|
|
||||||
arg?.buffer instanceof ArrayBuffer &&
|
|
||||||
arg.byteLength !== undefined
|
|
||||||
) {
|
|
||||||
parameters.push(buffers.length);
|
|
||||||
buffers.push(arg);
|
|
||||||
} else if (arg instanceof UnsafePointer) {
|
|
||||||
parameters.push(packU64(arg.value));
|
|
||||||
buffers.push(undefined);
|
|
||||||
} else if (arg === null) {
|
|
||||||
parameters.push(null);
|
|
||||||
buffers.push(undefined);
|
|
||||||
} else {
|
|
||||||
throw new TypeError(
|
|
||||||
"Invalid ffi arg value, expected TypedArray, UnsafePointer or null",
|
|
||||||
);
|
|
||||||
}
|
|
||||||
} else {
|
|
||||||
parameters.push(arg);
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
if (isNonBlocking) {
|
if (isNonBlocking) {
|
||||||
const promise = core.opAsync("op_ffi_call_nonblocking", {
|
const promise = core.opAsync("op_ffi_call_nonblocking", {
|
||||||
|
@ -228,5 +278,10 @@
|
||||||
return new DynamicLibrary(pathFromURL(path), symbols);
|
return new DynamicLibrary(pathFromURL(path), symbols);
|
||||||
}
|
}
|
||||||
|
|
||||||
window.__bootstrap.ffi = { dlopen, UnsafePointer, UnsafePointerView };
|
window.__bootstrap.ffi = {
|
||||||
|
dlopen,
|
||||||
|
UnsafePointer,
|
||||||
|
UnsafePointerView,
|
||||||
|
UnsafeFnPointer,
|
||||||
|
};
|
||||||
})(this);
|
})(this);
|
||||||
|
|
|
@ -2,7 +2,7 @@
|
||||||
|
|
||||||
[package]
|
[package]
|
||||||
name = "deno_ffi"
|
name = "deno_ffi"
|
||||||
version = "0.18.0"
|
version = "0.19.0"
|
||||||
authors = ["the Deno authors"]
|
authors = ["the Deno authors"]
|
||||||
edition = "2021"
|
edition = "2021"
|
||||||
license = "MIT"
|
license = "MIT"
|
||||||
|
@ -14,7 +14,7 @@ description = "Dynamic library ffi for deno"
|
||||||
path = "lib.rs"
|
path = "lib.rs"
|
||||||
|
|
||||||
[dependencies]
|
[dependencies]
|
||||||
deno_core = { version = "0.113.0", path = "../../core" }
|
deno_core = { version = "0.114.0", path = "../../core" }
|
||||||
dlopen = "0.1.8"
|
dlopen = "0.1.8"
|
||||||
libffi = "2.0.0"
|
libffi = "2.0.0"
|
||||||
serde = { version = "1.0.129", features = ["derive"] }
|
serde = { version = "1.0.129", features = ["derive"] }
|
||||||
|
|
|
@ -57,6 +57,7 @@ struct Symbol {
|
||||||
result_type: NativeType,
|
result_type: NativeType,
|
||||||
}
|
}
|
||||||
|
|
||||||
|
#[allow(clippy::non_send_fields_in_send_ty)]
|
||||||
unsafe impl Send for Symbol {}
|
unsafe impl Send for Symbol {}
|
||||||
unsafe impl Sync for Symbol {}
|
unsafe impl Sync for Symbol {}
|
||||||
|
|
||||||
|
@ -78,18 +79,21 @@ impl Resource for DynamicLibraryResource {
|
||||||
impl DynamicLibraryResource {
|
impl DynamicLibraryResource {
|
||||||
fn register(
|
fn register(
|
||||||
&mut self,
|
&mut self,
|
||||||
symbol: String,
|
name: String,
|
||||||
foreign_fn: ForeignFunction,
|
foreign_fn: ForeignFunction,
|
||||||
) -> Result<(), AnyError> {
|
) -> Result<(), AnyError> {
|
||||||
|
let symbol = match &foreign_fn.name {
|
||||||
|
Some(symbol) => symbol,
|
||||||
|
None => &name,
|
||||||
|
};
|
||||||
// By default, Err returned by this function does not tell
|
// By default, Err returned by this function does not tell
|
||||||
// which symbol wasn't exported. So we'll modify the error
|
// which symbol wasn't exported. So we'll modify the error
|
||||||
// message to include the name of symbol.
|
// message to include the name of symbol.
|
||||||
let fn_ptr = match unsafe { self.lib.symbol::<*const c_void>(&symbol) } {
|
let fn_ptr = match unsafe { self.lib.symbol::<*const c_void>(symbol) } {
|
||||||
Ok(value) => Ok(value),
|
Ok(value) => Ok(value),
|
||||||
Err(err) => Err(generic_error(format!(
|
Err(err) => Err(generic_error(format!(
|
||||||
"Failed to register symbol {}: {}",
|
"Failed to register symbol {}: {}",
|
||||||
symbol,
|
symbol, err
|
||||||
err.to_string()
|
|
||||||
))),
|
))),
|
||||||
}?;
|
}?;
|
||||||
let ptr = libffi::middle::CodePtr::from_ptr(fn_ptr as _);
|
let ptr = libffi::middle::CodePtr::from_ptr(fn_ptr as _);
|
||||||
|
@ -103,7 +107,7 @@ impl DynamicLibraryResource {
|
||||||
);
|
);
|
||||||
|
|
||||||
self.symbols.insert(
|
self.symbols.insert(
|
||||||
symbol,
|
name,
|
||||||
Symbol {
|
Symbol {
|
||||||
cif,
|
cif,
|
||||||
ptr,
|
ptr,
|
||||||
|
@ -126,6 +130,11 @@ pub fn init<P: FfiPermissions + 'static>(unstable: bool) -> Extension {
|
||||||
("op_ffi_load", op_sync(op_ffi_load::<P>)),
|
("op_ffi_load", op_sync(op_ffi_load::<P>)),
|
||||||
("op_ffi_call", op_sync(op_ffi_call)),
|
("op_ffi_call", op_sync(op_ffi_call)),
|
||||||
("op_ffi_call_nonblocking", op_async(op_ffi_call_nonblocking)),
|
("op_ffi_call_nonblocking", op_async(op_ffi_call_nonblocking)),
|
||||||
|
("op_ffi_call_ptr", op_sync(op_ffi_call_ptr)),
|
||||||
|
(
|
||||||
|
"op_ffi_call_ptr_nonblocking",
|
||||||
|
op_async(op_ffi_call_ptr_nonblocking),
|
||||||
|
),
|
||||||
("op_ffi_ptr_of", op_sync(op_ffi_ptr_of::<P>)),
|
("op_ffi_ptr_of", op_sync(op_ffi_ptr_of::<P>)),
|
||||||
("op_ffi_buf_copy_into", op_sync(op_ffi_buf_copy_into::<P>)),
|
("op_ffi_buf_copy_into", op_sync(op_ffi_buf_copy_into::<P>)),
|
||||||
("op_ffi_cstr_read", op_sync(op_ffi_cstr_read::<P>)),
|
("op_ffi_cstr_read", op_sync(op_ffi_cstr_read::<P>)),
|
||||||
|
@ -319,7 +328,7 @@ fn value_as_f64(value: Value) -> Result<f64, AnyError> {
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
#[derive(Serialize, Deserialize, Debug)]
|
#[derive(Serialize, Deserialize, Debug, Clone, Copy)]
|
||||||
struct U32x2(u32, u32);
|
struct U32x2(u32, u32);
|
||||||
|
|
||||||
impl From<u64> for U32x2 {
|
impl From<u64> for U32x2 {
|
||||||
|
@ -337,6 +346,7 @@ impl From<U32x2> for u64 {
|
||||||
#[derive(Deserialize, Debug)]
|
#[derive(Deserialize, Debug)]
|
||||||
#[serde(rename_all = "camelCase")]
|
#[serde(rename_all = "camelCase")]
|
||||||
struct ForeignFunction {
|
struct ForeignFunction {
|
||||||
|
name: Option<String>,
|
||||||
parameters: Vec<NativeType>,
|
parameters: Vec<NativeType>,
|
||||||
result: NativeType,
|
result: NativeType,
|
||||||
}
|
}
|
||||||
|
@ -464,6 +474,49 @@ struct FfiCallArgs {
|
||||||
buffers: Vec<Option<ZeroCopyBuf>>,
|
buffers: Vec<Option<ZeroCopyBuf>>,
|
||||||
}
|
}
|
||||||
|
|
||||||
|
#[derive(Deserialize)]
|
||||||
|
#[serde(rename_all = "camelCase")]
|
||||||
|
struct FfiCallPtrArgs {
|
||||||
|
pointer: U32x2,
|
||||||
|
def: ForeignFunction,
|
||||||
|
parameters: Vec<Value>,
|
||||||
|
buffers: Vec<Option<ZeroCopyBuf>>,
|
||||||
|
}
|
||||||
|
|
||||||
|
impl From<FfiCallPtrArgs> for FfiCallArgs {
|
||||||
|
fn from(args: FfiCallPtrArgs) -> Self {
|
||||||
|
FfiCallArgs {
|
||||||
|
rid: 0,
|
||||||
|
symbol: String::new(),
|
||||||
|
parameters: args.parameters,
|
||||||
|
buffers: args.buffers,
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
impl FfiCallPtrArgs {
|
||||||
|
fn get_symbol(&self) -> Symbol {
|
||||||
|
let fn_ptr: u64 = self.pointer.into();
|
||||||
|
let ptr = libffi::middle::CodePtr::from_ptr(fn_ptr as _);
|
||||||
|
let cif = libffi::middle::Cif::new(
|
||||||
|
self
|
||||||
|
.def
|
||||||
|
.parameters
|
||||||
|
.clone()
|
||||||
|
.into_iter()
|
||||||
|
.map(libffi::middle::Type::from),
|
||||||
|
self.def.result.into(),
|
||||||
|
);
|
||||||
|
|
||||||
|
Symbol {
|
||||||
|
cif,
|
||||||
|
ptr,
|
||||||
|
parameter_types: self.def.parameters.clone(),
|
||||||
|
result_type: self.def.result,
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
fn ffi_call(args: FfiCallArgs, symbol: &Symbol) -> Result<Value, AnyError> {
|
fn ffi_call(args: FfiCallArgs, symbol: &Symbol) -> Result<Value, AnyError> {
|
||||||
let buffers: Vec<Option<&[u8]>> = args
|
let buffers: Vec<Option<&[u8]>> = args
|
||||||
.buffers
|
.buffers
|
||||||
|
@ -558,6 +611,26 @@ fn ffi_call(args: FfiCallArgs, symbol: &Symbol) -> Result<Value, AnyError> {
|
||||||
})
|
})
|
||||||
}
|
}
|
||||||
|
|
||||||
|
fn op_ffi_call_ptr(
|
||||||
|
_state: &mut deno_core::OpState,
|
||||||
|
args: FfiCallPtrArgs,
|
||||||
|
_: (),
|
||||||
|
) -> Result<Value, AnyError> {
|
||||||
|
let symbol = args.get_symbol();
|
||||||
|
ffi_call(args.into(), &symbol)
|
||||||
|
}
|
||||||
|
|
||||||
|
async fn op_ffi_call_ptr_nonblocking(
|
||||||
|
_state: Rc<RefCell<deno_core::OpState>>,
|
||||||
|
args: FfiCallPtrArgs,
|
||||||
|
_: (),
|
||||||
|
) -> Result<Value, AnyError> {
|
||||||
|
let symbol = args.get_symbol();
|
||||||
|
tokio::task::spawn_blocking(move || ffi_call(args.into(), &symbol))
|
||||||
|
.await
|
||||||
|
.unwrap()
|
||||||
|
}
|
||||||
|
|
||||||
fn op_ffi_call(
|
fn op_ffi_call(
|
||||||
state: &mut deno_core::OpState,
|
state: &mut deno_core::OpState,
|
||||||
args: FfiCallArgs,
|
args: FfiCallArgs,
|
||||||
|
|
|
@ -2,7 +2,7 @@
|
||||||
|
|
||||||
[package]
|
[package]
|
||||||
name = "deno_http"
|
name = "deno_http"
|
||||||
version = "0.23.0"
|
version = "0.24.0"
|
||||||
authors = ["the Deno authors"]
|
authors = ["the Deno authors"]
|
||||||
edition = "2021"
|
edition = "2021"
|
||||||
license = "MIT"
|
license = "MIT"
|
||||||
|
@ -16,8 +16,8 @@ path = "lib.rs"
|
||||||
[dependencies]
|
[dependencies]
|
||||||
base64 = "0.13.0"
|
base64 = "0.13.0"
|
||||||
bytes = "1"
|
bytes = "1"
|
||||||
deno_core = { version = "0.113.0", path = "../../core" }
|
deno_core = { version = "0.114.0", path = "../../core" }
|
||||||
deno_websocket = { version = "0.36.0", path = "../websocket" }
|
deno_websocket = { version = "0.37.0", path = "../websocket" }
|
||||||
hyper = { version = "0.14.9", features = ["server", "stream", "http1", "http2", "runtime"] }
|
hyper = { version = "0.14.9", features = ["server", "stream", "http1", "http2", "runtime"] }
|
||||||
ring = "0.16.20"
|
ring = "0.16.20"
|
||||||
serde = { version = "1.0.129", features = ["derive"] }
|
serde = { version = "1.0.129", features = ["derive"] }
|
||||||
|
|
|
@ -2,7 +2,7 @@
|
||||||
|
|
||||||
[package]
|
[package]
|
||||||
name = "deno_net"
|
name = "deno_net"
|
||||||
version = "0.23.0"
|
version = "0.24.0"
|
||||||
authors = ["the Deno authors"]
|
authors = ["the Deno authors"]
|
||||||
edition = "2021"
|
edition = "2021"
|
||||||
license = "MIT"
|
license = "MIT"
|
||||||
|
@ -14,8 +14,8 @@ description = "Networking for Deno"
|
||||||
path = "lib.rs"
|
path = "lib.rs"
|
||||||
|
|
||||||
[dependencies]
|
[dependencies]
|
||||||
deno_core = { version = "0.113.0", path = "../../core" }
|
deno_core = { version = "0.114.0", path = "../../core" }
|
||||||
deno_tls = { version = "0.18.0", path = "../tls" }
|
deno_tls = { version = "0.19.0", path = "../tls" }
|
||||||
log = "0.4.14"
|
log = "0.4.14"
|
||||||
serde = { version = "1.0.129", features = ["derive"] }
|
serde = { version = "1.0.129", features = ["derive"] }
|
||||||
tokio = { version = "1.10.1", features = ["full"] }
|
tokio = { version = "1.10.1", features = ["full"] }
|
||||||
|
|
|
@ -2,7 +2,7 @@
|
||||||
|
|
||||||
[package]
|
[package]
|
||||||
name = "deno_timers"
|
name = "deno_timers"
|
||||||
version = "0.29.0"
|
version = "0.30.0"
|
||||||
authors = ["the Deno authors"]
|
authors = ["the Deno authors"]
|
||||||
edition = "2021"
|
edition = "2021"
|
||||||
license = "MIT"
|
license = "MIT"
|
||||||
|
@ -14,14 +14,14 @@ description = "Timers API implementation for Deno"
|
||||||
path = "lib.rs"
|
path = "lib.rs"
|
||||||
|
|
||||||
[dependencies]
|
[dependencies]
|
||||||
deno_core = { version = "0.113.0", path = "../../core" }
|
deno_core = { version = "0.114.0", path = "../../core" }
|
||||||
tokio = { version = "1.10.1", features = ["full"] }
|
tokio = { version = "1.10.1", features = ["full"] }
|
||||||
|
|
||||||
[dev-dependencies]
|
[dev-dependencies]
|
||||||
deno_bench_util = { version = "0.25.0", path = "../../bench_util" }
|
deno_bench_util = { version = "0.26.0", path = "../../bench_util" }
|
||||||
deno_url = { version = "0.31.0", path = "../url" }
|
deno_url = { version = "0.32.0", path = "../url" }
|
||||||
deno_web = { version = "0.62.0", path = "../web" }
|
deno_web = { version = "0.63.0", path = "../web" }
|
||||||
deno_webidl = { version = "0.31.0", path = "../webidl" }
|
deno_webidl = { version = "0.32.0", path = "../webidl" }
|
||||||
|
|
||||||
[[bench]]
|
[[bench]]
|
||||||
name = "timers_ops"
|
name = "timers_ops"
|
||||||
|
|
|
@ -2,7 +2,7 @@
|
||||||
|
|
||||||
[package]
|
[package]
|
||||||
name = "deno_tls"
|
name = "deno_tls"
|
||||||
version = "0.18.0"
|
version = "0.19.0"
|
||||||
authors = ["the Deno authors"]
|
authors = ["the Deno authors"]
|
||||||
edition = "2021"
|
edition = "2021"
|
||||||
license = "MIT"
|
license = "MIT"
|
||||||
|
@ -14,7 +14,7 @@ description = "TLS for Deno"
|
||||||
path = "lib.rs"
|
path = "lib.rs"
|
||||||
|
|
||||||
[dependencies]
|
[dependencies]
|
||||||
deno_core = { version = "0.113.0", path = "../../core" }
|
deno_core = { version = "0.114.0", path = "../../core" }
|
||||||
once_cell = "=1.9.0"
|
once_cell = "=1.9.0"
|
||||||
rustls = { version = "0.20", features = ["dangerous_configuration"] }
|
rustls = { version = "0.20", features = ["dangerous_configuration"] }
|
||||||
rustls-native-certs = "0.6.1"
|
rustls-native-certs = "0.6.1"
|
||||||
|
|
|
@ -2,7 +2,7 @@
|
||||||
|
|
||||||
[package]
|
[package]
|
||||||
name = "deno_url"
|
name = "deno_url"
|
||||||
version = "0.31.0"
|
version = "0.32.0"
|
||||||
authors = ["the Deno authors"]
|
authors = ["the Deno authors"]
|
||||||
edition = "2021"
|
edition = "2021"
|
||||||
license = "MIT"
|
license = "MIT"
|
||||||
|
@ -14,14 +14,14 @@ description = "URL API implementation for Deno"
|
||||||
path = "lib.rs"
|
path = "lib.rs"
|
||||||
|
|
||||||
[dependencies]
|
[dependencies]
|
||||||
deno_core = { version = "0.113.0", path = "../../core" }
|
deno_core = { version = "0.114.0", path = "../../core" }
|
||||||
serde = { version = "1.0.129", features = ["derive"] }
|
serde = { version = "1.0.129", features = ["derive"] }
|
||||||
serde_repr = "0.1.7"
|
serde_repr = "0.1.7"
|
||||||
urlpattern = "0.1.2"
|
urlpattern = "0.1.3"
|
||||||
|
|
||||||
[dev-dependencies]
|
[dev-dependencies]
|
||||||
deno_bench_util = { version = "0.25.0", path = "../../bench_util" }
|
deno_bench_util = { version = "0.26.0", path = "../../bench_util" }
|
||||||
deno_webidl = { version = "0.31.0", path = "../webidl" }
|
deno_webidl = { version = "0.32.0", path = "../webidl" }
|
||||||
|
|
||||||
[[bench]]
|
[[bench]]
|
||||||
name = "url_ops"
|
name = "url_ops"
|
||||||
|
|
|
@ -29,8 +29,10 @@
|
||||||
* @returns {string}
|
* @returns {string}
|
||||||
*/
|
*/
|
||||||
function atob(data) {
|
function atob(data) {
|
||||||
|
const prefix = "Failed to execute 'atob'";
|
||||||
|
webidl.requiredArguments(arguments.length, 1, { prefix });
|
||||||
data = webidl.converters.DOMString(data, {
|
data = webidl.converters.DOMString(data, {
|
||||||
prefix: "Failed to execute 'atob'",
|
prefix,
|
||||||
context: "Argument 1",
|
context: "Argument 1",
|
||||||
});
|
});
|
||||||
|
|
||||||
|
|
|
@ -279,6 +279,7 @@
|
||||||
const _pullAlgorithm = Symbol("[[pullAlgorithm]]");
|
const _pullAlgorithm = Symbol("[[pullAlgorithm]]");
|
||||||
const _pulling = Symbol("[[pulling]]");
|
const _pulling = Symbol("[[pulling]]");
|
||||||
const _pullSteps = Symbol("[[PullSteps]]");
|
const _pullSteps = Symbol("[[PullSteps]]");
|
||||||
|
const _releaseSteps = Symbol("[[ReleaseSteps]]");
|
||||||
const _queue = Symbol("[[queue]]");
|
const _queue = Symbol("[[queue]]");
|
||||||
const _queueTotalSize = Symbol("[[queueTotalSize]]");
|
const _queueTotalSize = Symbol("[[queueTotalSize]]");
|
||||||
const _readable = Symbol("[[readable]]");
|
const _readable = Symbol("[[readable]]");
|
||||||
|
@ -800,12 +801,19 @@
|
||||||
"The BYOB request's buffer has been detached and so cannot be filled with an enqueued chunk",
|
"The BYOB request's buffer has been detached and so cannot be filled with an enqueued chunk",
|
||||||
);
|
);
|
||||||
}
|
}
|
||||||
|
readableByteStreamControllerInvalidateBYOBRequest(controller);
|
||||||
firstPendingPullInto.buffer = transferArrayBuffer(
|
firstPendingPullInto.buffer = transferArrayBuffer(
|
||||||
firstPendingPullInto.buffer,
|
firstPendingPullInto.buffer,
|
||||||
);
|
);
|
||||||
|
if (firstPendingPullInto.readerType === "none") {
|
||||||
|
readableByteStreamControllerEnqueueDetachedPullIntoToQueue(
|
||||||
|
controller,
|
||||||
|
firstPendingPullInto,
|
||||||
|
);
|
||||||
|
}
|
||||||
}
|
}
|
||||||
readableByteStreamControllerInvalidateBYOBRequest(controller);
|
|
||||||
if (readableStreamHasDefaultReader(stream)) {
|
if (readableStreamHasDefaultReader(stream)) {
|
||||||
|
readableByteStreamControllerProcessReadRequestsUsingQueue(controller);
|
||||||
if (readableStreamGetNumReadRequests(stream) === 0) {
|
if (readableStreamGetNumReadRequests(stream) === 0) {
|
||||||
assert(controller[_pendingPullIntos].length === 0);
|
assert(controller[_pendingPullIntos].length === 0);
|
||||||
readableByteStreamControllerEnqueueChunkToQueue(
|
readableByteStreamControllerEnqueueChunkToQueue(
|
||||||
|
@ -866,6 +874,54 @@
|
||||||
controller[_queueTotalSize] += byteLength;
|
controller[_queueTotalSize] += byteLength;
|
||||||
}
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* @param {ReadableByteStreamController} controller
|
||||||
|
* @param {ArrayBufferLike} buffer
|
||||||
|
* @param {number} byteOffset
|
||||||
|
* @param {number} byteLength
|
||||||
|
* @returns {void}
|
||||||
|
*/
|
||||||
|
function readableByteStreamControllerEnqueueClonedChunkToQueue(
|
||||||
|
controller,
|
||||||
|
buffer,
|
||||||
|
byteOffset,
|
||||||
|
byteLength,
|
||||||
|
) {
|
||||||
|
let cloneResult;
|
||||||
|
try {
|
||||||
|
cloneResult = buffer.slice(byteOffset, byteOffset + byteLength);
|
||||||
|
} catch (e) {
|
||||||
|
readableByteStreamControllerError(controller, e);
|
||||||
|
}
|
||||||
|
readableByteStreamControllerEnqueueChunkToQueue(
|
||||||
|
controller,
|
||||||
|
cloneResult,
|
||||||
|
0,
|
||||||
|
byteLength,
|
||||||
|
);
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* @param {ReadableByteStreamController} controller
|
||||||
|
* @param {PullIntoDescriptor} pullIntoDescriptor
|
||||||
|
* @returns {void}
|
||||||
|
*/
|
||||||
|
function readableByteStreamControllerEnqueueDetachedPullIntoToQueue(
|
||||||
|
controller,
|
||||||
|
pullIntoDescriptor,
|
||||||
|
) {
|
||||||
|
assert(pullIntoDescriptor.readerType === "none");
|
||||||
|
if (pullIntoDescriptor.bytesFilled > 0) {
|
||||||
|
readableByteStreamControllerEnqueueClonedChunkToQueue(
|
||||||
|
controller,
|
||||||
|
pullIntoDescriptor.buffer,
|
||||||
|
pullIntoDescriptor.byteOffset,
|
||||||
|
pullIntoDescriptor.bytesFilled,
|
||||||
|
);
|
||||||
|
}
|
||||||
|
readableByteStreamControllerShiftPendingPullInto(controller);
|
||||||
|
}
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* @param {ReadableByteStreamController} controller
|
* @param {ReadableByteStreamController} controller
|
||||||
* @returns {ReadableStreamBYOBRequest | null}
|
* @returns {ReadableStreamBYOBRequest | null}
|
||||||
|
@ -1000,10 +1056,11 @@
|
||||||
readableStreamClose(stream);
|
readableStreamClose(stream);
|
||||||
const reader = stream[_reader];
|
const reader = stream[_reader];
|
||||||
if (reader !== undefined && isReadableStreamBYOBReader(reader)) {
|
if (reader !== undefined && isReadableStreamBYOBReader(reader)) {
|
||||||
for (const readIntoRequest of reader[_readIntoRequests]) {
|
const readIntoRequests = reader[_readIntoRequests];
|
||||||
|
reader[_readIntoRequests] = [];
|
||||||
|
for (const readIntoRequest of readIntoRequests) {
|
||||||
readIntoRequest.closeSteps(undefined);
|
readIntoRequest.closeSteps(undefined);
|
||||||
}
|
}
|
||||||
reader[_readIntoRequests] = [];
|
|
||||||
}
|
}
|
||||||
/** @type {Promise<void>} */
|
/** @type {Promise<void>} */
|
||||||
const sourceCancelPromise = stream[_controller][_cancelSteps](reason);
|
const sourceCancelPromise = stream[_controller][_cancelSteps](reason);
|
||||||
|
@ -1026,10 +1083,10 @@
|
||||||
if (isReadableStreamDefaultReader(reader)) {
|
if (isReadableStreamDefaultReader(reader)) {
|
||||||
/** @type {Array<ReadRequest<R>>} */
|
/** @type {Array<ReadRequest<R>>} */
|
||||||
const readRequests = reader[_readRequests];
|
const readRequests = reader[_readRequests];
|
||||||
|
reader[_readRequests] = [];
|
||||||
for (const readRequest of readRequests) {
|
for (const readRequest of readRequests) {
|
||||||
readRequest.closeSteps();
|
readRequest.closeSteps();
|
||||||
}
|
}
|
||||||
reader[_readRequests] = [];
|
|
||||||
}
|
}
|
||||||
// This promise can be double resolved.
|
// This promise can be double resolved.
|
||||||
// See: https://github.com/whatwg/streams/issues/1100
|
// See: https://github.com/whatwg/streams/issues/1100
|
||||||
|
@ -1224,6 +1281,27 @@
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* @param {ReadableStreamBYOBReader} reader
|
||||||
|
*/
|
||||||
|
function readableStreamBYOBReaderRelease(reader) {
|
||||||
|
readableStreamReaderGenericRelease(reader);
|
||||||
|
const e = new TypeError("The reader was released.");
|
||||||
|
readableStreamBYOBReaderErrorReadIntoRequests(reader, e);
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* @param {ReadableStreamBYOBReader} reader
|
||||||
|
* @param {any} e
|
||||||
|
*/
|
||||||
|
function readableStreamDefaultReaderErrorReadRequests(reader, e) {
|
||||||
|
const readRequests = reader[_readRequests];
|
||||||
|
reader[_readRequests] = [];
|
||||||
|
for (const readRequest of readRequests) {
|
||||||
|
readRequest.errorSteps(e);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* @param {ReadableByteStreamController} controller
|
* @param {ReadableByteStreamController} controller
|
||||||
*/
|
*/
|
||||||
|
@ -1250,6 +1328,25 @@
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
/**
|
||||||
|
* @param {ReadableByteStreamController} controller
|
||||||
|
*/
|
||||||
|
function readableByteStreamControllerProcessReadRequestsUsingQueue(
|
||||||
|
controller,
|
||||||
|
) {
|
||||||
|
const reader = controller[_stream][_reader];
|
||||||
|
assert(isReadableStreamDefaultReader(reader));
|
||||||
|
while (reader[_readRequests].length !== 0) {
|
||||||
|
if (controller[_queueTotalSize] === 0) {
|
||||||
|
return;
|
||||||
|
}
|
||||||
|
const readRequest = ArrayPrototypeShift(reader[_readRequests]);
|
||||||
|
readableByteStreamControllerFillReadRequestFromQueue(
|
||||||
|
controller,
|
||||||
|
readRequest,
|
||||||
|
);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* @param {ReadableByteStreamController} controller
|
* @param {ReadableByteStreamController} controller
|
||||||
|
@ -1401,6 +1498,16 @@
|
||||||
bytesWritten,
|
bytesWritten,
|
||||||
pullIntoDescriptor,
|
pullIntoDescriptor,
|
||||||
);
|
);
|
||||||
|
if (pullIntoDescriptor.readerType === "none") {
|
||||||
|
readableByteStreamControllerEnqueueDetachedPullIntoToQueue(
|
||||||
|
controller,
|
||||||
|
pullIntoDescriptor,
|
||||||
|
);
|
||||||
|
readableByteStreamControllerProcessPullIntoDescriptorsUsingQueue(
|
||||||
|
controller,
|
||||||
|
);
|
||||||
|
return;
|
||||||
|
}
|
||||||
if (pullIntoDescriptor.bytesFilled < pullIntoDescriptor.elementSize) {
|
if (pullIntoDescriptor.bytesFilled < pullIntoDescriptor.elementSize) {
|
||||||
return;
|
return;
|
||||||
}
|
}
|
||||||
|
@ -1410,16 +1517,11 @@
|
||||||
if (remainderSize > 0) {
|
if (remainderSize > 0) {
|
||||||
const end = pullIntoDescriptor.byteOffset +
|
const end = pullIntoDescriptor.byteOffset +
|
||||||
pullIntoDescriptor.bytesFilled;
|
pullIntoDescriptor.bytesFilled;
|
||||||
// We dont have access to CloneArrayBuffer, so we use .slice(). End is non-inclusive, as the spec says.
|
readableByteStreamControllerEnqueueClonedChunkToQueue(
|
||||||
const remainder = pullIntoDescriptor.buffer.slice(
|
|
||||||
end - remainderSize,
|
|
||||||
end,
|
|
||||||
);
|
|
||||||
readableByteStreamControllerEnqueueChunkToQueue(
|
|
||||||
controller,
|
controller,
|
||||||
remainder,
|
pullIntoDescriptor.buffer,
|
||||||
0,
|
end - remainderSize,
|
||||||
remainder.byteLength,
|
remainderSize,
|
||||||
);
|
);
|
||||||
}
|
}
|
||||||
pullIntoDescriptor.bytesFilled -= remainderSize;
|
pullIntoDescriptor.bytesFilled -= remainderSize;
|
||||||
|
@ -1484,6 +1586,9 @@
|
||||||
firstDescriptor,
|
firstDescriptor,
|
||||||
) {
|
) {
|
||||||
assert(firstDescriptor.bytesFilled === 0);
|
assert(firstDescriptor.bytesFilled === 0);
|
||||||
|
if (firstDescriptor.readerType === "none") {
|
||||||
|
readableByteStreamControllerShiftPendingPullInto(controller);
|
||||||
|
}
|
||||||
const stream = controller[_stream];
|
const stream = controller[_stream];
|
||||||
if (readableStreamHasBYOBReader(stream)) {
|
if (readableStreamHasBYOBReader(stream)) {
|
||||||
while (readableStreamGetNumReadIntoRequests(stream) > 0) {
|
while (readableStreamGetNumReadIntoRequests(stream) > 0) {
|
||||||
|
@ -1507,6 +1612,7 @@
|
||||||
pullIntoDescriptor,
|
pullIntoDescriptor,
|
||||||
) {
|
) {
|
||||||
assert(stream[_state] !== "errored");
|
assert(stream[_state] !== "errored");
|
||||||
|
assert(pullIntoDescriptor.readerType !== "none");
|
||||||
let done = false;
|
let done = false;
|
||||||
if (stream[_state] === "closed") {
|
if (stream[_state] === "closed") {
|
||||||
assert(pullIntoDescriptor.bytesFilled === 0);
|
assert(pullIntoDescriptor.bytesFilled === 0);
|
||||||
|
@ -1650,6 +1756,27 @@
|
||||||
return ready;
|
return ready;
|
||||||
}
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* @param {ReadableByteStreamController} controller
|
||||||
|
* @param {ReadRequest} readRequest
|
||||||
|
* @returns {void}
|
||||||
|
*/
|
||||||
|
function readableByteStreamControllerFillReadRequestFromQueue(
|
||||||
|
controller,
|
||||||
|
readRequest,
|
||||||
|
) {
|
||||||
|
assert(controller[_queueTotalSize] > 0);
|
||||||
|
const entry = ArrayPrototypeShift(controller[_queue]);
|
||||||
|
controller[_queueTotalSize] -= entry.byteLength;
|
||||||
|
readableByteStreamControllerHandleQueueDrain(controller);
|
||||||
|
const view = new Uint8Array(
|
||||||
|
entry.buffer,
|
||||||
|
entry.byteOffset,
|
||||||
|
entry.byteLength,
|
||||||
|
);
|
||||||
|
readRequest.chunkSteps(view);
|
||||||
|
}
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* @param {ReadableByteStreamController} controller
|
* @param {ReadableByteStreamController} controller
|
||||||
* @param {number} size
|
* @param {number} size
|
||||||
|
@ -1708,6 +1835,16 @@
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* @template R
|
||||||
|
* @param {ReadableStreamDefaultReader<R>} reader
|
||||||
|
*/
|
||||||
|
function readableStreamDefaultReaderRelease(reader) {
|
||||||
|
readableStreamReaderGenericRelease(reader);
|
||||||
|
const e = new TypeError("The reader was released.");
|
||||||
|
readableStreamDefaultReaderErrorReadRequests(reader, e);
|
||||||
|
}
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* @template R
|
* @template R
|
||||||
* @param {ReadableStream<R>} stream
|
* @param {ReadableStream<R>} stream
|
||||||
|
@ -1727,18 +1864,10 @@
|
||||||
closedPromise.reject(e);
|
closedPromise.reject(e);
|
||||||
setPromiseIsHandledToTrue(closedPromise.promise);
|
setPromiseIsHandledToTrue(closedPromise.promise);
|
||||||
if (isReadableStreamDefaultReader(reader)) {
|
if (isReadableStreamDefaultReader(reader)) {
|
||||||
/** @type {Array<ReadRequest<R>>} */
|
readableStreamDefaultReaderErrorReadRequests(reader, e);
|
||||||
const readRequests = reader[_readRequests];
|
|
||||||
for (const readRequest of readRequests) {
|
|
||||||
readRequest.errorSteps(e);
|
|
||||||
}
|
|
||||||
reader[_readRequests] = [];
|
|
||||||
} else {
|
} else {
|
||||||
assert(isReadableStreamBYOBReader(reader));
|
assert(isReadableStreamBYOBReader(reader));
|
||||||
for (const readIntoRequest of reader[_readIntoRequests]) {
|
readableStreamBYOBReaderErrorReadIntoRequests(reader, e);
|
||||||
readIntoRequest.errorSteps(e);
|
|
||||||
}
|
|
||||||
reader[_readIntoRequests] = [];
|
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -2104,7 +2233,7 @@
|
||||||
*/
|
*/
|
||||||
function finalize(isError, error) {
|
function finalize(isError, error) {
|
||||||
writableStreamDefaultWriterRelease(writer);
|
writableStreamDefaultWriterRelease(writer);
|
||||||
readableStreamReaderGenericRelease(reader);
|
readableStreamDefaultReaderRelease(reader);
|
||||||
|
|
||||||
if (signal !== undefined) {
|
if (signal !== undefined) {
|
||||||
signal[remove](abortAlgorithm);
|
signal[remove](abortAlgorithm);
|
||||||
|
@ -2154,9 +2283,10 @@
|
||||||
* @param {ReadableStreamGenericReader<R> | ReadableStreamBYOBReader} reader
|
* @param {ReadableStreamGenericReader<R> | ReadableStreamBYOBReader} reader
|
||||||
*/
|
*/
|
||||||
function readableStreamReaderGenericRelease(reader) {
|
function readableStreamReaderGenericRelease(reader) {
|
||||||
assert(reader[_stream] !== undefined);
|
const stream = reader[_stream];
|
||||||
assert(reader[_stream][_reader] === reader);
|
assert(stream !== undefined);
|
||||||
if (reader[_stream][_state] === "readable") {
|
assert(stream[_reader] === reader);
|
||||||
|
if (stream[_state] === "readable") {
|
||||||
reader[_closedPromise].reject(
|
reader[_closedPromise].reject(
|
||||||
new TypeError(
|
new TypeError(
|
||||||
"Reader was released and can no longer be used to monitor the stream's closedness.",
|
"Reader was released and can no longer be used to monitor the stream's closedness.",
|
||||||
|
@ -2171,10 +2301,23 @@
|
||||||
);
|
);
|
||||||
}
|
}
|
||||||
setPromiseIsHandledToTrue(reader[_closedPromise].promise);
|
setPromiseIsHandledToTrue(reader[_closedPromise].promise);
|
||||||
reader[_stream][_reader] = undefined;
|
stream[_controller][_releaseSteps]();
|
||||||
|
stream[_reader] = undefined;
|
||||||
reader[_stream] = undefined;
|
reader[_stream] = undefined;
|
||||||
}
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* @param {ReadableStreamBYOBReader} reader
|
||||||
|
* @param {any} e
|
||||||
|
*/
|
||||||
|
function readableStreamBYOBReaderErrorReadIntoRequests(reader, e) {
|
||||||
|
const readIntoRequests = reader[_readIntoRequests];
|
||||||
|
reader[_readIntoRequests] = [];
|
||||||
|
for (const readIntoRequest of readIntoRequests) {
|
||||||
|
readIntoRequest.errorSteps(e);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* @template R
|
* @template R
|
||||||
* @param {ReadableStream<R>} stream
|
* @param {ReadableStream<R>} stream
|
||||||
|
@ -2381,7 +2524,7 @@
|
||||||
function pullWithDefaultReader() {
|
function pullWithDefaultReader() {
|
||||||
if (isReadableStreamBYOBReader(reader)) {
|
if (isReadableStreamBYOBReader(reader)) {
|
||||||
assert(reader[_readIntoRequests].length === 0);
|
assert(reader[_readIntoRequests].length === 0);
|
||||||
readableStreamReaderGenericRelease(reader);
|
readableStreamBYOBReaderRelease(reader);
|
||||||
reader = acquireReadableStreamDefaultReader(stream);
|
reader = acquireReadableStreamDefaultReader(stream);
|
||||||
forwardReaderError(reader);
|
forwardReaderError(reader);
|
||||||
}
|
}
|
||||||
|
@ -2446,7 +2589,7 @@
|
||||||
function pullWithBYOBReader(view, forBranch2) {
|
function pullWithBYOBReader(view, forBranch2) {
|
||||||
if (isReadableStreamDefaultReader(reader)) {
|
if (isReadableStreamDefaultReader(reader)) {
|
||||||
assert(reader[_readRequests].length === 0);
|
assert(reader[_readRequests].length === 0);
|
||||||
readableStreamReaderGenericRelease(reader);
|
readableStreamDefaultReaderRelease(reader);
|
||||||
reader = acquireReadableStreamBYOBReader(stream);
|
reader = acquireReadableStreamBYOBReader(stream);
|
||||||
forwardReaderError(reader);
|
forwardReaderError(reader);
|
||||||
}
|
}
|
||||||
|
@ -3982,11 +4125,11 @@
|
||||||
promise.resolve(createIteratorResult(chunk, false));
|
promise.resolve(createIteratorResult(chunk, false));
|
||||||
},
|
},
|
||||||
closeSteps() {
|
closeSteps() {
|
||||||
readableStreamReaderGenericRelease(reader);
|
readableStreamDefaultReaderRelease(reader);
|
||||||
promise.resolve(createIteratorResult(undefined, true));
|
promise.resolve(createIteratorResult(undefined, true));
|
||||||
},
|
},
|
||||||
errorSteps(e) {
|
errorSteps(e) {
|
||||||
readableStreamReaderGenericRelease(reader);
|
readableStreamDefaultReaderRelease(reader);
|
||||||
promise.reject(e);
|
promise.reject(e);
|
||||||
},
|
},
|
||||||
};
|
};
|
||||||
|
@ -4006,11 +4149,11 @@
|
||||||
assert(reader[_readRequests].length === 0);
|
assert(reader[_readRequests].length === 0);
|
||||||
if (this[_preventCancel] === false) {
|
if (this[_preventCancel] === false) {
|
||||||
const result = readableStreamReaderGenericCancel(reader, arg);
|
const result = readableStreamReaderGenericCancel(reader, arg);
|
||||||
readableStreamReaderGenericRelease(reader);
|
readableStreamDefaultReaderRelease(reader);
|
||||||
await result;
|
await result;
|
||||||
return createIteratorResult(arg, true);
|
return createIteratorResult(arg, true);
|
||||||
}
|
}
|
||||||
readableStreamReaderGenericRelease(reader);
|
readableStreamDefaultReaderRelease(reader);
|
||||||
return createIteratorResult(undefined, true);
|
return createIteratorResult(undefined, true);
|
||||||
},
|
},
|
||||||
}, asyncIteratorPrototype);
|
}, asyncIteratorPrototype);
|
||||||
|
@ -4417,12 +4560,7 @@
|
||||||
if (this[_stream] === undefined) {
|
if (this[_stream] === undefined) {
|
||||||
return;
|
return;
|
||||||
}
|
}
|
||||||
if (this[_readRequests].length) {
|
readableStreamDefaultReaderRelease(this);
|
||||||
throw new TypeError(
|
|
||||||
"There are pending read requests, so the reader cannot be release.",
|
|
||||||
);
|
|
||||||
}
|
|
||||||
readableStreamReaderGenericRelease(this);
|
|
||||||
}
|
}
|
||||||
|
|
||||||
get closed() {
|
get closed() {
|
||||||
|
@ -4544,12 +4682,7 @@
|
||||||
if (this[_stream] === undefined) {
|
if (this[_stream] === undefined) {
|
||||||
return;
|
return;
|
||||||
}
|
}
|
||||||
if (this[_readIntoRequests].length !== 0) {
|
readableStreamBYOBReaderRelease(this);
|
||||||
throw new TypeError(
|
|
||||||
"There are pending read requests, so the reader cannot be released.",
|
|
||||||
);
|
|
||||||
}
|
|
||||||
readableStreamReaderGenericRelease(this);
|
|
||||||
}
|
}
|
||||||
|
|
||||||
get closed() {
|
get closed() {
|
||||||
|
@ -4794,15 +4927,7 @@
|
||||||
assert(readableStreamHasDefaultReader(stream));
|
assert(readableStreamHasDefaultReader(stream));
|
||||||
if (this[_queueTotalSize] > 0) {
|
if (this[_queueTotalSize] > 0) {
|
||||||
assert(readableStreamGetNumReadRequests(stream) === 0);
|
assert(readableStreamGetNumReadRequests(stream) === 0);
|
||||||
const entry = ArrayPrototypeShift(this[_queue]);
|
readableByteStreamControllerFillReadRequestFromQueue(this, readRequest);
|
||||||
this[_queueTotalSize] -= entry.byteLength;
|
|
||||||
readableByteStreamControllerHandleQueueDrain(this);
|
|
||||||
const view = new Uint8Array(
|
|
||||||
entry.buffer,
|
|
||||||
entry.byteOffset,
|
|
||||||
entry.byteLength,
|
|
||||||
);
|
|
||||||
readRequest.chunkSteps(view);
|
|
||||||
return;
|
return;
|
||||||
}
|
}
|
||||||
const autoAllocateChunkSize = this[_autoAllocateChunkSize];
|
const autoAllocateChunkSize = this[_autoAllocateChunkSize];
|
||||||
|
@ -4830,6 +4955,15 @@
|
||||||
readableStreamAddReadRequest(stream, readRequest);
|
readableStreamAddReadRequest(stream, readRequest);
|
||||||
readableByteStreamControllerCallPullIfNeeded(this);
|
readableByteStreamControllerCallPullIfNeeded(this);
|
||||||
}
|
}
|
||||||
|
|
||||||
|
[_releaseSteps]() {
|
||||||
|
if (this[_pendingPullIntos].length !== 0) {
|
||||||
|
/** @type {PullIntoDescriptor} */
|
||||||
|
const firstPendingPullInto = this[_pendingPullIntos][0];
|
||||||
|
firstPendingPullInto.readerType = "none";
|
||||||
|
this[_pendingPullIntos] = [firstPendingPullInto];
|
||||||
|
}
|
||||||
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
webidl.configurePrototype(ReadableByteStreamController);
|
webidl.configurePrototype(ReadableByteStreamController);
|
||||||
|
@ -4944,6 +5078,10 @@
|
||||||
readableStreamDefaultControllerCallPullIfNeeded(this);
|
readableStreamDefaultControllerCallPullIfNeeded(this);
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
[_releaseSteps]() {
|
||||||
|
return;
|
||||||
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
webidl.configurePrototype(ReadableStreamDefaultController);
|
webidl.configurePrototype(ReadableStreamDefaultController);
|
||||||
|
|
2
ext/web/06_streams_types.d.ts
vendored
2
ext/web/06_streams_types.d.ts
vendored
|
@ -33,7 +33,7 @@ interface PullIntoDescriptor {
|
||||||
elementSize: number;
|
elementSize: number;
|
||||||
// deno-lint-ignore no-explicit-any
|
// deno-lint-ignore no-explicit-any
|
||||||
viewConstructor: any;
|
viewConstructor: any;
|
||||||
readerType: "default" | "byob";
|
readerType: "default" | "byob" | "none";
|
||||||
}
|
}
|
||||||
|
|
||||||
interface ReadableByteStreamQueueEntry {
|
interface ReadableByteStreamQueueEntry {
|
||||||
|
|
|
@ -2,7 +2,7 @@
|
||||||
|
|
||||||
[package]
|
[package]
|
||||||
name = "deno_web"
|
name = "deno_web"
|
||||||
version = "0.62.0"
|
version = "0.63.0"
|
||||||
authors = ["the Deno authors"]
|
authors = ["the Deno authors"]
|
||||||
edition = "2021"
|
edition = "2021"
|
||||||
license = "MIT"
|
license = "MIT"
|
||||||
|
@ -16,7 +16,7 @@ path = "lib.rs"
|
||||||
[dependencies]
|
[dependencies]
|
||||||
async-trait = "0.1.51"
|
async-trait = "0.1.51"
|
||||||
base64 = "0.13.0"
|
base64 = "0.13.0"
|
||||||
deno_core = { version = "0.113.0", path = "../../core" }
|
deno_core = { version = "0.114.0", path = "../../core" }
|
||||||
encoding_rs = "0.8.29"
|
encoding_rs = "0.8.29"
|
||||||
serde = "1.0.129"
|
serde = "1.0.129"
|
||||||
tokio = { version = "1.10.1", features = ["full"] }
|
tokio = { version = "1.10.1", features = ["full"] }
|
||||||
|
|
|
@ -2,7 +2,7 @@
|
||||||
|
|
||||||
[package]
|
[package]
|
||||||
name = "deno_webgpu"
|
name = "deno_webgpu"
|
||||||
version = "0.32.0"
|
version = "0.33.0"
|
||||||
authors = ["the Deno authors"]
|
authors = ["the Deno authors"]
|
||||||
edition = "2021"
|
edition = "2021"
|
||||||
license = "MIT"
|
license = "MIT"
|
||||||
|
@ -14,7 +14,7 @@ description = "WebGPU implementation for Deno"
|
||||||
path = "lib.rs"
|
path = "lib.rs"
|
||||||
|
|
||||||
[dependencies]
|
[dependencies]
|
||||||
deno_core = { version = "0.113.0", path = "../../core" }
|
deno_core = { version = "0.114.0", path = "../../core" }
|
||||||
serde = { version = "1.0.129", features = ["derive"] }
|
serde = { version = "1.0.129", features = ["derive"] }
|
||||||
tokio = { version = "1.10.1", features = ["full"] }
|
tokio = { version = "1.10.1", features = ["full"] }
|
||||||
wgpu-core = { version = "0.10.1", features = ["trace"] }
|
wgpu-core = { version = "0.10.1", features = ["trace"] }
|
||||||
|
|
|
@ -2,7 +2,7 @@
|
||||||
|
|
||||||
[package]
|
[package]
|
||||||
name = "deno_webidl"
|
name = "deno_webidl"
|
||||||
version = "0.31.0"
|
version = "0.32.0"
|
||||||
authors = ["the Deno authors"]
|
authors = ["the Deno authors"]
|
||||||
edition = "2021"
|
edition = "2021"
|
||||||
license = "MIT"
|
license = "MIT"
|
||||||
|
@ -14,4 +14,4 @@ description = "WebIDL implementation for Deno"
|
||||||
path = "lib.rs"
|
path = "lib.rs"
|
||||||
|
|
||||||
[dependencies]
|
[dependencies]
|
||||||
deno_core = { version = "0.113.0", path = "../../core" }
|
deno_core = { version = "0.114.0", path = "../../core" }
|
||||||
|
|
|
@ -2,7 +2,7 @@
|
||||||
|
|
||||||
[package]
|
[package]
|
||||||
name = "deno_websocket"
|
name = "deno_websocket"
|
||||||
version = "0.36.0"
|
version = "0.37.0"
|
||||||
authors = ["the Deno authors"]
|
authors = ["the Deno authors"]
|
||||||
edition = "2021"
|
edition = "2021"
|
||||||
license = "MIT"
|
license = "MIT"
|
||||||
|
@ -14,8 +14,8 @@ description = "Implementation of WebSocket API for Deno"
|
||||||
path = "lib.rs"
|
path = "lib.rs"
|
||||||
|
|
||||||
[dependencies]
|
[dependencies]
|
||||||
deno_core = { version = "0.113.0", path = "../../core" }
|
deno_core = { version = "0.114.0", path = "../../core" }
|
||||||
deno_tls = { version = "0.18.0", path = "../tls" }
|
deno_tls = { version = "0.19.0", path = "../tls" }
|
||||||
http = "0.2.4"
|
http = "0.2.4"
|
||||||
hyper = { version = "0.14.12" }
|
hyper = { version = "0.14.12" }
|
||||||
serde = { version = "1.0.129", features = ["derive"] }
|
serde = { version = "1.0.129", features = ["derive"] }
|
||||||
|
|
|
@ -334,7 +334,7 @@ where
|
||||||
.map_err(|err| {
|
.map_err(|err| {
|
||||||
DomExceptionNetworkError::new(&format!(
|
DomExceptionNetworkError::new(&format!(
|
||||||
"failed to connect to WebSocket: {}",
|
"failed to connect to WebSocket: {}",
|
||||||
err.to_string()
|
err
|
||||||
))
|
))
|
||||||
})?;
|
})?;
|
||||||
|
|
||||||
|
|
|
@ -2,7 +2,7 @@
|
||||||
|
|
||||||
[package]
|
[package]
|
||||||
name = "deno_webstorage"
|
name = "deno_webstorage"
|
||||||
version = "0.26.0"
|
version = "0.27.0"
|
||||||
authors = ["the Deno authors"]
|
authors = ["the Deno authors"]
|
||||||
edition = "2021"
|
edition = "2021"
|
||||||
license = "MIT"
|
license = "MIT"
|
||||||
|
@ -14,7 +14,7 @@ description = "Implementation of WebStorage API for Deno"
|
||||||
path = "lib.rs"
|
path = "lib.rs"
|
||||||
|
|
||||||
[dependencies]
|
[dependencies]
|
||||||
deno_core = { version = "0.113.0", path = "../../core" }
|
deno_core = { version = "0.114.0", path = "../../core" }
|
||||||
deno_web = { version = "0.62.0", path = "../web" }
|
deno_web = { version = "0.63.0", path = "../web" }
|
||||||
rusqlite = { version = "0.25.3", features = ["unlock_notify", "bundled"] }
|
rusqlite = { version = "0.25.3", features = ["unlock_notify", "bundled"] }
|
||||||
serde = { version = "1.0.129", features = ["derive"] }
|
serde = { version = "1.0.129", features = ["derive"] }
|
||||||
|
|
|
@ -2,7 +2,7 @@
|
||||||
|
|
||||||
[package]
|
[package]
|
||||||
name = "deno_runtime"
|
name = "deno_runtime"
|
||||||
version = "0.39.0"
|
version = "0.40.0"
|
||||||
authors = ["the Deno authors"]
|
authors = ["the Deno authors"]
|
||||||
edition = "2021"
|
edition = "2021"
|
||||||
license = "MIT"
|
license = "MIT"
|
||||||
|
@ -22,44 +22,46 @@ name = "hello_runtime"
|
||||||
path = "examples/hello_runtime.rs"
|
path = "examples/hello_runtime.rs"
|
||||||
|
|
||||||
[build-dependencies]
|
[build-dependencies]
|
||||||
deno_broadcast_channel = { version = "0.25.0", path = "../ext/broadcast_channel" }
|
deno_broadcast_channel = { version = "0.26.0", path = "../ext/broadcast_channel" }
|
||||||
deno_console = { version = "0.31.0", path = "../ext/console" }
|
deno_console = { version = "0.32.0", path = "../ext/console" }
|
||||||
deno_core = { version = "0.113.0", path = "../core" }
|
deno_core = { version = "0.114.0", path = "../core" }
|
||||||
deno_crypto = { version = "0.45.0", path = "../ext/crypto" }
|
deno_crypto = { version = "0.46.0", path = "../ext/crypto" }
|
||||||
deno_fetch = { version = "0.54.0", path = "../ext/fetch" }
|
deno_fetch = { version = "0.55.0", path = "../ext/fetch" }
|
||||||
deno_ffi = { version = "0.18.0", path = "../ext/ffi" }
|
deno_ffi = { version = "0.19.0", path = "../ext/ffi" }
|
||||||
deno_http = { version = "0.23.0", path = "../ext/http" }
|
deno_http = { version = "0.24.0", path = "../ext/http" }
|
||||||
deno_net = { version = "0.23.0", path = "../ext/net" }
|
deno_net = { version = "0.24.0", path = "../ext/net" }
|
||||||
deno_timers = { version = "0.29.0", path = "../ext/timers" }
|
deno_timers = { version = "0.30.0", path = "../ext/timers" }
|
||||||
deno_tls = { version = "0.18.0", path = "../ext/tls" }
|
deno_tls = { version = "0.19.0", path = "../ext/tls" }
|
||||||
deno_url = { version = "0.31.0", path = "../ext/url" }
|
deno_url = { version = "0.32.0", path = "../ext/url" }
|
||||||
deno_web = { version = "0.62.0", path = "../ext/web" }
|
deno_web = { version = "0.63.0", path = "../ext/web" }
|
||||||
deno_webgpu = { version = "0.32.0", path = "../ext/webgpu" }
|
deno_webgpu = { version = "0.33.0", path = "../ext/webgpu" }
|
||||||
deno_webidl = { version = "0.31.0", path = "../ext/webidl" }
|
deno_webidl = { version = "0.32.0", path = "../ext/webidl" }
|
||||||
deno_websocket = { version = "0.36.0", path = "../ext/websocket" }
|
deno_websocket = { version = "0.37.0", path = "../ext/websocket" }
|
||||||
deno_webstorage = { version = "0.26.0", path = "../ext/webstorage" }
|
deno_webstorage = { version = "0.27.0", path = "../ext/webstorage" }
|
||||||
|
|
||||||
|
lzzzz = '=0.8.0'
|
||||||
|
|
||||||
[target.'cfg(windows)'.build-dependencies]
|
[target.'cfg(windows)'.build-dependencies]
|
||||||
winres = "0.1.11"
|
winres = "0.1.11"
|
||||||
winapi = "0.3.9"
|
winapi = "0.3.9"
|
||||||
|
|
||||||
[dependencies]
|
[dependencies]
|
||||||
deno_broadcast_channel = { version = "0.25.0", path = "../ext/broadcast_channel" }
|
deno_broadcast_channel = { version = "0.26.0", path = "../ext/broadcast_channel" }
|
||||||
deno_console = { version = "0.31.0", path = "../ext/console" }
|
deno_console = { version = "0.32.0", path = "../ext/console" }
|
||||||
deno_core = { version = "0.113.0", path = "../core" }
|
deno_core = { version = "0.114.0", path = "../core" }
|
||||||
deno_crypto = { version = "0.45.0", path = "../ext/crypto" }
|
deno_crypto = { version = "0.46.0", path = "../ext/crypto" }
|
||||||
deno_fetch = { version = "0.54.0", path = "../ext/fetch" }
|
deno_fetch = { version = "0.55.0", path = "../ext/fetch" }
|
||||||
deno_ffi = { version = "0.18.0", path = "../ext/ffi" }
|
deno_ffi = { version = "0.19.0", path = "../ext/ffi" }
|
||||||
deno_http = { version = "0.23.0", path = "../ext/http" }
|
deno_http = { version = "0.24.0", path = "../ext/http" }
|
||||||
deno_net = { version = "0.23.0", path = "../ext/net" }
|
deno_net = { version = "0.24.0", path = "../ext/net" }
|
||||||
deno_timers = { version = "0.29.0", path = "../ext/timers" }
|
deno_timers = { version = "0.30.0", path = "../ext/timers" }
|
||||||
deno_tls = { version = "0.18.0", path = "../ext/tls" }
|
deno_tls = { version = "0.19.0", path = "../ext/tls" }
|
||||||
deno_url = { version = "0.31.0", path = "../ext/url" }
|
deno_url = { version = "0.32.0", path = "../ext/url" }
|
||||||
deno_web = { version = "0.62.0", path = "../ext/web" }
|
deno_web = { version = "0.63.0", path = "../ext/web" }
|
||||||
deno_webgpu = { version = "0.32.0", path = "../ext/webgpu" }
|
deno_webgpu = { version = "0.33.0", path = "../ext/webgpu" }
|
||||||
deno_webidl = { version = "0.31.0", path = "../ext/webidl" }
|
deno_webidl = { version = "0.32.0", path = "../ext/webidl" }
|
||||||
deno_websocket = { version = "0.36.0", path = "../ext/websocket" }
|
deno_websocket = { version = "0.37.0", path = "../ext/websocket" }
|
||||||
deno_webstorage = { version = "0.26.0", path = "../ext/webstorage" }
|
deno_webstorage = { version = "0.27.0", path = "../ext/webstorage" }
|
||||||
|
|
||||||
atty = "0.2.14"
|
atty = "0.2.14"
|
||||||
dlopen = "0.1.8"
|
dlopen = "0.1.8"
|
||||||
|
@ -70,6 +72,7 @@ http = "0.2.4"
|
||||||
hyper = { version = "0.14.12", features = ["server", "stream", "http1", "http2", "runtime"] }
|
hyper = { version = "0.14.12", features = ["server", "stream", "http1", "http2", "runtime"] }
|
||||||
libc = "0.2.106"
|
libc = "0.2.106"
|
||||||
log = "0.4.14"
|
log = "0.4.14"
|
||||||
|
lzzzz = '=0.8.0'
|
||||||
netif = "0.1.0"
|
netif = "0.1.0"
|
||||||
notify = "=5.0.0-pre.12"
|
notify = "=5.0.0-pre.12"
|
||||||
once_cell = "=1.9.0"
|
once_cell = "=1.9.0"
|
||||||
|
|
|
@ -37,7 +37,32 @@ mod not_docs {
|
||||||
let snapshot = js_runtime.snapshot();
|
let snapshot = js_runtime.snapshot();
|
||||||
let snapshot_slice: &[u8] = &*snapshot;
|
let snapshot_slice: &[u8] = &*snapshot;
|
||||||
println!("Snapshot size: {}", snapshot_slice.len());
|
println!("Snapshot size: {}", snapshot_slice.len());
|
||||||
std::fs::write(&snapshot_path, snapshot_slice).unwrap();
|
|
||||||
|
let compressed_snapshot_with_size = {
|
||||||
|
let mut vec = vec![];
|
||||||
|
|
||||||
|
vec.extend_from_slice(
|
||||||
|
&u32::try_from(snapshot.len())
|
||||||
|
.expect("snapshot larger than 4gb")
|
||||||
|
.to_le_bytes(),
|
||||||
|
);
|
||||||
|
|
||||||
|
lzzzz::lz4_hc::compress_to_vec(
|
||||||
|
snapshot_slice,
|
||||||
|
&mut vec,
|
||||||
|
lzzzz::lz4_hc::CLEVEL_MAX,
|
||||||
|
)
|
||||||
|
.expect("snapshot compression failed");
|
||||||
|
|
||||||
|
vec
|
||||||
|
};
|
||||||
|
|
||||||
|
println!(
|
||||||
|
"Snapshot compressed size: {}",
|
||||||
|
compressed_snapshot_with_size.len()
|
||||||
|
);
|
||||||
|
|
||||||
|
std::fs::write(&snapshot_path, compressed_snapshot_with_size).unwrap();
|
||||||
println!("Snapshot written to: {} ", snapshot_path.display());
|
println!("Snapshot written to: {} ", snapshot_path.display());
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
|
@ -152,11 +152,7 @@ fn handle_ws_request(
|
||||||
_ => http::Response::builder()
|
_ => http::Response::builder()
|
||||||
.status(http::StatusCode::BAD_REQUEST)
|
.status(http::StatusCode::BAD_REQUEST)
|
||||||
.body("Not a valid Websocket Request".into()),
|
.body("Not a valid Websocket Request".into()),
|
||||||
});
|
})?;
|
||||||
|
|
||||||
if resp.is_err() {
|
|
||||||
return resp;
|
|
||||||
}
|
|
||||||
|
|
||||||
let (parts, _) = req.into_parts();
|
let (parts, _) = req.into_parts();
|
||||||
let req = http::Request::from_parts(parts, body);
|
let req = http::Request::from_parts(parts, body);
|
||||||
|
@ -193,7 +189,7 @@ fn handle_ws_request(
|
||||||
pump_websocket_messages(websocket, inbound_tx, outbound_rx).await;
|
pump_websocket_messages(websocket, inbound_tx, outbound_rx).await;
|
||||||
});
|
});
|
||||||
|
|
||||||
resp
|
Ok(resp)
|
||||||
}
|
}
|
||||||
|
|
||||||
fn handle_json_request(
|
fn handle_json_request(
|
||||||
|
|
|
@ -1,14 +1,36 @@
|
||||||
// Copyright 2018-2022 the Deno authors. All rights reserved. MIT license.
|
// Copyright 2018-2022 the Deno authors. All rights reserved. MIT license.
|
||||||
use deno_core::Snapshot;
|
use deno_core::Snapshot;
|
||||||
use log::debug;
|
use log::debug;
|
||||||
|
use once_cell::sync::Lazy;
|
||||||
|
|
||||||
pub static CLI_SNAPSHOT: &[u8] =
|
pub static CLI_SNAPSHOT: Lazy<Box<[u8]>> = Lazy::new(
|
||||||
include_bytes!(concat!(env!("OUT_DIR"), "/CLI_SNAPSHOT.bin"));
|
#[allow(clippy::uninit_vec)]
|
||||||
|
#[cold]
|
||||||
|
#[inline(never)]
|
||||||
|
|| {
|
||||||
|
static COMPRESSED_CLI_SNAPSHOT: &[u8] =
|
||||||
|
include_bytes!(concat!(env!("OUT_DIR"), "/CLI_SNAPSHOT.bin"));
|
||||||
|
|
||||||
|
let size =
|
||||||
|
u32::from_le_bytes(COMPRESSED_CLI_SNAPSHOT[0..4].try_into().unwrap())
|
||||||
|
as usize;
|
||||||
|
let mut vec = Vec::with_capacity(size);
|
||||||
|
|
||||||
|
// SAFETY: vec is allocated with exact snapshot size (+ alignment)
|
||||||
|
// SAFETY: non zeroed bytes are overwritten with decompressed snapshot
|
||||||
|
unsafe {
|
||||||
|
vec.set_len(size);
|
||||||
|
}
|
||||||
|
|
||||||
|
lzzzz::lz4::decompress(&COMPRESSED_CLI_SNAPSHOT[4..], &mut vec).unwrap();
|
||||||
|
|
||||||
|
vec.into_boxed_slice()
|
||||||
|
},
|
||||||
|
);
|
||||||
|
|
||||||
pub fn deno_isolate_init() -> Snapshot {
|
pub fn deno_isolate_init() -> Snapshot {
|
||||||
debug!("Deno isolate init with snapshots.");
|
debug!("Deno isolate init with snapshots.");
|
||||||
let data = CLI_SNAPSHOT;
|
Snapshot::Static(&*CLI_SNAPSHOT)
|
||||||
Snapshot::Static(data)
|
|
||||||
}
|
}
|
||||||
|
|
||||||
#[cfg(test)]
|
#[cfg(test)]
|
||||||
|
|
|
@ -138,6 +138,7 @@
|
||||||
dlopen: __bootstrap.ffi.dlopen,
|
dlopen: __bootstrap.ffi.dlopen,
|
||||||
UnsafePointer: __bootstrap.ffi.UnsafePointer,
|
UnsafePointer: __bootstrap.ffi.UnsafePointer,
|
||||||
UnsafePointerView: __bootstrap.ffi.UnsafePointerView,
|
UnsafePointerView: __bootstrap.ffi.UnsafePointerView,
|
||||||
|
UnsafeFnPointer: __bootstrap.ffi.UnsafeFnPointer,
|
||||||
flock: __bootstrap.fs.flock,
|
flock: __bootstrap.fs.flock,
|
||||||
flockSync: __bootstrap.fs.flockSync,
|
flockSync: __bootstrap.fs.flockSync,
|
||||||
funlock: __bootstrap.fs.funlock,
|
funlock: __bootstrap.fs.funlock,
|
||||||
|
|
|
@ -1918,7 +1918,7 @@ fn permission_prompt(message: &str) -> bool {
|
||||||
if success != TRUE {
|
if success != TRUE {
|
||||||
panic!(
|
panic!(
|
||||||
"Error flushing console input buffer: {}",
|
"Error flushing console input buffer: {}",
|
||||||
std::io::Error::last_os_error().to_string()
|
std::io::Error::last_os_error()
|
||||||
)
|
)
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
@ -1941,7 +1941,7 @@ fn permission_prompt(message: &str) -> bool {
|
||||||
if success != TRUE {
|
if success != TRUE {
|
||||||
panic!(
|
panic!(
|
||||||
"Error emulating enter key press: {}",
|
"Error emulating enter key press: {}",
|
||||||
std::io::Error::last_os_error().to_string()
|
std::io::Error::last_os_error()
|
||||||
)
|
)
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
@ -1954,7 +1954,7 @@ fn permission_prompt(message: &str) -> bool {
|
||||||
if success != TRUE {
|
if success != TRUE {
|
||||||
panic!(
|
panic!(
|
||||||
"Error peeking console input buffer: {}",
|
"Error peeking console input buffer: {}",
|
||||||
std::io::Error::last_os_error().to_string()
|
std::io::Error::last_os_error()
|
||||||
)
|
)
|
||||||
}
|
}
|
||||||
events_read == 0
|
events_read == 0
|
||||||
|
|
|
@ -1,7 +1,7 @@
|
||||||
# Copyright 2018-2022 the Deno authors. All rights reserved. MIT license.
|
# Copyright 2018-2022 the Deno authors. All rights reserved. MIT license.
|
||||||
[package]
|
[package]
|
||||||
name = "serde_v8"
|
name = "serde_v8"
|
||||||
version = "0.24.0"
|
version = "0.25.0"
|
||||||
authors = ["the Deno authors"]
|
authors = ["the Deno authors"]
|
||||||
edition = "2021"
|
edition = "2021"
|
||||||
license = "MIT"
|
license = "MIT"
|
||||||
|
|
|
@ -1,5 +1,6 @@
|
||||||
// Copyright 2018-2022 the Deno authors. All rights reserved. MIT license.
|
// Copyright 2018-2022 the Deno authors. All rights reserved. MIT license.
|
||||||
|
|
||||||
|
use std::os::raw::c_void;
|
||||||
use std::thread::sleep;
|
use std::thread::sleep;
|
||||||
use std::time::Duration;
|
use std::time::Duration;
|
||||||
|
|
||||||
|
@ -101,3 +102,13 @@ pub extern "C" fn nonblocking_buffer(ptr: *const u8, len: usize) {
|
||||||
let buf = unsafe { std::slice::from_raw_parts(ptr, len) };
|
let buf = unsafe { std::slice::from_raw_parts(ptr, len) };
|
||||||
assert_eq!(buf, vec![1, 2, 3, 4, 5, 6, 7, 8]);
|
assert_eq!(buf, vec![1, 2, 3, 4, 5, 6, 7, 8]);
|
||||||
}
|
}
|
||||||
|
|
||||||
|
#[no_mangle]
|
||||||
|
pub extern "C" fn get_add_u32_ptr() -> *const c_void {
|
||||||
|
add_u32 as *const c_void
|
||||||
|
}
|
||||||
|
|
||||||
|
#[no_mangle]
|
||||||
|
pub extern "C" fn get_sleep_blocking_ptr() -> *const c_void {
|
||||||
|
sleep_blocking as *const c_void
|
||||||
|
}
|
||||||
|
|
|
@ -86,7 +86,7 @@ remote.symbols.method14(null);
|
||||||
remote.symbols.method14(0);
|
remote.symbols.method14(0);
|
||||||
|
|
||||||
// @ts-expect-error: Invalid argument
|
// @ts-expect-error: Invalid argument
|
||||||
remote.symbols.method15(null);
|
remote.symbols.method15(0);
|
||||||
remote.symbols.method15(new Uint16Array(1));
|
remote.symbols.method15(new Uint16Array(1));
|
||||||
remote.symbols.method15({} as Deno.UnsafePointer);
|
remote.symbols.method15({} as Deno.UnsafePointer);
|
||||||
|
|
||||||
|
@ -109,3 +109,15 @@ const result4 = remote.symbols.method19();
|
||||||
// @ts-expect-error: Invalid argument
|
// @ts-expect-error: Invalid argument
|
||||||
result4.then((_0: Deno.TypedArray) => {});
|
result4.then((_0: Deno.TypedArray) => {});
|
||||||
result4.then((_1: Deno.UnsafePointer) => {});
|
result4.then((_1: Deno.UnsafePointer) => {});
|
||||||
|
|
||||||
|
const ptr = new Deno.UnsafePointer(0n);
|
||||||
|
const fnptr = new Deno.UnsafeFnPointer(
|
||||||
|
ptr,
|
||||||
|
{
|
||||||
|
parameters: ["u32", "pointer"],
|
||||||
|
result: "void",
|
||||||
|
} as const,
|
||||||
|
);
|
||||||
|
// @ts-expect-error: Invalid argument
|
||||||
|
fnptr.call(null, null);
|
||||||
|
fnptr.call(0, null);
|
||||||
|
|
Some files were not shown because too many files have changed in this diff Show more
Loading…
Reference in a new issue