mirror of
https://github.com/denoland/deno.git
synced 2024-12-24 08:09:08 -05:00
refactor(lsp): use deno_ast and cache swc ASTs (#11780)
This commit is contained in:
parent
a5bcf7033e
commit
2c2e3ec1ca
39 changed files with 1037 additions and 1758 deletions
130
Cargo.lock
generated
130
Cargo.lock
generated
|
@ -556,6 +556,7 @@ dependencies = [
|
|||
"chrono",
|
||||
"clap",
|
||||
"data-url",
|
||||
"deno_ast",
|
||||
"deno_broadcast_channel",
|
||||
"deno_console",
|
||||
"deno_core",
|
||||
|
@ -597,6 +598,7 @@ dependencies = [
|
|||
"nix",
|
||||
"notify",
|
||||
"num_cpus",
|
||||
"once_cell",
|
||||
"os_pipe",
|
||||
"percent-encoding",
|
||||
"pin-project",
|
||||
|
@ -609,9 +611,6 @@ dependencies = [
|
|||
"serde",
|
||||
"shell-escape",
|
||||
"sourcemap",
|
||||
"swc_bundler",
|
||||
"swc_common",
|
||||
"swc_ecmascript",
|
||||
"tempfile",
|
||||
"termcolor",
|
||||
"test_util",
|
||||
|
@ -648,6 +647,23 @@ dependencies = [
|
|||
"make-cmd",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "deno_ast"
|
||||
version = "0.1.6"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "2322427a9a5ac45c586231457a10715268fdac24ec89d41ba14a347db99fe770"
|
||||
dependencies = [
|
||||
"data-url",
|
||||
"dprint-swc-ecma-ast-view",
|
||||
"serde",
|
||||
"swc_atoms",
|
||||
"swc_bundler",
|
||||
"swc_common",
|
||||
"swc_ecmascript",
|
||||
"text_lines",
|
||||
"url",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "deno_bench_util"
|
||||
version = "0.10.0"
|
||||
|
@ -714,19 +730,18 @@ dependencies = [
|
|||
|
||||
[[package]]
|
||||
name = "deno_doc"
|
||||
version = "0.12.1"
|
||||
version = "0.13.0"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "23c860b2359120f1565aafd4e9e5eddf7fcbdb70dc55a97719c00d327570ffce"
|
||||
checksum = "f1456673bbb10085534055a40004090f081af391bd21f117f1d9ee611adb1a53"
|
||||
dependencies = [
|
||||
"cfg-if 1.0.0",
|
||||
"deno_ast",
|
||||
"deno_graph",
|
||||
"futures",
|
||||
"lazy_static",
|
||||
"regex",
|
||||
"serde",
|
||||
"serde_json",
|
||||
"swc_common",
|
||||
"swc_ecmascript",
|
||||
"termcolor",
|
||||
]
|
||||
|
||||
|
@ -760,23 +775,21 @@ dependencies = [
|
|||
|
||||
[[package]]
|
||||
name = "deno_graph"
|
||||
version = "0.3.1"
|
||||
version = "0.4.0"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "ec6c70108e13d63f6fa51975f0557d5c0fec80a247c3e51f2a215ef6614b53dc"
|
||||
checksum = "3c9e150f0e8a39fb5c4c2bc019b052dca18fd648c75674949db6f5d35f5abe72"
|
||||
dependencies = [
|
||||
"anyhow",
|
||||
"cfg-if 1.0.0",
|
||||
"data-url",
|
||||
"deno_ast",
|
||||
"futures",
|
||||
"lazy_static",
|
||||
"regex",
|
||||
"ring",
|
||||
"serde",
|
||||
"serde_json",
|
||||
"swc_common",
|
||||
"swc_ecmascript",
|
||||
"termcolor",
|
||||
"text_lines",
|
||||
"url",
|
||||
]
|
||||
|
||||
|
@ -797,22 +810,19 @@ dependencies = [
|
|||
|
||||
[[package]]
|
||||
name = "deno_lint"
|
||||
version = "0.14.0"
|
||||
version = "0.15.0"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "c0cbb4f64c7884703198d8fb0f67a900e4dfbc456efaabebc419a4d3612da064"
|
||||
checksum = "e7af2b640545a0b60268b1dfc4b78d78585e89446b7b20c5e0406bff872f2a01"
|
||||
dependencies = [
|
||||
"anyhow",
|
||||
"deno_ast",
|
||||
"derive_more",
|
||||
"dprint-swc-ecma-ast-view",
|
||||
"if_chain",
|
||||
"log",
|
||||
"once_cell",
|
||||
"regex",
|
||||
"serde",
|
||||
"serde_json",
|
||||
"swc_atoms",
|
||||
"swc_common",
|
||||
"swc_ecmascript",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
|
@ -1068,9 +1078,9 @@ dependencies = [
|
|||
|
||||
[[package]]
|
||||
name = "dprint-plugin-typescript"
|
||||
version = "0.54.0"
|
||||
version = "0.55.0"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "2249bbc5f46daecd4de4157cec4c4a118724024a213bea8a53ac693047959291"
|
||||
checksum = "0f893075573bad180d06bcc673996a128b09e9ab608c45e40c379fd733f85636"
|
||||
dependencies = [
|
||||
"dprint-core",
|
||||
"dprint-swc-ecma-ast-view",
|
||||
|
@ -1082,9 +1092,9 @@ dependencies = [
|
|||
|
||||
[[package]]
|
||||
name = "dprint-swc-ecma-ast-view"
|
||||
version = "0.33.1"
|
||||
version = "0.35.0"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "c44a27f32f2bc9347d08e4b8f47db055f4df9b8d9e1236cc2036a9e95707ba7b"
|
||||
checksum = "3c561abeae30e338748557e2c85e7c73621877eba137951207a3fd32306d9ce2"
|
||||
dependencies = [
|
||||
"bumpalo",
|
||||
"fnv",
|
||||
|
@ -3378,9 +3388,9 @@ dependencies = [
|
|||
|
||||
[[package]]
|
||||
name = "swc_bundler"
|
||||
version = "0.56.0"
|
||||
version = "0.58.0"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "1e00938122669f1358a570dad80626ac9007053967aa3e4066440c7946609784"
|
||||
checksum = "3d42b9e0dff902d05ea17813dc3537a29641a35634864d55ee91215c7cf5430f"
|
||||
dependencies = [
|
||||
"ahash 0.7.4",
|
||||
"anyhow",
|
||||
|
@ -3407,9 +3417,9 @@ dependencies = [
|
|||
|
||||
[[package]]
|
||||
name = "swc_common"
|
||||
version = "0.11.9"
|
||||
version = "0.12.0"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "0a873d7284ebc53a9051f41068dc2cb979e399a4a1fab25d9c0dee9f8db4d1f5"
|
||||
checksum = "7ca21695d45b5374d7eafedda065de3cab2337a4707642302f71caaa4c0d338a"
|
||||
dependencies = [
|
||||
"ahash 0.7.4",
|
||||
"ast_node",
|
||||
|
@ -3428,13 +3438,14 @@ dependencies = [
|
|||
"swc_eq_ignore_macros",
|
||||
"swc_visit",
|
||||
"unicode-width",
|
||||
"url",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "swc_ecma_ast"
|
||||
version = "0.51.1"
|
||||
version = "0.52.0"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "0541aee098b52870ef6181deae2fbe3f3025605f2e6f27b3993e6f66607a46a1"
|
||||
checksum = "aa0efb0e13ba6545e2b86336937e1641594f78c48484b85c2dc9582eaccb41e1"
|
||||
dependencies = [
|
||||
"is-macro",
|
||||
"num-bigint",
|
||||
|
@ -3446,9 +3457,9 @@ dependencies = [
|
|||
|
||||
[[package]]
|
||||
name = "swc_ecma_codegen"
|
||||
version = "0.69.1"
|
||||
version = "0.70.0"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "58e7b482064bc6386168de843b85fddb6b70fc2cd86323962821642a253fa427"
|
||||
checksum = "39e1c6d22c400be1e512321ab8190747d74e0aebfedbea3991011ef1ee767f67"
|
||||
dependencies = [
|
||||
"bitflags",
|
||||
"num-bigint",
|
||||
|
@ -3475,9 +3486,9 @@ dependencies = [
|
|||
|
||||
[[package]]
|
||||
name = "swc_ecma_dep_graph"
|
||||
version = "0.38.1"
|
||||
version = "0.39.0"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "630fe1a1464a64c64cdc9ca8c0d0a8358c66e1ca79cb8acf577e429dff56a104"
|
||||
checksum = "f4361e5514224618db7aed966268fd6afe2b9a04d353197a3ab3cefc272c7c6a"
|
||||
dependencies = [
|
||||
"swc_atoms",
|
||||
"swc_common",
|
||||
|
@ -3487,9 +3498,9 @@ dependencies = [
|
|||
|
||||
[[package]]
|
||||
name = "swc_ecma_loader"
|
||||
version = "0.17.1"
|
||||
version = "0.18.0"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "2e9796ff40909d124ac382bf89fd94bb3a108e1f6a5c7786e3dd54b9fb579dff"
|
||||
checksum = "e9695aa0b1394a1954da965a00a6a9624aa1c9f49148f72f1c01f5bd9c39d74c"
|
||||
dependencies = [
|
||||
"anyhow",
|
||||
"fxhash",
|
||||
|
@ -3502,9 +3513,9 @@ dependencies = [
|
|||
|
||||
[[package]]
|
||||
name = "swc_ecma_parser"
|
||||
version = "0.69.1"
|
||||
version = "0.70.1"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "c7eb1f1c556118750c7871bef5c9f37cd9cac9f5d6479b922aa026cebf1fac18"
|
||||
checksum = "bd061e1df02f5cf2a8ec788d79a9c5bd90b4deb3e59c849a325dce6ca8725ef8"
|
||||
dependencies = [
|
||||
"either",
|
||||
"enum_kind",
|
||||
|
@ -3523,9 +3534,9 @@ dependencies = [
|
|||
|
||||
[[package]]
|
||||
name = "swc_ecma_transforms"
|
||||
version = "0.69.0"
|
||||
version = "0.71.0"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "9214e4c1349c7cbdaa364fb9f67c4db3b028425be605e05fb7c387af1e788113"
|
||||
checksum = "b0b10595701693c55154e129b7d78c142f0391363a9855bb465ca5c757e7e43a"
|
||||
dependencies = [
|
||||
"swc_atoms",
|
||||
"swc_common",
|
||||
|
@ -3543,9 +3554,9 @@ dependencies = [
|
|||
|
||||
[[package]]
|
||||
name = "swc_ecma_transforms_base"
|
||||
version = "0.30.1"
|
||||
version = "0.31.0"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "3b0aa724a347be8c2a14e3debdec192531a865388595114d7685f123e9780731"
|
||||
checksum = "5e3c5519bcd00912e149d5d163468fd219fe143abc2ed642da1c0f8c97efc58a"
|
||||
dependencies = [
|
||||
"fxhash",
|
||||
"once_cell",
|
||||
|
@ -3562,9 +3573,9 @@ dependencies = [
|
|||
|
||||
[[package]]
|
||||
name = "swc_ecma_transforms_classes"
|
||||
version = "0.16.0"
|
||||
version = "0.17.0"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "5f55f90a828f89127ebf063e60b3ad97c3e6c339999e9304e350f72cd3187e5c"
|
||||
checksum = "4fab5a6996e92cd9afcd4c9e0288d18ab6ce1265c0fccfdc050c75267f362f01"
|
||||
dependencies = [
|
||||
"swc_atoms",
|
||||
"swc_common",
|
||||
|
@ -3576,9 +3587,9 @@ dependencies = [
|
|||
|
||||
[[package]]
|
||||
name = "swc_ecma_transforms_optimization"
|
||||
version = "0.39.0"
|
||||
version = "0.41.0"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "c28f88330eed13b1e58522ae42acd09d516ed65d3bf80b64d0bc794ffc4627b5"
|
||||
checksum = "4b7c74c58f17c4ec08709e2d2847e9d836803dfeb7606debd50bee82061bcbe6"
|
||||
dependencies = [
|
||||
"dashmap",
|
||||
"fxhash",
|
||||
|
@ -3598,9 +3609,9 @@ dependencies = [
|
|||
|
||||
[[package]]
|
||||
name = "swc_ecma_transforms_proposal"
|
||||
version = "0.36.0"
|
||||
version = "0.38.0"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "8f926eb4c5e1526f52da2b56d66649d64a0f77da417c30d144789fa7eb741007"
|
||||
checksum = "503d521523c2d399ab198dbd2e19adc62ffb3093c18398a31e5f7bec224c1b35"
|
||||
dependencies = [
|
||||
"either",
|
||||
"fxhash",
|
||||
|
@ -3618,9 +3629,9 @@ dependencies = [
|
|||
|
||||
[[package]]
|
||||
name = "swc_ecma_transforms_react"
|
||||
version = "0.37.0"
|
||||
version = "0.39.0"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "2ce5289df2c034b45edf839cc3a356dcd035102f5e591b1eb5c37204e3007006"
|
||||
checksum = "128c04b250a62a5d44b4e91e065688cc691c7906cd98d5a90ad9b82c4d393dc2"
|
||||
dependencies = [
|
||||
"base64 0.13.0",
|
||||
"dashmap",
|
||||
|
@ -3641,9 +3652,9 @@ dependencies = [
|
|||
|
||||
[[package]]
|
||||
name = "swc_ecma_transforms_typescript"
|
||||
version = "0.38.0"
|
||||
version = "0.40.0"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "86c22e8077c8fa6225d5d2ab4fcef223d155c1fa30304c06aaf0e6b6934396f3"
|
||||
checksum = "b0dad652b9e8071c869277527df626e5e0d5213d3cb939c32c872a3531af1d81"
|
||||
dependencies = [
|
||||
"fxhash",
|
||||
"serde",
|
||||
|
@ -3658,9 +3669,9 @@ dependencies = [
|
|||
|
||||
[[package]]
|
||||
name = "swc_ecma_utils"
|
||||
version = "0.43.1"
|
||||
version = "0.44.0"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "7755b2d35e93fc371186335d0cc65b1cc647c113b60e1a44ab8f679bf09521d6"
|
||||
checksum = "0435a50d1c728a65b2f84a20b6997e977ce39a445b379c8eb936133682a7febe"
|
||||
dependencies = [
|
||||
"once_cell",
|
||||
"scoped-tls",
|
||||
|
@ -3673,9 +3684,9 @@ dependencies = [
|
|||
|
||||
[[package]]
|
||||
name = "swc_ecma_visit"
|
||||
version = "0.37.1"
|
||||
version = "0.38.0"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "253528a42ad8a646ff7904e3770464f014331f7647467166a8ad92725910d85c"
|
||||
checksum = "b007dfbb41e090fd9d5704d86c9b56a73b6a5b201adf2aed14715a003917df04"
|
||||
dependencies = [
|
||||
"num-bigint",
|
||||
"swc_atoms",
|
||||
|
@ -3686,9 +3697,9 @@ dependencies = [
|
|||
|
||||
[[package]]
|
||||
name = "swc_ecmascript"
|
||||
version = "0.60.0"
|
||||
version = "0.63.0"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "2cb79d3e236ce0118e370f75a37a85ccc527338cf8d4697a7d23419711a6169f"
|
||||
checksum = "8a4a6b2c048fb7740fd84c4974049d31e2b5ba423c580b2794fad2efd7fdfa4e"
|
||||
dependencies = [
|
||||
"swc_ecma_ast",
|
||||
"swc_ecma_codegen",
|
||||
|
@ -3850,9 +3861,12 @@ checksum = "288cb548dbe72b652243ea797201f3d481a0609a967980fcc5b2315ea811560a"
|
|||
|
||||
[[package]]
|
||||
name = "text_lines"
|
||||
version = "0.1.2"
|
||||
version = "0.3.0"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "116279ecd8fa26fbdcf20c79ee6f85a5ce325a953486e11e71c51670bdaa308f"
|
||||
checksum = "f3b748c1c41162300bfc1748c7458ea66a45aabff1d9202a3267a95db40c7b7c"
|
||||
dependencies = [
|
||||
"serde",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "textwrap"
|
||||
|
|
|
@ -43,10 +43,11 @@ winapi = "0.3.9"
|
|||
winres = "0.1.11"
|
||||
|
||||
[dependencies]
|
||||
deno_ast = { version = "0.1.6", features = ["bundler", "codegen", "dep_graph", "module_specifier", "proposal", "react", "sourcemap", "transforms", "typescript", "view", "visit"] }
|
||||
deno_core = { version = "0.98.0", path = "../core" }
|
||||
deno_doc = "0.12.1"
|
||||
deno_graph = "0.3.1"
|
||||
deno_lint = { version = "0.14.0", features = ["docs"] }
|
||||
deno_doc = "0.13.0"
|
||||
deno_graph = "0.4.0"
|
||||
deno_lint = { version = "0.15.0", features = ["docs"] }
|
||||
deno_runtime = { version = "0.24.0", path = "../runtime" }
|
||||
deno_tls = { version = "0.3.0", path = "../ext/tls" }
|
||||
|
||||
|
@ -58,7 +59,7 @@ data-url = "0.1.0"
|
|||
dissimilar = "1.0.2"
|
||||
dprint-plugin-json = "0.13.0"
|
||||
dprint-plugin-markdown = "0.10.0"
|
||||
dprint-plugin-typescript = "0.54.0"
|
||||
dprint-plugin-typescript = "0.55.0"
|
||||
encoding_rs = "0.8.28"
|
||||
env_logger = "0.8.4"
|
||||
fancy-regex = "0.7.1"
|
||||
|
@ -73,6 +74,7 @@ log = { version = "0.4.14", features = ["serde"] }
|
|||
lspower = "1.1.0"
|
||||
notify = "5.0.0-pre.12"
|
||||
num_cpus = "1.13.0"
|
||||
once_cell = "1.8.0"
|
||||
percent-encoding = "2.1.0"
|
||||
pin-project = "1.0.8"
|
||||
rand = { version = "0.8.4", features = ["small_rng"] }
|
||||
|
@ -84,9 +86,6 @@ semver-parser = "0.10.2"
|
|||
serde = { version = "1.0.129", features = ["derive"] }
|
||||
shell-escape = "0.1.5"
|
||||
sourcemap = "6.0.1"
|
||||
swc_bundler = "0.56.0"
|
||||
swc_common = { version = "0.11.9", features = ["sourcemap"] }
|
||||
swc_ecmascript = { version = "0.60.0", features = ["codegen", "dep_graph", "parser", "proposal", "react", "transforms", "typescript", "visit"] }
|
||||
tempfile = "3.2.0"
|
||||
termcolor = "1.1.2"
|
||||
text-size = "1.1.0"
|
||||
|
|
|
@ -1,15 +1,18 @@
|
|||
// Copyright 2018-2021 the Deno authors. All rights reserved. MIT license.
|
||||
use deno_ast::swc::bundler::Hook;
|
||||
use deno_ast::swc::bundler::ModuleRecord;
|
||||
use deno_ast::swc::common::Span;
|
||||
use deno_core::error::AnyError;
|
||||
|
||||
pub struct BundleHook;
|
||||
|
||||
impl swc_bundler::Hook for BundleHook {
|
||||
impl Hook for BundleHook {
|
||||
fn get_import_meta_props(
|
||||
&self,
|
||||
span: swc_common::Span,
|
||||
module_record: &swc_bundler::ModuleRecord,
|
||||
) -> Result<Vec<swc_ecmascript::ast::KeyValueProp>, AnyError> {
|
||||
use swc_ecmascript::ast;
|
||||
span: Span,
|
||||
module_record: &ModuleRecord,
|
||||
) -> Result<Vec<deno_ast::swc::ast::KeyValueProp>, AnyError> {
|
||||
use deno_ast::swc::ast;
|
||||
|
||||
// we use custom file names, and swc "wraps" these in `<` and `>` so, we
|
||||
// want to strip those back out.
|
||||
|
|
|
@ -1,106 +0,0 @@
|
|||
// Copyright 2018-2021 the Deno authors. All rights reserved. MIT license.
|
||||
|
||||
use std::cell::RefCell;
|
||||
use std::rc::Rc;
|
||||
use std::sync::Arc;
|
||||
use swc_common::comments::Comment;
|
||||
use swc_common::comments::Comments;
|
||||
use swc_common::comments::SingleThreadedComments;
|
||||
use swc_common::comments::SingleThreadedCommentsMapInner;
|
||||
use swc_common::BytePos;
|
||||
|
||||
/// An implementation of swc's `Comments` that implements `Sync`
|
||||
/// to support being used in multi-threaded code. This implementation
|
||||
/// is immutable and should you need mutability you may create a copy
|
||||
/// by converting it to an swc `SingleThreadedComments`.
|
||||
#[derive(Clone, Debug)]
|
||||
pub struct MultiThreadedComments {
|
||||
leading: Arc<SingleThreadedCommentsMapInner>,
|
||||
trailing: Arc<SingleThreadedCommentsMapInner>,
|
||||
}
|
||||
|
||||
impl MultiThreadedComments {
|
||||
pub fn from_single_threaded(comments: SingleThreadedComments) -> Self {
|
||||
let (leading, trailing) = comments.take_all();
|
||||
let leading = Arc::new(Rc::try_unwrap(leading).unwrap().into_inner());
|
||||
let trailing = Arc::new(Rc::try_unwrap(trailing).unwrap().into_inner());
|
||||
MultiThreadedComments { leading, trailing }
|
||||
}
|
||||
|
||||
pub fn as_single_threaded(&self) -> SingleThreadedComments {
|
||||
let leading = Rc::new(RefCell::new((*self.leading).to_owned()));
|
||||
let trailing = Rc::new(RefCell::new((*self.trailing).to_owned()));
|
||||
SingleThreadedComments::from_leading_and_trailing(leading, trailing)
|
||||
}
|
||||
|
||||
/// Gets a vector of all the comments sorted by position.
|
||||
pub fn get_vec(&self) -> Vec<Comment> {
|
||||
let mut comments = self
|
||||
.leading
|
||||
.values()
|
||||
.chain(self.trailing.values())
|
||||
.flatten()
|
||||
.cloned()
|
||||
.collect::<Vec<_>>();
|
||||
comments.sort_by_key(|comment| comment.span.lo);
|
||||
comments
|
||||
}
|
||||
}
|
||||
|
||||
impl Comments for MultiThreadedComments {
|
||||
fn has_leading(&self, pos: BytePos) -> bool {
|
||||
self.leading.contains_key(&pos)
|
||||
}
|
||||
|
||||
fn get_leading(&self, pos: BytePos) -> Option<Vec<Comment>> {
|
||||
self.leading.get(&pos).cloned()
|
||||
}
|
||||
|
||||
fn has_trailing(&self, pos: BytePos) -> bool {
|
||||
self.trailing.contains_key(&pos)
|
||||
}
|
||||
|
||||
fn get_trailing(&self, pos: BytePos) -> Option<Vec<Comment>> {
|
||||
self.trailing.get(&pos).cloned()
|
||||
}
|
||||
|
||||
fn add_leading(&self, _pos: BytePos, _cmt: Comment) {
|
||||
panic_readonly();
|
||||
}
|
||||
|
||||
fn add_leading_comments(&self, _pos: BytePos, _comments: Vec<Comment>) {
|
||||
panic_readonly();
|
||||
}
|
||||
|
||||
fn move_leading(&self, _from: BytePos, _to: BytePos) {
|
||||
panic_readonly();
|
||||
}
|
||||
|
||||
fn take_leading(&self, _pos: BytePos) -> Option<Vec<Comment>> {
|
||||
panic_readonly();
|
||||
}
|
||||
|
||||
fn add_trailing(&self, _pos: BytePos, _cmt: Comment) {
|
||||
panic_readonly();
|
||||
}
|
||||
|
||||
fn add_trailing_comments(&self, _pos: BytePos, _comments: Vec<Comment>) {
|
||||
panic_readonly();
|
||||
}
|
||||
|
||||
fn move_trailing(&self, _from: BytePos, _to: BytePos) {
|
||||
panic_readonly();
|
||||
}
|
||||
|
||||
fn take_trailing(&self, _pos: BytePos) -> Option<Vec<Comment>> {
|
||||
panic_readonly();
|
||||
}
|
||||
|
||||
fn add_pure_comment(&self, _pos: BytePos) {
|
||||
panic_readonly();
|
||||
}
|
||||
}
|
||||
|
||||
fn panic_readonly() -> ! {
|
||||
panic!("MultiThreadedComments do not support write operations")
|
||||
}
|
419
cli/ast/mod.rs
419
cli/ast/mod.rs
|
@ -1,62 +1,44 @@
|
|||
// Copyright 2018-2021 the Deno authors. All rights reserved. MIT license.
|
||||
|
||||
use crate::config_file;
|
||||
use crate::media_type::MediaType;
|
||||
use crate::text_encoding::strip_bom;
|
||||
|
||||
use deno_ast::get_syntax;
|
||||
use deno_ast::swc::ast::Module;
|
||||
use deno_ast::swc::ast::Program;
|
||||
use deno_ast::swc::codegen::text_writer::JsWriter;
|
||||
use deno_ast::swc::codegen::Node;
|
||||
use deno_ast::swc::common::chain;
|
||||
use deno_ast::swc::common::comments::SingleThreadedComments;
|
||||
use deno_ast::swc::common::BytePos;
|
||||
use deno_ast::swc::common::FileName;
|
||||
use deno_ast::swc::common::Globals;
|
||||
use deno_ast::swc::common::SourceMap;
|
||||
use deno_ast::swc::common::Spanned;
|
||||
use deno_ast::swc::parser::lexer::Lexer;
|
||||
use deno_ast::swc::parser::StringInput;
|
||||
use deno_ast::swc::transforms::fixer;
|
||||
use deno_ast::swc::transforms::helpers;
|
||||
use deno_ast::swc::transforms::hygiene;
|
||||
use deno_ast::swc::transforms::pass::Optional;
|
||||
use deno_ast::swc::transforms::proposals;
|
||||
use deno_ast::swc::transforms::react;
|
||||
use deno_ast::swc::transforms::typescript;
|
||||
use deno_ast::swc::visit::FoldWith;
|
||||
use deno_ast::Diagnostic;
|
||||
use deno_ast::LineAndColumnDisplay;
|
||||
use deno_ast::MediaType;
|
||||
use deno_ast::ParsedSource;
|
||||
use deno_core::error::AnyError;
|
||||
use deno_core::resolve_url_or_path;
|
||||
use deno_core::serde_json;
|
||||
use deno_core::ModuleSpecifier;
|
||||
use std::error::Error;
|
||||
use std::fmt;
|
||||
use std::ops::Range;
|
||||
use std::rc::Rc;
|
||||
use std::sync::Arc;
|
||||
use swc_common::chain;
|
||||
use swc_common::comments::Comment;
|
||||
use swc_common::comments::CommentKind;
|
||||
use swc_common::comments::Comments;
|
||||
use swc_common::comments::SingleThreadedComments;
|
||||
use swc_common::BytePos;
|
||||
use swc_common::FileName;
|
||||
use swc_common::Globals;
|
||||
use swc_common::SourceFile;
|
||||
use swc_common::SourceMap;
|
||||
use swc_common::Span;
|
||||
use swc_common::Spanned;
|
||||
use swc_ecmascript::ast::Module;
|
||||
use swc_ecmascript::ast::Program;
|
||||
use swc_ecmascript::codegen::text_writer::JsWriter;
|
||||
use swc_ecmascript::codegen::Node;
|
||||
use swc_ecmascript::dep_graph::analyze_dependencies;
|
||||
use swc_ecmascript::dep_graph::DependencyDescriptor;
|
||||
use swc_ecmascript::parser::lexer::Lexer;
|
||||
use swc_ecmascript::parser::token::Token;
|
||||
use swc_ecmascript::parser::EsConfig;
|
||||
use swc_ecmascript::parser::JscTarget;
|
||||
use swc_ecmascript::parser::StringInput;
|
||||
use swc_ecmascript::parser::Syntax;
|
||||
use swc_ecmascript::parser::TsConfig;
|
||||
use swc_ecmascript::transforms::fixer;
|
||||
use swc_ecmascript::transforms::helpers;
|
||||
use swc_ecmascript::transforms::hygiene;
|
||||
use swc_ecmascript::transforms::pass::Optional;
|
||||
use swc_ecmascript::transforms::proposals;
|
||||
use swc_ecmascript::transforms::react;
|
||||
use swc_ecmascript::transforms::typescript;
|
||||
use swc_ecmascript::visit::FoldWith;
|
||||
|
||||
mod bundle_hook;
|
||||
mod comments;
|
||||
mod source_file_info;
|
||||
mod transforms;
|
||||
|
||||
pub use bundle_hook::BundleHook;
|
||||
use comments::MultiThreadedComments;
|
||||
use source_file_info::SourceFileInfo;
|
||||
|
||||
static TARGET: JscTarget = JscTarget::Es2020;
|
||||
|
||||
#[derive(Debug, Clone, Eq, PartialEq)]
|
||||
pub struct Location {
|
||||
|
@ -65,9 +47,29 @@ pub struct Location {
|
|||
pub col: usize,
|
||||
}
|
||||
|
||||
impl From<swc_common::Loc> for Location {
|
||||
fn from(swc_loc: swc_common::Loc) -> Self {
|
||||
use swc_common::FileName::*;
|
||||
impl Location {
|
||||
pub fn from_pos(parsed_source: &ParsedSource, pos: BytePos) -> Self {
|
||||
Location::from_line_and_column(
|
||||
parsed_source.specifier().to_string(),
|
||||
parsed_source.source().line_and_column_index(pos),
|
||||
)
|
||||
}
|
||||
|
||||
pub fn from_line_and_column(
|
||||
specifier: String,
|
||||
line_and_column: deno_ast::LineAndColumnIndex,
|
||||
) -> Self {
|
||||
Location {
|
||||
specifier,
|
||||
line: line_and_column.line_index + 1,
|
||||
col: line_and_column.column_index,
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
impl From<deno_ast::swc::common::Loc> for Location {
|
||||
fn from(swc_loc: deno_ast::swc::common::Loc) -> Self {
|
||||
use deno_ast::swc::common::FileName::*;
|
||||
|
||||
let filename = match &swc_loc.file.name {
|
||||
Real(path_buf) => path_buf.to_string_lossy().to_string(),
|
||||
|
@ -78,7 +80,7 @@ impl From<swc_common::Loc> for Location {
|
|||
Location {
|
||||
specifier: filename,
|
||||
line: swc_loc.line,
|
||||
col: swc_loc.col_display,
|
||||
col: swc_loc.col.0,
|
||||
}
|
||||
}
|
||||
}
|
||||
|
@ -95,60 +97,6 @@ impl std::fmt::Display for Location {
|
|||
}
|
||||
}
|
||||
|
||||
/// A diagnostic from the AST parser.
|
||||
#[derive(Debug, Clone, Eq, PartialEq)]
|
||||
pub struct Diagnostic {
|
||||
pub location: Location,
|
||||
pub message: String,
|
||||
}
|
||||
|
||||
impl Error for Diagnostic {}
|
||||
|
||||
impl fmt::Display for Diagnostic {
|
||||
fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
|
||||
write!(f, "{} at {}", self.message, self.location)
|
||||
}
|
||||
}
|
||||
|
||||
fn get_es_config(jsx: bool) -> EsConfig {
|
||||
EsConfig {
|
||||
class_private_methods: true,
|
||||
class_private_props: true,
|
||||
class_props: true,
|
||||
dynamic_import: true,
|
||||
export_default_from: true,
|
||||
export_namespace_from: true,
|
||||
import_meta: true,
|
||||
jsx,
|
||||
nullish_coalescing: true,
|
||||
num_sep: true,
|
||||
optional_chaining: true,
|
||||
top_level_await: true,
|
||||
..EsConfig::default()
|
||||
}
|
||||
}
|
||||
|
||||
fn get_ts_config(tsx: bool, dts: bool) -> TsConfig {
|
||||
TsConfig {
|
||||
decorators: true,
|
||||
dts,
|
||||
dynamic_import: true,
|
||||
tsx,
|
||||
..TsConfig::default()
|
||||
}
|
||||
}
|
||||
|
||||
pub fn get_syntax(media_type: &MediaType) -> Syntax {
|
||||
match media_type {
|
||||
MediaType::JavaScript => Syntax::Es(get_es_config(false)),
|
||||
MediaType::Jsx => Syntax::Es(get_es_config(true)),
|
||||
MediaType::TypeScript => Syntax::Typescript(get_ts_config(false, false)),
|
||||
MediaType::Dts => Syntax::Typescript(get_ts_config(false, true)),
|
||||
MediaType::Tsx => Syntax::Typescript(get_ts_config(true, false)),
|
||||
_ => Syntax::Es(get_es_config(false)),
|
||||
}
|
||||
}
|
||||
|
||||
#[derive(Debug, Clone)]
|
||||
pub enum ImportsNotUsedAsValues {
|
||||
Remove,
|
||||
|
@ -246,62 +194,20 @@ fn strip_config_from_emit_options(
|
|||
}
|
||||
}
|
||||
|
||||
/// A logical structure to hold the value of a parsed module for further
|
||||
/// processing.
|
||||
#[derive(Clone)]
|
||||
pub struct ParsedModule {
|
||||
info: Arc<SourceFileInfo>,
|
||||
comments: MultiThreadedComments,
|
||||
pub module: Module,
|
||||
}
|
||||
|
||||
impl fmt::Debug for ParsedModule {
|
||||
fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
|
||||
f.debug_struct("ParsedModule")
|
||||
.field("comments", &self.comments)
|
||||
.field("module", &self.module)
|
||||
.finish()
|
||||
}
|
||||
}
|
||||
|
||||
impl ParsedModule {
|
||||
/// Return a vector of dependencies for the module.
|
||||
pub fn analyze_dependencies(&self) -> Vec<DependencyDescriptor> {
|
||||
analyze_dependencies(&self.module, &self.comments)
|
||||
}
|
||||
|
||||
/// Get the module's leading comments, where triple slash directives might
|
||||
/// be located.
|
||||
pub fn get_leading_comments(&self) -> Vec<Comment> {
|
||||
self
|
||||
.comments
|
||||
.get_leading(self.module.span.lo)
|
||||
.unwrap_or_else(Vec::new)
|
||||
}
|
||||
|
||||
/// Get the module's comments sorted by position.
|
||||
pub fn get_comments(&self) -> Vec<Comment> {
|
||||
self.comments.get_vec()
|
||||
}
|
||||
|
||||
/// Get a location for a given position within the module.
|
||||
pub fn get_location(&self, pos: BytePos) -> Location {
|
||||
self.info.get_location(pos)
|
||||
}
|
||||
|
||||
/// Transform a TypeScript file into a JavaScript file, based on the supplied
|
||||
/// options.
|
||||
///
|
||||
/// The result is a tuple of the code and optional source map as strings.
|
||||
pub fn transpile(
|
||||
self,
|
||||
parsed_source: &ParsedSource,
|
||||
options: &EmitOptions,
|
||||
) -> Result<(String, Option<String>), AnyError> {
|
||||
let program = Program::Module(self.module);
|
||||
let program: Program = (*parsed_source.program()).clone();
|
||||
let source_map = Rc::new(SourceMap::default());
|
||||
let file_name = FileName::Custom(self.info.specifier.clone());
|
||||
source_map.new_source_file(file_name, self.info.text.clone());
|
||||
let comments = self.comments.as_single_threaded(); // needs to be mutable
|
||||
let file_name = FileName::Custom(parsed_source.specifier().to_string());
|
||||
source_map
|
||||
.new_source_file(file_name, parsed_source.source().text().to_string());
|
||||
let comments = parsed_source.comments().as_single_threaded(); // needs to be mutable
|
||||
|
||||
let jsx_pass = react::react(
|
||||
source_map.clone(),
|
||||
|
@ -332,7 +238,7 @@ impl ParsedModule {
|
|||
hygiene(),
|
||||
);
|
||||
|
||||
let program = swc_common::GLOBALS.set(&Globals::new(), || {
|
||||
let program = deno_ast::swc::common::GLOBALS.set(&Globals::new(), || {
|
||||
helpers::HELPERS.set(&helpers::Helpers::new(false), || {
|
||||
program.fold_with(&mut passes)
|
||||
})
|
||||
|
@ -347,8 +253,8 @@ impl ParsedModule {
|
|||
&mut buf,
|
||||
Some(&mut src_map_buf),
|
||||
));
|
||||
let config = swc_ecmascript::codegen::Config { minify: false };
|
||||
let mut emitter = swc_ecmascript::codegen::Emitter {
|
||||
let config = deno_ast::swc::codegen::Config { minify: false };
|
||||
let mut emitter = deno_ast::swc::codegen::Emitter {
|
||||
cfg: config,
|
||||
comments: Some(&comments),
|
||||
cm: source_map.clone(),
|
||||
|
@ -374,116 +280,37 @@ impl ParsedModule {
|
|||
}
|
||||
Ok((src, map))
|
||||
}
|
||||
}
|
||||
|
||||
/// For a given specifier, source, and media type, parse the text of the
|
||||
/// module and return a representation which can be further processed.
|
||||
///
|
||||
/// # Arguments
|
||||
///
|
||||
/// - `specifier` - The module specifier for the module.
|
||||
/// - `source` - The source code for the module.
|
||||
/// - `media_type` - The media type for the module.
|
||||
///
|
||||
// NOTE(bartlomieju): `specifier` has `&str` type instead of
|
||||
// `&ModuleSpecifier` because runtime compiler APIs don't
|
||||
// require valid module specifiers
|
||||
pub fn parse(
|
||||
specifier: &str,
|
||||
source: &str,
|
||||
media_type: &MediaType,
|
||||
) -> Result<ParsedModule, AnyError> {
|
||||
let source = strip_bom(source);
|
||||
let info = SourceFileInfo::new(specifier, source);
|
||||
let input =
|
||||
StringInput::new(source, BytePos(0), BytePos(source.len() as u32));
|
||||
let (comments, module) =
|
||||
parse_string_input(input, media_type).map_err(|err| Diagnostic {
|
||||
location: info.get_location(err.span().lo),
|
||||
message: err.into_kind().msg().to_string(),
|
||||
})?;
|
||||
|
||||
Ok(ParsedModule {
|
||||
info: Arc::new(info),
|
||||
comments: MultiThreadedComments::from_single_threaded(comments),
|
||||
module,
|
||||
})
|
||||
}
|
||||
|
||||
pub enum TokenOrComment {
|
||||
Token(Token),
|
||||
Comment { kind: CommentKind, text: String },
|
||||
}
|
||||
|
||||
pub struct LexedItem {
|
||||
pub span: Span,
|
||||
pub inner: TokenOrComment,
|
||||
}
|
||||
|
||||
impl LexedItem {
|
||||
pub fn span_as_range(&self) -> Range<usize> {
|
||||
self.span.lo.0 as usize..self.span.hi.0 as usize
|
||||
}
|
||||
}
|
||||
|
||||
fn flatten_comments(
|
||||
comments: SingleThreadedComments,
|
||||
) -> impl Iterator<Item = Comment> {
|
||||
let (leading, trailing) = comments.take_all();
|
||||
let mut comments = (*leading).clone().into_inner();
|
||||
comments.extend((*trailing).clone().into_inner());
|
||||
comments.into_iter().flat_map(|el| el.1)
|
||||
}
|
||||
|
||||
pub fn lex(source: &str, media_type: &MediaType) -> Vec<LexedItem> {
|
||||
let comments = SingleThreadedComments::default();
|
||||
let lexer = Lexer::new(
|
||||
get_syntax(media_type),
|
||||
TARGET,
|
||||
StringInput::new(source, BytePos(0), BytePos(source.len() as u32)),
|
||||
Some(&comments),
|
||||
);
|
||||
|
||||
let mut tokens: Vec<LexedItem> = lexer
|
||||
.map(|token| LexedItem {
|
||||
span: token.span,
|
||||
inner: TokenOrComment::Token(token.token),
|
||||
})
|
||||
.collect();
|
||||
|
||||
tokens.extend(flatten_comments(comments).map(|comment| LexedItem {
|
||||
span: comment.span,
|
||||
inner: TokenOrComment::Comment {
|
||||
kind: comment.kind,
|
||||
text: comment.text,
|
||||
},
|
||||
}));
|
||||
|
||||
tokens.sort_by_key(|item| item.span.lo.0);
|
||||
|
||||
tokens
|
||||
}
|
||||
|
||||
/// A low level function which transpiles a source module into an swc
|
||||
/// SourceFile.
|
||||
pub fn transpile_module(
|
||||
specifier: &str,
|
||||
source: &str,
|
||||
media_type: &MediaType,
|
||||
media_type: MediaType,
|
||||
emit_options: &EmitOptions,
|
||||
globals: &Globals,
|
||||
cm: Rc<SourceMap>,
|
||||
) -> Result<(Rc<SourceFile>, Module), AnyError> {
|
||||
) -> Result<(Rc<deno_ast::swc::common::SourceFile>, Module), AnyError> {
|
||||
let source = strip_bom(source);
|
||||
let source_file = cm.new_source_file(
|
||||
FileName::Custom(specifier.to_string()),
|
||||
source.to_string(),
|
||||
);
|
||||
let input = StringInput::from(&*source_file);
|
||||
let (comments, module) =
|
||||
parse_string_input(input, media_type).map_err(|err| Diagnostic {
|
||||
location: cm.lookup_char_pos(err.span().lo).into(),
|
||||
let comments = SingleThreadedComments::default();
|
||||
let syntax = get_syntax(media_type);
|
||||
let lexer = Lexer::new(syntax, deno_ast::TARGET, input, Some(&comments));
|
||||
let mut parser = deno_ast::swc::parser::Parser::new_from(lexer);
|
||||
let module = parser.parse_module().map_err(|err| {
|
||||
let location = cm.lookup_char_pos(err.span().lo);
|
||||
Diagnostic {
|
||||
display_position: LineAndColumnDisplay {
|
||||
line_number: location.line,
|
||||
column_number: location.col_display + 1,
|
||||
},
|
||||
specifier: specifier.to_string(),
|
||||
message: err.into_kind().msg().to_string(),
|
||||
}
|
||||
})?;
|
||||
|
||||
let jsx_pass = react::react(
|
||||
|
@ -511,7 +338,7 @@ pub fn transpile_module(
|
|||
fixer(Some(&comments)),
|
||||
);
|
||||
|
||||
let module = swc_common::GLOBALS.set(globals, || {
|
||||
let module = deno_ast::swc::common::GLOBALS.set(globals, || {
|
||||
helpers::HELPERS.set(&helpers::Helpers::new(false), || {
|
||||
module.fold_with(&mut passes)
|
||||
})
|
||||
|
@ -520,69 +347,12 @@ pub fn transpile_module(
|
|||
Ok((source_file, module))
|
||||
}
|
||||
|
||||
fn parse_string_input(
|
||||
input: StringInput,
|
||||
media_type: &MediaType,
|
||||
) -> Result<
|
||||
(SingleThreadedComments, Module),
|
||||
swc_ecmascript::parser::error::Error,
|
||||
> {
|
||||
let syntax = get_syntax(media_type);
|
||||
let comments = SingleThreadedComments::default();
|
||||
let lexer = Lexer::new(syntax, TARGET, input, Some(&comments));
|
||||
let mut parser = swc_ecmascript::parser::Parser::new_from(lexer);
|
||||
let module = parser.parse_module()?;
|
||||
|
||||
Ok((comments, module))
|
||||
}
|
||||
|
||||
#[cfg(test)]
|
||||
mod tests {
|
||||
use super::*;
|
||||
use std::collections::HashMap;
|
||||
use swc_common::BytePos;
|
||||
use swc_ecmascript::dep_graph::DependencyKind;
|
||||
|
||||
#[test]
|
||||
fn test_parsed_module_analyze_dependencies() {
|
||||
let specifier = resolve_url_or_path("https://deno.land/x/mod.js").unwrap();
|
||||
let source = "import * as bar from './test.ts';\nconst foo = await import('./foo.ts');";
|
||||
let parsed_module =
|
||||
parse(specifier.as_str(), source, &MediaType::JavaScript)
|
||||
.expect("could not parse module");
|
||||
let actual = parsed_module.analyze_dependencies();
|
||||
assert_eq!(
|
||||
actual,
|
||||
vec![
|
||||
DependencyDescriptor {
|
||||
kind: DependencyKind::Import,
|
||||
is_dynamic: false,
|
||||
leading_comments: Vec::new(),
|
||||
span: Span::new(BytePos(0), BytePos(33), Default::default()),
|
||||
specifier: "./test.ts".into(),
|
||||
specifier_span: Span::new(
|
||||
BytePos(21),
|
||||
BytePos(32),
|
||||
Default::default()
|
||||
),
|
||||
import_assertions: HashMap::default(),
|
||||
},
|
||||
DependencyDescriptor {
|
||||
kind: DependencyKind::Import,
|
||||
is_dynamic: true,
|
||||
leading_comments: Vec::new(),
|
||||
span: Span::new(BytePos(52), BytePos(70), Default::default()),
|
||||
specifier: "./foo.ts".into(),
|
||||
specifier_span: Span::new(
|
||||
BytePos(59),
|
||||
BytePos(69),
|
||||
Default::default()
|
||||
),
|
||||
import_assertions: HashMap::default(),
|
||||
}
|
||||
]
|
||||
);
|
||||
}
|
||||
use deno_ast::parse_module;
|
||||
use deno_ast::ParseParams;
|
||||
use deno_ast::SourceTextInfo;
|
||||
|
||||
#[test]
|
||||
fn test_transpile() {
|
||||
|
@ -605,10 +375,15 @@ mod tests {
|
|||
}
|
||||
}
|
||||
"#;
|
||||
let module = parse(specifier.as_str(), source, &MediaType::TypeScript)
|
||||
let module = parse_module(ParseParams {
|
||||
specifier: specifier.as_str().to_string(),
|
||||
source: SourceTextInfo::from_string(source.to_string()),
|
||||
media_type: deno_ast::MediaType::TypeScript,
|
||||
capture_tokens: false,
|
||||
maybe_syntax: None,
|
||||
})
|
||||
.expect("could not parse module");
|
||||
let (code, maybe_map) = module
|
||||
.transpile(&EmitOptions::default())
|
||||
let (code, maybe_map) = transpile(&module, &EmitOptions::default())
|
||||
.expect("could not strip types");
|
||||
assert!(code.starts_with("var D;\n(function(D) {\n"));
|
||||
assert!(
|
||||
|
@ -628,10 +403,15 @@ mod tests {
|
|||
}
|
||||
}
|
||||
"#;
|
||||
let module = parse(specifier.as_str(), source, &MediaType::Tsx)
|
||||
let module = parse_module(ParseParams {
|
||||
specifier: specifier.as_str().to_string(),
|
||||
source: SourceTextInfo::from_string(source.to_string()),
|
||||
media_type: deno_ast::MediaType::Tsx,
|
||||
capture_tokens: false,
|
||||
maybe_syntax: None,
|
||||
})
|
||||
.expect("could not parse module");
|
||||
let (code, _) = module
|
||||
.transpile(&EmitOptions::default())
|
||||
let (code, _) = transpile(&module, &EmitOptions::default())
|
||||
.expect("could not strip types");
|
||||
assert!(code.contains("React.createElement(\"div\", null"));
|
||||
}
|
||||
|
@ -658,10 +438,15 @@ mod tests {
|
|||
}
|
||||
}
|
||||
"#;
|
||||
let module = parse(specifier.as_str(), source, &MediaType::TypeScript)
|
||||
let module = parse_module(ParseParams {
|
||||
specifier: specifier.as_str().to_string(),
|
||||
source: SourceTextInfo::from_string(source.to_string()),
|
||||
media_type: deno_ast::MediaType::TypeScript,
|
||||
capture_tokens: false,
|
||||
maybe_syntax: None,
|
||||
})
|
||||
.expect("could not parse module");
|
||||
let (code, _) = module
|
||||
.transpile(&EmitOptions::default())
|
||||
let (code, _) = transpile(&module, &EmitOptions::default())
|
||||
.expect("could not strip types");
|
||||
assert!(code.contains("_applyDecoratedDescriptor("));
|
||||
}
|
||||
|
|
|
@ -1,130 +0,0 @@
|
|||
// Copyright 2018-2021 the Deno authors. All rights reserved. MIT license.
|
||||
|
||||
use super::Location;
|
||||
|
||||
use swc_common::BytePos;
|
||||
|
||||
pub struct SourceFileInfo {
|
||||
pub specifier: String,
|
||||
pub text: String,
|
||||
line_start_byte_positions: Vec<BytePos>,
|
||||
}
|
||||
|
||||
impl SourceFileInfo {
|
||||
pub fn new(specifier: &str, text: &str) -> SourceFileInfo {
|
||||
SourceFileInfo {
|
||||
line_start_byte_positions: get_line_start_positions(text),
|
||||
specifier: specifier.to_string(),
|
||||
text: text.to_string(),
|
||||
}
|
||||
}
|
||||
|
||||
pub fn get_location(&self, pos: BytePos) -> Location {
|
||||
let line_index = self.get_line_index_at_pos(pos);
|
||||
let col = self.get_column_on_line_index_at_pos(line_index, pos);
|
||||
|
||||
Location {
|
||||
specifier: self.specifier.clone(),
|
||||
// todo(dsherret): this is temporarily 1-indexed in order to have
|
||||
// the same behaviour as swc, but we should change this to be 0-indexed
|
||||
// in order to be the same as the LSP.
|
||||
line: line_index + 1,
|
||||
col,
|
||||
}
|
||||
}
|
||||
|
||||
fn get_line_index_at_pos(&self, pos: BytePos) -> usize {
|
||||
match self.line_start_byte_positions.binary_search(&pos) {
|
||||
Ok(index) => index,
|
||||
Err(insert_index) => insert_index - 1,
|
||||
}
|
||||
}
|
||||
|
||||
fn get_column_on_line_index_at_pos(
|
||||
&self,
|
||||
line_index: usize,
|
||||
pos: BytePos,
|
||||
) -> usize {
|
||||
assert!(line_index < self.line_start_byte_positions.len());
|
||||
let pos = pos.0 as usize;
|
||||
let line_start_pos = self.line_start_byte_positions[line_index].0 as usize;
|
||||
let line_end_pos = self
|
||||
.line_start_byte_positions
|
||||
.get(line_index + 1)
|
||||
// may include line feed chars at the end, but in that case the pos should be less
|
||||
.map(|p| p.0 as usize)
|
||||
.unwrap_or_else(|| self.text.len());
|
||||
let line_text = &self.text[line_start_pos..line_end_pos];
|
||||
|
||||
if pos < line_start_pos {
|
||||
panic!(
|
||||
"byte position {} was less than the start line position of {}",
|
||||
pos, line_start_pos
|
||||
);
|
||||
} else if pos > line_end_pos {
|
||||
panic!(
|
||||
"byte position {} exceeded the end line position of {}",
|
||||
pos, line_end_pos
|
||||
);
|
||||
} else if pos == line_end_pos {
|
||||
line_text.chars().count()
|
||||
} else {
|
||||
line_text
|
||||
.char_indices()
|
||||
.position(|(c_pos, _)| line_start_pos + c_pos >= pos)
|
||||
.unwrap()
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
fn get_line_start_positions(text: &str) -> Vec<BytePos> {
|
||||
let mut result = vec![BytePos(0)];
|
||||
for (pos, c) in text.char_indices() {
|
||||
if c == '\n' {
|
||||
let line_start_pos = BytePos((pos + 1) as u32);
|
||||
result.push(line_start_pos);
|
||||
}
|
||||
}
|
||||
result
|
||||
}
|
||||
|
||||
#[cfg(test)]
|
||||
mod test {
|
||||
use super::SourceFileInfo;
|
||||
use crate::ast::Location;
|
||||
|
||||
use swc_common::BytePos;
|
||||
|
||||
#[test]
|
||||
fn should_provide_locations() {
|
||||
let text = "12\n3\r\n4\n5";
|
||||
let specifier = "file:///file.ts";
|
||||
let info = SourceFileInfo::new(specifier, text);
|
||||
assert_pos_line_and_col(&info, 0, 1, 0); // 1
|
||||
assert_pos_line_and_col(&info, 1, 1, 1); // 2
|
||||
assert_pos_line_and_col(&info, 2, 1, 2); // \n
|
||||
assert_pos_line_and_col(&info, 3, 2, 0); // 3
|
||||
assert_pos_line_and_col(&info, 4, 2, 1); // \r
|
||||
assert_pos_line_and_col(&info, 5, 2, 2); // \n
|
||||
assert_pos_line_and_col(&info, 6, 3, 0); // 4
|
||||
assert_pos_line_and_col(&info, 7, 3, 1); // \n
|
||||
assert_pos_line_and_col(&info, 8, 4, 0); // 5
|
||||
assert_pos_line_and_col(&info, 9, 4, 1); // <EOF>
|
||||
}
|
||||
|
||||
fn assert_pos_line_and_col(
|
||||
info: &SourceFileInfo,
|
||||
pos: u32,
|
||||
line: usize,
|
||||
col: usize,
|
||||
) {
|
||||
assert_eq!(
|
||||
info.get_location(BytePos(pos)),
|
||||
Location {
|
||||
specifier: info.specifier.clone(),
|
||||
line,
|
||||
col,
|
||||
}
|
||||
);
|
||||
}
|
||||
}
|
|
@ -1,7 +1,7 @@
|
|||
use swc_common::DUMMY_SP;
|
||||
use swc_ecmascript::ast as swc_ast;
|
||||
use swc_ecmascript::visit::noop_fold_type;
|
||||
use swc_ecmascript::visit::Fold;
|
||||
use deno_ast::swc::ast as swc_ast;
|
||||
use deno_ast::swc::common::DUMMY_SP;
|
||||
use deno_ast::swc::visit::noop_fold_type;
|
||||
use deno_ast::swc::visit::Fold;
|
||||
|
||||
/// Transforms import declarations to variable declarations
|
||||
/// with a dynamic import. This is used to provide import
|
||||
|
@ -15,7 +15,7 @@ impl Fold for DownlevelImportsFolder {
|
|||
&mut self,
|
||||
module_item: swc_ast::ModuleItem,
|
||||
) -> swc_ast::ModuleItem {
|
||||
use swc_ecmascript::ast::*;
|
||||
use deno_ast::swc::ast::*;
|
||||
|
||||
match &module_item {
|
||||
ModuleItem::ModuleDecl(ModuleDecl::Import(import_decl)) => {
|
||||
|
@ -117,7 +117,7 @@ impl Fold for StripExportsFolder {
|
|||
&mut self,
|
||||
module_item: swc_ast::ModuleItem,
|
||||
) -> swc_ast::ModuleItem {
|
||||
use swc_ecmascript::ast::*;
|
||||
use deno_ast::swc::ast::*;
|
||||
|
||||
match module_item {
|
||||
ModuleItem::ModuleDecl(ModuleDecl::ExportAll(export_all)) => {
|
||||
|
@ -249,18 +249,18 @@ fn create_assignment(key: String) -> swc_ast::ObjectPatProp {
|
|||
|
||||
#[cfg(test)]
|
||||
mod test {
|
||||
use deno_ast::swc::ast::Module;
|
||||
use deno_ast::swc::codegen::text_writer::JsWriter;
|
||||
use deno_ast::swc::codegen::Node;
|
||||
use deno_ast::swc::common::FileName;
|
||||
use deno_ast::swc::common::SourceMap;
|
||||
use deno_ast::swc::parser::Parser;
|
||||
use deno_ast::swc::parser::StringInput;
|
||||
use deno_ast::swc::parser::Syntax;
|
||||
use deno_ast::swc::parser::TsConfig;
|
||||
use deno_ast::swc::visit::Fold;
|
||||
use deno_ast::swc::visit::FoldWith;
|
||||
use std::rc::Rc;
|
||||
use swc_common::FileName;
|
||||
use swc_common::SourceMap;
|
||||
use swc_ecmascript::ast::Module;
|
||||
use swc_ecmascript::codegen::text_writer::JsWriter;
|
||||
use swc_ecmascript::codegen::Node;
|
||||
use swc_ecmascript::parser::Parser;
|
||||
use swc_ecmascript::parser::StringInput;
|
||||
use swc_ecmascript::parser::Syntax;
|
||||
use swc_ecmascript::parser::TsConfig;
|
||||
use swc_ecmascript::visit::Fold;
|
||||
use swc_ecmascript::visit::FoldWith;
|
||||
|
||||
use super::*;
|
||||
|
||||
|
@ -450,8 +450,8 @@ mod test {
|
|||
{
|
||||
let writer =
|
||||
Box::new(JsWriter::new(source_map.clone(), "\n", &mut buf, None));
|
||||
let config = swc_ecmascript::codegen::Config { minify: false };
|
||||
let mut emitter = swc_ecmascript::codegen::Emitter {
|
||||
let config = deno_ast::swc::codegen::Config { minify: false };
|
||||
let mut emitter = deno_ast::swc::codegen::Emitter {
|
||||
cfg: config,
|
||||
comments: None,
|
||||
cm: source_map,
|
||||
|
|
|
@ -9,8 +9,8 @@
|
|||
//! Diagnostics are compile-time type errors, whereas JsErrors are runtime
|
||||
//! exceptions.
|
||||
|
||||
use crate::ast::Diagnostic;
|
||||
use crate::import_map::ImportMapError;
|
||||
use deno_ast::Diagnostic;
|
||||
use deno_core::error::AnyError;
|
||||
|
||||
fn get_import_map_error_class(_: &ImportMapError) -> &'static str {
|
||||
|
|
|
@ -6,10 +6,10 @@ use crate::http_cache::HttpCache;
|
|||
use crate::http_util::fetch_once;
|
||||
use crate::http_util::FetchOnceArgs;
|
||||
use crate::http_util::FetchOnceResult;
|
||||
use crate::media_type::MediaType;
|
||||
use crate::text_encoding;
|
||||
use crate::version::get_user_agent;
|
||||
use data_url::DataUrl;
|
||||
use deno_ast::MediaType;
|
||||
use deno_core::error::custom_error;
|
||||
use deno_core::error::generic_error;
|
||||
use deno_core::error::uri_error;
|
||||
|
@ -52,7 +52,7 @@ pub struct File {
|
|||
/// The resolved media type for the file.
|
||||
pub media_type: MediaType,
|
||||
/// The source of the file as a string.
|
||||
pub source: String,
|
||||
pub source: Arc<String>,
|
||||
/// The _final_ specifier for the file. The requested specifier and the final
|
||||
/// specifier maybe different for remote files that have been redirected.
|
||||
pub specifier: ModuleSpecifier,
|
||||
|
@ -137,7 +137,7 @@ fn fetch_local(specifier: &ModuleSpecifier) -> Result<File, AnyError> {
|
|||
local,
|
||||
maybe_types: None,
|
||||
media_type,
|
||||
source,
|
||||
source: Arc::new(source),
|
||||
specifier: specifier.clone(),
|
||||
maybe_headers: None,
|
||||
})
|
||||
|
@ -275,7 +275,7 @@ impl FileFetcher {
|
|||
local,
|
||||
maybe_types,
|
||||
media_type,
|
||||
source,
|
||||
source: Arc::new(source),
|
||||
specifier: specifier.clone(),
|
||||
maybe_headers: Some(headers.clone()),
|
||||
})
|
||||
|
@ -367,7 +367,7 @@ impl FileFetcher {
|
|||
local,
|
||||
maybe_types: None,
|
||||
media_type,
|
||||
source,
|
||||
source: Arc::new(source),
|
||||
specifier: specifier.clone(),
|
||||
maybe_headers: None,
|
||||
})
|
||||
|
@ -429,7 +429,7 @@ impl FileFetcher {
|
|||
local,
|
||||
maybe_types: None,
|
||||
media_type,
|
||||
source,
|
||||
source: Arc::new(source),
|
||||
specifier: specifier.clone(),
|
||||
maybe_headers: None,
|
||||
})
|
||||
|
@ -673,7 +673,7 @@ mod tests {
|
|||
let url_str = format!("http://127.0.0.1:4545/encoding/{}", fixture);
|
||||
let specifier = resolve_url(&url_str).unwrap();
|
||||
let (file, headers) = test_fetch_remote(&specifier).await;
|
||||
assert_eq!(file.source, expected);
|
||||
assert_eq!(file.source.as_str(), expected);
|
||||
assert_eq!(file.media_type, MediaType::TypeScript);
|
||||
assert_eq!(
|
||||
headers.get("content-type").unwrap(),
|
||||
|
@ -685,7 +685,7 @@ mod tests {
|
|||
let p = test_util::testdata_path().join(format!("encoding/{}.ts", charset));
|
||||
let specifier = resolve_url_or_path(p.to_str().unwrap()).unwrap();
|
||||
let (file, _) = test_fetch(&specifier).await;
|
||||
assert_eq!(file.source, expected);
|
||||
assert_eq!(file.source.as_str(), expected);
|
||||
}
|
||||
|
||||
#[test]
|
||||
|
@ -898,7 +898,7 @@ mod tests {
|
|||
local,
|
||||
maybe_types: None,
|
||||
media_type: MediaType::TypeScript,
|
||||
source: "some source code".to_string(),
|
||||
source: Arc::new("some source code".to_string()),
|
||||
specifier: specifier.clone(),
|
||||
maybe_headers: None,
|
||||
};
|
||||
|
@ -928,7 +928,7 @@ mod tests {
|
|||
let maybe_file = file_fetcher.get_source(&specifier);
|
||||
assert!(maybe_file.is_some());
|
||||
let file = maybe_file.unwrap();
|
||||
assert_eq!(file.source, "export const redirect = 1;\n");
|
||||
assert_eq!(file.source.as_str(), "export const redirect = 1;\n");
|
||||
assert_eq!(
|
||||
file.specifier,
|
||||
resolve_url("http://localhost:4545/subdir/redirects/redirect1.js")
|
||||
|
@ -955,7 +955,7 @@ mod tests {
|
|||
assert!(result.is_ok());
|
||||
let file = result.unwrap();
|
||||
assert_eq!(
|
||||
file.source,
|
||||
file.source.as_str(),
|
||||
"export const a = \"a\";\n\nexport enum A {\n A,\n B,\n C,\n}\n"
|
||||
);
|
||||
assert_eq!(file.media_type, MediaType::TypeScript);
|
||||
|
@ -987,7 +987,7 @@ mod tests {
|
|||
assert!(result.is_ok());
|
||||
let file = result.unwrap();
|
||||
assert_eq!(
|
||||
file.source,
|
||||
file.source.as_str(),
|
||||
"export const a = \"a\";\n\nexport enum A {\n A,\n B,\n C,\n}\n"
|
||||
);
|
||||
assert_eq!(file.media_type, MediaType::TypeScript);
|
||||
|
@ -1010,7 +1010,7 @@ mod tests {
|
|||
assert!(result.is_ok());
|
||||
let file = result.unwrap();
|
||||
assert_eq!(
|
||||
file.source,
|
||||
file.source.as_str(),
|
||||
"export { printHello } from \"./print_hello.ts\";\n"
|
||||
);
|
||||
assert_eq!(file.media_type, MediaType::TypeScript);
|
||||
|
@ -1033,7 +1033,7 @@ mod tests {
|
|||
assert!(result.is_ok());
|
||||
let file = result.unwrap();
|
||||
assert_eq!(
|
||||
file.source,
|
||||
file.source.as_str(),
|
||||
"export { printHello } from \"./print_hello.ts\";\n"
|
||||
);
|
||||
// This validates that when using the cached value, because we modified
|
||||
|
@ -1054,7 +1054,7 @@ mod tests {
|
|||
assert!(result.is_ok());
|
||||
let file = result.unwrap();
|
||||
assert_eq!(
|
||||
file.source,
|
||||
file.source.as_str(),
|
||||
"export { printHello } from \"./print_hello.ts\";\n"
|
||||
);
|
||||
assert_eq!(file.media_type, MediaType::Json);
|
||||
|
@ -1077,7 +1077,7 @@ mod tests {
|
|||
assert!(result.is_ok());
|
||||
let file = result.unwrap();
|
||||
assert_eq!(
|
||||
file.source,
|
||||
file.source.as_str(),
|
||||
"export { printHello } from \"./print_hello.ts\";\n"
|
||||
);
|
||||
assert_eq!(file.media_type, MediaType::TypeScript);
|
||||
|
@ -1497,7 +1497,7 @@ mod tests {
|
|||
.await;
|
||||
assert!(result.is_ok());
|
||||
let file = result.unwrap();
|
||||
assert_eq!(file.source, r#"console.log("hello deno");"#);
|
||||
assert_eq!(file.source.as_str(), r#"console.log("hello deno");"#);
|
||||
|
||||
fs::write(fixture_path, r#"console.log("goodbye deno");"#)
|
||||
.expect("could not write file");
|
||||
|
@ -1506,7 +1506,7 @@ mod tests {
|
|||
.await;
|
||||
assert!(result.is_ok());
|
||||
let file = result.unwrap();
|
||||
assert_eq!(file.source, r#"console.log("goodbye deno");"#);
|
||||
assert_eq!(file.source.as_str(), r#"console.log("goodbye deno");"#);
|
||||
}
|
||||
|
||||
#[tokio::test]
|
||||
|
|
|
@ -1,9 +1,8 @@
|
|||
// Copyright 2018-2021 the Deno authors. All rights reserved. MIT license.
|
||||
|
||||
use crate::colors;
|
||||
use crate::media_type::serialize_media_type;
|
||||
use crate::media_type::MediaType;
|
||||
|
||||
use deno_ast::MediaType;
|
||||
use deno_core::resolve_url;
|
||||
use deno_core::serde::Serialize;
|
||||
use deno_core::ModuleSpecifier;
|
||||
|
@ -77,10 +76,7 @@ pub struct ModuleGraphInfoMod {
|
|||
pub maybe_type_dependency: Option<ModuleGraphInfoDep>,
|
||||
#[serde(skip_serializing_if = "Option::is_none")]
|
||||
pub size: Option<usize>,
|
||||
#[serde(
|
||||
skip_serializing_if = "Option::is_none",
|
||||
serialize_with = "serialize_media_type"
|
||||
)]
|
||||
#[serde(skip_serializing_if = "Option::is_none")]
|
||||
pub media_type: Option<MediaType>,
|
||||
#[serde(skip_serializing_if = "Option::is_none")]
|
||||
pub local: Option<PathBuf>,
|
||||
|
|
|
@ -4,14 +4,22 @@ use super::language_server;
|
|||
use super::tsc;
|
||||
|
||||
use crate::ast;
|
||||
use crate::ast::Location;
|
||||
use crate::import_map::ImportMap;
|
||||
use crate::lsp::documents::DocumentData;
|
||||
use crate::media_type::MediaType;
|
||||
use crate::module_graph::parse_deno_types;
|
||||
use crate::module_graph::parse_ts_reference;
|
||||
use crate::module_graph::TypeScriptReference;
|
||||
use crate::tools::lint::create_linter;
|
||||
|
||||
use deno_ast::swc::ast as swc_ast;
|
||||
use deno_ast::swc::common::DUMMY_SP;
|
||||
use deno_ast::swc::visit::Node;
|
||||
use deno_ast::swc::visit::Visit;
|
||||
use deno_ast::swc::visit::VisitWith;
|
||||
use deno_ast::Diagnostic;
|
||||
use deno_ast::MediaType;
|
||||
use deno_ast::SourceTextInfo;
|
||||
use deno_core::error::anyhow;
|
||||
use deno_core::error::custom_error;
|
||||
use deno_core::error::AnyError;
|
||||
|
@ -29,11 +37,6 @@ use regex::Regex;
|
|||
use std::cmp::Ordering;
|
||||
use std::collections::HashMap;
|
||||
use std::fmt;
|
||||
use swc_common::DUMMY_SP;
|
||||
use swc_ecmascript::ast as swc_ast;
|
||||
use swc_ecmascript::visit::Node;
|
||||
use swc_ecmascript::visit::Visit;
|
||||
use swc_ecmascript::visit::VisitWith;
|
||||
|
||||
lazy_static::lazy_static! {
|
||||
/// Diagnostic error codes which actually are the same, and so when grouping
|
||||
|
@ -131,17 +134,12 @@ fn as_lsp_range(range: &deno_lint::diagnostic::Range) -> Range {
|
|||
}
|
||||
|
||||
pub fn get_lint_references(
|
||||
specifier: &ModuleSpecifier,
|
||||
media_type: &MediaType,
|
||||
source_code: &str,
|
||||
parsed_source: &deno_ast::ParsedSource,
|
||||
) -> Result<Vec<Reference>, AnyError> {
|
||||
let syntax = ast::get_syntax(media_type);
|
||||
let syntax = deno_ast::get_syntax(parsed_source.media_type());
|
||||
let lint_rules = rules::get_recommended_rules();
|
||||
let linter = create_linter(syntax, lint_rules);
|
||||
// TODO(@kitsonk) we should consider caching the swc source file versions for
|
||||
// reuse by other processes
|
||||
let (_, lint_diagnostics) =
|
||||
linter.lint(specifier.to_string(), source_code.to_string())?;
|
||||
let lint_diagnostics = linter.lint_with_ast(parsed_source);
|
||||
|
||||
Ok(
|
||||
lint_diagnostics
|
||||
|
@ -281,27 +279,34 @@ pub fn resolve_import(
|
|||
|
||||
pub fn parse_module(
|
||||
specifier: &ModuleSpecifier,
|
||||
source: &str,
|
||||
media_type: &MediaType,
|
||||
) -> Result<ast::ParsedModule, AnyError> {
|
||||
ast::parse(&specifier.to_string(), source, media_type)
|
||||
source: SourceTextInfo,
|
||||
media_type: MediaType,
|
||||
) -> Result<deno_ast::ParsedSource, Diagnostic> {
|
||||
deno_ast::parse_module(deno_ast::ParseParams {
|
||||
specifier: specifier.as_str().to_string(),
|
||||
source,
|
||||
media_type,
|
||||
// capture the tokens for linting and formatting
|
||||
capture_tokens: true,
|
||||
maybe_syntax: None,
|
||||
})
|
||||
}
|
||||
|
||||
// TODO(@kitsonk) a lot of this logic is duplicated in module_graph.rs in
|
||||
// Module::parse() and should be refactored out to a common function.
|
||||
pub fn analyze_dependencies(
|
||||
specifier: &ModuleSpecifier,
|
||||
media_type: &MediaType,
|
||||
parsed_module: &ast::ParsedModule,
|
||||
media_type: MediaType,
|
||||
parsed_source: &deno_ast::ParsedSource,
|
||||
maybe_import_map: &Option<ImportMap>,
|
||||
) -> (HashMap<String, Dependency>, Option<ResolvedDependency>) {
|
||||
let mut maybe_type = None;
|
||||
let mut dependencies = HashMap::<String, Dependency>::new();
|
||||
|
||||
// Parse leading comments for supported triple slash references.
|
||||
for comment in parsed_module.get_leading_comments().iter() {
|
||||
for comment in parsed_source.get_leading_comments().iter() {
|
||||
if let Some((ts_reference, span)) = parse_ts_reference(comment) {
|
||||
let loc = parsed_module.get_location(span.lo);
|
||||
let loc = parsed_source.source().line_and_column_index(span.lo);
|
||||
match ts_reference {
|
||||
TypeScriptReference::Path(import) => {
|
||||
let dep = dependencies.entry(import.clone()).or_default();
|
||||
|
@ -310,20 +315,19 @@ pub fn analyze_dependencies(
|
|||
dep.maybe_code = Some(resolved_import);
|
||||
dep.maybe_code_specifier_range = Some(Range {
|
||||
start: Position {
|
||||
line: (loc.line - 1) as u32,
|
||||
character: loc.col as u32,
|
||||
line: loc.line_index as u32,
|
||||
character: loc.column_index as u32,
|
||||
},
|
||||
end: Position {
|
||||
line: (loc.line - 1) as u32,
|
||||
character: (loc.col + import.chars().count() + 2) as u32,
|
||||
line: loc.line_index as u32,
|
||||
character: (loc.column_index + import.chars().count() + 2) as u32,
|
||||
},
|
||||
});
|
||||
}
|
||||
TypeScriptReference::Types(import) => {
|
||||
let resolved_import =
|
||||
resolve_import(&import, specifier, maybe_import_map);
|
||||
if media_type == &MediaType::JavaScript
|
||||
|| media_type == &MediaType::Jsx
|
||||
if media_type == MediaType::JavaScript || media_type == MediaType::Jsx
|
||||
{
|
||||
maybe_type = Some(resolved_import.clone());
|
||||
}
|
||||
|
@ -331,12 +335,12 @@ pub fn analyze_dependencies(
|
|||
dep.maybe_type = Some(resolved_import);
|
||||
dep.maybe_type_specifier_range = Some(Range {
|
||||
start: Position {
|
||||
line: (loc.line - 1) as u32,
|
||||
character: loc.col as u32,
|
||||
line: loc.line_index as u32,
|
||||
character: loc.column_index as u32,
|
||||
},
|
||||
end: Position {
|
||||
line: (loc.line - 1) as u32,
|
||||
character: (loc.col + import.chars().count() + 2) as u32,
|
||||
line: loc.line_index as u32,
|
||||
character: (loc.column_index + import.chars().count() + 2) as u32,
|
||||
},
|
||||
});
|
||||
}
|
||||
|
@ -345,9 +349,9 @@ pub fn analyze_dependencies(
|
|||
}
|
||||
|
||||
// Parse ES and type only imports
|
||||
let descriptors = parsed_module.analyze_dependencies();
|
||||
let descriptors = deno_graph::analyze_dependencies(parsed_source);
|
||||
for desc in descriptors.into_iter().filter(|desc| {
|
||||
desc.kind != swc_ecmascript::dep_graph::DependencyKind::Require
|
||||
desc.kind != deno_ast::swc::dep_graph::DependencyKind::Require
|
||||
}) {
|
||||
let resolved_import =
|
||||
resolve_import(&desc.specifier, specifier, maybe_import_map);
|
||||
|
@ -359,7 +363,7 @@ pub fn analyze_dependencies(
|
|||
(
|
||||
resolve_import(deno_types, specifier, maybe_import_map),
|
||||
deno_types.clone(),
|
||||
parsed_module.get_location(span.lo)
|
||||
parsed_source.source().line_and_column_index(span.lo)
|
||||
)
|
||||
})
|
||||
} else {
|
||||
|
@ -368,16 +372,20 @@ pub fn analyze_dependencies(
|
|||
|
||||
let dep = dependencies.entry(desc.specifier.to_string()).or_default();
|
||||
dep.is_dynamic = desc.is_dynamic;
|
||||
let start = parsed_module.get_location(desc.specifier_span.lo);
|
||||
let end = parsed_module.get_location(desc.specifier_span.hi);
|
||||
let start = parsed_source
|
||||
.source()
|
||||
.line_and_column_index(desc.specifier_span.lo);
|
||||
let end = parsed_source
|
||||
.source()
|
||||
.line_and_column_index(desc.specifier_span.hi);
|
||||
let range = Range {
|
||||
start: Position {
|
||||
line: (start.line - 1) as u32,
|
||||
character: start.col as u32,
|
||||
line: start.line_index as u32,
|
||||
character: start.column_index as u32,
|
||||
},
|
||||
end: Position {
|
||||
line: (end.line - 1) as u32,
|
||||
character: end.col as u32,
|
||||
line: end.line_index as u32,
|
||||
character: end.column_index as u32,
|
||||
},
|
||||
};
|
||||
dep.maybe_code_specifier_range = Some(range);
|
||||
|
@ -388,12 +396,15 @@ pub fn analyze_dependencies(
|
|||
{
|
||||
dep.maybe_type_specifier_range = Some(Range {
|
||||
start: Position {
|
||||
line: (loc.line - 1) as u32,
|
||||
character: (loc.col + 1) as u32,
|
||||
line: loc.line_index as u32,
|
||||
// +1 to skip quote
|
||||
character: (loc.column_index + 1) as u32,
|
||||
},
|
||||
end: Position {
|
||||
line: (loc.line - 1) as u32,
|
||||
character: (loc.col + 1 + specifier.chars().count()) as u32,
|
||||
line: loc.line_index as u32,
|
||||
// +1 to skip quote
|
||||
character: (loc.column_index + 1 + specifier.chars().count())
|
||||
as u32,
|
||||
},
|
||||
});
|
||||
dep.maybe_type = Some(resolved_dependency);
|
||||
|
@ -692,14 +703,12 @@ impl CodeActionCollection {
|
|||
})
|
||||
.unwrap();
|
||||
|
||||
let line_content = if let Some(doc) = document {
|
||||
doc
|
||||
.content_line(diagnostic.range.start.line as usize)
|
||||
.ok()
|
||||
.flatten()
|
||||
} else {
|
||||
None
|
||||
};
|
||||
let line_content = document.map(|d| {
|
||||
d.source()
|
||||
.text_info()
|
||||
.line_text(diagnostic.range.start.line as usize)
|
||||
.to_string()
|
||||
});
|
||||
|
||||
let mut changes = HashMap::new();
|
||||
changes.insert(
|
||||
|
@ -1021,14 +1030,14 @@ impl DependencyRanges {
|
|||
|
||||
struct DependencyRangeCollector<'a> {
|
||||
import_ranges: DependencyRanges,
|
||||
parsed_module: &'a ast::ParsedModule,
|
||||
parsed_source: &'a deno_ast::ParsedSource,
|
||||
}
|
||||
|
||||
impl<'a> DependencyRangeCollector<'a> {
|
||||
pub fn new(parsed_module: &'a ast::ParsedModule) -> Self {
|
||||
pub fn new(parsed_source: &'a deno_ast::ParsedSource) -> Self {
|
||||
Self {
|
||||
import_ranges: DependencyRanges::default(),
|
||||
parsed_module,
|
||||
parsed_source,
|
||||
}
|
||||
}
|
||||
|
||||
|
@ -1043,8 +1052,8 @@ impl<'a> Visit for DependencyRangeCollector<'a> {
|
|||
node: &swc_ast::ImportDecl,
|
||||
_parent: &dyn Node,
|
||||
) {
|
||||
let start = self.parsed_module.get_location(node.src.span.lo);
|
||||
let end = self.parsed_module.get_location(node.src.span.hi);
|
||||
let start = Location::from_pos(self.parsed_source, node.src.span.lo);
|
||||
let end = Location::from_pos(self.parsed_source, node.src.span.hi);
|
||||
self.import_ranges.0.push(DependencyRange {
|
||||
range: narrow_range(get_range_from_location(&start, &end)),
|
||||
specifier: node.src.value.to_string(),
|
||||
|
@ -1057,8 +1066,8 @@ impl<'a> Visit for DependencyRangeCollector<'a> {
|
|||
_parent: &dyn Node,
|
||||
) {
|
||||
if let Some(src) = &node.src {
|
||||
let start = self.parsed_module.get_location(src.span.lo);
|
||||
let end = self.parsed_module.get_location(src.span.hi);
|
||||
let start = Location::from_pos(self.parsed_source, src.span.lo);
|
||||
let end = Location::from_pos(self.parsed_source, src.span.hi);
|
||||
self.import_ranges.0.push(DependencyRange {
|
||||
range: narrow_range(get_range_from_location(&start, &end)),
|
||||
specifier: src.value.to_string(),
|
||||
|
@ -1071,8 +1080,8 @@ impl<'a> Visit for DependencyRangeCollector<'a> {
|
|||
node: &swc_ast::ExportAll,
|
||||
_parent: &dyn Node,
|
||||
) {
|
||||
let start = self.parsed_module.get_location(node.src.span.lo);
|
||||
let end = self.parsed_module.get_location(node.src.span.hi);
|
||||
let start = Location::from_pos(self.parsed_source, node.src.span.lo);
|
||||
let end = Location::from_pos(self.parsed_source, node.src.span.hi);
|
||||
self.import_ranges.0.push(DependencyRange {
|
||||
range: narrow_range(get_range_from_location(&start, &end)),
|
||||
specifier: node.src.value.to_string(),
|
||||
|
@ -1084,8 +1093,8 @@ impl<'a> Visit for DependencyRangeCollector<'a> {
|
|||
node: &swc_ast::TsImportType,
|
||||
_parent: &dyn Node,
|
||||
) {
|
||||
let start = self.parsed_module.get_location(node.arg.span.lo);
|
||||
let end = self.parsed_module.get_location(node.arg.span.hi);
|
||||
let start = Location::from_pos(self.parsed_source, node.arg.span.lo);
|
||||
let end = Location::from_pos(self.parsed_source, node.arg.span.hi);
|
||||
self.import_ranges.0.push(DependencyRange {
|
||||
range: narrow_range(get_range_from_location(&start, &end)),
|
||||
specifier: node.arg.value.to_string(),
|
||||
|
@ -1096,11 +1105,11 @@ impl<'a> Visit for DependencyRangeCollector<'a> {
|
|||
/// Analyze a document for import ranges, which then can be used to identify if
|
||||
/// a particular position within the document as inside an import range.
|
||||
pub fn analyze_dependency_ranges(
|
||||
parsed_module: &ast::ParsedModule,
|
||||
parsed_source: &deno_ast::ParsedSource,
|
||||
) -> Result<DependencyRanges, AnyError> {
|
||||
let mut collector = DependencyRangeCollector::new(parsed_module);
|
||||
parsed_module
|
||||
.module
|
||||
let mut collector = DependencyRangeCollector::new(parsed_source);
|
||||
parsed_source
|
||||
.module()
|
||||
.visit_with(&swc_ast::Invalid { span: DUMMY_SP }, &mut collector);
|
||||
Ok(collector.take())
|
||||
}
|
||||
|
@ -1202,8 +1211,13 @@ mod tests {
|
|||
fn test_get_lint_references() {
|
||||
let specifier = resolve_url("file:///a.ts").expect("bad specifier");
|
||||
let source = "const foo = 42;";
|
||||
let actual =
|
||||
get_lint_references(&specifier, &MediaType::TypeScript, source).unwrap();
|
||||
let parsed_module = parse_module(
|
||||
&specifier,
|
||||
SourceTextInfo::from_string(source.to_string()),
|
||||
MediaType::TypeScript,
|
||||
)
|
||||
.unwrap();
|
||||
let actual = get_lint_references(&parsed_module).unwrap();
|
||||
|
||||
assert_eq!(
|
||||
actual,
|
||||
|
@ -1246,11 +1260,15 @@ mod tests {
|
|||
// @deno-types="https://deno.land/x/types/react/index.d.ts";
|
||||
import React from "https://cdn.skypack.dev/react";
|
||||
"#;
|
||||
let parsed_module =
|
||||
parse_module(&specifier, source, &MediaType::TypeScript).unwrap();
|
||||
let parsed_module = parse_module(
|
||||
&specifier,
|
||||
SourceTextInfo::from_string(source.to_string()),
|
||||
MediaType::TypeScript,
|
||||
)
|
||||
.unwrap();
|
||||
let (actual, maybe_type) = analyze_dependencies(
|
||||
&specifier,
|
||||
&MediaType::TypeScript,
|
||||
MediaType::TypeScript,
|
||||
&parsed_module,
|
||||
&None,
|
||||
);
|
||||
|
@ -1338,7 +1356,12 @@ mod tests {
|
|||
let source =
|
||||
"import * as a from \"./b.ts\";\nexport * as a from \"./c.ts\";\n";
|
||||
let media_type = MediaType::TypeScript;
|
||||
let parsed_module = parse_module(&specifier, source, &media_type).unwrap();
|
||||
let parsed_module = parse_module(
|
||||
&specifier,
|
||||
SourceTextInfo::from_string(source.to_string()),
|
||||
media_type,
|
||||
)
|
||||
.unwrap();
|
||||
let result = analyze_dependency_ranges(&parsed_module);
|
||||
assert!(result.is_ok());
|
||||
let actual = result.unwrap();
|
||||
|
|
|
@ -1,11 +1,18 @@
|
|||
// Copyright 2018-2021 the Deno authors. All rights reserved. MIT license.
|
||||
|
||||
use super::analysis;
|
||||
use super::config::Config;
|
||||
use super::config::WorkspaceSettings;
|
||||
use super::language_server;
|
||||
use super::text::LineIndex;
|
||||
use super::tsc;
|
||||
use crate::ast::ParsedModule;
|
||||
use super::tsc::NavigationTree;
|
||||
|
||||
use deno_core::error::anyhow;
|
||||
use deno_ast::swc::ast;
|
||||
use deno_ast::swc::common::Span;
|
||||
use deno_ast::swc::visit::Node;
|
||||
use deno_ast::swc::visit::Visit;
|
||||
use deno_ast::swc::visit::VisitWith;
|
||||
use deno_ast::ParsedSource;
|
||||
use deno_core::error::AnyError;
|
||||
use deno_core::resolve_url;
|
||||
use deno_core::serde::Deserialize;
|
||||
|
@ -18,11 +25,6 @@ use regex::Regex;
|
|||
use std::cell::RefCell;
|
||||
use std::collections::HashSet;
|
||||
use std::rc::Rc;
|
||||
use swc_common::Span;
|
||||
use swc_ecmascript::ast;
|
||||
use swc_ecmascript::visit::Node;
|
||||
use swc_ecmascript::visit::Visit;
|
||||
use swc_ecmascript::visit::VisitWith;
|
||||
|
||||
lazy_static::lazy_static! {
|
||||
static ref ABSTRACT_MODIFIER: Regex = Regex::new(r"\babstract\b").unwrap();
|
||||
|
@ -44,24 +46,24 @@ pub struct CodeLensData {
|
|||
pub specifier: ModuleSpecifier,
|
||||
}
|
||||
|
||||
fn span_to_range(span: &Span, parsed_module: &ParsedModule) -> lsp::Range {
|
||||
let start = parsed_module.get_location(span.lo);
|
||||
let end = parsed_module.get_location(span.hi);
|
||||
fn span_to_range(span: &Span, parsed_source: &ParsedSource) -> lsp::Range {
|
||||
let start = parsed_source.source().line_and_column_index(span.lo);
|
||||
let end = parsed_source.source().line_and_column_index(span.hi);
|
||||
lsp::Range {
|
||||
start: lsp::Position {
|
||||
line: (start.line - 1) as u32,
|
||||
character: start.col as u32,
|
||||
line: start.line_index as u32,
|
||||
character: start.column_index as u32,
|
||||
},
|
||||
end: lsp::Position {
|
||||
line: (end.line - 1) as u32,
|
||||
character: end.col as u32,
|
||||
line: end.line_index as u32,
|
||||
character: end.column_index as u32,
|
||||
},
|
||||
}
|
||||
}
|
||||
|
||||
struct DenoTestCollector<'a> {
|
||||
code_lenses: Vec<lsp::CodeLens>,
|
||||
parsed_module: &'a ParsedModule,
|
||||
parsed_source: &'a ParsedSource,
|
||||
specifier: ModuleSpecifier,
|
||||
test_vars: HashSet<String>,
|
||||
}
|
||||
|
@ -69,18 +71,18 @@ struct DenoTestCollector<'a> {
|
|||
impl<'a> DenoTestCollector<'a> {
|
||||
pub fn new(
|
||||
specifier: ModuleSpecifier,
|
||||
parsed_module: &'a ParsedModule,
|
||||
parsed_source: &'a ParsedSource,
|
||||
) -> Self {
|
||||
Self {
|
||||
code_lenses: Vec::new(),
|
||||
parsed_module,
|
||||
parsed_source,
|
||||
specifier,
|
||||
test_vars: HashSet::new(),
|
||||
}
|
||||
}
|
||||
|
||||
fn add_code_lens<N: AsRef<str>>(&mut self, name: N, span: &Span) {
|
||||
let range = span_to_range(span, self.parsed_module);
|
||||
let range = span_to_range(span, self.parsed_source);
|
||||
self.code_lenses.push(lsp::CodeLens {
|
||||
range,
|
||||
command: Some(lsp::Command {
|
||||
|
@ -370,36 +372,37 @@ pub(crate) async fn resolve_code_lens(
|
|||
|
||||
pub(crate) async fn collect(
|
||||
specifier: &ModuleSpecifier,
|
||||
language_server: &mut language_server::Inner,
|
||||
parsed_source: Option<&ParsedSource>,
|
||||
config: &Config,
|
||||
line_index: &LineIndex,
|
||||
navigation_tree: &NavigationTree,
|
||||
) -> Result<Vec<lsp::CodeLens>, AnyError> {
|
||||
let mut code_lenses = collect_test(specifier, language_server)?;
|
||||
code_lenses.extend(collect_tsc(specifier, language_server).await?);
|
||||
let mut code_lenses = collect_test(specifier, parsed_source, config)?;
|
||||
code_lenses.extend(
|
||||
collect_tsc(
|
||||
specifier,
|
||||
&config.get_workspace_settings(),
|
||||
line_index,
|
||||
navigation_tree,
|
||||
)
|
||||
.await?,
|
||||
);
|
||||
|
||||
Ok(code_lenses)
|
||||
}
|
||||
|
||||
fn collect_test(
|
||||
specifier: &ModuleSpecifier,
|
||||
language_server: &mut language_server::Inner,
|
||||
parsed_source: Option<&ParsedSource>,
|
||||
config: &Config,
|
||||
) -> Result<Vec<lsp::CodeLens>, AnyError> {
|
||||
if language_server.config.specifier_code_lens_test(specifier) {
|
||||
let source = language_server
|
||||
.get_text_content(specifier)
|
||||
.ok_or_else(|| anyhow!("Missing text content: {}", specifier))?;
|
||||
let media_type = language_server
|
||||
.get_media_type(specifier)
|
||||
.ok_or_else(|| anyhow!("Missing media type: {}", specifier))?;
|
||||
// we swallow parsed errors, as they are meaningless here.
|
||||
// TODO(@kitsonk) consider caching previous code_lens results to return if
|
||||
// there is a parse error to avoid issues of lenses popping in and out
|
||||
if let Ok(parsed_module) =
|
||||
analysis::parse_module(specifier, &source, &media_type)
|
||||
{
|
||||
if config.specifier_code_lens_test(specifier) {
|
||||
if let Some(parsed_source) = parsed_source {
|
||||
let mut collector =
|
||||
DenoTestCollector::new(specifier.clone(), &parsed_module);
|
||||
parsed_module.module.visit_with(
|
||||
DenoTestCollector::new(specifier.clone(), parsed_source);
|
||||
parsed_source.module().visit_with(
|
||||
&ast::Invalid {
|
||||
span: swc_common::DUMMY_SP,
|
||||
span: deno_ast::swc::common::DUMMY_SP,
|
||||
},
|
||||
&mut collector,
|
||||
);
|
||||
|
@ -412,13 +415,10 @@ fn collect_test(
|
|||
/// Return tsc navigation tree code lenses.
|
||||
async fn collect_tsc(
|
||||
specifier: &ModuleSpecifier,
|
||||
language_server: &mut language_server::Inner,
|
||||
workspace_settings: &WorkspaceSettings,
|
||||
line_index: &LineIndex,
|
||||
navigation_tree: &NavigationTree,
|
||||
) -> Result<Vec<lsp::CodeLens>, AnyError> {
|
||||
let workspace_settings = language_server.config.get_workspace_settings();
|
||||
let line_index = language_server
|
||||
.get_line_index_sync(specifier)
|
||||
.ok_or_else(|| anyhow!("Missing line index."))?;
|
||||
let navigation_tree = language_server.get_navigation_tree(specifier).await?;
|
||||
let code_lenses = Rc::new(RefCell::new(Vec::new()));
|
||||
navigation_tree.walk(&|i, mp| {
|
||||
let mut code_lenses = code_lenses.borrow_mut();
|
||||
|
@ -428,7 +428,7 @@ async fn collect_tsc(
|
|||
let source = CodeLensSource::Implementations;
|
||||
match i.kind {
|
||||
tsc::ScriptElementKind::InterfaceElement => {
|
||||
code_lenses.push(i.to_code_lens(&line_index, specifier, &source));
|
||||
code_lenses.push(i.to_code_lens(line_index, specifier, &source));
|
||||
}
|
||||
tsc::ScriptElementKind::ClassElement
|
||||
| tsc::ScriptElementKind::MemberFunctionElement
|
||||
|
@ -436,7 +436,7 @@ async fn collect_tsc(
|
|||
| tsc::ScriptElementKind::MemberGetAccessorElement
|
||||
| tsc::ScriptElementKind::MemberSetAccessorElement => {
|
||||
if ABSTRACT_MODIFIER.is_match(&i.kind_modifiers) {
|
||||
code_lenses.push(i.to_code_lens(&line_index, specifier, &source));
|
||||
code_lenses.push(i.to_code_lens(line_index, specifier, &source));
|
||||
}
|
||||
}
|
||||
_ => (),
|
||||
|
@ -448,31 +448,31 @@ async fn collect_tsc(
|
|||
let source = CodeLensSource::References;
|
||||
if let Some(parent) = &mp {
|
||||
if parent.kind == tsc::ScriptElementKind::EnumElement {
|
||||
code_lenses.push(i.to_code_lens(&line_index, specifier, &source));
|
||||
code_lenses.push(i.to_code_lens(line_index, specifier, &source));
|
||||
}
|
||||
}
|
||||
match i.kind {
|
||||
tsc::ScriptElementKind::FunctionElement => {
|
||||
if workspace_settings.code_lens.references_all_functions {
|
||||
code_lenses.push(i.to_code_lens(&line_index, specifier, &source));
|
||||
code_lenses.push(i.to_code_lens(line_index, specifier, &source));
|
||||
}
|
||||
}
|
||||
tsc::ScriptElementKind::ConstElement
|
||||
| tsc::ScriptElementKind::LetElement
|
||||
| tsc::ScriptElementKind::VariableElement => {
|
||||
if EXPORT_MODIFIER.is_match(&i.kind_modifiers) {
|
||||
code_lenses.push(i.to_code_lens(&line_index, specifier, &source));
|
||||
code_lenses.push(i.to_code_lens(line_index, specifier, &source));
|
||||
}
|
||||
}
|
||||
tsc::ScriptElementKind::ClassElement => {
|
||||
if i.text != "<class>" {
|
||||
code_lenses.push(i.to_code_lens(&line_index, specifier, &source));
|
||||
code_lenses.push(i.to_code_lens(line_index, specifier, &source));
|
||||
}
|
||||
}
|
||||
tsc::ScriptElementKind::InterfaceElement
|
||||
| tsc::ScriptElementKind::TypeElement
|
||||
| tsc::ScriptElementKind::EnumElement => {
|
||||
code_lenses.push(i.to_code_lens(&line_index, specifier, &source));
|
||||
code_lenses.push(i.to_code_lens(line_index, specifier, &source));
|
||||
}
|
||||
tsc::ScriptElementKind::LocalFunctionElement
|
||||
| tsc::ScriptElementKind::MemberGetAccessorElement
|
||||
|
@ -485,11 +485,8 @@ async fn collect_tsc(
|
|||
tsc::ScriptElementKind::ClassElement
|
||||
| tsc::ScriptElementKind::InterfaceElement
|
||||
| tsc::ScriptElementKind::TypeElement => {
|
||||
code_lenses.push(i.to_code_lens(
|
||||
&line_index,
|
||||
specifier,
|
||||
&source,
|
||||
));
|
||||
code_lenses
|
||||
.push(i.to_code_lens(line_index, specifier, &source));
|
||||
}
|
||||
_ => (),
|
||||
}
|
||||
|
@ -505,8 +502,10 @@ async fn collect_tsc(
|
|||
|
||||
#[cfg(test)]
|
||||
mod tests {
|
||||
use deno_ast::MediaType;
|
||||
use deno_ast::SourceTextInfo;
|
||||
|
||||
use super::*;
|
||||
use crate::media_type::MediaType;
|
||||
|
||||
#[test]
|
||||
fn test_deno_test_collector() {
|
||||
|
@ -519,13 +518,16 @@ mod tests {
|
|||
|
||||
Deno.test("test b", function anotherTest() {});
|
||||
"#;
|
||||
let parsed_module =
|
||||
analysis::parse_module(&specifier, source, &MediaType::TypeScript)
|
||||
let parsed_module = crate::lsp::analysis::parse_module(
|
||||
&specifier,
|
||||
SourceTextInfo::from_string(source.to_string()),
|
||||
MediaType::TypeScript,
|
||||
)
|
||||
.unwrap();
|
||||
let mut collector = DenoTestCollector::new(specifier, &parsed_module);
|
||||
parsed_module.module.visit_with(
|
||||
parsed_module.module().visit_with(
|
||||
&ast::Invalid {
|
||||
span: swc_common::DUMMY_SP,
|
||||
span: deno_ast::swc::common::DUMMY_SP,
|
||||
},
|
||||
&mut collector,
|
||||
);
|
||||
|
|
|
@ -403,10 +403,11 @@ mod tests {
|
|||
use crate::lsp::documents::DocumentCache;
|
||||
use crate::lsp::documents::LanguageId;
|
||||
use crate::lsp::sources::Sources;
|
||||
use crate::media_type::MediaType;
|
||||
use deno_ast::MediaType;
|
||||
use deno_core::resolve_url;
|
||||
use std::collections::HashMap;
|
||||
use std::path::Path;
|
||||
use std::sync::Arc;
|
||||
use tempfile::TempDir;
|
||||
|
||||
fn mock_state_snapshot(
|
||||
|
@ -418,17 +419,28 @@ mod tests {
|
|||
for (specifier, source, version, language_id) in fixtures {
|
||||
let specifier =
|
||||
resolve_url(specifier).expect("failed to create specifier");
|
||||
documents.open(specifier.clone(), *version, language_id.clone(), source);
|
||||
documents.open(
|
||||
specifier.clone(),
|
||||
*version,
|
||||
*language_id,
|
||||
Arc::new(source.to_string()),
|
||||
);
|
||||
let media_type = MediaType::from(&specifier);
|
||||
let parsed_module =
|
||||
analysis::parse_module(&specifier, source, &media_type).unwrap();
|
||||
let parsed_module = documents
|
||||
.get(&specifier)
|
||||
.unwrap()
|
||||
.source()
|
||||
.module()
|
||||
.map(|r| r.as_ref())
|
||||
.unwrap()
|
||||
.unwrap();
|
||||
let (deps, _) = analysis::analyze_dependencies(
|
||||
&specifier,
|
||||
&media_type,
|
||||
&parsed_module,
|
||||
media_type,
|
||||
parsed_module,
|
||||
&None,
|
||||
);
|
||||
let dep_ranges = analysis::analyze_dependency_ranges(&parsed_module).ok();
|
||||
let dep_ranges = analysis::analyze_dependency_ranges(parsed_module).ok();
|
||||
documents
|
||||
.set_dependencies(&specifier, Some(deps), dep_ranges)
|
||||
.unwrap();
|
||||
|
|
|
@ -7,7 +7,6 @@ use super::sources::Sources;
|
|||
use super::tsc;
|
||||
|
||||
use crate::diagnostics;
|
||||
use crate::media_type::MediaType;
|
||||
use crate::tokio_util::create_basic_runtime;
|
||||
|
||||
use analysis::ResolvedDependency;
|
||||
|
@ -327,23 +326,29 @@ async fn generate_lint_diagnostics(
|
|||
.lock()
|
||||
.await
|
||||
.get_version(specifier, &DiagnosticSource::DenoLint);
|
||||
let media_type = MediaType::from(specifier);
|
||||
if version != current_version {
|
||||
if let Ok(Some(source_code)) = documents.content(specifier) {
|
||||
if let Ok(references) = analysis::get_lint_references(
|
||||
specifier,
|
||||
&media_type,
|
||||
&source_code,
|
||||
) {
|
||||
let diagnostics =
|
||||
references.into_iter().map(|r| r.to_diagnostic()).collect();
|
||||
diagnostics_vec.push((specifier.clone(), version, diagnostics));
|
||||
let module = documents
|
||||
.get(specifier)
|
||||
.map(|d| d.source().module())
|
||||
.flatten();
|
||||
let diagnostics = match module {
|
||||
Some(Ok(module)) => {
|
||||
if let Ok(references) = analysis::get_lint_references(module) {
|
||||
references
|
||||
.into_iter()
|
||||
.map(|r| r.to_diagnostic())
|
||||
.collect::<Vec<_>>()
|
||||
} else {
|
||||
diagnostics_vec.push((specifier.clone(), version, Vec::new()));
|
||||
Vec::new()
|
||||
}
|
||||
} else {
|
||||
}
|
||||
Some(Err(_)) => Vec::new(),
|
||||
None => {
|
||||
error!("Missing file contents for: {}", specifier);
|
||||
Vec::new()
|
||||
}
|
||||
};
|
||||
diagnostics_vec.push((specifier.clone(), version, diagnostics));
|
||||
}
|
||||
}
|
||||
}
|
||||
|
|
76
cli/lsp/document_source.rs
Normal file
76
cli/lsp/document_source.rs
Normal file
|
@ -0,0 +1,76 @@
|
|||
use deno_ast::swc::common::BytePos;
|
||||
use deno_ast::Diagnostic;
|
||||
use deno_ast::MediaType;
|
||||
use deno_ast::ParsedSource;
|
||||
use deno_ast::SourceTextInfo;
|
||||
use deno_core::ModuleSpecifier;
|
||||
use once_cell::sync::OnceCell;
|
||||
use std::sync::Arc;
|
||||
|
||||
use super::analysis;
|
||||
use super::text::LineIndex;
|
||||
|
||||
#[derive(Debug)]
|
||||
struct DocumentSourceInner {
|
||||
specifier: ModuleSpecifier,
|
||||
media_type: MediaType,
|
||||
text_info: SourceTextInfo,
|
||||
parsed_module: OnceCell<Result<ParsedSource, Diagnostic>>,
|
||||
line_index: LineIndex,
|
||||
}
|
||||
|
||||
/// Immutable information about a document.
|
||||
#[derive(Debug, Clone)]
|
||||
pub struct DocumentSource {
|
||||
inner: Arc<DocumentSourceInner>,
|
||||
}
|
||||
|
||||
impl DocumentSource {
|
||||
pub fn new(
|
||||
specifier: &ModuleSpecifier,
|
||||
media_type: MediaType,
|
||||
text: Arc<String>,
|
||||
line_index: LineIndex,
|
||||
) -> Self {
|
||||
Self {
|
||||
inner: Arc::new(DocumentSourceInner {
|
||||
specifier: specifier.clone(),
|
||||
media_type,
|
||||
text_info: SourceTextInfo::new(BytePos(0), text),
|
||||
parsed_module: OnceCell::new(),
|
||||
line_index,
|
||||
}),
|
||||
}
|
||||
}
|
||||
|
||||
pub fn text_info(&self) -> &SourceTextInfo {
|
||||
&self.inner.text_info
|
||||
}
|
||||
|
||||
pub fn line_index(&self) -> &LineIndex {
|
||||
&self.inner.line_index
|
||||
}
|
||||
|
||||
pub fn module(&self) -> Option<&Result<ParsedSource, Diagnostic>> {
|
||||
let is_parsable = matches!(
|
||||
self.inner.media_type,
|
||||
MediaType::JavaScript
|
||||
| MediaType::Jsx
|
||||
| MediaType::TypeScript
|
||||
| MediaType::Tsx
|
||||
| MediaType::Dts,
|
||||
);
|
||||
if is_parsable {
|
||||
// lazily parse the module
|
||||
Some(self.inner.parsed_module.get_or_init(|| {
|
||||
analysis::parse_module(
|
||||
&self.inner.specifier,
|
||||
self.inner.text_info.clone(),
|
||||
self.inner.media_type,
|
||||
)
|
||||
}))
|
||||
} else {
|
||||
None
|
||||
}
|
||||
}
|
||||
}
|
|
@ -1,24 +1,24 @@
|
|||
// Copyright 2018-2021 the Deno authors. All rights reserved. MIT license.
|
||||
|
||||
use super::analysis;
|
||||
use super::document_source::DocumentSource;
|
||||
use super::text::LineIndex;
|
||||
use super::tsc;
|
||||
|
||||
use crate::media_type::MediaType;
|
||||
|
||||
use deno_ast::MediaType;
|
||||
use deno_core::error::custom_error;
|
||||
use deno_core::error::AnyError;
|
||||
use deno_core::error::Context;
|
||||
use deno_core::ModuleSpecifier;
|
||||
use lspower::lsp;
|
||||
use std::collections::HashMap;
|
||||
use std::collections::HashSet;
|
||||
use std::ops::Range;
|
||||
use std::str::FromStr;
|
||||
use std::sync::Arc;
|
||||
|
||||
/// A representation of the language id sent from the LSP client, which is used
|
||||
/// to determine how the document is handled within the language server.
|
||||
#[derive(Debug, Clone, PartialEq, Eq)]
|
||||
#[derive(Debug, Clone, PartialEq, Eq, Copy)]
|
||||
pub enum LanguageId {
|
||||
JavaScript,
|
||||
Jsx,
|
||||
|
@ -81,11 +81,10 @@ impl IndexValid {
|
|||
|
||||
#[derive(Debug, Clone)]
|
||||
pub struct DocumentData {
|
||||
bytes: Option<Vec<u8>>,
|
||||
source: DocumentSource,
|
||||
dependencies: Option<HashMap<String, analysis::Dependency>>,
|
||||
dependency_ranges: Option<analysis::DependencyRanges>,
|
||||
pub(crate) language_id: LanguageId,
|
||||
line_index: Option<LineIndex>,
|
||||
maybe_navigation_tree: Option<tsc::NavigationTree>,
|
||||
specifier: ModuleSpecifier,
|
||||
version: Option<i32>,
|
||||
|
@ -96,14 +95,19 @@ impl DocumentData {
|
|||
specifier: ModuleSpecifier,
|
||||
version: i32,
|
||||
language_id: LanguageId,
|
||||
source: &str,
|
||||
source_text: Arc<String>,
|
||||
) -> Self {
|
||||
let line_index = LineIndex::new(&source_text);
|
||||
Self {
|
||||
bytes: Some(source.as_bytes().to_owned()),
|
||||
source: DocumentSource::new(
|
||||
&specifier,
|
||||
MediaType::from(&language_id),
|
||||
source_text,
|
||||
line_index,
|
||||
),
|
||||
dependencies: None,
|
||||
dependency_ranges: None,
|
||||
language_id,
|
||||
line_index: Some(LineIndex::new(source)),
|
||||
maybe_navigation_tree: None,
|
||||
specifier,
|
||||
version: Some(version),
|
||||
|
@ -114,59 +118,39 @@ impl DocumentData {
|
|||
&mut self,
|
||||
content_changes: Vec<lsp::TextDocumentContentChangeEvent>,
|
||||
) -> Result<(), AnyError> {
|
||||
if self.bytes.is_none() {
|
||||
return Ok(());
|
||||
}
|
||||
let content = &mut String::from_utf8(self.bytes.clone().unwrap())
|
||||
.context("unable to parse bytes to string")?;
|
||||
let mut line_index = if let Some(line_index) = &self.line_index {
|
||||
line_index.clone()
|
||||
} else {
|
||||
LineIndex::new(content)
|
||||
};
|
||||
let mut content = self.source.text_info().text_str().to_string();
|
||||
let mut line_index = self.source.line_index().clone();
|
||||
let mut index_valid = IndexValid::All;
|
||||
for change in content_changes {
|
||||
if let Some(range) = change.range {
|
||||
if !index_valid.covers(range.start.line) {
|
||||
line_index = LineIndex::new(content);
|
||||
line_index = LineIndex::new(&content);
|
||||
}
|
||||
index_valid = IndexValid::UpTo(range.start.line);
|
||||
let range = line_index.get_text_range(range)?;
|
||||
content.replace_range(Range::<usize>::from(range), &change.text);
|
||||
} else {
|
||||
*content = change.text;
|
||||
content = change.text;
|
||||
index_valid = IndexValid::UpTo(0);
|
||||
}
|
||||
}
|
||||
self.bytes = Some(content.as_bytes().to_owned());
|
||||
self.line_index = if index_valid == IndexValid::All {
|
||||
Some(line_index)
|
||||
let line_index = if index_valid == IndexValid::All {
|
||||
line_index
|
||||
} else {
|
||||
Some(LineIndex::new(content))
|
||||
LineIndex::new(&content)
|
||||
};
|
||||
self.source = DocumentSource::new(
|
||||
&self.specifier,
|
||||
MediaType::from(&self.language_id),
|
||||
Arc::new(content),
|
||||
line_index,
|
||||
);
|
||||
self.maybe_navigation_tree = None;
|
||||
Ok(())
|
||||
}
|
||||
|
||||
pub fn content(&self) -> Result<Option<String>, AnyError> {
|
||||
if let Some(bytes) = &self.bytes {
|
||||
Ok(Some(
|
||||
String::from_utf8(bytes.clone())
|
||||
.context("cannot decode bytes to string")?,
|
||||
))
|
||||
} else {
|
||||
Ok(None)
|
||||
}
|
||||
}
|
||||
|
||||
pub fn content_line(&self, line: usize) -> Result<Option<String>, AnyError> {
|
||||
let content = self.content().ok().flatten();
|
||||
if let Some(content) = content {
|
||||
let lines = content.lines().into_iter().collect::<Vec<&str>>();
|
||||
Ok(Some(lines[line].to_string()))
|
||||
} else {
|
||||
Ok(None)
|
||||
}
|
||||
pub fn source(&self) -> &DocumentSource {
|
||||
&self.source
|
||||
}
|
||||
|
||||
/// Determines if a position within the document is within a dependency range
|
||||
|
@ -223,7 +207,7 @@ impl DocumentCache {
|
|||
specifier: &ModuleSpecifier,
|
||||
version: i32,
|
||||
content_changes: Vec<lsp::TextDocumentContentChangeEvent>,
|
||||
) -> Result<Option<String>, AnyError> {
|
||||
) -> Result<(), AnyError> {
|
||||
if !self.contains_key(specifier) {
|
||||
return Err(custom_error(
|
||||
"NotFound",
|
||||
|
@ -237,7 +221,7 @@ impl DocumentCache {
|
|||
let doc = self.docs.get_mut(specifier).unwrap();
|
||||
doc.apply_content_changes(content_changes)?;
|
||||
doc.version = Some(version);
|
||||
doc.content()
|
||||
Ok(())
|
||||
}
|
||||
|
||||
pub fn close(&mut self, specifier: &ModuleSpecifier) {
|
||||
|
@ -249,15 +233,15 @@ impl DocumentCache {
|
|||
self.docs.contains_key(specifier)
|
||||
}
|
||||
|
||||
pub fn content(
|
||||
&self,
|
||||
specifier: &ModuleSpecifier,
|
||||
) -> Result<Option<String>, AnyError> {
|
||||
if let Some(doc) = self.docs.get(specifier) {
|
||||
doc.content()
|
||||
} else {
|
||||
Ok(None)
|
||||
pub fn get(&self, specifier: &ModuleSpecifier) -> Option<&DocumentData> {
|
||||
self.docs.get(specifier)
|
||||
}
|
||||
|
||||
pub fn content(&self, specifier: &ModuleSpecifier) -> Option<Arc<String>> {
|
||||
self
|
||||
.docs
|
||||
.get(specifier)
|
||||
.map(|d| d.source().text_info().text())
|
||||
}
|
||||
|
||||
// For a given specifier, get all open documents which directly or indirectly
|
||||
|
@ -282,13 +266,6 @@ impl DocumentCache {
|
|||
.flatten()
|
||||
}
|
||||
|
||||
pub fn get_language_id(
|
||||
&self,
|
||||
specifier: &ModuleSpecifier,
|
||||
) -> Option<LanguageId> {
|
||||
self.docs.get(specifier).map(|doc| doc.language_id.clone())
|
||||
}
|
||||
|
||||
pub fn get_navigation_tree(
|
||||
&self,
|
||||
specifier: &ModuleSpecifier,
|
||||
|
@ -349,8 +326,10 @@ impl DocumentCache {
|
|||
}
|
||||
|
||||
pub fn line_index(&self, specifier: &ModuleSpecifier) -> Option<LineIndex> {
|
||||
let doc = self.docs.get(specifier)?;
|
||||
doc.line_index.clone()
|
||||
self
|
||||
.docs
|
||||
.get(specifier)
|
||||
.map(|d| d.source().line_index().clone())
|
||||
}
|
||||
|
||||
pub fn open(
|
||||
|
@ -358,7 +337,7 @@ impl DocumentCache {
|
|||
specifier: ModuleSpecifier,
|
||||
version: i32,
|
||||
language_id: LanguageId,
|
||||
source: &str,
|
||||
source: Arc<String>,
|
||||
) {
|
||||
self.docs.insert(
|
||||
specifier.clone(),
|
||||
|
@ -489,7 +468,7 @@ mod tests {
|
|||
specifier.clone(),
|
||||
1,
|
||||
LanguageId::TypeScript,
|
||||
"console.log(\"Hello Deno\");\n",
|
||||
Arc::new("console.log(\"Hello Deno\");\n".to_string()),
|
||||
);
|
||||
assert!(document_cache.contains_key(&specifier));
|
||||
assert!(!document_cache.contains_key(&missing_specifier));
|
||||
|
@ -503,7 +482,7 @@ mod tests {
|
|||
specifier.clone(),
|
||||
1,
|
||||
LanguageId::TypeScript,
|
||||
"console.log(\"Hello deno\");\n",
|
||||
Arc::new("console.log(\"Hello deno\");\n".to_string()),
|
||||
);
|
||||
document_cache
|
||||
.change(
|
||||
|
@ -527,8 +506,9 @@ mod tests {
|
|||
.expect("failed to make changes");
|
||||
let actual = document_cache
|
||||
.content(&specifier)
|
||||
.expect("failed to get content");
|
||||
assert_eq!(actual, Some("console.log(\"Hello Deno\");\n".to_string()));
|
||||
.expect("failed to get content")
|
||||
.to_string();
|
||||
assert_eq!(actual, "console.log(\"Hello Deno\");\n");
|
||||
}
|
||||
|
||||
#[test]
|
||||
|
@ -539,7 +519,7 @@ mod tests {
|
|||
specifier.clone(),
|
||||
1,
|
||||
LanguageId::TypeScript,
|
||||
"console.log(\"Hello 🦕\");\n",
|
||||
Arc::new("console.log(\"Hello 🦕\");\n".to_string()),
|
||||
);
|
||||
document_cache
|
||||
.change(
|
||||
|
@ -563,8 +543,9 @@ mod tests {
|
|||
.expect("failed to make changes");
|
||||
let actual = document_cache
|
||||
.content(&specifier)
|
||||
.expect("failed to get content");
|
||||
assert_eq!(actual, Some("console.log(\"Hello Deno\");\n".to_string()));
|
||||
.expect("failed to get content")
|
||||
.to_string();
|
||||
assert_eq!(actual, "console.log(\"Hello Deno\");\n");
|
||||
}
|
||||
|
||||
#[test]
|
||||
|
@ -576,7 +557,7 @@ mod tests {
|
|||
specifier.clone(),
|
||||
1,
|
||||
"typescript".parse().unwrap(),
|
||||
"console.log(\"hello world\");\n",
|
||||
Arc::new("console.log(\"hello world\");\n".to_string()),
|
||||
);
|
||||
assert!(document_cache.is_diagnosable(&specifier));
|
||||
let specifier = resolve_url("file:///a/file.rs").unwrap();
|
||||
|
@ -584,7 +565,7 @@ mod tests {
|
|||
specifier.clone(),
|
||||
1,
|
||||
"rust".parse().unwrap(),
|
||||
"pub mod a;",
|
||||
Arc::new("pub mod a;".to_string()),
|
||||
);
|
||||
assert!(!document_cache.is_diagnosable(&specifier));
|
||||
let specifier =
|
||||
|
|
|
@ -58,9 +58,8 @@ use crate::deno_dir;
|
|||
use crate::fs_util;
|
||||
use crate::import_map::ImportMap;
|
||||
use crate::logger;
|
||||
use crate::media_type::MediaType;
|
||||
use crate::tools::fmt::format_file;
|
||||
use crate::tools::fmt::get_typescript_config;
|
||||
use crate::tools::fmt::format_parsed_module;
|
||||
|
||||
pub const REGISTRIES_PATH: &str = "registries";
|
||||
const SOURCES_PATH: &str = "deps";
|
||||
|
@ -165,19 +164,17 @@ impl Inner {
|
|||
|
||||
/// Analyzes dependencies of a document that has been opened in the editor and
|
||||
/// sets the dependencies property on the document.
|
||||
fn analyze_dependencies(
|
||||
&mut self,
|
||||
specifier: &ModuleSpecifier,
|
||||
media_type: &MediaType,
|
||||
source: &str,
|
||||
) {
|
||||
if let Ok(parsed_module) =
|
||||
analysis::parse_module(specifier, source, media_type)
|
||||
fn analyze_dependencies(&mut self, specifier: &ModuleSpecifier) {
|
||||
if let Some(Ok(parsed_module)) = self
|
||||
.documents
|
||||
.get(specifier)
|
||||
.map(|d| d.source().module())
|
||||
.flatten()
|
||||
{
|
||||
let (mut deps, _) = analysis::analyze_dependencies(
|
||||
specifier,
|
||||
media_type,
|
||||
&parsed_module,
|
||||
parsed_module.media_type(),
|
||||
parsed_module,
|
||||
&self.maybe_import_map,
|
||||
);
|
||||
for (_, dep) in deps.iter_mut() {
|
||||
|
@ -188,7 +185,7 @@ impl Inner {
|
|||
}
|
||||
}
|
||||
}
|
||||
let dep_ranges = analysis::analyze_dependency_ranges(&parsed_module).ok();
|
||||
let dep_ranges = analysis::analyze_dependency_ranges(parsed_module).ok();
|
||||
if let Err(err) =
|
||||
self
|
||||
.documents
|
||||
|
@ -202,18 +199,14 @@ impl Inner {
|
|||
/// Analyzes all dependencies for all documents that have been opened in the
|
||||
/// editor and sets the dependencies property on the documents.
|
||||
fn analyze_dependencies_all(&mut self) {
|
||||
let docs: Vec<(ModuleSpecifier, String, MediaType)> = self
|
||||
let specifiers = self
|
||||
.documents
|
||||
.docs
|
||||
.iter()
|
||||
.filter_map(|(s, doc)| {
|
||||
let source = doc.content().ok().flatten()?;
|
||||
let media_type = MediaType::from(&doc.language_id);
|
||||
Some((s.clone(), source, media_type))
|
||||
})
|
||||
.collect();
|
||||
for (specifier, source, media_type) in docs {
|
||||
self.analyze_dependencies(&specifier, &media_type, &source);
|
||||
.keys()
|
||||
.map(ToOwned::to_owned)
|
||||
.collect::<Vec<_>>();
|
||||
for specifier in specifiers {
|
||||
self.analyze_dependencies(&specifier);
|
||||
}
|
||||
}
|
||||
|
||||
|
@ -281,30 +274,19 @@ impl Inner {
|
|||
pub(crate) fn get_text_content(
|
||||
&self,
|
||||
specifier: &ModuleSpecifier,
|
||||
) -> Option<String> {
|
||||
) -> Option<Arc<String>> {
|
||||
if specifier.scheme() == "asset" {
|
||||
self
|
||||
.assets
|
||||
.get(specifier)
|
||||
.map(|o| o.clone().map(|a| a.text))?
|
||||
} else if self.documents.contains_key(specifier) {
|
||||
self.documents.content(specifier).unwrap()
|
||||
self.documents.content(specifier)
|
||||
} else {
|
||||
self.sources.get_source(specifier)
|
||||
}
|
||||
}
|
||||
|
||||
pub(crate) fn get_media_type(
|
||||
&self,
|
||||
specifier: &ModuleSpecifier,
|
||||
) -> Option<MediaType> {
|
||||
if specifier.scheme() == "asset" || self.documents.contains_key(specifier) {
|
||||
Some(MediaType::from(specifier))
|
||||
} else {
|
||||
self.sources.get_media_type(specifier)
|
||||
}
|
||||
}
|
||||
|
||||
pub(crate) async fn get_navigation_tree(
|
||||
&mut self,
|
||||
specifier: &ModuleSpecifier,
|
||||
|
@ -789,20 +771,15 @@ impl Inner {
|
|||
params.text_document.language_id, params.text_document.uri
|
||||
);
|
||||
}
|
||||
let media_type = MediaType::from(&language_id);
|
||||
self.documents.open(
|
||||
specifier.clone(),
|
||||
params.text_document.version,
|
||||
language_id,
|
||||
¶ms.text_document.text,
|
||||
Arc::new(params.text_document.text),
|
||||
);
|
||||
|
||||
if self.documents.is_diagnosable(&specifier) {
|
||||
self.analyze_dependencies(
|
||||
&specifier,
|
||||
&media_type,
|
||||
¶ms.text_document.text,
|
||||
);
|
||||
self.analyze_dependencies(&specifier);
|
||||
self
|
||||
.diagnostics_server
|
||||
.invalidate(self.documents.dependents(&specifier))
|
||||
|
@ -822,12 +799,9 @@ impl Inner {
|
|||
params.text_document.version,
|
||||
params.content_changes,
|
||||
) {
|
||||
Ok(Some(source)) => {
|
||||
Ok(()) => {
|
||||
if self.documents.is_diagnosable(&specifier) {
|
||||
let media_type = MediaType::from(
|
||||
&self.documents.get_language_id(&specifier).unwrap(),
|
||||
);
|
||||
self.analyze_dependencies(&specifier, &media_type, &source);
|
||||
self.analyze_dependencies(&specifier);
|
||||
self
|
||||
.diagnostics_server
|
||||
.invalidate(self.documents.dependents(&specifier))
|
||||
|
@ -837,7 +811,6 @@ impl Inner {
|
|||
}
|
||||
}
|
||||
}
|
||||
Ok(_) => error!("No content returned from change."),
|
||||
Err(err) => error!("{}", err),
|
||||
}
|
||||
self.performance.measure(mark);
|
||||
|
@ -1021,16 +994,11 @@ impl Inner {
|
|||
return Ok(None);
|
||||
}
|
||||
let mark = self.performance.mark("formatting", Some(¶ms));
|
||||
let file_text = self
|
||||
.documents
|
||||
.content(&specifier)
|
||||
.map_err(|_| {
|
||||
let document_data = self.documents.get(&specifier).ok_or_else(|| {
|
||||
LspError::invalid_params(
|
||||
"The specified file could not be found in memory.",
|
||||
)
|
||||
})?
|
||||
.unwrap();
|
||||
let line_index = self.documents.line_index(&specifier);
|
||||
})?;
|
||||
let file_path =
|
||||
if let Ok(file_path) = params.text_document.uri.to_file_path() {
|
||||
file_path
|
||||
|
@ -1038,14 +1006,28 @@ impl Inner {
|
|||
PathBuf::from(params.text_document.uri.path())
|
||||
};
|
||||
|
||||
// TODO(lucacasonato): handle error properly
|
||||
let source = document_data.source().clone();
|
||||
let text_edits = tokio::task::spawn_blocking(move || {
|
||||
let config = get_typescript_config();
|
||||
match format_file(&file_path, &file_text, config) {
|
||||
let format_result = match source.module() {
|
||||
Some(Ok(parsed_module)) => Ok(format_parsed_module(parsed_module)),
|
||||
Some(Err(err)) => Err(err.to_string()),
|
||||
None => {
|
||||
// it's not a js/ts file, so attempt to format its contents
|
||||
format_file(&file_path, source.text_info().text_str())
|
||||
}
|
||||
};
|
||||
|
||||
match format_result {
|
||||
Ok(new_text) => {
|
||||
Some(text::get_edits(&file_text, &new_text, line_index))
|
||||
let line_index = source.line_index();
|
||||
Some(text::get_edits(
|
||||
source.text_info().text_str(),
|
||||
&new_text,
|
||||
line_index,
|
||||
))
|
||||
}
|
||||
Err(err) => {
|
||||
// TODO(lucacasonato): handle error properly
|
||||
warn!("Format error: {}", err);
|
||||
None
|
||||
}
|
||||
|
@ -1257,7 +1239,7 @@ impl Inner {
|
|||
Some("deno-lint") => code_actions
|
||||
.add_deno_lint_ignore_action(
|
||||
&specifier,
|
||||
self.documents.docs.get(&specifier),
|
||||
self.documents.get(&specifier),
|
||||
diagnostic,
|
||||
)
|
||||
.map_err(|err| {
|
||||
|
@ -1436,8 +1418,34 @@ impl Inner {
|
|||
}
|
||||
|
||||
let mark = self.performance.mark("code_lens", Some(¶ms));
|
||||
let code_lenses =
|
||||
code_lens::collect(&specifier, self).await.map_err(|err| {
|
||||
let navigation_tree =
|
||||
self.get_navigation_tree(&specifier).await.map_err(|err| {
|
||||
error!("Error getting code lenses for \"{}\": {}", specifier, err);
|
||||
LspError::internal_error()
|
||||
})?;
|
||||
let parsed_module = self
|
||||
.documents
|
||||
.get(&specifier)
|
||||
.map(|d| d.source().module())
|
||||
.flatten()
|
||||
.map(|m| m.as_ref().ok())
|
||||
.flatten();
|
||||
let line_index = self.get_line_index_sync(&specifier).ok_or_else(|| {
|
||||
error!(
|
||||
"Error getting code lenses for \"{}\": Missing line index",
|
||||
specifier
|
||||
);
|
||||
LspError::internal_error()
|
||||
})?;
|
||||
let code_lenses = code_lens::collect(
|
||||
&specifier,
|
||||
parsed_module,
|
||||
&self.config,
|
||||
&line_index,
|
||||
&navigation_tree,
|
||||
)
|
||||
.await
|
||||
.map_err(|err| {
|
||||
error!("Error getting code lenses for \"{}\": {}", specifier, err);
|
||||
LspError::internal_error()
|
||||
})?;
|
||||
|
@ -2606,11 +2614,7 @@ impl Inner {
|
|||
// now that we have dependencies loaded, we need to re-analyze them and
|
||||
// invalidate some diagnostics
|
||||
if self.documents.contains_key(&referrer) {
|
||||
if let Some(source) = self.documents.content(&referrer).unwrap() {
|
||||
let media_type =
|
||||
MediaType::from(&self.documents.get_language_id(&referrer).unwrap());
|
||||
self.analyze_dependencies(&referrer, &media_type, &source);
|
||||
}
|
||||
self.analyze_dependencies(&referrer);
|
||||
self.diagnostics_server.invalidate(vec![referrer]).await;
|
||||
}
|
||||
|
||||
|
@ -2728,7 +2732,7 @@ impl Inner {
|
|||
.await
|
||||
.map_err(|_| LspError::internal_error())?
|
||||
{
|
||||
Some(asset.text)
|
||||
Some(asset.text.to_string())
|
||||
} else {
|
||||
error!("Missing asset: {}", specifier);
|
||||
None
|
||||
|
@ -2736,7 +2740,7 @@ impl Inner {
|
|||
}
|
||||
_ => {
|
||||
if let Some(source) = self.sources.get_source(&specifier) {
|
||||
Some(source)
|
||||
Some(source.to_string())
|
||||
} else {
|
||||
error!("The cached source was not found: {}", specifier);
|
||||
None
|
||||
|
|
|
@ -10,6 +10,7 @@ mod code_lens;
|
|||
mod completions;
|
||||
mod config;
|
||||
mod diagnostics;
|
||||
mod document_source;
|
||||
mod documents;
|
||||
pub(crate) mod language_server;
|
||||
mod lsp_custom;
|
||||
|
|
|
@ -1,6 +1,7 @@
|
|||
// Copyright 2018-2021 the Deno authors. All rights reserved. MIT license.
|
||||
|
||||
use super::analysis;
|
||||
use super::document_source::DocumentSource;
|
||||
use super::text::LineIndex;
|
||||
use super::tsc;
|
||||
use super::urls::INVALID_SPECIFIER;
|
||||
|
@ -13,12 +14,12 @@ use crate::flags::Flags;
|
|||
use crate::http_cache;
|
||||
use crate::http_cache::HttpCache;
|
||||
use crate::import_map::ImportMap;
|
||||
use crate::media_type::MediaType;
|
||||
use crate::module_graph::GraphBuilder;
|
||||
use crate::program_state::ProgramState;
|
||||
use crate::specifier_handler::FetchHandler;
|
||||
use crate::text_encoding;
|
||||
|
||||
use deno_ast::MediaType;
|
||||
use deno_core::error::anyhow;
|
||||
use deno_core::error::AnyError;
|
||||
use deno_core::parking_lot::Mutex;
|
||||
|
@ -118,12 +119,11 @@ fn resolve_specifier(
|
|||
struct Metadata {
|
||||
dependencies: Option<HashMap<String, analysis::Dependency>>,
|
||||
length_utf16: usize,
|
||||
line_index: LineIndex,
|
||||
maybe_navigation_tree: Option<tsc::NavigationTree>,
|
||||
maybe_types: Option<analysis::ResolvedDependency>,
|
||||
maybe_warning: Option<String>,
|
||||
media_type: MediaType,
|
||||
source: String,
|
||||
source: DocumentSource,
|
||||
specifier: ModuleSpecifier,
|
||||
version: String,
|
||||
}
|
||||
|
@ -133,12 +133,16 @@ impl Default for Metadata {
|
|||
Self {
|
||||
dependencies: None,
|
||||
length_utf16: 0,
|
||||
line_index: LineIndex::default(),
|
||||
maybe_navigation_tree: None,
|
||||
maybe_types: None,
|
||||
maybe_warning: None,
|
||||
media_type: MediaType::default(),
|
||||
source: String::default(),
|
||||
source: DocumentSource::new(
|
||||
&INVALID_SPECIFIER,
|
||||
MediaType::default(),
|
||||
Arc::new(String::default()),
|
||||
LineIndex::default(),
|
||||
),
|
||||
specifier: INVALID_SPECIFIER.clone(),
|
||||
version: String::default(),
|
||||
}
|
||||
|
@ -148,49 +152,52 @@ impl Default for Metadata {
|
|||
impl Metadata {
|
||||
fn new(
|
||||
specifier: &ModuleSpecifier,
|
||||
source: &str,
|
||||
source: Arc<String>,
|
||||
version: &str,
|
||||
media_type: &MediaType,
|
||||
media_type: MediaType,
|
||||
maybe_warning: Option<String>,
|
||||
maybe_import_map: &Option<ImportMap>,
|
||||
) -> Self {
|
||||
let (dependencies, maybe_types) = if let Ok(parsed_module) =
|
||||
analysis::parse_module(specifier, source, media_type)
|
||||
{
|
||||
let line_index = LineIndex::new(&source);
|
||||
let document_source =
|
||||
DocumentSource::new(specifier, media_type, source, line_index);
|
||||
let (dependencies, maybe_types) =
|
||||
if let Some(Ok(parsed_module)) = document_source.module() {
|
||||
let (deps, maybe_types) = analysis::analyze_dependencies(
|
||||
specifier,
|
||||
media_type,
|
||||
&parsed_module,
|
||||
parsed_module,
|
||||
maybe_import_map,
|
||||
);
|
||||
(Some(deps), maybe_types)
|
||||
} else {
|
||||
(None, None)
|
||||
};
|
||||
let line_index = LineIndex::new(source);
|
||||
|
||||
Self {
|
||||
dependencies,
|
||||
length_utf16: source.encode_utf16().count(),
|
||||
line_index,
|
||||
length_utf16: document_source
|
||||
.text_info()
|
||||
.text_str()
|
||||
.encode_utf16()
|
||||
.count(),
|
||||
maybe_navigation_tree: None,
|
||||
maybe_types,
|
||||
maybe_warning,
|
||||
media_type: media_type.to_owned(),
|
||||
source: source.to_string(),
|
||||
source: document_source,
|
||||
specifier: specifier.clone(),
|
||||
version: version.to_string(),
|
||||
}
|
||||
}
|
||||
|
||||
fn refresh(&mut self, maybe_import_map: &Option<ImportMap>) {
|
||||
let (dependencies, maybe_types) = if let Ok(parsed_module) =
|
||||
analysis::parse_module(&self.specifier, &self.source, &self.media_type)
|
||||
{
|
||||
let (dependencies, maybe_types) =
|
||||
if let Some(Ok(parsed_module)) = self.source.module() {
|
||||
let (deps, maybe_types) = analysis::analyze_dependencies(
|
||||
&self.specifier,
|
||||
&self.media_type,
|
||||
&parsed_module,
|
||||
self.media_type,
|
||||
parsed_module,
|
||||
maybe_import_map,
|
||||
);
|
||||
(Some(deps), maybe_types)
|
||||
|
@ -265,7 +272,7 @@ impl Sources {
|
|||
self.0.lock().get_script_version(specifier)
|
||||
}
|
||||
|
||||
pub fn get_source(&self, specifier: &ModuleSpecifier) -> Option<String> {
|
||||
pub fn get_source(&self, specifier: &ModuleSpecifier) -> Option<Arc<String>> {
|
||||
self.0.lock().get_source(specifier)
|
||||
}
|
||||
|
||||
|
@ -344,7 +351,7 @@ impl Inner {
|
|||
let specifier =
|
||||
resolve_specifier(specifier, &mut self.redirects, &self.http_cache)?;
|
||||
let metadata = self.get_metadata(&specifier)?;
|
||||
Some(metadata.line_index)
|
||||
Some(metadata.source.line_index().clone())
|
||||
}
|
||||
|
||||
fn get_maybe_types(
|
||||
|
@ -406,9 +413,9 @@ impl Inner {
|
|||
};
|
||||
let mut metadata = Metadata::new(
|
||||
specifier,
|
||||
&source,
|
||||
Arc::new(source),
|
||||
&version,
|
||||
&media_type,
|
||||
media_type,
|
||||
maybe_warning,
|
||||
&self.maybe_import_map,
|
||||
);
|
||||
|
@ -455,11 +462,11 @@ impl Inner {
|
|||
Some(metadata.version)
|
||||
}
|
||||
|
||||
fn get_source(&mut self, specifier: &ModuleSpecifier) -> Option<String> {
|
||||
fn get_source(&mut self, specifier: &ModuleSpecifier) -> Option<Arc<String>> {
|
||||
let specifier =
|
||||
resolve_specifier(specifier, &mut self.redirects, &self.http_cache)?;
|
||||
let metadata = self.get_metadata(&specifier)?;
|
||||
Some(metadata.source)
|
||||
Some(metadata.source.text_info().text())
|
||||
}
|
||||
|
||||
fn resolution_result(
|
||||
|
@ -602,7 +609,7 @@ mod tests {
|
|||
resolve_path(&tests.join("001_hello.js").to_string_lossy()).unwrap();
|
||||
let actual = sources.get_source(&specifier);
|
||||
assert!(actual.is_some());
|
||||
let actual = actual.unwrap();
|
||||
let actual = actual.unwrap().to_string();
|
||||
assert_eq!(actual, "console.log(\"Hello World\");\n");
|
||||
}
|
||||
|
||||
|
|
|
@ -210,21 +210,12 @@ impl LineIndex {
|
|||
|
||||
/// Compare two strings and return a vector of text edit records which are
|
||||
/// supported by the Language Server Protocol.
|
||||
pub fn get_edits(
|
||||
a: &str,
|
||||
b: &str,
|
||||
maybe_line_index: Option<LineIndex>,
|
||||
) -> Vec<TextEdit> {
|
||||
pub fn get_edits(a: &str, b: &str, line_index: &LineIndex) -> Vec<TextEdit> {
|
||||
if a == b {
|
||||
return vec![];
|
||||
}
|
||||
let chunks = diff(a, b);
|
||||
let mut text_edits = Vec::<TextEdit>::new();
|
||||
let line_index = if let Some(line_index) = maybe_line_index {
|
||||
line_index
|
||||
} else {
|
||||
LineIndex::new(a)
|
||||
};
|
||||
let mut iter = chunks.iter().peekable();
|
||||
let mut a_pos = TextSize::from(0);
|
||||
loop {
|
||||
|
@ -575,7 +566,7 @@ const C: char = \"メ メ\";
|
|||
fn test_get_edits() {
|
||||
let a = "abcdefg";
|
||||
let b = "a\nb\nchije\nfg\n";
|
||||
let actual = get_edits(a, b, None);
|
||||
let actual = get_edits(a, b, &LineIndex::new(a));
|
||||
assert_eq!(
|
||||
actual,
|
||||
vec![
|
||||
|
@ -613,7 +604,7 @@ const C: char = \"メ メ\";
|
|||
fn test_get_edits_mbc() {
|
||||
let a = "const bar = \"👍🇺🇸😃\";\nconsole.log('hello deno')\n";
|
||||
let b = "const bar = \"👍🇺🇸😃\";\nconsole.log(\"hello deno\");\n";
|
||||
let actual = get_edits(a, b, None);
|
||||
let actual = get_edits(a, b, &LineIndex::new(a));
|
||||
assert_eq!(
|
||||
actual,
|
||||
vec![
|
||||
|
|
|
@ -18,11 +18,11 @@ use super::text::LineIndex;
|
|||
use super::urls::INVALID_SPECIFIER;
|
||||
|
||||
use crate::config_file::TsConfig;
|
||||
use crate::media_type::MediaType;
|
||||
use crate::tokio_util::create_basic_runtime;
|
||||
use crate::tsc;
|
||||
use crate::tsc::ResolveArgs;
|
||||
|
||||
use deno_ast::MediaType;
|
||||
use deno_core::error::anyhow;
|
||||
use deno_core::error::custom_error;
|
||||
use deno_core::error::AnyError;
|
||||
|
@ -45,6 +45,7 @@ use lspower::lsp;
|
|||
use regex::Captures;
|
||||
use regex::Regex;
|
||||
use std::collections::HashSet;
|
||||
use std::sync::Arc;
|
||||
use std::thread;
|
||||
use std::{borrow::Cow, cmp};
|
||||
use std::{collections::HashMap, path::Path};
|
||||
|
@ -111,7 +112,7 @@ impl TsServer {
|
|||
/// from static assets built into Rust, or static assets built into tsc.
|
||||
#[derive(Debug, Clone)]
|
||||
pub struct AssetDocument {
|
||||
pub text: String,
|
||||
pub text: Arc<String>,
|
||||
pub length: usize,
|
||||
pub line_index: LineIndex,
|
||||
pub maybe_navigation_tree: Option<NavigationTree>,
|
||||
|
@ -121,7 +122,7 @@ impl AssetDocument {
|
|||
pub fn new<T: AsRef<str>>(text: T) -> Self {
|
||||
let text = text.as_ref();
|
||||
Self {
|
||||
text: text.to_string(),
|
||||
text: Arc::new(text.to_string()),
|
||||
length: text.encode_utf16().count(),
|
||||
line_index: LineIndex::new(text),
|
||||
maybe_navigation_tree: None,
|
||||
|
@ -2057,7 +2058,7 @@ fn cache_snapshot(
|
|||
state
|
||||
.state_snapshot
|
||||
.documents
|
||||
.content(specifier)?
|
||||
.content(specifier)
|
||||
.ok_or_else(|| {
|
||||
anyhow!("Specifier unexpectedly doesn't have content: {}", specifier)
|
||||
})?
|
||||
|
@ -2068,7 +2069,7 @@ fn cache_snapshot(
|
|||
};
|
||||
state
|
||||
.snapshots
|
||||
.insert((specifier.clone(), version.into()), content);
|
||||
.insert((specifier.clone(), version.into()), content.to_string());
|
||||
}
|
||||
Ok(())
|
||||
}
|
||||
|
@ -2235,17 +2236,16 @@ fn op_get_text(
|
|||
let specifier = state.normalize_specifier(args.specifier)?;
|
||||
let content =
|
||||
if let Some(Some(content)) = state.state_snapshot.assets.get(&specifier) {
|
||||
content.text.clone()
|
||||
content.text.as_str()
|
||||
} else {
|
||||
cache_snapshot(state, &specifier, args.version.clone())?;
|
||||
state
|
||||
.snapshots
|
||||
.get(&(specifier, args.version.into()))
|
||||
.unwrap()
|
||||
.clone()
|
||||
};
|
||||
state.state_snapshot.performance.measure(mark);
|
||||
Ok(text::slice(&content, args.start..args.end).to_string())
|
||||
Ok(text::slice(content, args.start..args.end).to_string())
|
||||
}
|
||||
|
||||
fn op_load(
|
||||
|
@ -2259,7 +2259,7 @@ fn op_load(
|
|||
let specifier = state.normalize_specifier(args.specifier)?;
|
||||
let result = state.state_snapshot.sources.get_source(&specifier);
|
||||
state.state_snapshot.performance.measure(mark);
|
||||
Ok(result)
|
||||
Ok(result.map(|t| t.to_string()))
|
||||
}
|
||||
|
||||
fn op_resolve(
|
||||
|
@ -2908,19 +2908,24 @@ mod tests {
|
|||
for (specifier, source, version, language_id) in fixtures {
|
||||
let specifier =
|
||||
resolve_url(specifier).expect("failed to create specifier");
|
||||
documents.open(specifier.clone(), *version, language_id.clone(), source);
|
||||
documents.open(
|
||||
specifier.clone(),
|
||||
*version,
|
||||
*language_id,
|
||||
Arc::new(source.to_string()),
|
||||
);
|
||||
let media_type = MediaType::from(&specifier);
|
||||
if let Ok(parsed_module) =
|
||||
analysis::parse_module(&specifier, source, &media_type)
|
||||
if let Some(Ok(parsed_module)) =
|
||||
documents.get(&specifier).unwrap().source().module()
|
||||
{
|
||||
let (deps, _) = analysis::analyze_dependencies(
|
||||
&specifier,
|
||||
&media_type,
|
||||
&parsed_module,
|
||||
media_type,
|
||||
parsed_module,
|
||||
&None,
|
||||
);
|
||||
let dep_ranges =
|
||||
analysis::analyze_dependency_ranges(&parsed_module).ok();
|
||||
analysis::analyze_dependency_ranges(parsed_module).ok();
|
||||
documents
|
||||
.set_dependencies(&specifier, Some(deps), dep_ranges)
|
||||
.unwrap();
|
||||
|
|
|
@ -1,9 +1,9 @@
|
|||
// Copyright 2018-2021 the Deno authors. All rights reserved. MIT license.
|
||||
|
||||
use crate::file_fetcher::map_content_type;
|
||||
use crate::media_type::MediaType;
|
||||
|
||||
use data_url::DataUrl;
|
||||
use deno_ast::MediaType;
|
||||
use deno_core::error::uri_error;
|
||||
use deno_core::error::AnyError;
|
||||
use deno_core::url::Position;
|
||||
|
|
|
@ -23,7 +23,6 @@ mod info;
|
|||
mod lockfile;
|
||||
mod logger;
|
||||
mod lsp;
|
||||
mod media_type;
|
||||
mod module_graph;
|
||||
mod module_loader;
|
||||
mod ops;
|
||||
|
@ -58,12 +57,12 @@ use crate::flags::RunFlags;
|
|||
use crate::flags::TestFlags;
|
||||
use crate::flags::UpgradeFlags;
|
||||
use crate::fmt_errors::PrettyJsError;
|
||||
use crate::media_type::MediaType;
|
||||
use crate::module_loader::CliModuleLoader;
|
||||
use crate::program_state::ProgramState;
|
||||
use crate::source_maps::apply_source_map;
|
||||
use crate::specifier_handler::FetchHandler;
|
||||
use crate::tools::installer::infer_name_from_url;
|
||||
use deno_ast::MediaType;
|
||||
use deno_core::error::generic_error;
|
||||
use deno_core::error::AnyError;
|
||||
use deno_core::futures::future::FutureExt;
|
||||
|
@ -597,7 +596,7 @@ async fn eval_command(
|
|||
} else {
|
||||
MediaType::Jsx
|
||||
},
|
||||
source: String::from_utf8(source_code)?,
|
||||
source: Arc::new(String::from_utf8(source_code)?),
|
||||
specifier: main_module.clone(),
|
||||
maybe_headers: None,
|
||||
};
|
||||
|
@ -850,7 +849,7 @@ async fn run_from_stdin(flags: Flags) -> Result<(), AnyError> {
|
|||
local: main_module.clone().to_file_path().unwrap(),
|
||||
maybe_types: None,
|
||||
media_type: MediaType::TypeScript,
|
||||
source: String::from_utf8(source)?,
|
||||
source: Arc::new(String::from_utf8(source)?),
|
||||
specifier: main_module.clone(),
|
||||
maybe_headers: None,
|
||||
};
|
||||
|
|
|
@ -1,422 +0,0 @@
|
|||
// Copyright 2018-2021 the Deno authors. All rights reserved. MIT license.
|
||||
|
||||
use data_url::DataUrl;
|
||||
use deno_core::serde::Serialize;
|
||||
use deno_core::serde::Serializer;
|
||||
use deno_core::ModuleSpecifier;
|
||||
use std::fmt;
|
||||
use std::path::Path;
|
||||
use std::path::PathBuf;
|
||||
|
||||
// Warning! The values in this enum are duplicated in tsc/99_main_compiler.js
|
||||
// Update carefully!
|
||||
#[allow(non_camel_case_types)]
|
||||
#[repr(i32)]
|
||||
#[derive(Clone, Copy, Ord, PartialOrd, Eq, PartialEq, Debug)]
|
||||
pub enum MediaType {
|
||||
JavaScript = 0,
|
||||
Jsx = 1,
|
||||
TypeScript = 2,
|
||||
Dts = 3,
|
||||
Tsx = 4,
|
||||
Json = 5,
|
||||
Wasm = 6,
|
||||
TsBuildInfo = 7,
|
||||
SourceMap = 8,
|
||||
Unknown = 9,
|
||||
}
|
||||
|
||||
impl fmt::Display for MediaType {
|
||||
fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
|
||||
let value = match self {
|
||||
MediaType::JavaScript => "JavaScript",
|
||||
MediaType::Jsx => "JSX",
|
||||
MediaType::TypeScript => "TypeScript",
|
||||
MediaType::Dts => "Dts",
|
||||
MediaType::Tsx => "TSX",
|
||||
MediaType::Json => "Json",
|
||||
MediaType::Wasm => "Wasm",
|
||||
MediaType::TsBuildInfo => "TsBuildInfo",
|
||||
MediaType::SourceMap => "SourceMap",
|
||||
MediaType::Unknown => "Unknown",
|
||||
};
|
||||
write!(f, "{}", value)
|
||||
}
|
||||
}
|
||||
|
||||
impl<'a> From<&'a Path> for MediaType {
|
||||
fn from(path: &'a Path) -> Self {
|
||||
Self::from_path(path)
|
||||
}
|
||||
}
|
||||
|
||||
impl<'a> From<&'a PathBuf> for MediaType {
|
||||
fn from(path: &'a PathBuf) -> Self {
|
||||
Self::from_path(path)
|
||||
}
|
||||
}
|
||||
|
||||
impl<'a> From<&'a String> for MediaType {
|
||||
fn from(specifier: &'a String) -> Self {
|
||||
Self::from_path(&PathBuf::from(specifier))
|
||||
}
|
||||
}
|
||||
|
||||
impl<'a> From<&'a ModuleSpecifier> for MediaType {
|
||||
fn from(specifier: &'a ModuleSpecifier) -> Self {
|
||||
if specifier.scheme() != "data" {
|
||||
let path = if specifier.scheme() == "file" {
|
||||
if let Ok(path) = specifier.to_file_path() {
|
||||
path
|
||||
} else {
|
||||
PathBuf::from(specifier.path())
|
||||
}
|
||||
} else {
|
||||
PathBuf::from(specifier.path())
|
||||
};
|
||||
Self::from_path(&path)
|
||||
} else if let Ok(data_url) = DataUrl::process(specifier.as_str()) {
|
||||
Self::from_content_type(specifier, data_url.mime_type().to_string())
|
||||
} else {
|
||||
Self::Unknown
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
impl Default for MediaType {
|
||||
fn default() -> Self {
|
||||
MediaType::Unknown
|
||||
}
|
||||
}
|
||||
|
||||
impl MediaType {
|
||||
pub fn from_content_type<S: AsRef<str>>(
|
||||
specifier: &ModuleSpecifier,
|
||||
content_type: S,
|
||||
) -> Self {
|
||||
match content_type.as_ref().trim().to_lowercase().as_ref() {
|
||||
"application/typescript"
|
||||
| "text/typescript"
|
||||
| "video/vnd.dlna.mpeg-tts"
|
||||
| "video/mp2t"
|
||||
| "application/x-typescript" => {
|
||||
map_js_like_extension(specifier, Self::TypeScript)
|
||||
}
|
||||
"application/javascript"
|
||||
| "text/javascript"
|
||||
| "application/ecmascript"
|
||||
| "text/ecmascript"
|
||||
| "application/x-javascript"
|
||||
| "application/node" => {
|
||||
map_js_like_extension(specifier, Self::JavaScript)
|
||||
}
|
||||
"text/jsx" => Self::Jsx,
|
||||
"text/tsx" => Self::Tsx,
|
||||
"application/json" | "text/json" => Self::Json,
|
||||
"application/wasm" => Self::Wasm,
|
||||
// Handle plain and possibly webassembly
|
||||
"text/plain" | "application/octet-stream"
|
||||
if specifier.scheme() != "data" =>
|
||||
{
|
||||
Self::from(specifier)
|
||||
}
|
||||
_ => Self::Unknown,
|
||||
}
|
||||
}
|
||||
|
||||
fn from_path(path: &Path) -> Self {
|
||||
match path.extension() {
|
||||
None => match path.file_name() {
|
||||
None => MediaType::Unknown,
|
||||
Some(os_str) => match os_str.to_str() {
|
||||
Some(".tsbuildinfo") => MediaType::TsBuildInfo,
|
||||
_ => MediaType::Unknown,
|
||||
},
|
||||
},
|
||||
Some(os_str) => match os_str.to_str() {
|
||||
Some("ts") => {
|
||||
if let Some(os_str) = path.file_stem() {
|
||||
if let Some(file_name) = os_str.to_str() {
|
||||
if file_name.ends_with(".d") {
|
||||
return MediaType::Dts;
|
||||
}
|
||||
}
|
||||
}
|
||||
MediaType::TypeScript
|
||||
}
|
||||
Some("tsx") => MediaType::Tsx,
|
||||
Some("js") => MediaType::JavaScript,
|
||||
Some("jsx") => MediaType::Jsx,
|
||||
Some("mjs") => MediaType::JavaScript,
|
||||
Some("cjs") => MediaType::JavaScript,
|
||||
Some("json") => MediaType::Json,
|
||||
Some("wasm") => MediaType::Wasm,
|
||||
Some("tsbuildinfo") => MediaType::TsBuildInfo,
|
||||
Some("map") => MediaType::SourceMap,
|
||||
_ => MediaType::Unknown,
|
||||
},
|
||||
}
|
||||
}
|
||||
|
||||
/// Convert a MediaType to a `ts.Extension`.
|
||||
///
|
||||
/// *NOTE* This is defined in TypeScript as a string based enum. Changes to
|
||||
/// that enum in TypeScript should be reflected here.
|
||||
pub fn as_ts_extension(&self) -> &str {
|
||||
match self {
|
||||
MediaType::JavaScript => ".js",
|
||||
MediaType::Jsx => ".jsx",
|
||||
MediaType::TypeScript => ".ts",
|
||||
MediaType::Dts => ".d.ts",
|
||||
MediaType::Tsx => ".tsx",
|
||||
MediaType::Json => ".json",
|
||||
// TypeScript doesn't have an "unknown", so we will treat WASM as JS for
|
||||
// mapping purposes, though in reality, it is unlikely to ever be passed
|
||||
// to the compiler.
|
||||
MediaType::Wasm => ".js",
|
||||
MediaType::TsBuildInfo => ".tsbuildinfo",
|
||||
// TypeScript doesn't have an "source map", so we will treat SourceMap as
|
||||
// JS for mapping purposes, though in reality, it is unlikely to ever be
|
||||
// passed to the compiler.
|
||||
MediaType::SourceMap => ".js",
|
||||
// TypeScript doesn't have an "unknown", so we will treat unknowns as JS
|
||||
// for mapping purposes, though in reality, it is unlikely to ever be
|
||||
// passed to the compiler.
|
||||
MediaType::Unknown => ".js",
|
||||
}
|
||||
}
|
||||
|
||||
/// Map the media type to a `ts.ScriptKind`
|
||||
pub fn as_ts_script_kind(&self) -> i32 {
|
||||
match self {
|
||||
MediaType::JavaScript => 1,
|
||||
MediaType::Jsx => 2,
|
||||
MediaType::TypeScript => 3,
|
||||
MediaType::Dts => 3,
|
||||
MediaType::Tsx => 4,
|
||||
MediaType::Json => 5,
|
||||
_ => 0,
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
impl Serialize for MediaType {
|
||||
fn serialize<S>(&self, serializer: S) -> Result<S::Ok, S::Error>
|
||||
where
|
||||
S: Serializer,
|
||||
{
|
||||
let value = match self {
|
||||
MediaType::JavaScript => 0_i32,
|
||||
MediaType::Jsx => 1_i32,
|
||||
MediaType::TypeScript => 2_i32,
|
||||
MediaType::Dts => 3_i32,
|
||||
MediaType::Tsx => 4_i32,
|
||||
MediaType::Json => 5_i32,
|
||||
MediaType::Wasm => 6_i32,
|
||||
MediaType::TsBuildInfo => 7_i32,
|
||||
MediaType::SourceMap => 8_i32,
|
||||
MediaType::Unknown => 9_i32,
|
||||
};
|
||||
Serialize::serialize(&value, serializer)
|
||||
}
|
||||
}
|
||||
|
||||
/// Serialize a `MediaType` enum into a human readable string. The default
|
||||
/// serialization for media types is and integer.
|
||||
///
|
||||
/// TODO(@kitsonk) remove this once we stop sending MediaType into tsc.
|
||||
pub fn serialize_media_type<S>(
|
||||
mmt: &Option<MediaType>,
|
||||
s: S,
|
||||
) -> Result<S::Ok, S::Error>
|
||||
where
|
||||
S: Serializer,
|
||||
{
|
||||
match *mmt {
|
||||
Some(ref mt) => s.serialize_some(&mt.to_string()),
|
||||
None => s.serialize_none(),
|
||||
}
|
||||
}
|
||||
|
||||
/// Used to augment media types by using the path part of a module specifier to
|
||||
/// resolve to a more accurate media type.
|
||||
fn map_js_like_extension(
|
||||
specifier: &ModuleSpecifier,
|
||||
default: MediaType,
|
||||
) -> MediaType {
|
||||
let path = if specifier.scheme() == "file" {
|
||||
if let Ok(path) = specifier.to_file_path() {
|
||||
path
|
||||
} else {
|
||||
PathBuf::from(specifier.path())
|
||||
}
|
||||
} else {
|
||||
PathBuf::from(specifier.path())
|
||||
};
|
||||
match path.extension() {
|
||||
None => default,
|
||||
Some(os_str) => match os_str.to_str() {
|
||||
None => default,
|
||||
Some("jsx") => MediaType::Jsx,
|
||||
Some("tsx") => MediaType::Tsx,
|
||||
// Because DTS files do not have a separate media type, or a unique
|
||||
// extension, we have to "guess" at those things that we consider that
|
||||
// look like TypeScript, and end with `.d.ts` are DTS files.
|
||||
Some("ts") => {
|
||||
if default == MediaType::TypeScript {
|
||||
match path.file_stem() {
|
||||
None => default,
|
||||
Some(os_str) => {
|
||||
if let Some(file_stem) = os_str.to_str() {
|
||||
if file_stem.ends_with(".d") {
|
||||
MediaType::Dts
|
||||
} else {
|
||||
default
|
||||
}
|
||||
} else {
|
||||
default
|
||||
}
|
||||
}
|
||||
}
|
||||
} else {
|
||||
default
|
||||
}
|
||||
}
|
||||
Some(_) => default,
|
||||
},
|
||||
}
|
||||
}
|
||||
|
||||
#[cfg(test)]
|
||||
mod tests {
|
||||
use super::*;
|
||||
use deno_core::serde_json::json;
|
||||
|
||||
#[test]
|
||||
fn test_map_file_extension() {
|
||||
assert_eq!(
|
||||
MediaType::from(Path::new("foo/bar.ts")),
|
||||
MediaType::TypeScript
|
||||
);
|
||||
assert_eq!(MediaType::from(Path::new("foo/bar.tsx")), MediaType::Tsx);
|
||||
assert_eq!(MediaType::from(Path::new("foo/bar.d.ts")), MediaType::Dts);
|
||||
assert_eq!(
|
||||
MediaType::from(Path::new("foo/bar.js")),
|
||||
MediaType::JavaScript
|
||||
);
|
||||
assert_eq!(MediaType::from(Path::new("foo/bar.jsx")), MediaType::Jsx);
|
||||
assert_eq!(MediaType::from(Path::new("foo/bar.json")), MediaType::Json);
|
||||
assert_eq!(MediaType::from(Path::new("foo/bar.wasm")), MediaType::Wasm);
|
||||
assert_eq!(
|
||||
MediaType::from(Path::new("foo/bar.cjs")),
|
||||
MediaType::JavaScript
|
||||
);
|
||||
assert_eq!(
|
||||
MediaType::from(Path::new("foo/.tsbuildinfo")),
|
||||
MediaType::TsBuildInfo
|
||||
);
|
||||
assert_eq!(
|
||||
MediaType::from(Path::new("foo/bar.js.map")),
|
||||
MediaType::SourceMap
|
||||
);
|
||||
assert_eq!(
|
||||
MediaType::from(Path::new("foo/bar.txt")),
|
||||
MediaType::Unknown
|
||||
);
|
||||
assert_eq!(MediaType::from(Path::new("foo/bar")), MediaType::Unknown);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_from_specifier() {
|
||||
let fixtures = vec![
|
||||
("file:///a/b/c.ts", MediaType::TypeScript),
|
||||
("file:///a/b/c.js", MediaType::JavaScript),
|
||||
("file:///a/b/c.txt", MediaType::Unknown),
|
||||
("https://deno.land/x/mod.ts", MediaType::TypeScript),
|
||||
("https://deno.land/x/mod.js", MediaType::JavaScript),
|
||||
("https://deno.land/x/mod.txt", MediaType::Unknown),
|
||||
("data:application/typescript;base64,ZXhwb3J0IGNvbnN0IGEgPSAiYSI7CgpleHBvcnQgZW51bSBBIHsKICBBLAogIEIsCiAgQywKfQo=", MediaType::TypeScript),
|
||||
("data:application/javascript;base64,ZXhwb3J0IGNvbnN0IGEgPSAiYSI7CgpleHBvcnQgZW51bSBBIHsKICBBLAogIEIsCiAgQywKfQo=", MediaType::JavaScript),
|
||||
("data:text/plain;base64,ZXhwb3J0IGNvbnN0IGEgPSAiYSI7CgpleHBvcnQgZW51bSBBIHsKICBBLAogIEIsCiAgQywKfQo=", MediaType::Unknown),
|
||||
];
|
||||
|
||||
for (specifier, expected) in fixtures {
|
||||
let actual = deno_core::resolve_url_or_path(specifier).unwrap();
|
||||
assert_eq!(MediaType::from(&actual), expected);
|
||||
}
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_from_content_type() {
|
||||
let fixtures = vec![
|
||||
(
|
||||
"https://deno.land/x/mod.ts",
|
||||
"application/typescript",
|
||||
MediaType::TypeScript,
|
||||
),
|
||||
(
|
||||
"https://deno.land/x/mod.d.ts",
|
||||
"application/typescript",
|
||||
MediaType::Dts,
|
||||
),
|
||||
("https://deno.land/x/mod.tsx", "text/tsx", MediaType::Tsx),
|
||||
(
|
||||
"https://deno.land/x/mod.js",
|
||||
"application/javascript",
|
||||
MediaType::JavaScript,
|
||||
),
|
||||
("https://deno.land/x/mod.jsx", "text/jsx", MediaType::Jsx),
|
||||
(
|
||||
"https://deno.land/x/mod.ts",
|
||||
"text/plain",
|
||||
MediaType::TypeScript,
|
||||
),
|
||||
(
|
||||
"https://deno.land/x/mod.js",
|
||||
"text/plain",
|
||||
MediaType::JavaScript,
|
||||
),
|
||||
(
|
||||
"https://deno.land/x/mod.wasm",
|
||||
"text/plain",
|
||||
MediaType::Wasm,
|
||||
),
|
||||
];
|
||||
|
||||
for (specifier, content_type, expected) in fixtures {
|
||||
let fixture = deno_core::resolve_url_or_path(specifier).unwrap();
|
||||
assert_eq!(
|
||||
MediaType::from_content_type(&fixture, content_type),
|
||||
expected
|
||||
);
|
||||
}
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_serialization() {
|
||||
assert_eq!(json!(MediaType::JavaScript), json!(0));
|
||||
assert_eq!(json!(MediaType::Jsx), json!(1));
|
||||
assert_eq!(json!(MediaType::TypeScript), json!(2));
|
||||
assert_eq!(json!(MediaType::Dts), json!(3));
|
||||
assert_eq!(json!(MediaType::Tsx), json!(4));
|
||||
assert_eq!(json!(MediaType::Json), json!(5));
|
||||
assert_eq!(json!(MediaType::Wasm), json!(6));
|
||||
assert_eq!(json!(MediaType::TsBuildInfo), json!(7));
|
||||
assert_eq!(json!(MediaType::SourceMap), json!(8));
|
||||
assert_eq!(json!(MediaType::Unknown), json!(9));
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_display() {
|
||||
assert_eq!(MediaType::JavaScript.to_string(), "JavaScript");
|
||||
assert_eq!(MediaType::Jsx.to_string(), "JSX");
|
||||
assert_eq!(MediaType::TypeScript.to_string(), "TypeScript");
|
||||
assert_eq!(MediaType::Dts.to_string(), "Dts");
|
||||
assert_eq!(MediaType::Tsx.to_string(), "TSX");
|
||||
assert_eq!(MediaType::Json.to_string(), "Json");
|
||||
assert_eq!(MediaType::Wasm.to_string(), "Wasm");
|
||||
assert_eq!(MediaType::TsBuildInfo.to_string(), "TsBuildInfo");
|
||||
assert_eq!(MediaType::SourceMap.to_string(), "SourceMap");
|
||||
assert_eq!(MediaType::Unknown.to_string(), "Unknown");
|
||||
}
|
||||
}
|
|
@ -1,10 +1,9 @@
|
|||
// Copyright 2018-2021 the Deno authors. All rights reserved. MIT license.
|
||||
use crate::ast;
|
||||
use crate::ast::parse;
|
||||
use crate::ast::transpile;
|
||||
use crate::ast::transpile_module;
|
||||
use crate::ast::BundleHook;
|
||||
use crate::ast::Location;
|
||||
use crate::ast::ParsedModule;
|
||||
use crate::checksum;
|
||||
use crate::colors;
|
||||
use crate::config_file::CompilerOptions;
|
||||
|
@ -16,7 +15,6 @@ use crate::import_map::ImportMap;
|
|||
use crate::import_map::ImportMapError;
|
||||
use crate::info;
|
||||
use crate::lockfile::Lockfile;
|
||||
use crate::media_type::MediaType;
|
||||
use crate::specifier_handler::CachedModule;
|
||||
use crate::specifier_handler::Dependency;
|
||||
use crate::specifier_handler::DependencyMap;
|
||||
|
@ -25,6 +23,12 @@ use crate::specifier_handler::FetchFuture;
|
|||
use crate::specifier_handler::SpecifierHandler;
|
||||
use crate::tsc;
|
||||
use crate::version;
|
||||
use deno_ast::swc::common::comments::Comment;
|
||||
use deno_ast::swc::common::BytePos;
|
||||
use deno_ast::swc::common::Span;
|
||||
use deno_ast::MediaType;
|
||||
use deno_ast::ParsedSource;
|
||||
use deno_ast::SourceTextInfo;
|
||||
use deno_core::error::anyhow;
|
||||
use deno_core::error::custom_error;
|
||||
use deno_core::error::get_custom_error_class;
|
||||
|
@ -46,6 +50,7 @@ use deno_core::url::Url;
|
|||
use deno_core::ModuleResolutionError;
|
||||
use deno_core::ModuleSource;
|
||||
use deno_core::ModuleSpecifier;
|
||||
use deno_graph::analyze_dependencies;
|
||||
use log::debug;
|
||||
use regex::Regex;
|
||||
use std::collections::HashMap;
|
||||
|
@ -57,9 +62,6 @@ use std::rc::Rc;
|
|||
use std::result;
|
||||
use std::sync::Arc;
|
||||
use std::time::Instant;
|
||||
use swc_common::comments::Comment;
|
||||
use swc_common::BytePos;
|
||||
use swc_common::Span;
|
||||
|
||||
lazy_static::lazy_static! {
|
||||
/// Matched the `@deno-types` pragma.
|
||||
|
@ -128,9 +130,9 @@ impl Error for GraphError {}
|
|||
/// A structure for handling bundle loading, which is implemented here, to
|
||||
/// avoid a circular dependency with `ast`.
|
||||
struct BundleLoader<'a> {
|
||||
cm: Rc<swc_common::SourceMap>,
|
||||
cm: Rc<deno_ast::swc::common::SourceMap>,
|
||||
emit_options: &'a ast::EmitOptions,
|
||||
globals: &'a swc_common::Globals,
|
||||
globals: &'a deno_ast::swc::common::Globals,
|
||||
graph: &'a Graph,
|
||||
}
|
||||
|
||||
|
@ -138,8 +140,8 @@ impl<'a> BundleLoader<'a> {
|
|||
pub fn new(
|
||||
graph: &'a Graph,
|
||||
emit_options: &'a ast::EmitOptions,
|
||||
globals: &'a swc_common::Globals,
|
||||
cm: Rc<swc_common::SourceMap>,
|
||||
globals: &'a deno_ast::swc::common::Globals,
|
||||
cm: Rc<deno_ast::swc::common::SourceMap>,
|
||||
) -> Self {
|
||||
BundleLoader {
|
||||
cm,
|
||||
|
@ -150,13 +152,13 @@ impl<'a> BundleLoader<'a> {
|
|||
}
|
||||
}
|
||||
|
||||
impl swc_bundler::Load for BundleLoader<'_> {
|
||||
impl deno_ast::swc::bundler::Load for BundleLoader<'_> {
|
||||
fn load(
|
||||
&self,
|
||||
file: &swc_common::FileName,
|
||||
) -> Result<swc_bundler::ModuleData, AnyError> {
|
||||
file: &deno_ast::swc::common::FileName,
|
||||
) -> Result<deno_ast::swc::bundler::ModuleData, AnyError> {
|
||||
match file {
|
||||
swc_common::FileName::Custom(filename) => {
|
||||
deno_ast::swc::common::FileName::Custom(filename) => {
|
||||
let specifier = resolve_url_or_path(filename)
|
||||
.context("Failed to convert swc FileName to ModuleSpecifier.")?;
|
||||
if let Some(src) = self.graph.get_source(&specifier) {
|
||||
|
@ -167,12 +169,12 @@ impl swc_bundler::Load for BundleLoader<'_> {
|
|||
let (source_file, module) = transpile_module(
|
||||
filename,
|
||||
&src,
|
||||
&media_type,
|
||||
media_type,
|
||||
self.emit_options,
|
||||
self.globals,
|
||||
self.cm.clone(),
|
||||
)?;
|
||||
Ok(swc_bundler::ModuleData {
|
||||
Ok(deno_ast::swc::bundler::ModuleData {
|
||||
fm: source_file,
|
||||
module,
|
||||
helpers: Default::default(),
|
||||
|
@ -261,7 +263,7 @@ pub struct Module {
|
|||
maybe_version: Option<String>,
|
||||
media_type: MediaType,
|
||||
specifier: ModuleSpecifier,
|
||||
source: String,
|
||||
text_info: SourceTextInfo,
|
||||
source_path: PathBuf,
|
||||
}
|
||||
|
||||
|
@ -278,7 +280,7 @@ impl Default for Module {
|
|||
maybe_version: None,
|
||||
media_type: MediaType::Unknown,
|
||||
specifier: deno_core::resolve_url("file:///example.js").unwrap(),
|
||||
source: "".to_string(),
|
||||
text_info: SourceTextInfo::from_string("".to_string()),
|
||||
source_path: PathBuf::new(),
|
||||
}
|
||||
}
|
||||
|
@ -305,7 +307,7 @@ impl Module {
|
|||
specifier: cached_module.specifier,
|
||||
maybe_import_map,
|
||||
media_type,
|
||||
source: cached_module.source,
|
||||
text_info: SourceTextInfo::new(BytePos(0), cached_module.source),
|
||||
source_path: cached_module.source_path,
|
||||
maybe_emit: cached_module.maybe_emit,
|
||||
maybe_emit_path: cached_module.maybe_emit_path,
|
||||
|
@ -334,7 +336,8 @@ impl Module {
|
|||
/// version.
|
||||
pub fn is_emit_valid(&self, config: &[u8]) -> bool {
|
||||
if let Some(version) = self.maybe_version.clone() {
|
||||
version == get_version(&self.source, &version::deno(), config)
|
||||
version
|
||||
== get_version(self.text_info.text_str(), &version::deno(), config)
|
||||
} else {
|
||||
false
|
||||
}
|
||||
|
@ -342,14 +345,19 @@ impl Module {
|
|||
|
||||
/// Parse a module, populating the structure with data retrieved from the
|
||||
/// source of the module.
|
||||
pub fn parse(&mut self) -> Result<ParsedModule, AnyError> {
|
||||
let parsed_module =
|
||||
parse(self.specifier.as_str(), &self.source, &self.media_type)?;
|
||||
pub fn parse(&mut self) -> Result<ParsedSource, AnyError> {
|
||||
let parsed_module = deno_ast::parse_module(deno_ast::ParseParams {
|
||||
specifier: self.specifier.as_str().to_string(),
|
||||
source: self.text_info.clone(),
|
||||
media_type: self.media_type,
|
||||
capture_tokens: false,
|
||||
maybe_syntax: None,
|
||||
})?;
|
||||
|
||||
// parse out any triple slash references
|
||||
for comment in parsed_module.get_leading_comments().iter() {
|
||||
if let Some((ts_reference, _)) = parse_ts_reference(comment) {
|
||||
let location = parsed_module.get_location(comment.span.lo);
|
||||
let location = Location::from_pos(&parsed_module, comment.span.lo);
|
||||
match ts_reference {
|
||||
TypeScriptReference::Path(import) => {
|
||||
let specifier =
|
||||
|
@ -382,11 +390,11 @@ impl Module {
|
|||
}
|
||||
|
||||
// Parse out all the syntactical dependencies for a module
|
||||
let dependencies = parsed_module.analyze_dependencies();
|
||||
let dependencies = analyze_dependencies(&parsed_module);
|
||||
for desc in dependencies.iter().filter(|desc| {
|
||||
desc.kind != swc_ecmascript::dep_graph::DependencyKind::Require
|
||||
desc.kind != deno_ast::swc::dep_graph::DependencyKind::Require
|
||||
}) {
|
||||
let location = parsed_module.get_location(desc.span.lo);
|
||||
let location = Location::from_pos(&parsed_module, desc.span.lo);
|
||||
|
||||
// In situations where there is a potential issue with resolving the
|
||||
// import specifier, that ends up being a module resolution error for a
|
||||
|
@ -495,12 +503,15 @@ impl Module {
|
|||
|
||||
/// Calculate the hashed version of the module and update the `maybe_version`.
|
||||
pub fn set_version(&mut self, config: &[u8]) {
|
||||
self.maybe_version =
|
||||
Some(get_version(&self.source, &version::deno(), config))
|
||||
self.maybe_version = Some(get_version(
|
||||
self.text_info.text_str(),
|
||||
&version::deno(),
|
||||
config,
|
||||
))
|
||||
}
|
||||
|
||||
pub fn size(&self) -> usize {
|
||||
self.source.as_bytes().len()
|
||||
self.text_info.text_str().len()
|
||||
}
|
||||
}
|
||||
|
||||
|
@ -736,7 +747,7 @@ fn to_module_result(
|
|||
} else {
|
||||
match module.media_type {
|
||||
MediaType::JavaScript | MediaType::Unknown => Ok(ModuleSource {
|
||||
code: module.source.clone(),
|
||||
code: module.text_info.text_str().to_string(),
|
||||
module_url_found: module.specifier.to_string(),
|
||||
module_url_specified: specifier.to_string(),
|
||||
}),
|
||||
|
@ -1128,11 +1139,13 @@ impl Graph {
|
|||
|| module.media_type == MediaType::Tsx
|
||||
|| module.media_type == MediaType::TypeScript)
|
||||
{
|
||||
emitted_files
|
||||
.insert(module.specifier.to_string(), module.source.clone());
|
||||
emitted_files.insert(
|
||||
module.specifier.to_string(),
|
||||
module.text_info.text_str().to_string(),
|
||||
);
|
||||
}
|
||||
let parsed_module = module.parse()?;
|
||||
let (code, maybe_map) = parsed_module.transpile(&emit_options)?;
|
||||
let (code, maybe_map) = transpile(&parsed_module, &emit_options)?;
|
||||
emit_count += 1;
|
||||
emitted_files.insert(format!("{}.js", module.specifier), code);
|
||||
if let Some(map) = maybe_map {
|
||||
|
@ -1170,23 +1183,23 @@ impl Graph {
|
|||
emit_options: &ast::EmitOptions,
|
||||
bundle_type: &BundleType,
|
||||
) -> Result<(String, Option<String>), AnyError> {
|
||||
let cm = Rc::new(swc_common::SourceMap::new(
|
||||
swc_common::FilePathMapping::empty(),
|
||||
let cm = Rc::new(deno_ast::swc::common::SourceMap::new(
|
||||
deno_ast::swc::common::FilePathMapping::empty(),
|
||||
));
|
||||
let globals = swc_common::Globals::new();
|
||||
let globals = deno_ast::swc::common::Globals::new();
|
||||
let loader = BundleLoader::new(self, emit_options, &globals, cm.clone());
|
||||
let hook = Box::new(BundleHook);
|
||||
let module = match bundle_type {
|
||||
BundleType::Module => swc_bundler::ModuleType::Es,
|
||||
BundleType::Classic => swc_bundler::ModuleType::Iife,
|
||||
BundleType::Module => deno_ast::swc::bundler::ModuleType::Es,
|
||||
BundleType::Classic => deno_ast::swc::bundler::ModuleType::Iife,
|
||||
_ => unreachable!("invalid bundle type"),
|
||||
};
|
||||
let bundler = swc_bundler::Bundler::new(
|
||||
let bundler = deno_ast::swc::bundler::Bundler::new(
|
||||
&globals,
|
||||
cm.clone(),
|
||||
loader,
|
||||
self,
|
||||
swc_bundler::Config {
|
||||
deno_ast::swc::bundler::Config {
|
||||
module,
|
||||
..Default::default()
|
||||
},
|
||||
|
@ -1195,7 +1208,7 @@ impl Graph {
|
|||
let mut entries = HashMap::new();
|
||||
entries.insert(
|
||||
"bundle".to_string(),
|
||||
swc_common::FileName::Custom(specifier.to_string()),
|
||||
deno_ast::swc::common::FileName::Custom(specifier.to_string()),
|
||||
);
|
||||
let output = bundler
|
||||
.bundle(entries)
|
||||
|
@ -1203,11 +1216,11 @@ impl Graph {
|
|||
let mut buf = Vec::new();
|
||||
let mut src_map_buf = Vec::new();
|
||||
{
|
||||
let mut emitter = swc_ecmascript::codegen::Emitter {
|
||||
cfg: swc_ecmascript::codegen::Config { minify: false },
|
||||
let mut emitter = deno_ast::swc::codegen::Emitter {
|
||||
cfg: deno_ast::swc::codegen::Config { minify: false },
|
||||
cm: cm.clone(),
|
||||
comments: None,
|
||||
wr: Box::new(swc_ecmascript::codegen::text_writer::JsWriter::new(
|
||||
wr: Box::new(deno_ast::swc::codegen::text_writer::JsWriter::new(
|
||||
cm.clone(),
|
||||
"\n",
|
||||
&mut buf,
|
||||
|
@ -1421,9 +1434,9 @@ impl Graph {
|
|||
|
||||
/// Get the source for a given module specifier. If the module is not part
|
||||
/// of the graph, the result will be `None`.
|
||||
pub fn get_source(&self, specifier: &ModuleSpecifier) -> Option<String> {
|
||||
pub fn get_source(&self, specifier: &ModuleSpecifier) -> Option<Arc<String>> {
|
||||
if let ModuleSlot::Module(module) = self.get_module(specifier) {
|
||||
Some(module.source.clone())
|
||||
Some(module.text_info.text())
|
||||
} else {
|
||||
None
|
||||
}
|
||||
|
@ -1482,7 +1495,10 @@ impl Graph {
|
|||
size: Some(module.size()),
|
||||
media_type: Some(module.media_type),
|
||||
local: Some(module.source_path.clone()),
|
||||
checksum: Some(checksum::gen(&[module.source.as_bytes()])),
|
||||
checksum: Some(checksum::gen(&[module
|
||||
.text_info
|
||||
.text_str()
|
||||
.as_bytes()])),
|
||||
emit,
|
||||
map,
|
||||
..Default::default()
|
||||
|
@ -1547,7 +1563,8 @@ impl Graph {
|
|||
for (ms, module_slot) in self.modules.iter() {
|
||||
if let ModuleSlot::Module(module) = module_slot {
|
||||
let specifier = module.specifier.to_string();
|
||||
let valid = lockfile.check_or_insert(&specifier, &module.source);
|
||||
let valid =
|
||||
lockfile.check_or_insert(&specifier, module.text_info.text_str());
|
||||
if !valid {
|
||||
eprintln!(
|
||||
"{}",
|
||||
|
@ -1739,7 +1756,7 @@ impl Graph {
|
|||
continue;
|
||||
}
|
||||
let parsed_module = module.parse()?;
|
||||
let emit = parsed_module.transpile(&emit_options)?;
|
||||
let emit = transpile(&parsed_module, &emit_options)?;
|
||||
emit_count += 1;
|
||||
module.maybe_emit = Some(Emit::Cli(emit));
|
||||
module.set_version(&config);
|
||||
|
@ -1810,13 +1827,14 @@ impl Graph {
|
|||
}
|
||||
}
|
||||
|
||||
impl swc_bundler::Resolve for Graph {
|
||||
impl deno_ast::swc::bundler::Resolve for Graph {
|
||||
fn resolve(
|
||||
&self,
|
||||
referrer: &swc_common::FileName,
|
||||
referrer: &deno_ast::swc::common::FileName,
|
||||
specifier: &str,
|
||||
) -> Result<swc_common::FileName, AnyError> {
|
||||
let referrer = if let swc_common::FileName::Custom(referrer) = referrer {
|
||||
) -> Result<deno_ast::swc::common::FileName, AnyError> {
|
||||
let referrer =
|
||||
if let deno_ast::swc::common::FileName::Custom(referrer) = referrer {
|
||||
resolve_url_or_path(referrer)
|
||||
.context("Cannot resolve swc FileName to a module specifier")?
|
||||
} else {
|
||||
|
@ -1827,7 +1845,9 @@ impl swc_bundler::Resolve for Graph {
|
|||
};
|
||||
let specifier = self.resolve(specifier, &referrer, false)?;
|
||||
|
||||
Ok(swc_common::FileName::Custom(specifier.to_string()))
|
||||
Ok(deno_ast::swc::common::FileName::Custom(
|
||||
specifier.to_string(),
|
||||
))
|
||||
}
|
||||
}
|
||||
|
||||
|
@ -2114,8 +2134,10 @@ pub mod tests {
|
|||
.replace("/", "-");
|
||||
let source_path = self.fixtures.join(specifier_text);
|
||||
let media_type = MediaType::from(&source_path);
|
||||
let source = fs::read_to_string(&source_path)
|
||||
.map_err(|err| (specifier.clone(), err.into()))?;
|
||||
let source = Arc::new(
|
||||
fs::read_to_string(&source_path)
|
||||
.map_err(|err| (specifier.clone(), err.into()))?,
|
||||
);
|
||||
let is_remote = specifier.scheme() != "file";
|
||||
|
||||
Ok(CachedModule {
|
||||
|
@ -2211,9 +2233,9 @@ pub mod tests {
|
|||
specifier: ModuleSpecifier,
|
||||
sources: HashMap<&str, &str>,
|
||||
) -> Graph {
|
||||
let sources: HashMap<String, String> = sources
|
||||
let sources: HashMap<String, Arc<String>> = sources
|
||||
.iter()
|
||||
.map(|(k, v)| (k.to_string(), v.to_string()))
|
||||
.map(|(k, v)| (k.to_string(), Arc::new(v.to_string())))
|
||||
.collect();
|
||||
let handler = Arc::new(Mutex::new(MemoryHandler::new(sources)));
|
||||
let mut builder = GraphBuilder::new(handler.clone(), None, None);
|
||||
|
@ -2248,37 +2270,37 @@ pub mod tests {
|
|||
|
||||
#[test]
|
||||
fn test_module_emit_valid() {
|
||||
let source = "console.log(42);".to_string();
|
||||
let maybe_version = Some(get_version(&source, &version::deno(), b""));
|
||||
let source = "console.log(42);";
|
||||
let maybe_version = Some(get_version(source, &version::deno(), b""));
|
||||
let module = Module {
|
||||
maybe_version,
|
||||
source,
|
||||
text_info: SourceTextInfo::from_string(source.to_string()),
|
||||
..Module::default()
|
||||
};
|
||||
assert!(module.is_emit_valid(b""));
|
||||
|
||||
let source = "console.log(42);".to_string();
|
||||
let source = "console.log(42);";
|
||||
let old_source = "console.log(43);";
|
||||
let maybe_version = Some(get_version(old_source, &version::deno(), b""));
|
||||
let module = Module {
|
||||
maybe_version,
|
||||
source,
|
||||
text_info: SourceTextInfo::from_string(source.to_string()),
|
||||
..Module::default()
|
||||
};
|
||||
assert!(!module.is_emit_valid(b""));
|
||||
|
||||
let source = "console.log(42);".to_string();
|
||||
let maybe_version = Some(get_version(&source, "0.0.0", b""));
|
||||
let source = "console.log(42);";
|
||||
let maybe_version = Some(get_version(source, "0.0.0", b""));
|
||||
let module = Module {
|
||||
maybe_version,
|
||||
source,
|
||||
text_info: SourceTextInfo::from_string(source.to_string()),
|
||||
..Module::default()
|
||||
};
|
||||
assert!(!module.is_emit_valid(b""));
|
||||
|
||||
let source = "console.log(42);".to_string();
|
||||
let source = "console.log(42);";
|
||||
let module = Module {
|
||||
source,
|
||||
text_info: SourceTextInfo::from_string(source.to_string()),
|
||||
..Module::default()
|
||||
};
|
||||
assert!(!module.is_emit_valid(b""));
|
||||
|
@ -2286,10 +2308,10 @@ pub mod tests {
|
|||
|
||||
#[test]
|
||||
fn test_module_set_version() {
|
||||
let source = "console.log(42);".to_string();
|
||||
let expected = Some(get_version(&source, &version::deno(), b""));
|
||||
let source = "console.log(42);";
|
||||
let expected = Some(get_version(source, &version::deno(), b""));
|
||||
let mut module = Module {
|
||||
source,
|
||||
text_info: SourceTextInfo::from_string(source.to_string()),
|
||||
..Module::default()
|
||||
};
|
||||
assert!(module.maybe_version.is_none());
|
||||
|
|
|
@ -47,7 +47,7 @@ struct EmitArgs {
|
|||
import_map: Option<Value>,
|
||||
import_map_path: Option<String>,
|
||||
root_specifier: String,
|
||||
sources: Option<HashMap<String, String>>,
|
||||
sources: Option<HashMap<String, Arc<String>>>,
|
||||
}
|
||||
|
||||
async fn op_emit(
|
||||
|
|
|
@ -3,10 +3,9 @@
|
|||
use crate::ast::Location;
|
||||
use crate::disk_cache::DiskCache;
|
||||
use crate::file_fetcher::FileFetcher;
|
||||
use crate::media_type::MediaType;
|
||||
use crate::program_state::ProgramState;
|
||||
use deno_runtime::permissions::Permissions;
|
||||
|
||||
use deno_ast::MediaType;
|
||||
use deno_core::error::custom_error;
|
||||
use deno_core::error::AnyError;
|
||||
use deno_core::futures::future;
|
||||
|
@ -16,6 +15,7 @@ use deno_core::serde::Deserialize;
|
|||
use deno_core::serde::Serialize;
|
||||
use deno_core::serde_json;
|
||||
use deno_core::ModuleSpecifier;
|
||||
use deno_runtime::permissions::Permissions;
|
||||
use log::debug;
|
||||
use std::collections::HashMap;
|
||||
use std::fmt;
|
||||
|
@ -62,7 +62,7 @@ pub struct CachedModule {
|
|||
pub maybe_version: Option<String>,
|
||||
pub media_type: MediaType,
|
||||
pub requested_specifier: ModuleSpecifier,
|
||||
pub source: String,
|
||||
pub source: Arc<String>,
|
||||
pub source_path: PathBuf,
|
||||
pub specifier: ModuleSpecifier,
|
||||
}
|
||||
|
@ -79,7 +79,7 @@ impl Default for CachedModule {
|
|||
maybe_version: None,
|
||||
media_type: MediaType::Unknown,
|
||||
requested_specifier: specifier.clone(),
|
||||
source: "".to_string(),
|
||||
source: Arc::new(String::default()),
|
||||
source_path: PathBuf::new(),
|
||||
specifier,
|
||||
}
|
||||
|
@ -467,11 +467,11 @@ impl SpecifierHandler for FetchHandler {
|
|||
}
|
||||
|
||||
pub struct MemoryHandler {
|
||||
sources: HashMap<String, String>,
|
||||
sources: HashMap<String, Arc<String>>,
|
||||
}
|
||||
|
||||
impl MemoryHandler {
|
||||
pub fn new(sources: HashMap<String, String>) -> Self {
|
||||
pub fn new(sources: HashMap<String, Arc<String>>) -> Self {
|
||||
Self { sources }
|
||||
}
|
||||
}
|
||||
|
@ -496,7 +496,7 @@ impl SpecifierHandler for MemoryHandler {
|
|||
let is_remote = specifier.scheme() != "file";
|
||||
|
||||
Ok(CachedModule {
|
||||
source: source.to_string(),
|
||||
source: source.clone(),
|
||||
requested_specifier: specifier.clone(),
|
||||
specifier,
|
||||
media_type,
|
||||
|
@ -626,7 +626,7 @@ pub mod tests {
|
|||
assert!(cached_module.maybe_dependencies.is_none());
|
||||
assert_eq!(cached_module.media_type, MediaType::TypeScript);
|
||||
assert_eq!(
|
||||
cached_module.source,
|
||||
cached_module.source.as_str(),
|
||||
"export { printHello } from \"./print_hello.ts\";\n"
|
||||
);
|
||||
assert_eq!(cached_module.specifier, specifier);
|
||||
|
@ -700,9 +700,9 @@ pub mod tests {
|
|||
"https://deno.land/x/c.js" => c_src,
|
||||
"https://deno.land/x/d.d.ts" => d_src
|
||||
);
|
||||
let sources: HashMap<String, String> = sources
|
||||
let sources: HashMap<String, Arc<String>> = sources
|
||||
.iter()
|
||||
.map(|(k, v)| (k.to_string(), v.to_string()))
|
||||
.map(|(k, v)| (k.to_string(), Arc::new(v.to_string())))
|
||||
.collect();
|
||||
let mut handler = MemoryHandler::new(sources);
|
||||
let specifier = resolve_url_or_path("file:///a.ts").unwrap();
|
||||
|
@ -710,7 +710,7 @@ pub mod tests {
|
|||
.fetch(specifier.clone(), None, false)
|
||||
.await
|
||||
.expect("could not fetch module");
|
||||
assert_eq!(actual.source, a_src.to_string());
|
||||
assert_eq!(actual.source.as_str(), a_src);
|
||||
assert_eq!(actual.requested_specifier, specifier);
|
||||
assert_eq!(actual.specifier, specifier);
|
||||
assert_eq!(actual.media_type, MediaType::TypeScript);
|
||||
|
@ -721,7 +721,7 @@ pub mod tests {
|
|||
.fetch(specifier.clone(), None, false)
|
||||
.await
|
||||
.expect("could not fetch module");
|
||||
assert_eq!(actual.source, b_src.to_string());
|
||||
assert_eq!(actual.source.as_str(), b_src);
|
||||
assert_eq!(actual.requested_specifier, specifier);
|
||||
assert_eq!(actual.specifier, specifier);
|
||||
assert_eq!(actual.media_type, MediaType::TypeScript);
|
||||
|
@ -732,7 +732,7 @@ pub mod tests {
|
|||
.fetch(specifier.clone(), None, false)
|
||||
.await
|
||||
.expect("could not fetch module");
|
||||
assert_eq!(actual.source, c_src.to_string());
|
||||
assert_eq!(actual.source.as_str(), c_src);
|
||||
assert_eq!(actual.requested_specifier, specifier);
|
||||
assert_eq!(actual.specifier, specifier);
|
||||
assert_eq!(actual.media_type, MediaType::JavaScript);
|
||||
|
@ -743,7 +743,7 @@ pub mod tests {
|
|||
.fetch(specifier.clone(), None, false)
|
||||
.await
|
||||
.expect("could not fetch module");
|
||||
assert_eq!(actual.source, d_src.to_string());
|
||||
assert_eq!(actual.source.as_str(), d_src);
|
||||
assert_eq!(actual.requested_specifier, specifier);
|
||||
assert_eq!(actual.specifier, specifier);
|
||||
assert_eq!(actual.media_type, MediaType::Dts);
|
||||
|
@ -761,7 +761,7 @@ pub mod tests {
|
|||
.fetch(specifier.clone(), None, false)
|
||||
.await
|
||||
.expect("could not fetch module");
|
||||
assert_eq!(actual.source, a_src.to_string());
|
||||
assert_eq!(actual.source.as_str(), a_src);
|
||||
assert_eq!(actual.requested_specifier, specifier);
|
||||
assert_eq!(actual.specifier, specifier);
|
||||
assert_eq!(actual.media_type, MediaType::TypeScript);
|
||||
|
@ -772,7 +772,7 @@ pub mod tests {
|
|||
.fetch(specifier.clone(), None, false)
|
||||
.await
|
||||
.expect("could not fetch module");
|
||||
assert_eq!(actual.source, a_src.to_string());
|
||||
assert_eq!(actual.source.as_str(), a_src);
|
||||
assert_eq!(actual.requested_specifier, specifier);
|
||||
assert_eq!(actual.specifier, specifier);
|
||||
assert_eq!(actual.media_type, MediaType::TypeScript);
|
||||
|
|
|
@ -488,7 +488,7 @@ fn syntax_error() {
|
|||
Some(vec![("NO_COLOR".to_owned(), "1".to_owned())]),
|
||||
false,
|
||||
);
|
||||
assert!(out.ends_with("parse error: Expected ';', '}' or <eof> at 1:7\n2\n"));
|
||||
assert!(out.ends_with("parse error: Expected ';', '}' or <eof> at 1:8\n2\n"));
|
||||
assert!(err.is_empty());
|
||||
}
|
||||
|
||||
|
|
2
cli/tests/testdata/error_syntax.js.out
vendored
2
cli/tests/testdata/error_syntax.js.out
vendored
|
@ -1 +1 @@
|
|||
error: Expected ,, got following at [WILDCARD]/error_syntax.js:3:5
|
||||
error: Expected ,, got following at [WILDCARD]/error_syntax.js:3:6
|
||||
|
|
|
@ -1 +1 @@
|
|||
error: Unexpected eof at [WILDCARD]/error_syntax_empty_trailing_line.mjs:2:21
|
||||
error: Unexpected eof at [WILDCARD]/error_syntax_empty_trailing_line.mjs:2:22
|
||||
|
|
2
cli/tests/testdata/swc_syntax_error.ts.out
vendored
2
cli/tests/testdata/swc_syntax_error.ts.out
vendored
|
@ -1 +1 @@
|
|||
error: Unexpected token `}`. Expected an identifier, void, yield, null, await, break, a string literal, a numeric literal, true, false, `, -, import, this, typeof, {, [, ( at [WILDCARD]syntax_error.ts:4:0
|
||||
error: Unexpected token `}`. Expected an identifier, void, yield, null, await, break, a string literal, a numeric literal, true, false, `, -, import, this, typeof, {, [, ( at [WILDCARD]syntax_error.ts:4:1
|
||||
|
|
|
@ -1,14 +1,13 @@
|
|||
// Copyright 2018-2021 the Deno authors. All rights reserved. MIT license.
|
||||
|
||||
use crate::ast;
|
||||
use crate::ast::TokenOrComment;
|
||||
use crate::colors;
|
||||
use crate::flags::Flags;
|
||||
use crate::fs_util::collect_files;
|
||||
use crate::media_type::MediaType;
|
||||
use crate::module_graph::TypeLib;
|
||||
use crate::program_state::ProgramState;
|
||||
use crate::source_maps::SourceMapGetter;
|
||||
use deno_ast::swc::common::Span;
|
||||
use deno_ast::MediaType;
|
||||
use deno_core::error::AnyError;
|
||||
use deno_core::serde_json;
|
||||
use deno_core::url::Url;
|
||||
|
@ -23,7 +22,7 @@ use std::fs::File;
|
|||
use std::io::BufWriter;
|
||||
use std::io::Write;
|
||||
use std::path::PathBuf;
|
||||
use swc_common::Span;
|
||||
use std::sync::Arc;
|
||||
use uuid::Uuid;
|
||||
|
||||
// TODO(caspervonb) all of these structs can and should be made private, possibly moved to
|
||||
|
@ -190,7 +189,7 @@ pub trait CoverageReporter {
|
|||
script_coverage: &ScriptCoverage,
|
||||
script_source: &str,
|
||||
maybe_source_map: Option<Vec<u8>>,
|
||||
maybe_original_source: Option<String>,
|
||||
maybe_original_source: Option<Arc<String>>,
|
||||
);
|
||||
|
||||
fn done(&mut self);
|
||||
|
@ -210,7 +209,7 @@ impl CoverageReporter for LcovCoverageReporter {
|
|||
script_coverage: &ScriptCoverage,
|
||||
script_source: &str,
|
||||
maybe_source_map: Option<Vec<u8>>,
|
||||
_maybe_original_source: Option<String>,
|
||||
_maybe_original_source: Option<Arc<String>>,
|
||||
) {
|
||||
// TODO(caspervonb) cleanup and reduce duplication between reporters, pre-compute line coverage
|
||||
// elsewhere.
|
||||
|
@ -426,14 +425,14 @@ impl CoverageReporter for PrettyCoverageReporter {
|
|||
script_coverage: &ScriptCoverage,
|
||||
script_source: &str,
|
||||
maybe_source_map: Option<Vec<u8>>,
|
||||
maybe_original_source: Option<String>,
|
||||
maybe_original_source: Option<Arc<String>>,
|
||||
) {
|
||||
let maybe_source_map = maybe_source_map
|
||||
.map(|source_map| SourceMap::from_slice(&source_map).unwrap());
|
||||
|
||||
let mut ignored_spans: Vec<Span> = Vec::new();
|
||||
for item in ast::lex(script_source, &MediaType::JavaScript) {
|
||||
if let TokenOrComment::Token(_) = item.inner {
|
||||
for item in deno_ast::lex(script_source, MediaType::JavaScript) {
|
||||
if let deno_ast::TokenOrComment::Token(_) = item.inner {
|
||||
continue;
|
||||
}
|
||||
|
||||
|
|
|
@ -1,15 +1,14 @@
|
|||
// Copyright 2018-2021 the Deno authors. All rights reserved. MIT license.
|
||||
|
||||
use crate::ast;
|
||||
use crate::colors;
|
||||
use crate::file_fetcher::File;
|
||||
use crate::flags::Flags;
|
||||
use crate::get_types;
|
||||
use crate::import_map::ImportMap;
|
||||
use crate::media_type::MediaType;
|
||||
use crate::program_state::ProgramState;
|
||||
use crate::write_json_to_stdout;
|
||||
use crate::write_to_stdout_ignore_sigpipe;
|
||||
use deno_ast::MediaType;
|
||||
use deno_core::error::AnyError;
|
||||
use deno_core::futures::future;
|
||||
use deno_core::futures::future::FutureExt;
|
||||
|
@ -81,7 +80,7 @@ impl Loader for DocLoader {
|
|||
.map(|file| {
|
||||
Some(LoadResponse {
|
||||
specifier: specifier.clone(),
|
||||
content: Arc::new(file.source),
|
||||
content: file.source.clone(),
|
||||
maybe_headers: file.maybe_headers,
|
||||
})
|
||||
});
|
||||
|
@ -100,6 +99,7 @@ pub async fn print_docs(
|
|||
) -> Result<(), AnyError> {
|
||||
let program_state = ProgramState::build(flags.clone()).await?;
|
||||
let source_file = source_file.unwrap_or_else(|| "--builtin".to_string());
|
||||
let source_parser = deno_graph::DefaultSourceParser::new();
|
||||
|
||||
let parse_result = if source_file == "--builtin" {
|
||||
let mut loader = StubDocLoader;
|
||||
|
@ -113,12 +113,11 @@ pub async fn print_docs(
|
|||
None,
|
||||
)
|
||||
.await;
|
||||
let doc_parser = doc::DocParser::new(graph, private);
|
||||
let syntax = ast::get_syntax(&MediaType::Dts);
|
||||
let doc_parser = doc::DocParser::new(graph, private, &source_parser);
|
||||
doc_parser.parse_source(
|
||||
&source_file_specifier,
|
||||
syntax,
|
||||
get_types(flags.unstable).as_str(),
|
||||
MediaType::Dts,
|
||||
Arc::new(get_types(flags.unstable)),
|
||||
)
|
||||
} else {
|
||||
let module_specifier = resolve_url_or_path(&source_file)?;
|
||||
|
@ -130,7 +129,7 @@ pub async fn print_docs(
|
|||
local: PathBuf::from("./$deno$doc.ts"),
|
||||
maybe_types: None,
|
||||
media_type: MediaType::TypeScript,
|
||||
source: format!("export * from \"{}\";", module_specifier),
|
||||
source: Arc::new(format!("export * from \"{}\";", module_specifier)),
|
||||
specifier: root_specifier.clone(),
|
||||
maybe_headers: None,
|
||||
};
|
||||
|
@ -152,7 +151,7 @@ pub async fn print_docs(
|
|||
None,
|
||||
)
|
||||
.await;
|
||||
let doc_parser = doc::DocParser::new(graph, private);
|
||||
let doc_parser = doc::DocParser::new(graph, private, &source_parser);
|
||||
doc_parser.parse_with_reexports(&root_specifier)
|
||||
};
|
||||
|
||||
|
|
|
@ -13,6 +13,7 @@ use crate::file_watcher;
|
|||
use crate::file_watcher::ResolutionResult;
|
||||
use crate::fs_util::{collect_files, get_extension, is_supported_ext_fmt};
|
||||
use crate::text_encoding;
|
||||
use deno_ast::ParsedSource;
|
||||
use deno_core::error::generic_error;
|
||||
use deno_core::error::AnyError;
|
||||
use deno_core::futures;
|
||||
|
@ -62,16 +63,13 @@ pub async fn format(
|
|||
}
|
||||
}
|
||||
};
|
||||
let operation = |paths: Vec<PathBuf>| {
|
||||
let config = get_typescript_config();
|
||||
async move {
|
||||
let operation = |paths: Vec<PathBuf>| async move {
|
||||
if check {
|
||||
check_source_files(config, paths).await?;
|
||||
check_source_files(paths).await?;
|
||||
} else {
|
||||
format_source_files(config, paths).await?;
|
||||
format_source_files(paths).await?;
|
||||
}
|
||||
Ok(())
|
||||
}
|
||||
};
|
||||
|
||||
if watch {
|
||||
|
@ -91,14 +89,10 @@ pub async fn format(
|
|||
|
||||
/// Formats markdown (using <https://github.com/dprint/dprint-plugin-markdown>) and its code blocks
|
||||
/// (ts/tsx, js/jsx).
|
||||
fn format_markdown(
|
||||
file_text: &str,
|
||||
ts_config: dprint_plugin_typescript::configuration::Configuration,
|
||||
) -> Result<String, String> {
|
||||
let md_config = get_markdown_config();
|
||||
fn format_markdown(file_text: &str) -> Result<String, String> {
|
||||
dprint_plugin_markdown::format_text(
|
||||
file_text,
|
||||
&md_config,
|
||||
&MARKDOWN_CONFIG,
|
||||
move |tag, text, line_width| {
|
||||
let tag = tag.to_lowercase();
|
||||
if matches!(
|
||||
|
@ -121,13 +115,13 @@ fn format_markdown(
|
|||
};
|
||||
|
||||
if matches!(extension, "json" | "jsonc") {
|
||||
let mut json_config = get_json_config();
|
||||
let mut json_config = JSON_CONFIG.clone();
|
||||
json_config.line_width = line_width;
|
||||
dprint_plugin_json::format_text(text, &json_config)
|
||||
} else {
|
||||
let fake_filename =
|
||||
PathBuf::from(format!("deno_fmt_stdin.{}", extension));
|
||||
let mut codeblock_config = ts_config.clone();
|
||||
let mut codeblock_config = TYPESCRIPT_CONFIG.clone();
|
||||
codeblock_config.line_width = line_width;
|
||||
dprint_plugin_typescript::format_text(
|
||||
&fake_filename,
|
||||
|
@ -147,8 +141,7 @@ fn format_markdown(
|
|||
/// of configuration builder of <https://github.com/dprint/dprint-plugin-json>.
|
||||
/// See <https://git.io/Jt4ht> for configuration.
|
||||
fn format_json(file_text: &str) -> Result<String, String> {
|
||||
let json_config = get_json_config();
|
||||
dprint_plugin_json::format_text(file_text, &json_config)
|
||||
dprint_plugin_json::format_text(file_text, &JSON_CONFIG)
|
||||
.map_err(|e| e.to_string())
|
||||
}
|
||||
|
||||
|
@ -156,23 +149,40 @@ fn format_json(file_text: &str) -> Result<String, String> {
|
|||
pub fn format_file(
|
||||
file_path: &Path,
|
||||
file_text: &str,
|
||||
config: dprint_plugin_typescript::configuration::Configuration,
|
||||
) -> Result<String, String> {
|
||||
let ext = get_extension(file_path).unwrap_or_else(String::new);
|
||||
if ext == "md" {
|
||||
format_markdown(file_text, config)
|
||||
format_markdown(file_text)
|
||||
} else if matches!(ext.as_str(), "json" | "jsonc") {
|
||||
format_json(file_text)
|
||||
} else {
|
||||
dprint_plugin_typescript::format_text(file_path, file_text, &config)
|
||||
dprint_plugin_typescript::format_text(
|
||||
file_path,
|
||||
file_text,
|
||||
&TYPESCRIPT_CONFIG,
|
||||
)
|
||||
.map_err(|e| e.to_string())
|
||||
}
|
||||
}
|
||||
|
||||
async fn check_source_files(
|
||||
config: dprint_plugin_typescript::configuration::Configuration,
|
||||
paths: Vec<PathBuf>,
|
||||
) -> Result<(), AnyError> {
|
||||
pub fn format_parsed_module(parsed_source: &ParsedSource) -> String {
|
||||
dprint_plugin_typescript::format_parsed_file(
|
||||
&dprint_plugin_typescript::SourceFileInfo {
|
||||
is_jsx: matches!(
|
||||
parsed_source.media_type(),
|
||||
deno_ast::MediaType::Jsx | deno_ast::MediaType::Tsx
|
||||
),
|
||||
info: parsed_source.source(),
|
||||
leading_comments: parsed_source.comments().leading_map(),
|
||||
trailing_comments: parsed_source.comments().trailing_map(),
|
||||
module: parsed_source.module(),
|
||||
tokens: parsed_source.tokens(),
|
||||
},
|
||||
&TYPESCRIPT_CONFIG,
|
||||
)
|
||||
}
|
||||
|
||||
async fn check_source_files(paths: Vec<PathBuf>) -> Result<(), AnyError> {
|
||||
let not_formatted_files_count = Arc::new(AtomicUsize::new(0));
|
||||
let checked_files_count = Arc::new(AtomicUsize::new(0));
|
||||
|
||||
|
@ -186,7 +196,7 @@ async fn check_source_files(
|
|||
checked_files_count.fetch_add(1, Ordering::Relaxed);
|
||||
let file_text = read_file_contents(&file_path)?.text;
|
||||
|
||||
match format_file(&file_path, &file_text, config) {
|
||||
match format_file(&file_path, &file_text) {
|
||||
Ok(formatted_text) => {
|
||||
if formatted_text != file_text {
|
||||
not_formatted_files_count.fetch_add(1, Ordering::Relaxed);
|
||||
|
@ -225,10 +235,7 @@ async fn check_source_files(
|
|||
}
|
||||
}
|
||||
|
||||
async fn format_source_files(
|
||||
config: dprint_plugin_typescript::configuration::Configuration,
|
||||
paths: Vec<PathBuf>,
|
||||
) -> Result<(), AnyError> {
|
||||
async fn format_source_files(paths: Vec<PathBuf>) -> Result<(), AnyError> {
|
||||
let formatted_files_count = Arc::new(AtomicUsize::new(0));
|
||||
let checked_files_count = Arc::new(AtomicUsize::new(0));
|
||||
let output_lock = Arc::new(Mutex::new(0)); // prevent threads outputting at the same time
|
||||
|
@ -240,7 +247,7 @@ async fn format_source_files(
|
|||
checked_files_count.fetch_add(1, Ordering::Relaxed);
|
||||
let file_contents = read_file_contents(&file_path)?;
|
||||
|
||||
match format_file(&file_path, &file_contents.text, config) {
|
||||
match format_file(&file_path, &file_contents.text) {
|
||||
Ok(formatted_text) => {
|
||||
if formatted_text != file_contents.text {
|
||||
write_file_contents(
|
||||
|
@ -291,10 +298,9 @@ pub fn format_stdin(check: bool, ext: String) -> Result<(), AnyError> {
|
|||
if stdin().read_to_string(&mut source).is_err() {
|
||||
return Err(generic_error("Failed to read from stdin"));
|
||||
}
|
||||
let config = get_typescript_config();
|
||||
let file_path = PathBuf::from(format!("_stdin.{}", ext));
|
||||
|
||||
match format_file(&file_path, &source, config) {
|
||||
match format_file(&file_path, &source) {
|
||||
Ok(formatted_text) => {
|
||||
if check {
|
||||
if formatted_text != source {
|
||||
|
@ -319,24 +325,18 @@ fn files_str(len: usize) -> &'static str {
|
|||
}
|
||||
}
|
||||
|
||||
pub fn get_typescript_config(
|
||||
) -> dprint_plugin_typescript::configuration::Configuration {
|
||||
dprint_plugin_typescript::configuration::ConfigurationBuilder::new()
|
||||
lazy_static::lazy_static! {
|
||||
static ref TYPESCRIPT_CONFIG: dprint_plugin_typescript::configuration::Configuration = dprint_plugin_typescript::configuration::ConfigurationBuilder::new()
|
||||
.deno()
|
||||
.build()
|
||||
}
|
||||
.build();
|
||||
|
||||
fn get_markdown_config() -> dprint_plugin_markdown::configuration::Configuration
|
||||
{
|
||||
dprint_plugin_markdown::configuration::ConfigurationBuilder::new()
|
||||
static ref MARKDOWN_CONFIG: dprint_plugin_markdown::configuration::Configuration = dprint_plugin_markdown::configuration::ConfigurationBuilder::new()
|
||||
.deno()
|
||||
.build()
|
||||
}
|
||||
.build();
|
||||
|
||||
fn get_json_config() -> dprint_plugin_json::configuration::Configuration {
|
||||
dprint_plugin_json::configuration::ConfigurationBuilder::new()
|
||||
static ref JSON_CONFIG: dprint_plugin_json::configuration::Configuration = dprint_plugin_json::configuration::ConfigurationBuilder::new()
|
||||
.deno()
|
||||
.build()
|
||||
.build();
|
||||
}
|
||||
|
||||
struct FileContents {
|
||||
|
|
|
@ -1,18 +1,18 @@
|
|||
// Copyright 2018-2021 the Deno authors. All rights reserved. MIT license.
|
||||
|
||||
//! This module provides file formatting utilities using
|
||||
//! This module provides file linting utilities using
|
||||
//! [`deno_lint`](https://github.com/denoland/deno_lint).
|
||||
//!
|
||||
//! At the moment it is only consumed using CLI but in
|
||||
//! the future it can be easily extended to provide
|
||||
//! the same functions as ops available in JS runtime.
|
||||
use crate::ast;
|
||||
use crate::colors;
|
||||
use crate::config_file::LintConfig;
|
||||
use crate::fmt_errors;
|
||||
use crate::fs_util::{collect_files, is_supported_ext};
|
||||
use crate::media_type::MediaType;
|
||||
use crate::tools::fmt::run_parallelized;
|
||||
use deno_ast::swc::parser::Syntax;
|
||||
use deno_ast::MediaType;
|
||||
use deno_core::error::{anyhow, generic_error, AnyError, JsStackFrame};
|
||||
use deno_core::serde_json;
|
||||
use deno_lint::diagnostic::LintDiagnostic;
|
||||
|
@ -28,7 +28,6 @@ use std::io::{stdin, Read};
|
|||
use std::path::PathBuf;
|
||||
use std::sync::atomic::{AtomicBool, Ordering};
|
||||
use std::sync::{Arc, Mutex};
|
||||
use swc_ecmascript::parser::Syntax;
|
||||
|
||||
pub enum LintReporterKind {
|
||||
Pretty,
|
||||
|
@ -210,7 +209,7 @@ fn lint_file(
|
|||
let file_name = file_path.to_string_lossy().to_string();
|
||||
let source_code = fs::read_to_string(&file_path)?;
|
||||
let media_type = MediaType::from(&file_path);
|
||||
let syntax = ast::get_syntax(&media_type);
|
||||
let syntax = deno_ast::get_syntax(media_type);
|
||||
|
||||
// Obtaining rules from config is infallible at this point.
|
||||
let lint_rules = get_configured_rules(
|
||||
|
@ -254,7 +253,7 @@ fn lint_stdin(
|
|||
rules_include,
|
||||
rules_exclude,
|
||||
)?;
|
||||
let syntax = ast::get_syntax(&MediaType::TypeScript);
|
||||
let syntax = deno_ast::get_syntax(MediaType::TypeScript);
|
||||
let linter = create_linter(syntax, lint_rules);
|
||||
let mut has_error = false;
|
||||
let pseudo_file_name = "_stdin.ts";
|
||||
|
|
|
@ -1,12 +1,11 @@
|
|||
// Copyright 2018-2021 the Deno authors. All rights reserved. MIT license.
|
||||
|
||||
use crate::ast;
|
||||
use crate::ast::Diagnostic;
|
||||
use crate::ast::transpile;
|
||||
use crate::ast::ImportsNotUsedAsValues;
|
||||
use crate::ast::TokenOrComment;
|
||||
use crate::colors;
|
||||
use crate::media_type::MediaType;
|
||||
use crate::program_state::ProgramState;
|
||||
use deno_ast::swc::parser::error::SyntaxError;
|
||||
use deno_ast::swc::parser::token::{Token, Word};
|
||||
use deno_core::error::AnyError;
|
||||
use deno_core::futures::FutureExt;
|
||||
use deno_core::parking_lot::Mutex;
|
||||
|
@ -29,8 +28,6 @@ use std::borrow::Cow;
|
|||
use std::cell::RefCell;
|
||||
use std::path::PathBuf;
|
||||
use std::sync::Arc;
|
||||
use swc_ecmascript::parser::error::SyntaxError;
|
||||
use swc_ecmascript::parser::token::{Token, Word};
|
||||
use tokio::sync::mpsc::channel;
|
||||
use tokio::sync::mpsc::unbounded_channel;
|
||||
use tokio::sync::mpsc::Receiver;
|
||||
|
@ -231,8 +228,8 @@ impl Validator for EditorHelper {
|
|||
let mut stack: Vec<Token> = Vec::new();
|
||||
let mut in_template = false;
|
||||
|
||||
for item in ast::lex(ctx.input(), &MediaType::TypeScript) {
|
||||
if let TokenOrComment::Token(token) = item.inner {
|
||||
for item in deno_ast::lex(ctx.input(), deno_ast::MediaType::TypeScript) {
|
||||
if let deno_ast::TokenOrComment::Token(token) = item.inner {
|
||||
match token {
|
||||
Token::BackQuote => in_template = !in_template,
|
||||
Token::LParen
|
||||
|
@ -306,16 +303,19 @@ impl Highlighter for EditorHelper {
|
|||
fn highlight<'l>(&self, line: &'l str, _: usize) -> Cow<'l, str> {
|
||||
let mut out_line = String::from(line);
|
||||
|
||||
for item in ast::lex(line, &MediaType::TypeScript) {
|
||||
for item in deno_ast::lex(line, deno_ast::MediaType::TypeScript) {
|
||||
// Adding color adds more bytes to the string,
|
||||
// so an offset is needed to stop spans falling out of sync.
|
||||
let offset = out_line.len() - line.len();
|
||||
let span = item.span_as_range();
|
||||
let span = std::ops::Range {
|
||||
start: item.span.lo.0 as usize,
|
||||
end: item.span.hi.0 as usize,
|
||||
};
|
||||
|
||||
out_line.replace_range(
|
||||
span.start + offset..span.end + offset,
|
||||
&match item.inner {
|
||||
TokenOrComment::Token(token) => match token {
|
||||
deno_ast::TokenOrComment::Token(token) => match token {
|
||||
Token::Str { .. } | Token::Template { .. } | Token::BackQuote => {
|
||||
colors::green(&line[span]).to_string()
|
||||
}
|
||||
|
@ -342,7 +342,7 @@ impl Highlighter for EditorHelper {
|
|||
},
|
||||
_ => line[span].to_string(),
|
||||
},
|
||||
TokenOrComment::Comment { .. } => {
|
||||
deno_ast::TokenOrComment::Comment { .. } => {
|
||||
colors::gray(&line[span]).to_string()
|
||||
}
|
||||
},
|
||||
|
@ -536,13 +536,13 @@ impl ReplSession {
|
|||
}
|
||||
Err(err) => {
|
||||
// handle a parsing diagnostic
|
||||
match err.downcast_ref::<Diagnostic>() {
|
||||
match err.downcast_ref::<deno_ast::Diagnostic>() {
|
||||
Some(diagnostic) => Ok(EvaluationOutput::Error(format!(
|
||||
"{}: {} at {}:{}",
|
||||
colors::red("parse error"),
|
||||
diagnostic.message,
|
||||
diagnostic.location.line,
|
||||
diagnostic.location.col
|
||||
diagnostic.display_position.line_number,
|
||||
diagnostic.display_position.column_number,
|
||||
))),
|
||||
None => Err(err),
|
||||
}
|
||||
|
@ -649,11 +649,17 @@ impl ReplSession {
|
|||
&mut self,
|
||||
expression: &str,
|
||||
) -> Result<Value, AnyError> {
|
||||
let parsed_module =
|
||||
crate::ast::parse("repl.ts", expression, &crate::MediaType::TypeScript)?;
|
||||
let parsed_module = deno_ast::parse_module(deno_ast::ParseParams {
|
||||
specifier: "repl.ts".to_string(),
|
||||
source: deno_ast::SourceTextInfo::from_string(expression.to_string()),
|
||||
media_type: deno_ast::MediaType::TypeScript,
|
||||
capture_tokens: false,
|
||||
maybe_syntax: None,
|
||||
})?;
|
||||
|
||||
let transpiled_src = parsed_module
|
||||
.transpile(&crate::ast::EmitOptions {
|
||||
let transpiled_src = transpile(
|
||||
&parsed_module,
|
||||
&crate::ast::EmitOptions {
|
||||
emit_metadata: false,
|
||||
source_map: false,
|
||||
inline_source_map: false,
|
||||
|
@ -663,7 +669,8 @@ impl ReplSession {
|
|||
jsx_factory: "React.createElement".into(),
|
||||
jsx_fragment_factory: "React.Fragment".into(),
|
||||
repl_imports: true,
|
||||
})?
|
||||
},
|
||||
)?
|
||||
.0;
|
||||
|
||||
self
|
||||
|
|
|
@ -1,6 +1,5 @@
|
|||
// Copyright 2018-2021 the Deno authors. All rights reserved. MIT license.
|
||||
|
||||
use crate::ast;
|
||||
use crate::ast::Location;
|
||||
use crate::colors;
|
||||
use crate::create_main_worker;
|
||||
|
@ -12,7 +11,6 @@ use crate::fs_util::collect_specifiers;
|
|||
use crate::fs_util::is_supported_test_ext;
|
||||
use crate::fs_util::is_supported_test_path;
|
||||
use crate::located_script_name;
|
||||
use crate::media_type::MediaType;
|
||||
use crate::module_graph;
|
||||
use crate::module_graph::GraphBuilder;
|
||||
use crate::module_graph::Module;
|
||||
|
@ -22,6 +20,8 @@ use crate::program_state::ProgramState;
|
|||
use crate::tokio_util;
|
||||
use crate::tools::coverage::CoverageCollector;
|
||||
use crate::FetchHandler;
|
||||
use deno_ast::swc::common::comments::CommentKind;
|
||||
use deno_ast::MediaType;
|
||||
use deno_core::error::generic_error;
|
||||
use deno_core::error::AnyError;
|
||||
use deno_core::futures::future;
|
||||
|
@ -47,7 +47,6 @@ use std::sync::mpsc::Sender;
|
|||
use std::sync::Arc;
|
||||
use std::time::Duration;
|
||||
use std::time::Instant;
|
||||
use swc_common::comments::CommentKind;
|
||||
use uuid::Uuid;
|
||||
|
||||
/// The test mode is used to determine how a specifier is to be tested.
|
||||
|
@ -269,7 +268,7 @@ async fn test_specifier(
|
|||
local: test_specifier.to_file_path().unwrap(),
|
||||
maybe_types: None,
|
||||
media_type: MediaType::JavaScript,
|
||||
source: test_source.clone(),
|
||||
source: Arc::new(test_source),
|
||||
specifier: test_specifier.clone(),
|
||||
maybe_headers: None,
|
||||
};
|
||||
|
@ -344,7 +343,7 @@ async fn test_specifier(
|
|||
fn extract_files_from_regex_blocks(
|
||||
location: &Location,
|
||||
source: &str,
|
||||
media_type: &MediaType,
|
||||
media_type: MediaType,
|
||||
blocks_regex: &Regex,
|
||||
lines_regex: &Regex,
|
||||
) -> Result<Vec<File>, AnyError> {
|
||||
|
@ -365,11 +364,11 @@ fn extract_files_from_regex_blocks(
|
|||
Some(&"jsx") => MediaType::Jsx,
|
||||
Some(&"ts") => MediaType::TypeScript,
|
||||
Some(&"tsx") => MediaType::Tsx,
|
||||
Some(&"") => *media_type,
|
||||
Some(&"") => media_type,
|
||||
_ => MediaType::Unknown,
|
||||
}
|
||||
} else {
|
||||
*media_type
|
||||
media_type
|
||||
};
|
||||
|
||||
if file_media_type == MediaType::Unknown {
|
||||
|
@ -408,7 +407,7 @@ fn extract_files_from_regex_blocks(
|
|||
local: file_specifier.to_file_path().unwrap(),
|
||||
maybe_types: None,
|
||||
media_type: file_media_type,
|
||||
source: file_source,
|
||||
source: Arc::new(file_source),
|
||||
specifier: file_specifier,
|
||||
maybe_headers: None,
|
||||
})
|
||||
|
@ -420,11 +419,20 @@ fn extract_files_from_regex_blocks(
|
|||
|
||||
fn extract_files_from_source_comments(
|
||||
specifier: &ModuleSpecifier,
|
||||
source: &str,
|
||||
media_type: &MediaType,
|
||||
source: Arc<String>,
|
||||
media_type: MediaType,
|
||||
) -> Result<Vec<File>, AnyError> {
|
||||
let parsed_module = ast::parse(specifier.as_str(), source, media_type)?;
|
||||
let comments = parsed_module.get_comments();
|
||||
let parsed_source = deno_ast::parse_module(deno_ast::ParseParams {
|
||||
specifier: specifier.as_str().to_string(),
|
||||
source: deno_ast::SourceTextInfo::new(
|
||||
deno_ast::swc::common::BytePos(0),
|
||||
source,
|
||||
),
|
||||
media_type,
|
||||
capture_tokens: false,
|
||||
maybe_syntax: None,
|
||||
})?;
|
||||
let comments = parsed_source.comments().get_vec();
|
||||
let blocks_regex = Regex::new(r"```([^\n]*)\n([\S\s]*?)```")?;
|
||||
let lines_regex = Regex::new(r"(?:\* ?)(?:\# ?)?(.*)")?;
|
||||
|
||||
|
@ -438,7 +446,7 @@ fn extract_files_from_source_comments(
|
|||
true
|
||||
})
|
||||
.flat_map(|comment| {
|
||||
let location = parsed_module.get_location(comment.span.lo);
|
||||
let location = Location::from_pos(&parsed_source, comment.span.lo);
|
||||
|
||||
extract_files_from_regex_blocks(
|
||||
&location,
|
||||
|
@ -457,7 +465,7 @@ fn extract_files_from_source_comments(
|
|||
fn extract_files_from_fenced_blocks(
|
||||
specifier: &ModuleSpecifier,
|
||||
source: &str,
|
||||
media_type: &MediaType,
|
||||
media_type: MediaType,
|
||||
) -> Result<Vec<File>, AnyError> {
|
||||
let location = Location {
|
||||
specifier: specifier.to_string(),
|
||||
|
@ -493,13 +501,13 @@ async fn fetch_inline_files(
|
|||
extract_files_from_fenced_blocks(
|
||||
&file.specifier,
|
||||
&file.source,
|
||||
&file.media_type,
|
||||
file.media_type,
|
||||
)
|
||||
} else {
|
||||
extract_files_from_source_comments(
|
||||
&file.specifier,
|
||||
&file.source,
|
||||
&file.media_type,
|
||||
file.source.clone(),
|
||||
file.media_type,
|
||||
)
|
||||
};
|
||||
|
||||
|
|
|
@ -2,10 +2,10 @@
|
|||
|
||||
use crate::config_file::TsConfig;
|
||||
use crate::diagnostics::Diagnostics;
|
||||
use crate::media_type::MediaType;
|
||||
use crate::module_graph::Graph;
|
||||
use crate::module_graph::Stats;
|
||||
|
||||
use deno_ast::MediaType;
|
||||
use deno_core::error::anyhow;
|
||||
use deno_core::error::bail;
|
||||
use deno_core::error::AnyError;
|
||||
|
@ -380,7 +380,7 @@ fn op_load(state: &mut State, args: Value) -> Result<Value, AnyError> {
|
|||
} else {
|
||||
specifier
|
||||
};
|
||||
let maybe_source = graph.get_source(&specifier);
|
||||
let maybe_source = graph.get_source(&specifier).map(|t| t.to_string());
|
||||
media_type = if let Some(media_type) = graph.get_media_type(&specifier) {
|
||||
media_type
|
||||
} else {
|
||||
|
|
|
@ -131,7 +131,7 @@ fn signal_str_to_int(s: &str) -> Option<libc::c_int> {
|
|||
}
|
||||
|
||||
#[cfg(target_os = "windows")]
|
||||
fn signal_str_to_int(s: &str) -> Option<libc::c_int> {
|
||||
fn signal_str_to_int(_s: &str) -> Option<libc::c_int> {
|
||||
unimplemented!()
|
||||
}
|
||||
|
||||
|
|
Loading…
Reference in a new issue