From 466d3df9d1ff43e16e1a2c20b7792de664547b48 Mon Sep 17 00:00:00 2001 From: David Sherret Date: Fri, 6 Aug 2021 10:36:16 -0400 Subject: [PATCH] refactor: make `ParsedModule` implement `Sync` (#11581) --- Cargo.lock | 4 +- cli/Cargo.toml | 2 +- cli/ast/bundle_hook.rs | 53 +++++++++ cli/ast/comments.rs | 106 ++++++++++++++++++ cli/ast/mod.rs | 212 +++++++++++++----------------------- cli/ast/source_file_info.rs | 130 ++++++++++++++++++++++ cli/lsp/analysis.rs | 89 +++++++-------- cli/lsp/code_lens.rs | 38 +++---- cli/module_graph.rs | 11 +- cli/specifier_handler.rs | 4 +- cli/tools/coverage.rs | 2 +- cli/tools/repl.rs | 4 +- cli/tools/test_runner.rs | 11 +- 13 files changed, 436 insertions(+), 230 deletions(-) create mode 100644 cli/ast/bundle_hook.rs create mode 100644 cli/ast/comments.rs create mode 100644 cli/ast/source_file_info.rs diff --git a/Cargo.lock b/Cargo.lock index fb3e850b60..4820b85414 100644 --- a/Cargo.lock +++ b/Cargo.lock @@ -3426,9 +3426,9 @@ dependencies = [ [[package]] name = "swc_common" -version = "0.11.0" +version = "0.11.4" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "25c6cd455ca59637ff47297c375a7fea1c7d61cc92448421485021ec6c6bf03d" +checksum = "f303985ebd578a033371a87be28cb54189d3ae5d492622523200f4de08db5275" dependencies = [ "ast_node", "cfg-if 0.1.10", diff --git a/cli/Cargo.toml b/cli/Cargo.toml index c74b2980e1..5d71f60375 100644 --- a/cli/Cargo.toml +++ b/cli/Cargo.toml @@ -82,7 +82,7 @@ serde = { version = "1.0.126", features = ["derive"] } shell-escape = "0.1.5" sourcemap = "6.0.1" swc_bundler = "0.46.0" -swc_common = { version = "0.11.0", features = ["sourcemap"] } +swc_common = { version = "0.11.4", features = ["sourcemap"] } swc_ecmascript = { version = "0.46.0", features = ["codegen", "dep_graph", "parser", "proposal", "react", "transforms", "typescript", "visit"] } tempfile = "3.2.0" termcolor = "1.1.2" diff --git a/cli/ast/bundle_hook.rs b/cli/ast/bundle_hook.rs new file mode 100644 index 0000000000..ab7eb545ff --- /dev/null +++ b/cli/ast/bundle_hook.rs @@ -0,0 +1,53 @@ +// Copyright 2018-2021 the Deno authors. All rights reserved. MIT license. +use deno_core::error::AnyError; + +pub struct BundleHook; + +impl swc_bundler::Hook for BundleHook { + fn get_import_meta_props( + &self, + span: swc_common::Span, + module_record: &swc_bundler::ModuleRecord, + ) -> Result, AnyError> { + use swc_ecmascript::ast; + + // we use custom file names, and swc "wraps" these in `<` and `>` so, we + // want to strip those back out. + let mut value = module_record.file_name.to_string(); + value.pop(); + value.remove(0); + + Ok(vec![ + ast::KeyValueProp { + key: ast::PropName::Ident(ast::Ident::new("url".into(), span)), + value: Box::new(ast::Expr::Lit(ast::Lit::Str(ast::Str { + span, + value: value.into(), + kind: ast::StrKind::Synthesized, + has_escape: false, + }))), + }, + ast::KeyValueProp { + key: ast::PropName::Ident(ast::Ident::new("main".into(), span)), + value: Box::new(if module_record.is_entry { + ast::Expr::Member(ast::MemberExpr { + span, + obj: ast::ExprOrSuper::Expr(Box::new(ast::Expr::MetaProp( + ast::MetaPropExpr { + meta: ast::Ident::new("import".into(), span), + prop: ast::Ident::new("meta".into(), span), + }, + ))), + prop: Box::new(ast::Expr::Ident(ast::Ident::new( + "main".into(), + span, + ))), + computed: false, + }) + } else { + ast::Expr::Lit(ast::Lit::Bool(ast::Bool { span, value: false })) + }), + }, + ]) + } +} diff --git a/cli/ast/comments.rs b/cli/ast/comments.rs new file mode 100644 index 0000000000..dfe7f99a91 --- /dev/null +++ b/cli/ast/comments.rs @@ -0,0 +1,106 @@ +// Copyright 2018-2021 the Deno authors. All rights reserved. MIT license. + +use std::cell::RefCell; +use std::rc::Rc; +use std::sync::Arc; +use swc_common::comments::Comment; +use swc_common::comments::Comments; +use swc_common::comments::SingleThreadedComments; +use swc_common::comments::SingleThreadedCommentsMapInner; +use swc_common::BytePos; + +/// An implementation of swc's `Comments` that implements `Sync` +/// to support being used in multi-threaded code. This implementation +/// is immutable and should you need mutability you may create a copy +/// by converting it to an swc `SingleThreadedComments`. +#[derive(Clone, Debug)] +pub struct MultiThreadedComments { + leading: Arc, + trailing: Arc, +} + +impl MultiThreadedComments { + pub fn from_single_threaded(comments: SingleThreadedComments) -> Self { + let (leading, trailing) = comments.take_all(); + let leading = Arc::new(Rc::try_unwrap(leading).unwrap().into_inner()); + let trailing = Arc::new(Rc::try_unwrap(trailing).unwrap().into_inner()); + MultiThreadedComments { leading, trailing } + } + + pub fn as_single_threaded(&self) -> SingleThreadedComments { + let leading = Rc::new(RefCell::new((*self.leading).to_owned())); + let trailing = Rc::new(RefCell::new((*self.trailing).to_owned())); + SingleThreadedComments::from_leading_and_trailing(leading, trailing) + } + + /// Gets a vector of all the comments sorted by position. + pub fn get_vec(&self) -> Vec { + let mut comments = self + .leading + .values() + .chain(self.trailing.values()) + .flatten() + .cloned() + .collect::>(); + comments.sort_by_key(|comment| comment.span.lo); + comments + } +} + +impl Comments for MultiThreadedComments { + fn has_leading(&self, pos: BytePos) -> bool { + self.leading.contains_key(&pos) + } + + fn get_leading(&self, pos: BytePos) -> Option> { + self.leading.get(&pos).cloned() + } + + fn has_trailing(&self, pos: BytePos) -> bool { + self.trailing.contains_key(&pos) + } + + fn get_trailing(&self, pos: BytePos) -> Option> { + self.trailing.get(&pos).cloned() + } + + fn add_leading(&self, _pos: BytePos, _cmt: Comment) { + panic_readonly(); + } + + fn add_leading_comments(&self, _pos: BytePos, _comments: Vec) { + panic_readonly(); + } + + fn move_leading(&self, _from: BytePos, _to: BytePos) { + panic_readonly(); + } + + fn take_leading(&self, _pos: BytePos) -> Option> { + panic_readonly(); + } + + fn add_trailing(&self, _pos: BytePos, _cmt: Comment) { + panic_readonly(); + } + + fn add_trailing_comments(&self, _pos: BytePos, _comments: Vec) { + panic_readonly(); + } + + fn move_trailing(&self, _from: BytePos, _to: BytePos) { + panic_readonly(); + } + + fn take_trailing(&self, _pos: BytePos) -> Option> { + panic_readonly(); + } + + fn add_pure_comment(&self, _pos: BytePos) { + panic_readonly(); + } +} + +fn panic_readonly() -> ! { + panic!("MultiThreadedComments do not support write operations") +} diff --git a/cli/ast/mod.rs b/cli/ast/mod.rs index 428fe126d0..15414ba8ec 100644 --- a/cli/ast/mod.rs +++ b/cli/ast/mod.rs @@ -11,10 +11,13 @@ use std::error::Error; use std::fmt; use std::ops::Range; use std::rc::Rc; +use std::sync::Arc; use swc_common::chain; use swc_common::comments::Comment; use swc_common::comments::CommentKind; +use swc_common::comments::Comments; use swc_common::comments::SingleThreadedComments; +use swc_common::BytePos; use swc_common::FileName; use swc_common::Globals; use swc_common::SourceFile; @@ -43,13 +46,20 @@ use swc_ecmascript::transforms::react; use swc_ecmascript::transforms::typescript; use swc_ecmascript::visit::FoldWith; +mod bundle_hook; +mod comments; +mod source_file_info; mod transforms; +pub use bundle_hook::BundleHook; +use comments::MultiThreadedComments; +use source_file_info::SourceFileInfo; + static TARGET: JscTarget = JscTarget::Es2020; #[derive(Debug, Clone, Eq, PartialEq)] pub struct Location { - pub filename: String, + pub specifier: String, pub line: usize, pub col: usize, } @@ -65,7 +75,7 @@ impl From for Location { }; Location { - filename, + specifier: filename, line: swc_loc.line, col: swc_loc.col_display, } @@ -74,13 +84,13 @@ impl From for Location { impl From for ModuleSpecifier { fn from(loc: Location) -> Self { - resolve_url_or_path(&loc.filename).unwrap() + resolve_url_or_path(&loc.specifier).unwrap() } } impl std::fmt::Display for Location { fn fmt(&self, f: &mut std::fmt::Formatter) -> std::fmt::Result { - write!(f, "{}:{}:{}", self.filename, self.line, self.col) + write!(f, "{}:{}:{}", self.specifier, self.line, self.col) } } @@ -239,18 +249,15 @@ fn strip_config_from_emit_options( /// processing. #[derive(Clone)] pub struct ParsedModule { - comments: SingleThreadedComments, - leading_comments: Vec, + info: Arc, + comments: MultiThreadedComments, pub module: Module, - pub source_map: Rc, - source_file: Rc, } impl fmt::Debug for ParsedModule { fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result { f.debug_struct("ParsedModule") .field("comments", &self.comments) - .field("leading_comments", &self.leading_comments) .field("module", &self.module) .finish() } @@ -265,28 +272,20 @@ impl ParsedModule { /// Get the module's leading comments, where triple slash directives might /// be located. pub fn get_leading_comments(&self) -> Vec { - self.leading_comments.clone() + self + .comments + .get_leading(self.module.span.lo) + .unwrap_or_else(Vec::new) } - /// Get the module's comments. + /// Get the module's comments sorted by position. pub fn get_comments(&self) -> Vec { - let mut comments = Vec::new(); - let (leading_comments, trailing_comments) = self.comments.borrow_all(); - - for value in leading_comments.values() { - comments.append(&mut value.clone()); - } - - for value in trailing_comments.values() { - comments.append(&mut value.clone()); - } - - comments + self.comments.get_vec() } - /// Get a location for a given span within the module. - pub fn get_location(&self, span: &Span) -> Location { - self.source_map.lookup_char_pos(span.lo).into() + /// Get a location for a given position within the module. + pub fn get_location(&self, pos: BytePos) -> Location { + self.info.get_location(pos) } /// Transform a TypeScript file into a JavaScript file, based on the supplied @@ -298,10 +297,14 @@ impl ParsedModule { options: &EmitOptions, ) -> Result<(String, Option), AnyError> { let program = Program::Module(self.module); + let source_map = Rc::new(SourceMap::default()); + let file_name = FileName::Custom(self.info.specifier.clone()); + source_map.new_source_file(file_name, self.info.text.clone()); + let comments = self.comments.as_single_threaded(); // needs to be mutable let jsx_pass = react::react( - self.source_map.clone(), - Some(&self.comments), + source_map.clone(), + Some(&comments), react::Options { pragma: options.jsx_factory.clone(), pragma_frag: options.jsx_fragment_factory.clone(), @@ -324,7 +327,7 @@ impl ParsedModule { typescript::strip::strip_with_config(strip_config_from_emit_options( options )), - fixer(Some(&self.comments)), + fixer(Some(&comments)), hygiene(), ); @@ -338,7 +341,7 @@ impl ParsedModule { let mut buf = vec![]; { let writer = Box::new(JsWriter::new( - self.source_map.clone(), + source_map.clone(), "\n", &mut buf, Some(&mut src_map_buf), @@ -346,8 +349,8 @@ impl ParsedModule { let config = swc_ecmascript::codegen::Config { minify: false }; let mut emitter = swc_ecmascript::codegen::Emitter { cfg: config, - comments: Some(&self.comments), - cm: self.source_map.clone(), + comments: Some(&comments), + cm: source_map.clone(), wr: writer, }; program.emit_with(&mut emitter)?; @@ -356,8 +359,7 @@ impl ParsedModule { let mut map: Option = None; { let mut buf = Vec::new(); - self - .source_map + source_map .build_source_map_from(&mut src_map_buf, None) .to_writer(&mut buf)?; @@ -373,41 +375,7 @@ impl ParsedModule { } } -pub fn parse_with_source_map( - specifier: &str, - source: &str, - media_type: &MediaType, - source_map: Rc, -) -> Result { - let source_file = source_map.new_source_file( - FileName::Custom(specifier.to_string()), - source.to_string(), - ); - let syntax = get_syntax(media_type); - let input = StringInput::from(&*source_file); - let comments = SingleThreadedComments::default(); - - let lexer = Lexer::new(syntax, TARGET, input, Some(&comments)); - let mut parser = swc_ecmascript::parser::Parser::new_from(lexer); - - let sm = &source_map; - let module = parser.parse_module().map_err(move |err| Diagnostic { - location: sm.lookup_char_pos(err.span().lo).into(), - message: err.into_kind().msg().to_string(), - })?; - let leading_comments = - comments.with_leading(module.span.lo, |comments| comments.to_vec()); - - Ok(ParsedModule { - comments, - leading_comments, - module, - source_map, - source_file, - }) -} - -/// For a given specifier, source, and media type, parse the source of the +/// For a given specifier, source, and media type, parse the text of the /// module and return a representation which can be further processed. /// /// # Arguments @@ -424,8 +392,16 @@ pub fn parse( source: &str, media_type: &MediaType, ) -> Result { - let source_map = Rc::new(SourceMap::default()); - parse_with_source_map(specifier, source, media_type, source_map) + let info = SourceFileInfo::new(specifier, source); + let input = + StringInput::new(source, BytePos(0), BytePos(source.len() as u32)); + let (comments, module) = parse_string_input(&info, input, media_type)?; + + Ok(ParsedModule { + info: Arc::new(info), + comments: MultiThreadedComments::from_single_threaded(comments), + module, + }) } pub enum TokenOrComment { @@ -453,21 +429,12 @@ fn flatten_comments( comments.into_iter().flat_map(|el| el.1) } -pub fn lex( - specifier: &str, - source: &str, - media_type: &MediaType, -) -> Vec { - let source_map = SourceMap::default(); - let source_file = source_map.new_source_file( - FileName::Custom(specifier.to_string()), - source.to_string(), - ); +pub fn lex(source: &str, media_type: &MediaType) -> Vec { let comments = SingleThreadedComments::default(); let lexer = Lexer::new( get_syntax(media_type), TARGET, - StringInput::from(source_file.as_ref()), + StringInput::new(source, BytePos(0), BytePos(source.len() as u32)), Some(&comments), ); @@ -494,19 +461,24 @@ pub fn lex( /// A low level function which transpiles a source module into an swc /// SourceFile. pub fn transpile_module( - filename: &str, - src: &str, + specifier: &str, + source: &str, media_type: &MediaType, emit_options: &EmitOptions, globals: &Globals, cm: Rc, ) -> Result<(Rc, Module), AnyError> { - let parsed_module = - parse_with_source_map(filename, src, media_type, cm.clone())?; + let info = SourceFileInfo::new(specifier, source); + let source_file = cm.new_source_file( + FileName::Custom(specifier.to_string()), + source.to_string(), + ); + let input = StringInput::from(&*source_file); + let (comments, module) = parse_string_input(&info, input, media_type)?; let jsx_pass = react::react( cm, - Some(&parsed_module.comments), + Some(&comments), react::Options { pragma: emit_options.jsx_factory.clone(), pragma_frag: emit_options.jsx_fragment_factory.clone(), @@ -526,12 +498,9 @@ pub fn transpile_module( typescript::strip::strip_with_config(strip_config_from_emit_options( emit_options )), - fixer(Some(&parsed_module.comments)), + fixer(Some(&comments)), ); - let source_file = parsed_module.source_file.clone(); - let module = parsed_module.module; - let module = swc_common::GLOBALS.set(globals, || { helpers::HELPERS.set(&helpers::Helpers::new(false), || { module.fold_with(&mut passes) @@ -541,55 +510,22 @@ pub fn transpile_module( Ok((source_file, module)) } -pub struct BundleHook; +fn parse_string_input( + info: &SourceFileInfo, + input: StringInput, + media_type: &MediaType, +) -> Result<(SingleThreadedComments, Module), AnyError> { + let syntax = get_syntax(media_type); + let comments = SingleThreadedComments::default(); + let lexer = Lexer::new(syntax, TARGET, input, Some(&comments)); + let mut parser = swc_ecmascript::parser::Parser::new_from(lexer); -impl swc_bundler::Hook for BundleHook { - fn get_import_meta_props( - &self, - span: swc_common::Span, - module_record: &swc_bundler::ModuleRecord, - ) -> Result, AnyError> { - use swc_ecmascript::ast; + let module = parser.parse_module().map_err(|err| Diagnostic { + location: info.get_location(err.span().lo), + message: err.into_kind().msg().to_string(), + })?; - // we use custom file names, and swc "wraps" these in `<` and `>` so, we - // want to strip those back out. - let mut value = module_record.file_name.to_string(); - value.pop(); - value.remove(0); - - Ok(vec![ - ast::KeyValueProp { - key: ast::PropName::Ident(ast::Ident::new("url".into(), span)), - value: Box::new(ast::Expr::Lit(ast::Lit::Str(ast::Str { - span, - value: value.into(), - kind: ast::StrKind::Synthesized, - has_escape: false, - }))), - }, - ast::KeyValueProp { - key: ast::PropName::Ident(ast::Ident::new("main".into(), span)), - value: Box::new(if module_record.is_entry { - ast::Expr::Member(ast::MemberExpr { - span, - obj: ast::ExprOrSuper::Expr(Box::new(ast::Expr::MetaProp( - ast::MetaPropExpr { - meta: ast::Ident::new("import".into(), span), - prop: ast::Ident::new("meta".into(), span), - }, - ))), - prop: Box::new(ast::Expr::Ident(ast::Ident::new( - "main".into(), - span, - ))), - computed: false, - }) - } else { - ast::Expr::Lit(ast::Lit::Bool(ast::Bool { span, value: false })) - }), - }, - ]) - } + Ok((comments, module)) } #[cfg(test)] diff --git a/cli/ast/source_file_info.rs b/cli/ast/source_file_info.rs new file mode 100644 index 0000000000..5792fb4192 --- /dev/null +++ b/cli/ast/source_file_info.rs @@ -0,0 +1,130 @@ +// Copyright 2018-2021 the Deno authors. All rights reserved. MIT license. + +use super::Location; + +use swc_common::BytePos; + +pub struct SourceFileInfo { + pub specifier: String, + pub text: String, + line_start_byte_positions: Vec, +} + +impl SourceFileInfo { + pub fn new(specifier: &str, text: &str) -> SourceFileInfo { + SourceFileInfo { + line_start_byte_positions: get_line_start_positions(text), + specifier: specifier.to_string(), + text: text.to_string(), + } + } + + pub fn get_location(&self, pos: BytePos) -> Location { + let line_index = self.get_line_index_at_pos(pos); + let col = self.get_column_on_line_index_at_pos(line_index, pos); + + Location { + specifier: self.specifier.clone(), + // todo(dsherret): this is temporarily 1-indexed in order to have + // the same behaviour as swc, but we should change this to be 0-indexed + // in order to be the same as the LSP. + line: line_index + 1, + col, + } + } + + fn get_line_index_at_pos(&self, pos: BytePos) -> usize { + match self.line_start_byte_positions.binary_search(&pos) { + Ok(index) => index, + Err(insert_index) => insert_index - 1, + } + } + + fn get_column_on_line_index_at_pos( + &self, + line_index: usize, + pos: BytePos, + ) -> usize { + assert!(line_index < self.line_start_byte_positions.len()); + let pos = pos.0 as usize; + let line_start_pos = self.line_start_byte_positions[line_index].0 as usize; + let line_end_pos = self + .line_start_byte_positions + .get(line_index + 1) + // may include line feed chars at the end, but in that case the pos should be less + .map(|p| p.0 as usize) + .unwrap_or_else(|| self.text.len()); + let line_text = &self.text[line_start_pos..line_end_pos]; + + if pos < line_start_pos { + panic!( + "byte position {} was less than the start line position of {}", + pos, line_start_pos + ); + } else if pos > line_end_pos { + panic!( + "byte position {} exceeded the end line position of {}", + pos, line_end_pos + ); + } else if pos == line_end_pos { + line_text.chars().count() + } else { + line_text + .char_indices() + .position(|(c_pos, _)| line_start_pos + c_pos >= pos) + .unwrap() + } + } +} + +fn get_line_start_positions(text: &str) -> Vec { + let mut result = vec![BytePos(0)]; + for (pos, c) in text.char_indices() { + if c == '\n' { + let line_start_pos = BytePos((pos + 1) as u32); + result.push(line_start_pos); + } + } + result +} + +#[cfg(test)] +mod test { + use super::SourceFileInfo; + use crate::ast::Location; + + use swc_common::BytePos; + + #[test] + fn should_provide_locations() { + let text = "12\n3\r\n4\n5"; + let specifier = "file:///file.ts"; + let info = SourceFileInfo::new(specifier, text); + assert_pos_line_and_col(&info, 0, 1, 0); // 1 + assert_pos_line_and_col(&info, 1, 1, 1); // 2 + assert_pos_line_and_col(&info, 2, 1, 2); // \n + assert_pos_line_and_col(&info, 3, 2, 0); // 3 + assert_pos_line_and_col(&info, 4, 2, 1); // \r + assert_pos_line_and_col(&info, 5, 2, 2); // \n + assert_pos_line_and_col(&info, 6, 3, 0); // 4 + assert_pos_line_and_col(&info, 7, 3, 1); // \n + assert_pos_line_and_col(&info, 8, 4, 0); // 5 + assert_pos_line_and_col(&info, 9, 4, 1); // + } + + fn assert_pos_line_and_col( + info: &SourceFileInfo, + pos: u32, + line: usize, + col: usize, + ) { + assert_eq!( + info.get_location(BytePos(pos)), + Location { + specifier: info.specifier.clone(), + line, + col, + } + ); + } +} diff --git a/cli/lsp/analysis.rs b/cli/lsp/analysis.rs index aae67f87a0..17fdf5d4c1 100644 --- a/cli/lsp/analysis.rs +++ b/cli/lsp/analysis.rs @@ -29,9 +29,6 @@ use regex::Regex; use std::cmp::Ordering; use std::collections::HashMap; use std::fmt; -use std::rc::Rc; -use swc_common::Loc; -use swc_common::SourceMap; use swc_common::DUMMY_SP; use swc_ecmascript::ast as swc_ast; use swc_ecmascript::visit::Node; @@ -287,13 +284,7 @@ pub fn parse_module( source: &str, media_type: &MediaType, ) -> Result { - let source_map = Rc::new(swc_common::SourceMap::default()); - ast::parse_with_source_map( - &specifier.to_string(), - source, - media_type, - source_map, - ) + ast::parse(&specifier.to_string(), source, media_type) } // TODO(@kitsonk) a lot of this logic is duplicated in module_graph.rs in @@ -310,7 +301,7 @@ pub fn analyze_dependencies( // Parse leading comments for supported triple slash references. for comment in parsed_module.get_leading_comments().iter() { if let Some((ts_reference, span)) = parse_ts_reference(comment) { - let loc = parsed_module.source_map.lookup_char_pos(span.lo); + let loc = parsed_module.get_location(span.lo); match ts_reference { TypeScriptReference::Path(import) => { let dep = dependencies.entry(import.clone()).or_default(); @@ -320,11 +311,11 @@ pub fn analyze_dependencies( dep.maybe_code_specifier_range = Some(Range { start: Position { line: (loc.line - 1) as u32, - character: loc.col_display as u32, + character: loc.col as u32, }, end: Position { line: (loc.line - 1) as u32, - character: (loc.col_display + import.chars().count() + 2) as u32, + character: (loc.col + import.chars().count() + 2) as u32, }, }); } @@ -341,11 +332,11 @@ pub fn analyze_dependencies( dep.maybe_type_specifier_range = Some(Range { start: Position { line: (loc.line - 1) as u32, - character: loc.col_display as u32, + character: loc.col as u32, }, end: Position { line: (loc.line - 1) as u32, - character: (loc.col_display + import.chars().count() + 2) as u32, + character: (loc.col + import.chars().count() + 2) as u32, }, }); } @@ -368,7 +359,7 @@ pub fn analyze_dependencies( ( resolve_import(deno_types, specifier, maybe_import_map), deno_types.clone(), - parsed_module.source_map.lookup_char_pos(span.lo) + parsed_module.get_location(span.lo) ) }) } else { @@ -377,20 +368,16 @@ pub fn analyze_dependencies( let dep = dependencies.entry(desc.specifier.to_string()).or_default(); dep.is_dynamic = desc.is_dynamic; - let start = parsed_module - .source_map - .lookup_char_pos(desc.specifier_span.lo); - let end = parsed_module - .source_map - .lookup_char_pos(desc.specifier_span.hi); + let start = parsed_module.get_location(desc.specifier_span.lo); + let end = parsed_module.get_location(desc.specifier_span.hi); let range = Range { start: Position { line: (start.line - 1) as u32, - character: start.col_display as u32, + character: start.col as u32, }, end: Position { line: (end.line - 1) as u32, - character: end.col_display as u32, + character: end.col as u32, }, }; dep.maybe_code_specifier_range = Some(range); @@ -402,11 +389,11 @@ pub fn analyze_dependencies( dep.maybe_type_specifier_range = Some(Range { start: Position { line: (loc.line - 1) as u32, - character: (loc.col_display + 1) as u32, + character: (loc.col + 1) as u32, }, end: Position { line: (loc.line - 1) as u32, - character: (loc.col_display + 1 + specifier.chars().count()) as u32, + character: (loc.col + 1 + specifier.chars().count()) as u32, }, }); dep.maybe_type = Some(resolved_dependency); @@ -971,16 +958,19 @@ fn prepend_whitespace(content: String, line_content: Option) -> String { } } -/// Get LSP range from the provided SWC start and end locations. -fn get_range_from_loc(start: &Loc, end: &Loc) -> lsp::Range { +/// Get LSP range from the provided start and end locations. +fn get_range_from_location( + start: &ast::Location, + end: &ast::Location, +) -> lsp::Range { lsp::Range { start: lsp::Position { line: (start.line - 1) as u32, - character: start.col_display as u32, + character: start.col as u32, }, end: lsp::Position { line: (end.line - 1) as u32, - character: end.col_display as u32, + character: end.col as u32, }, } } @@ -1029,16 +1019,16 @@ impl DependencyRanges { } } -struct DependencyRangeCollector { +struct DependencyRangeCollector<'a> { import_ranges: DependencyRanges, - source_map: Rc, + parsed_module: &'a ast::ParsedModule, } -impl DependencyRangeCollector { - pub fn new(source_map: Rc) -> Self { +impl<'a> DependencyRangeCollector<'a> { + pub fn new(parsed_module: &'a ast::ParsedModule) -> Self { Self { import_ranges: DependencyRanges::default(), - source_map, + parsed_module, } } @@ -1047,16 +1037,16 @@ impl DependencyRangeCollector { } } -impl Visit for DependencyRangeCollector { +impl<'a> Visit for DependencyRangeCollector<'a> { fn visit_import_decl( &mut self, node: &swc_ast::ImportDecl, _parent: &dyn Node, ) { - let start = self.source_map.lookup_char_pos(node.src.span.lo); - let end = self.source_map.lookup_char_pos(node.src.span.hi); + let start = self.parsed_module.get_location(node.src.span.lo); + let end = self.parsed_module.get_location(node.src.span.hi); self.import_ranges.0.push(DependencyRange { - range: narrow_range(get_range_from_loc(&start, &end)), + range: narrow_range(get_range_from_location(&start, &end)), specifier: node.src.value.to_string(), }); } @@ -1067,10 +1057,10 @@ impl Visit for DependencyRangeCollector { _parent: &dyn Node, ) { if let Some(src) = &node.src { - let start = self.source_map.lookup_char_pos(src.span.lo); - let end = self.source_map.lookup_char_pos(src.span.hi); + let start = self.parsed_module.get_location(src.span.lo); + let end = self.parsed_module.get_location(src.span.hi); self.import_ranges.0.push(DependencyRange { - range: narrow_range(get_range_from_loc(&start, &end)), + range: narrow_range(get_range_from_location(&start, &end)), specifier: src.value.to_string(), }); } @@ -1081,10 +1071,10 @@ impl Visit for DependencyRangeCollector { node: &swc_ast::ExportAll, _parent: &dyn Node, ) { - let start = self.source_map.lookup_char_pos(node.src.span.lo); - let end = self.source_map.lookup_char_pos(node.src.span.hi); + let start = self.parsed_module.get_location(node.src.span.lo); + let end = self.parsed_module.get_location(node.src.span.hi); self.import_ranges.0.push(DependencyRange { - range: narrow_range(get_range_from_loc(&start, &end)), + range: narrow_range(get_range_from_location(&start, &end)), specifier: node.src.value.to_string(), }); } @@ -1094,10 +1084,10 @@ impl Visit for DependencyRangeCollector { node: &swc_ast::TsImportType, _parent: &dyn Node, ) { - let start = self.source_map.lookup_char_pos(node.arg.span.lo); - let end = self.source_map.lookup_char_pos(node.arg.span.hi); + let start = self.parsed_module.get_location(node.arg.span.lo); + let end = self.parsed_module.get_location(node.arg.span.hi); self.import_ranges.0.push(DependencyRange { - range: narrow_range(get_range_from_loc(&start, &end)), + range: narrow_range(get_range_from_location(&start, &end)), specifier: node.arg.value.to_string(), }); } @@ -1108,8 +1098,7 @@ impl Visit for DependencyRangeCollector { pub fn analyze_dependency_ranges( parsed_module: &ast::ParsedModule, ) -> Result { - let mut collector = - DependencyRangeCollector::new(parsed_module.source_map.clone()); + let mut collector = DependencyRangeCollector::new(parsed_module); parsed_module .module .visit_with(&swc_ast::Invalid { span: DUMMY_SP }, &mut collector); diff --git a/cli/lsp/code_lens.rs b/cli/lsp/code_lens.rs index 1f8d2481b1..0570ac7034 100644 --- a/cli/lsp/code_lens.rs +++ b/cli/lsp/code_lens.rs @@ -3,6 +3,7 @@ use super::analysis; use super::language_server; use super::tsc; +use crate::ast::ParsedModule; use deno_core::error::anyhow; use deno_core::error::AnyError; @@ -17,7 +18,6 @@ use regex::Regex; use std::cell::RefCell; use std::collections::HashSet; use std::rc::Rc; -use swc_common::SourceMap; use swc_common::Span; use swc_ecmascript::ast; use swc_ecmascript::visit::Node; @@ -44,40 +44,43 @@ pub struct CodeLensData { pub specifier: ModuleSpecifier, } -fn span_to_range(span: &Span, source_map: Rc) -> lsp::Range { - let start = source_map.lookup_char_pos(span.lo); - let end = source_map.lookup_char_pos(span.hi); +fn span_to_range(span: &Span, parsed_module: &ParsedModule) -> lsp::Range { + let start = parsed_module.get_location(span.lo); + let end = parsed_module.get_location(span.hi); lsp::Range { start: lsp::Position { line: (start.line - 1) as u32, - character: start.col_display as u32, + character: start.col as u32, }, end: lsp::Position { line: (end.line - 1) as u32, - character: end.col_display as u32, + character: end.col as u32, }, } } -struct DenoTestCollector { +struct DenoTestCollector<'a> { code_lenses: Vec, - source_map: Rc, + parsed_module: &'a ParsedModule, specifier: ModuleSpecifier, test_vars: HashSet, } -impl DenoTestCollector { - pub fn new(specifier: ModuleSpecifier, source_map: Rc) -> Self { +impl<'a> DenoTestCollector<'a> { + pub fn new( + specifier: ModuleSpecifier, + parsed_module: &'a ParsedModule, + ) -> Self { Self { code_lenses: Vec::new(), - source_map, + parsed_module, specifier, test_vars: HashSet::new(), } } fn add_code_lens>(&mut self, name: N, span: &Span) { - let range = span_to_range(span, self.source_map.clone()); + let range = span_to_range(span, self.parsed_module); self.code_lenses.push(lsp::CodeLens { range, command: Some(lsp::Command { @@ -125,7 +128,7 @@ impl DenoTestCollector { } } -impl Visit for DenoTestCollector { +impl<'a> Visit for DenoTestCollector<'a> { fn visit_call_expr(&mut self, node: &ast::CallExpr, _parent: &dyn Node) { if let ast::ExprOrSuper::Expr(callee_expr) = &node.callee { match callee_expr.as_ref() { @@ -392,10 +395,8 @@ fn collect_test( if let Ok(parsed_module) = analysis::parse_module(specifier, &source, &media_type) { - let mut collector = DenoTestCollector::new( - specifier.clone(), - parsed_module.source_map.clone(), - ); + let mut collector = + DenoTestCollector::new(specifier.clone(), &parsed_module); parsed_module.module.visit_with( &ast::Invalid { span: swc_common::DUMMY_SP, @@ -521,8 +522,7 @@ mod tests { let parsed_module = analysis::parse_module(&specifier, source, &MediaType::TypeScript) .unwrap(); - let mut collector = - DenoTestCollector::new(specifier, parsed_module.source_map.clone()); + let mut collector = DenoTestCollector::new(specifier, &parsed_module); parsed_module.module.visit_with( &ast::Invalid { span: swc_common::DUMMY_SP, diff --git a/cli/module_graph.rs b/cli/module_graph.rs index 1a82bce0e3..26c20f21dd 100644 --- a/cli/module_graph.rs +++ b/cli/module_graph.rs @@ -349,7 +349,7 @@ impl Module { // parse out any triple slash references for comment in parsed_module.get_leading_comments().iter() { if let Some((ts_reference, _)) = parse_ts_reference(comment) { - let location = parsed_module.get_location(&comment.span); + let location = parsed_module.get_location(comment.span.lo); match ts_reference { TypeScriptReference::Path(import) => { let specifier = @@ -386,12 +386,7 @@ impl Module { for desc in dependencies.iter().filter(|desc| { desc.kind != swc_ecmascript::dep_graph::DependencyKind::Require }) { - let loc = parsed_module.source_map.lookup_char_pos(desc.span.lo); - let location = Location { - filename: self.specifier.to_string(), - col: loc.col_display, - line: loc.line, - }; + let location = parsed_module.get_location(desc.span.lo); // In situations where there is a potential issue with resolving the // import specifier, that ends up being a module resolution error for a @@ -468,7 +463,7 @@ impl Module { let referrer_scheme = self.specifier.scheme(); let specifier_scheme = specifier.scheme(); let location = maybe_location.unwrap_or(Location { - filename: self.specifier.to_string(), + specifier: self.specifier.to_string(), line: 0, col: 0, }); diff --git a/cli/specifier_handler.rs b/cli/specifier_handler.rs index ae47e977a7..5fd9d6b256 100644 --- a/cli/specifier_handler.rs +++ b/cli/specifier_handler.rs @@ -274,7 +274,7 @@ impl SpecifierHandler for FetchHandler { let message = if let Some(location) = &maybe_location { format!( "Cannot resolve module \"{}\" from \"{}\".", - requested_specifier, location.filename + requested_specifier, location.specifier ) } else { format!("Cannot resolve module \"{}\".", requested_specifier) @@ -291,7 +291,7 @@ impl SpecifierHandler for FetchHandler { // they are confusing to the user to print out the location because // they cannot actually get to the source code that is quoted, as // it only exists in the runtime memory of Deno. - if !location.filename.contains("$deno$") { + if !location.specifier.contains("$deno$") { ( requested_specifier.clone(), HandlerError::FetchErrorWithLocation(err.to_string(), location) diff --git a/cli/tools/coverage.rs b/cli/tools/coverage.rs index bb1e9417d6..62f5f5d2e7 100644 --- a/cli/tools/coverage.rs +++ b/cli/tools/coverage.rs @@ -432,7 +432,7 @@ impl CoverageReporter for PrettyCoverageReporter { .map(|source_map| SourceMap::from_slice(&source_map).unwrap()); let mut ignored_spans: Vec = Vec::new(); - for item in ast::lex("", script_source, &MediaType::JavaScript) { + for item in ast::lex(script_source, &MediaType::JavaScript) { if let TokenOrComment::Token(_) = item.inner { continue; } diff --git a/cli/tools/repl.rs b/cli/tools/repl.rs index 5b0d788413..d527ea8681 100644 --- a/cli/tools/repl.rs +++ b/cli/tools/repl.rs @@ -231,7 +231,7 @@ impl Validator for EditorHelper { let mut stack: Vec = Vec::new(); let mut in_template = false; - for item in ast::lex("", ctx.input(), &MediaType::TypeScript) { + for item in ast::lex(ctx.input(), &MediaType::TypeScript) { if let TokenOrComment::Token(token) = item.inner { match token { Token::BackQuote => in_template = !in_template, @@ -302,7 +302,7 @@ impl Highlighter for EditorHelper { fn highlight<'l>(&self, line: &'l str, _: usize) -> Cow<'l, str> { let mut out_line = String::from(line); - for item in ast::lex("", line, &MediaType::TypeScript) { + for item in ast::lex(line, &MediaType::TypeScript) { // Adding color adds more bytes to the string, // so an offset is needed to stop spans falling out of sync. let offset = out_line.len() - line.len(); diff --git a/cli/tools/test_runner.rs b/cli/tools/test_runner.rs index 656db50f43..106555f7d9 100644 --- a/cli/tools/test_runner.rs +++ b/cli/tools/test_runner.rs @@ -399,7 +399,7 @@ fn extract_files_from_regex_blocks( let file_specifier = deno_core::resolve_url_or_path(&format!( "{}${}-{}{}", - location.filename, + location.specifier, location.line + line_offset, location.line + line_offset + line_count, file_media_type.as_ts_extension(), @@ -425,10 +425,7 @@ fn extract_files_from_source_comments( media_type: &MediaType, ) -> Result, AnyError> { let parsed_module = ast::parse(specifier.as_str(), source, media_type)?; - let mut comments = parsed_module.get_comments(); - comments - .sort_by_key(|comment| parsed_module.get_location(&comment.span).line); - + let comments = parsed_module.get_comments(); let blocks_regex = Regex::new(r"```([^\n]*)\n([\S\s]*?)```")?; let lines_regex = Regex::new(r"(?:\* ?)(?:\# ?)?(.*)")?; @@ -442,7 +439,7 @@ fn extract_files_from_source_comments( true }) .flat_map(|comment| { - let location = parsed_module.get_location(&comment.span); + let location = parsed_module.get_location(comment.span.lo); extract_files_from_regex_blocks( &location, @@ -464,7 +461,7 @@ fn extract_files_from_fenced_blocks( media_type: &MediaType, ) -> Result, AnyError> { let location = Location { - filename: specifier.to_string(), + specifier: specifier.to_string(), line: 1, col: 0, };