1
0
Fork 0
mirror of https://github.com/denoland/deno.git synced 2024-12-25 00:29:09 -05:00

refactor: make ParsedModule implement Sync (#11581)

This commit is contained in:
David Sherret 2021-08-06 10:36:16 -04:00 committed by GitHub
parent e9ddc7a41a
commit 466d3df9d1
No known key found for this signature in database
GPG key ID: 4AEE18F83AFDEB23
13 changed files with 436 additions and 230 deletions

4
Cargo.lock generated
View file

@ -3426,9 +3426,9 @@ dependencies = [
[[package]] [[package]]
name = "swc_common" name = "swc_common"
version = "0.11.0" version = "0.11.4"
source = "registry+https://github.com/rust-lang/crates.io-index" source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "25c6cd455ca59637ff47297c375a7fea1c7d61cc92448421485021ec6c6bf03d" checksum = "f303985ebd578a033371a87be28cb54189d3ae5d492622523200f4de08db5275"
dependencies = [ dependencies = [
"ast_node", "ast_node",
"cfg-if 0.1.10", "cfg-if 0.1.10",

View file

@ -82,7 +82,7 @@ serde = { version = "1.0.126", features = ["derive"] }
shell-escape = "0.1.5" shell-escape = "0.1.5"
sourcemap = "6.0.1" sourcemap = "6.0.1"
swc_bundler = "0.46.0" swc_bundler = "0.46.0"
swc_common = { version = "0.11.0", features = ["sourcemap"] } swc_common = { version = "0.11.4", features = ["sourcemap"] }
swc_ecmascript = { version = "0.46.0", features = ["codegen", "dep_graph", "parser", "proposal", "react", "transforms", "typescript", "visit"] } swc_ecmascript = { version = "0.46.0", features = ["codegen", "dep_graph", "parser", "proposal", "react", "transforms", "typescript", "visit"] }
tempfile = "3.2.0" tempfile = "3.2.0"
termcolor = "1.1.2" termcolor = "1.1.2"

53
cli/ast/bundle_hook.rs Normal file
View file

@ -0,0 +1,53 @@
// Copyright 2018-2021 the Deno authors. All rights reserved. MIT license.
use deno_core::error::AnyError;
pub struct BundleHook;
impl swc_bundler::Hook for BundleHook {
fn get_import_meta_props(
&self,
span: swc_common::Span,
module_record: &swc_bundler::ModuleRecord,
) -> Result<Vec<swc_ecmascript::ast::KeyValueProp>, AnyError> {
use swc_ecmascript::ast;
// we use custom file names, and swc "wraps" these in `<` and `>` so, we
// want to strip those back out.
let mut value = module_record.file_name.to_string();
value.pop();
value.remove(0);
Ok(vec![
ast::KeyValueProp {
key: ast::PropName::Ident(ast::Ident::new("url".into(), span)),
value: Box::new(ast::Expr::Lit(ast::Lit::Str(ast::Str {
span,
value: value.into(),
kind: ast::StrKind::Synthesized,
has_escape: false,
}))),
},
ast::KeyValueProp {
key: ast::PropName::Ident(ast::Ident::new("main".into(), span)),
value: Box::new(if module_record.is_entry {
ast::Expr::Member(ast::MemberExpr {
span,
obj: ast::ExprOrSuper::Expr(Box::new(ast::Expr::MetaProp(
ast::MetaPropExpr {
meta: ast::Ident::new("import".into(), span),
prop: ast::Ident::new("meta".into(), span),
},
))),
prop: Box::new(ast::Expr::Ident(ast::Ident::new(
"main".into(),
span,
))),
computed: false,
})
} else {
ast::Expr::Lit(ast::Lit::Bool(ast::Bool { span, value: false }))
}),
},
])
}
}

106
cli/ast/comments.rs Normal file
View file

@ -0,0 +1,106 @@
// Copyright 2018-2021 the Deno authors. All rights reserved. MIT license.
use std::cell::RefCell;
use std::rc::Rc;
use std::sync::Arc;
use swc_common::comments::Comment;
use swc_common::comments::Comments;
use swc_common::comments::SingleThreadedComments;
use swc_common::comments::SingleThreadedCommentsMapInner;
use swc_common::BytePos;
/// An implementation of swc's `Comments` that implements `Sync`
/// to support being used in multi-threaded code. This implementation
/// is immutable and should you need mutability you may create a copy
/// by converting it to an swc `SingleThreadedComments`.
#[derive(Clone, Debug)]
pub struct MultiThreadedComments {
leading: Arc<SingleThreadedCommentsMapInner>,
trailing: Arc<SingleThreadedCommentsMapInner>,
}
impl MultiThreadedComments {
pub fn from_single_threaded(comments: SingleThreadedComments) -> Self {
let (leading, trailing) = comments.take_all();
let leading = Arc::new(Rc::try_unwrap(leading).unwrap().into_inner());
let trailing = Arc::new(Rc::try_unwrap(trailing).unwrap().into_inner());
MultiThreadedComments { leading, trailing }
}
pub fn as_single_threaded(&self) -> SingleThreadedComments {
let leading = Rc::new(RefCell::new((*self.leading).to_owned()));
let trailing = Rc::new(RefCell::new((*self.trailing).to_owned()));
SingleThreadedComments::from_leading_and_trailing(leading, trailing)
}
/// Gets a vector of all the comments sorted by position.
pub fn get_vec(&self) -> Vec<Comment> {
let mut comments = self
.leading
.values()
.chain(self.trailing.values())
.flatten()
.cloned()
.collect::<Vec<_>>();
comments.sort_by_key(|comment| comment.span.lo);
comments
}
}
impl Comments for MultiThreadedComments {
fn has_leading(&self, pos: BytePos) -> bool {
self.leading.contains_key(&pos)
}
fn get_leading(&self, pos: BytePos) -> Option<Vec<Comment>> {
self.leading.get(&pos).cloned()
}
fn has_trailing(&self, pos: BytePos) -> bool {
self.trailing.contains_key(&pos)
}
fn get_trailing(&self, pos: BytePos) -> Option<Vec<Comment>> {
self.trailing.get(&pos).cloned()
}
fn add_leading(&self, _pos: BytePos, _cmt: Comment) {
panic_readonly();
}
fn add_leading_comments(&self, _pos: BytePos, _comments: Vec<Comment>) {
panic_readonly();
}
fn move_leading(&self, _from: BytePos, _to: BytePos) {
panic_readonly();
}
fn take_leading(&self, _pos: BytePos) -> Option<Vec<Comment>> {
panic_readonly();
}
fn add_trailing(&self, _pos: BytePos, _cmt: Comment) {
panic_readonly();
}
fn add_trailing_comments(&self, _pos: BytePos, _comments: Vec<Comment>) {
panic_readonly();
}
fn move_trailing(&self, _from: BytePos, _to: BytePos) {
panic_readonly();
}
fn take_trailing(&self, _pos: BytePos) -> Option<Vec<Comment>> {
panic_readonly();
}
fn add_pure_comment(&self, _pos: BytePos) {
panic_readonly();
}
}
fn panic_readonly() -> ! {
panic!("MultiThreadedComments do not support write operations")
}

View file

@ -11,10 +11,13 @@ use std::error::Error;
use std::fmt; use std::fmt;
use std::ops::Range; use std::ops::Range;
use std::rc::Rc; use std::rc::Rc;
use std::sync::Arc;
use swc_common::chain; use swc_common::chain;
use swc_common::comments::Comment; use swc_common::comments::Comment;
use swc_common::comments::CommentKind; use swc_common::comments::CommentKind;
use swc_common::comments::Comments;
use swc_common::comments::SingleThreadedComments; use swc_common::comments::SingleThreadedComments;
use swc_common::BytePos;
use swc_common::FileName; use swc_common::FileName;
use swc_common::Globals; use swc_common::Globals;
use swc_common::SourceFile; use swc_common::SourceFile;
@ -43,13 +46,20 @@ use swc_ecmascript::transforms::react;
use swc_ecmascript::transforms::typescript; use swc_ecmascript::transforms::typescript;
use swc_ecmascript::visit::FoldWith; use swc_ecmascript::visit::FoldWith;
mod bundle_hook;
mod comments;
mod source_file_info;
mod transforms; mod transforms;
pub use bundle_hook::BundleHook;
use comments::MultiThreadedComments;
use source_file_info::SourceFileInfo;
static TARGET: JscTarget = JscTarget::Es2020; static TARGET: JscTarget = JscTarget::Es2020;
#[derive(Debug, Clone, Eq, PartialEq)] #[derive(Debug, Clone, Eq, PartialEq)]
pub struct Location { pub struct Location {
pub filename: String, pub specifier: String,
pub line: usize, pub line: usize,
pub col: usize, pub col: usize,
} }
@ -65,7 +75,7 @@ impl From<swc_common::Loc> for Location {
}; };
Location { Location {
filename, specifier: filename,
line: swc_loc.line, line: swc_loc.line,
col: swc_loc.col_display, col: swc_loc.col_display,
} }
@ -74,13 +84,13 @@ impl From<swc_common::Loc> for Location {
impl From<Location> for ModuleSpecifier { impl From<Location> for ModuleSpecifier {
fn from(loc: Location) -> Self { fn from(loc: Location) -> Self {
resolve_url_or_path(&loc.filename).unwrap() resolve_url_or_path(&loc.specifier).unwrap()
} }
} }
impl std::fmt::Display for Location { impl std::fmt::Display for Location {
fn fmt(&self, f: &mut std::fmt::Formatter) -> std::fmt::Result { fn fmt(&self, f: &mut std::fmt::Formatter) -> std::fmt::Result {
write!(f, "{}:{}:{}", self.filename, self.line, self.col) write!(f, "{}:{}:{}", self.specifier, self.line, self.col)
} }
} }
@ -239,18 +249,15 @@ fn strip_config_from_emit_options(
/// processing. /// processing.
#[derive(Clone)] #[derive(Clone)]
pub struct ParsedModule { pub struct ParsedModule {
comments: SingleThreadedComments, info: Arc<SourceFileInfo>,
leading_comments: Vec<Comment>, comments: MultiThreadedComments,
pub module: Module, pub module: Module,
pub source_map: Rc<SourceMap>,
source_file: Rc<SourceFile>,
} }
impl fmt::Debug for ParsedModule { impl fmt::Debug for ParsedModule {
fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result { fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
f.debug_struct("ParsedModule") f.debug_struct("ParsedModule")
.field("comments", &self.comments) .field("comments", &self.comments)
.field("leading_comments", &self.leading_comments)
.field("module", &self.module) .field("module", &self.module)
.finish() .finish()
} }
@ -265,28 +272,20 @@ impl ParsedModule {
/// Get the module's leading comments, where triple slash directives might /// Get the module's leading comments, where triple slash directives might
/// be located. /// be located.
pub fn get_leading_comments(&self) -> Vec<Comment> { pub fn get_leading_comments(&self) -> Vec<Comment> {
self.leading_comments.clone() self
.comments
.get_leading(self.module.span.lo)
.unwrap_or_else(Vec::new)
} }
/// Get the module's comments. /// Get the module's comments sorted by position.
pub fn get_comments(&self) -> Vec<Comment> { pub fn get_comments(&self) -> Vec<Comment> {
let mut comments = Vec::new(); self.comments.get_vec()
let (leading_comments, trailing_comments) = self.comments.borrow_all();
for value in leading_comments.values() {
comments.append(&mut value.clone());
}
for value in trailing_comments.values() {
comments.append(&mut value.clone());
}
comments
} }
/// Get a location for a given span within the module. /// Get a location for a given position within the module.
pub fn get_location(&self, span: &Span) -> Location { pub fn get_location(&self, pos: BytePos) -> Location {
self.source_map.lookup_char_pos(span.lo).into() self.info.get_location(pos)
} }
/// Transform a TypeScript file into a JavaScript file, based on the supplied /// Transform a TypeScript file into a JavaScript file, based on the supplied
@ -298,10 +297,14 @@ impl ParsedModule {
options: &EmitOptions, options: &EmitOptions,
) -> Result<(String, Option<String>), AnyError> { ) -> Result<(String, Option<String>), AnyError> {
let program = Program::Module(self.module); let program = Program::Module(self.module);
let source_map = Rc::new(SourceMap::default());
let file_name = FileName::Custom(self.info.specifier.clone());
source_map.new_source_file(file_name, self.info.text.clone());
let comments = self.comments.as_single_threaded(); // needs to be mutable
let jsx_pass = react::react( let jsx_pass = react::react(
self.source_map.clone(), source_map.clone(),
Some(&self.comments), Some(&comments),
react::Options { react::Options {
pragma: options.jsx_factory.clone(), pragma: options.jsx_factory.clone(),
pragma_frag: options.jsx_fragment_factory.clone(), pragma_frag: options.jsx_fragment_factory.clone(),
@ -324,7 +327,7 @@ impl ParsedModule {
typescript::strip::strip_with_config(strip_config_from_emit_options( typescript::strip::strip_with_config(strip_config_from_emit_options(
options options
)), )),
fixer(Some(&self.comments)), fixer(Some(&comments)),
hygiene(), hygiene(),
); );
@ -338,7 +341,7 @@ impl ParsedModule {
let mut buf = vec![]; let mut buf = vec![];
{ {
let writer = Box::new(JsWriter::new( let writer = Box::new(JsWriter::new(
self.source_map.clone(), source_map.clone(),
"\n", "\n",
&mut buf, &mut buf,
Some(&mut src_map_buf), Some(&mut src_map_buf),
@ -346,8 +349,8 @@ impl ParsedModule {
let config = swc_ecmascript::codegen::Config { minify: false }; let config = swc_ecmascript::codegen::Config { minify: false };
let mut emitter = swc_ecmascript::codegen::Emitter { let mut emitter = swc_ecmascript::codegen::Emitter {
cfg: config, cfg: config,
comments: Some(&self.comments), comments: Some(&comments),
cm: self.source_map.clone(), cm: source_map.clone(),
wr: writer, wr: writer,
}; };
program.emit_with(&mut emitter)?; program.emit_with(&mut emitter)?;
@ -356,8 +359,7 @@ impl ParsedModule {
let mut map: Option<String> = None; let mut map: Option<String> = None;
{ {
let mut buf = Vec::new(); let mut buf = Vec::new();
self source_map
.source_map
.build_source_map_from(&mut src_map_buf, None) .build_source_map_from(&mut src_map_buf, None)
.to_writer(&mut buf)?; .to_writer(&mut buf)?;
@ -373,41 +375,7 @@ impl ParsedModule {
} }
} }
pub fn parse_with_source_map( /// For a given specifier, source, and media type, parse the text of the
specifier: &str,
source: &str,
media_type: &MediaType,
source_map: Rc<SourceMap>,
) -> Result<ParsedModule, AnyError> {
let source_file = source_map.new_source_file(
FileName::Custom(specifier.to_string()),
source.to_string(),
);
let syntax = get_syntax(media_type);
let input = StringInput::from(&*source_file);
let comments = SingleThreadedComments::default();
let lexer = Lexer::new(syntax, TARGET, input, Some(&comments));
let mut parser = swc_ecmascript::parser::Parser::new_from(lexer);
let sm = &source_map;
let module = parser.parse_module().map_err(move |err| Diagnostic {
location: sm.lookup_char_pos(err.span().lo).into(),
message: err.into_kind().msg().to_string(),
})?;
let leading_comments =
comments.with_leading(module.span.lo, |comments| comments.to_vec());
Ok(ParsedModule {
comments,
leading_comments,
module,
source_map,
source_file,
})
}
/// For a given specifier, source, and media type, parse the source of the
/// module and return a representation which can be further processed. /// module and return a representation which can be further processed.
/// ///
/// # Arguments /// # Arguments
@ -424,8 +392,16 @@ pub fn parse(
source: &str, source: &str,
media_type: &MediaType, media_type: &MediaType,
) -> Result<ParsedModule, AnyError> { ) -> Result<ParsedModule, AnyError> {
let source_map = Rc::new(SourceMap::default()); let info = SourceFileInfo::new(specifier, source);
parse_with_source_map(specifier, source, media_type, source_map) let input =
StringInput::new(source, BytePos(0), BytePos(source.len() as u32));
let (comments, module) = parse_string_input(&info, input, media_type)?;
Ok(ParsedModule {
info: Arc::new(info),
comments: MultiThreadedComments::from_single_threaded(comments),
module,
})
} }
pub enum TokenOrComment { pub enum TokenOrComment {
@ -453,21 +429,12 @@ fn flatten_comments(
comments.into_iter().flat_map(|el| el.1) comments.into_iter().flat_map(|el| el.1)
} }
pub fn lex( pub fn lex(source: &str, media_type: &MediaType) -> Vec<LexedItem> {
specifier: &str,
source: &str,
media_type: &MediaType,
) -> Vec<LexedItem> {
let source_map = SourceMap::default();
let source_file = source_map.new_source_file(
FileName::Custom(specifier.to_string()),
source.to_string(),
);
let comments = SingleThreadedComments::default(); let comments = SingleThreadedComments::default();
let lexer = Lexer::new( let lexer = Lexer::new(
get_syntax(media_type), get_syntax(media_type),
TARGET, TARGET,
StringInput::from(source_file.as_ref()), StringInput::new(source, BytePos(0), BytePos(source.len() as u32)),
Some(&comments), Some(&comments),
); );
@ -494,19 +461,24 @@ pub fn lex(
/// A low level function which transpiles a source module into an swc /// A low level function which transpiles a source module into an swc
/// SourceFile. /// SourceFile.
pub fn transpile_module( pub fn transpile_module(
filename: &str, specifier: &str,
src: &str, source: &str,
media_type: &MediaType, media_type: &MediaType,
emit_options: &EmitOptions, emit_options: &EmitOptions,
globals: &Globals, globals: &Globals,
cm: Rc<SourceMap>, cm: Rc<SourceMap>,
) -> Result<(Rc<SourceFile>, Module), AnyError> { ) -> Result<(Rc<SourceFile>, Module), AnyError> {
let parsed_module = let info = SourceFileInfo::new(specifier, source);
parse_with_source_map(filename, src, media_type, cm.clone())?; let source_file = cm.new_source_file(
FileName::Custom(specifier.to_string()),
source.to_string(),
);
let input = StringInput::from(&*source_file);
let (comments, module) = parse_string_input(&info, input, media_type)?;
let jsx_pass = react::react( let jsx_pass = react::react(
cm, cm,
Some(&parsed_module.comments), Some(&comments),
react::Options { react::Options {
pragma: emit_options.jsx_factory.clone(), pragma: emit_options.jsx_factory.clone(),
pragma_frag: emit_options.jsx_fragment_factory.clone(), pragma_frag: emit_options.jsx_fragment_factory.clone(),
@ -526,12 +498,9 @@ pub fn transpile_module(
typescript::strip::strip_with_config(strip_config_from_emit_options( typescript::strip::strip_with_config(strip_config_from_emit_options(
emit_options emit_options
)), )),
fixer(Some(&parsed_module.comments)), fixer(Some(&comments)),
); );
let source_file = parsed_module.source_file.clone();
let module = parsed_module.module;
let module = swc_common::GLOBALS.set(globals, || { let module = swc_common::GLOBALS.set(globals, || {
helpers::HELPERS.set(&helpers::Helpers::new(false), || { helpers::HELPERS.set(&helpers::Helpers::new(false), || {
module.fold_with(&mut passes) module.fold_with(&mut passes)
@ -541,55 +510,22 @@ pub fn transpile_module(
Ok((source_file, module)) Ok((source_file, module))
} }
pub struct BundleHook; fn parse_string_input(
info: &SourceFileInfo,
input: StringInput,
media_type: &MediaType,
) -> Result<(SingleThreadedComments, Module), AnyError> {
let syntax = get_syntax(media_type);
let comments = SingleThreadedComments::default();
let lexer = Lexer::new(syntax, TARGET, input, Some(&comments));
let mut parser = swc_ecmascript::parser::Parser::new_from(lexer);
impl swc_bundler::Hook for BundleHook { let module = parser.parse_module().map_err(|err| Diagnostic {
fn get_import_meta_props( location: info.get_location(err.span().lo),
&self, message: err.into_kind().msg().to_string(),
span: swc_common::Span, })?;
module_record: &swc_bundler::ModuleRecord,
) -> Result<Vec<swc_ecmascript::ast::KeyValueProp>, AnyError> {
use swc_ecmascript::ast;
// we use custom file names, and swc "wraps" these in `<` and `>` so, we Ok((comments, module))
// want to strip those back out.
let mut value = module_record.file_name.to_string();
value.pop();
value.remove(0);
Ok(vec![
ast::KeyValueProp {
key: ast::PropName::Ident(ast::Ident::new("url".into(), span)),
value: Box::new(ast::Expr::Lit(ast::Lit::Str(ast::Str {
span,
value: value.into(),
kind: ast::StrKind::Synthesized,
has_escape: false,
}))),
},
ast::KeyValueProp {
key: ast::PropName::Ident(ast::Ident::new("main".into(), span)),
value: Box::new(if module_record.is_entry {
ast::Expr::Member(ast::MemberExpr {
span,
obj: ast::ExprOrSuper::Expr(Box::new(ast::Expr::MetaProp(
ast::MetaPropExpr {
meta: ast::Ident::new("import".into(), span),
prop: ast::Ident::new("meta".into(), span),
},
))),
prop: Box::new(ast::Expr::Ident(ast::Ident::new(
"main".into(),
span,
))),
computed: false,
})
} else {
ast::Expr::Lit(ast::Lit::Bool(ast::Bool { span, value: false }))
}),
},
])
}
} }
#[cfg(test)] #[cfg(test)]

130
cli/ast/source_file_info.rs Normal file
View file

@ -0,0 +1,130 @@
// Copyright 2018-2021 the Deno authors. All rights reserved. MIT license.
use super::Location;
use swc_common::BytePos;
pub struct SourceFileInfo {
pub specifier: String,
pub text: String,
line_start_byte_positions: Vec<BytePos>,
}
impl SourceFileInfo {
pub fn new(specifier: &str, text: &str) -> SourceFileInfo {
SourceFileInfo {
line_start_byte_positions: get_line_start_positions(text),
specifier: specifier.to_string(),
text: text.to_string(),
}
}
pub fn get_location(&self, pos: BytePos) -> Location {
let line_index = self.get_line_index_at_pos(pos);
let col = self.get_column_on_line_index_at_pos(line_index, pos);
Location {
specifier: self.specifier.clone(),
// todo(dsherret): this is temporarily 1-indexed in order to have
// the same behaviour as swc, but we should change this to be 0-indexed
// in order to be the same as the LSP.
line: line_index + 1,
col,
}
}
fn get_line_index_at_pos(&self, pos: BytePos) -> usize {
match self.line_start_byte_positions.binary_search(&pos) {
Ok(index) => index,
Err(insert_index) => insert_index - 1,
}
}
fn get_column_on_line_index_at_pos(
&self,
line_index: usize,
pos: BytePos,
) -> usize {
assert!(line_index < self.line_start_byte_positions.len());
let pos = pos.0 as usize;
let line_start_pos = self.line_start_byte_positions[line_index].0 as usize;
let line_end_pos = self
.line_start_byte_positions
.get(line_index + 1)
// may include line feed chars at the end, but in that case the pos should be less
.map(|p| p.0 as usize)
.unwrap_or_else(|| self.text.len());
let line_text = &self.text[line_start_pos..line_end_pos];
if pos < line_start_pos {
panic!(
"byte position {} was less than the start line position of {}",
pos, line_start_pos
);
} else if pos > line_end_pos {
panic!(
"byte position {} exceeded the end line position of {}",
pos, line_end_pos
);
} else if pos == line_end_pos {
line_text.chars().count()
} else {
line_text
.char_indices()
.position(|(c_pos, _)| line_start_pos + c_pos >= pos)
.unwrap()
}
}
}
fn get_line_start_positions(text: &str) -> Vec<BytePos> {
let mut result = vec![BytePos(0)];
for (pos, c) in text.char_indices() {
if c == '\n' {
let line_start_pos = BytePos((pos + 1) as u32);
result.push(line_start_pos);
}
}
result
}
#[cfg(test)]
mod test {
use super::SourceFileInfo;
use crate::ast::Location;
use swc_common::BytePos;
#[test]
fn should_provide_locations() {
let text = "12\n3\r\n4\n5";
let specifier = "file:///file.ts";
let info = SourceFileInfo::new(specifier, text);
assert_pos_line_and_col(&info, 0, 1, 0); // 1
assert_pos_line_and_col(&info, 1, 1, 1); // 2
assert_pos_line_and_col(&info, 2, 1, 2); // \n
assert_pos_line_and_col(&info, 3, 2, 0); // 3
assert_pos_line_and_col(&info, 4, 2, 1); // \r
assert_pos_line_and_col(&info, 5, 2, 2); // \n
assert_pos_line_and_col(&info, 6, 3, 0); // 4
assert_pos_line_and_col(&info, 7, 3, 1); // \n
assert_pos_line_and_col(&info, 8, 4, 0); // 5
assert_pos_line_and_col(&info, 9, 4, 1); // <EOF>
}
fn assert_pos_line_and_col(
info: &SourceFileInfo,
pos: u32,
line: usize,
col: usize,
) {
assert_eq!(
info.get_location(BytePos(pos)),
Location {
specifier: info.specifier.clone(),
line,
col,
}
);
}
}

View file

@ -29,9 +29,6 @@ use regex::Regex;
use std::cmp::Ordering; use std::cmp::Ordering;
use std::collections::HashMap; use std::collections::HashMap;
use std::fmt; use std::fmt;
use std::rc::Rc;
use swc_common::Loc;
use swc_common::SourceMap;
use swc_common::DUMMY_SP; use swc_common::DUMMY_SP;
use swc_ecmascript::ast as swc_ast; use swc_ecmascript::ast as swc_ast;
use swc_ecmascript::visit::Node; use swc_ecmascript::visit::Node;
@ -287,13 +284,7 @@ pub fn parse_module(
source: &str, source: &str,
media_type: &MediaType, media_type: &MediaType,
) -> Result<ast::ParsedModule, AnyError> { ) -> Result<ast::ParsedModule, AnyError> {
let source_map = Rc::new(swc_common::SourceMap::default()); ast::parse(&specifier.to_string(), source, media_type)
ast::parse_with_source_map(
&specifier.to_string(),
source,
media_type,
source_map,
)
} }
// TODO(@kitsonk) a lot of this logic is duplicated in module_graph.rs in // TODO(@kitsonk) a lot of this logic is duplicated in module_graph.rs in
@ -310,7 +301,7 @@ pub fn analyze_dependencies(
// Parse leading comments for supported triple slash references. // Parse leading comments for supported triple slash references.
for comment in parsed_module.get_leading_comments().iter() { for comment in parsed_module.get_leading_comments().iter() {
if let Some((ts_reference, span)) = parse_ts_reference(comment) { if let Some((ts_reference, span)) = parse_ts_reference(comment) {
let loc = parsed_module.source_map.lookup_char_pos(span.lo); let loc = parsed_module.get_location(span.lo);
match ts_reference { match ts_reference {
TypeScriptReference::Path(import) => { TypeScriptReference::Path(import) => {
let dep = dependencies.entry(import.clone()).or_default(); let dep = dependencies.entry(import.clone()).or_default();
@ -320,11 +311,11 @@ pub fn analyze_dependencies(
dep.maybe_code_specifier_range = Some(Range { dep.maybe_code_specifier_range = Some(Range {
start: Position { start: Position {
line: (loc.line - 1) as u32, line: (loc.line - 1) as u32,
character: loc.col_display as u32, character: loc.col as u32,
}, },
end: Position { end: Position {
line: (loc.line - 1) as u32, line: (loc.line - 1) as u32,
character: (loc.col_display + import.chars().count() + 2) as u32, character: (loc.col + import.chars().count() + 2) as u32,
}, },
}); });
} }
@ -341,11 +332,11 @@ pub fn analyze_dependencies(
dep.maybe_type_specifier_range = Some(Range { dep.maybe_type_specifier_range = Some(Range {
start: Position { start: Position {
line: (loc.line - 1) as u32, line: (loc.line - 1) as u32,
character: loc.col_display as u32, character: loc.col as u32,
}, },
end: Position { end: Position {
line: (loc.line - 1) as u32, line: (loc.line - 1) as u32,
character: (loc.col_display + import.chars().count() + 2) as u32, character: (loc.col + import.chars().count() + 2) as u32,
}, },
}); });
} }
@ -368,7 +359,7 @@ pub fn analyze_dependencies(
( (
resolve_import(deno_types, specifier, maybe_import_map), resolve_import(deno_types, specifier, maybe_import_map),
deno_types.clone(), deno_types.clone(),
parsed_module.source_map.lookup_char_pos(span.lo) parsed_module.get_location(span.lo)
) )
}) })
} else { } else {
@ -377,20 +368,16 @@ pub fn analyze_dependencies(
let dep = dependencies.entry(desc.specifier.to_string()).or_default(); let dep = dependencies.entry(desc.specifier.to_string()).or_default();
dep.is_dynamic = desc.is_dynamic; dep.is_dynamic = desc.is_dynamic;
let start = parsed_module let start = parsed_module.get_location(desc.specifier_span.lo);
.source_map let end = parsed_module.get_location(desc.specifier_span.hi);
.lookup_char_pos(desc.specifier_span.lo);
let end = parsed_module
.source_map
.lookup_char_pos(desc.specifier_span.hi);
let range = Range { let range = Range {
start: Position { start: Position {
line: (start.line - 1) as u32, line: (start.line - 1) as u32,
character: start.col_display as u32, character: start.col as u32,
}, },
end: Position { end: Position {
line: (end.line - 1) as u32, line: (end.line - 1) as u32,
character: end.col_display as u32, character: end.col as u32,
}, },
}; };
dep.maybe_code_specifier_range = Some(range); dep.maybe_code_specifier_range = Some(range);
@ -402,11 +389,11 @@ pub fn analyze_dependencies(
dep.maybe_type_specifier_range = Some(Range { dep.maybe_type_specifier_range = Some(Range {
start: Position { start: Position {
line: (loc.line - 1) as u32, line: (loc.line - 1) as u32,
character: (loc.col_display + 1) as u32, character: (loc.col + 1) as u32,
}, },
end: Position { end: Position {
line: (loc.line - 1) as u32, line: (loc.line - 1) as u32,
character: (loc.col_display + 1 + specifier.chars().count()) as u32, character: (loc.col + 1 + specifier.chars().count()) as u32,
}, },
}); });
dep.maybe_type = Some(resolved_dependency); dep.maybe_type = Some(resolved_dependency);
@ -971,16 +958,19 @@ fn prepend_whitespace(content: String, line_content: Option<String>) -> String {
} }
} }
/// Get LSP range from the provided SWC start and end locations. /// Get LSP range from the provided start and end locations.
fn get_range_from_loc(start: &Loc, end: &Loc) -> lsp::Range { fn get_range_from_location(
start: &ast::Location,
end: &ast::Location,
) -> lsp::Range {
lsp::Range { lsp::Range {
start: lsp::Position { start: lsp::Position {
line: (start.line - 1) as u32, line: (start.line - 1) as u32,
character: start.col_display as u32, character: start.col as u32,
}, },
end: lsp::Position { end: lsp::Position {
line: (end.line - 1) as u32, line: (end.line - 1) as u32,
character: end.col_display as u32, character: end.col as u32,
}, },
} }
} }
@ -1029,16 +1019,16 @@ impl DependencyRanges {
} }
} }
struct DependencyRangeCollector { struct DependencyRangeCollector<'a> {
import_ranges: DependencyRanges, import_ranges: DependencyRanges,
source_map: Rc<SourceMap>, parsed_module: &'a ast::ParsedModule,
} }
impl DependencyRangeCollector { impl<'a> DependencyRangeCollector<'a> {
pub fn new(source_map: Rc<SourceMap>) -> Self { pub fn new(parsed_module: &'a ast::ParsedModule) -> Self {
Self { Self {
import_ranges: DependencyRanges::default(), import_ranges: DependencyRanges::default(),
source_map, parsed_module,
} }
} }
@ -1047,16 +1037,16 @@ impl DependencyRangeCollector {
} }
} }
impl Visit for DependencyRangeCollector { impl<'a> Visit for DependencyRangeCollector<'a> {
fn visit_import_decl( fn visit_import_decl(
&mut self, &mut self,
node: &swc_ast::ImportDecl, node: &swc_ast::ImportDecl,
_parent: &dyn Node, _parent: &dyn Node,
) { ) {
let start = self.source_map.lookup_char_pos(node.src.span.lo); let start = self.parsed_module.get_location(node.src.span.lo);
let end = self.source_map.lookup_char_pos(node.src.span.hi); let end = self.parsed_module.get_location(node.src.span.hi);
self.import_ranges.0.push(DependencyRange { self.import_ranges.0.push(DependencyRange {
range: narrow_range(get_range_from_loc(&start, &end)), range: narrow_range(get_range_from_location(&start, &end)),
specifier: node.src.value.to_string(), specifier: node.src.value.to_string(),
}); });
} }
@ -1067,10 +1057,10 @@ impl Visit for DependencyRangeCollector {
_parent: &dyn Node, _parent: &dyn Node,
) { ) {
if let Some(src) = &node.src { if let Some(src) = &node.src {
let start = self.source_map.lookup_char_pos(src.span.lo); let start = self.parsed_module.get_location(src.span.lo);
let end = self.source_map.lookup_char_pos(src.span.hi); let end = self.parsed_module.get_location(src.span.hi);
self.import_ranges.0.push(DependencyRange { self.import_ranges.0.push(DependencyRange {
range: narrow_range(get_range_from_loc(&start, &end)), range: narrow_range(get_range_from_location(&start, &end)),
specifier: src.value.to_string(), specifier: src.value.to_string(),
}); });
} }
@ -1081,10 +1071,10 @@ impl Visit for DependencyRangeCollector {
node: &swc_ast::ExportAll, node: &swc_ast::ExportAll,
_parent: &dyn Node, _parent: &dyn Node,
) { ) {
let start = self.source_map.lookup_char_pos(node.src.span.lo); let start = self.parsed_module.get_location(node.src.span.lo);
let end = self.source_map.lookup_char_pos(node.src.span.hi); let end = self.parsed_module.get_location(node.src.span.hi);
self.import_ranges.0.push(DependencyRange { self.import_ranges.0.push(DependencyRange {
range: narrow_range(get_range_from_loc(&start, &end)), range: narrow_range(get_range_from_location(&start, &end)),
specifier: node.src.value.to_string(), specifier: node.src.value.to_string(),
}); });
} }
@ -1094,10 +1084,10 @@ impl Visit for DependencyRangeCollector {
node: &swc_ast::TsImportType, node: &swc_ast::TsImportType,
_parent: &dyn Node, _parent: &dyn Node,
) { ) {
let start = self.source_map.lookup_char_pos(node.arg.span.lo); let start = self.parsed_module.get_location(node.arg.span.lo);
let end = self.source_map.lookup_char_pos(node.arg.span.hi); let end = self.parsed_module.get_location(node.arg.span.hi);
self.import_ranges.0.push(DependencyRange { self.import_ranges.0.push(DependencyRange {
range: narrow_range(get_range_from_loc(&start, &end)), range: narrow_range(get_range_from_location(&start, &end)),
specifier: node.arg.value.to_string(), specifier: node.arg.value.to_string(),
}); });
} }
@ -1108,8 +1098,7 @@ impl Visit for DependencyRangeCollector {
pub fn analyze_dependency_ranges( pub fn analyze_dependency_ranges(
parsed_module: &ast::ParsedModule, parsed_module: &ast::ParsedModule,
) -> Result<DependencyRanges, AnyError> { ) -> Result<DependencyRanges, AnyError> {
let mut collector = let mut collector = DependencyRangeCollector::new(parsed_module);
DependencyRangeCollector::new(parsed_module.source_map.clone());
parsed_module parsed_module
.module .module
.visit_with(&swc_ast::Invalid { span: DUMMY_SP }, &mut collector); .visit_with(&swc_ast::Invalid { span: DUMMY_SP }, &mut collector);

View file

@ -3,6 +3,7 @@
use super::analysis; use super::analysis;
use super::language_server; use super::language_server;
use super::tsc; use super::tsc;
use crate::ast::ParsedModule;
use deno_core::error::anyhow; use deno_core::error::anyhow;
use deno_core::error::AnyError; use deno_core::error::AnyError;
@ -17,7 +18,6 @@ use regex::Regex;
use std::cell::RefCell; use std::cell::RefCell;
use std::collections::HashSet; use std::collections::HashSet;
use std::rc::Rc; use std::rc::Rc;
use swc_common::SourceMap;
use swc_common::Span; use swc_common::Span;
use swc_ecmascript::ast; use swc_ecmascript::ast;
use swc_ecmascript::visit::Node; use swc_ecmascript::visit::Node;
@ -44,40 +44,43 @@ pub struct CodeLensData {
pub specifier: ModuleSpecifier, pub specifier: ModuleSpecifier,
} }
fn span_to_range(span: &Span, source_map: Rc<SourceMap>) -> lsp::Range { fn span_to_range(span: &Span, parsed_module: &ParsedModule) -> lsp::Range {
let start = source_map.lookup_char_pos(span.lo); let start = parsed_module.get_location(span.lo);
let end = source_map.lookup_char_pos(span.hi); let end = parsed_module.get_location(span.hi);
lsp::Range { lsp::Range {
start: lsp::Position { start: lsp::Position {
line: (start.line - 1) as u32, line: (start.line - 1) as u32,
character: start.col_display as u32, character: start.col as u32,
}, },
end: lsp::Position { end: lsp::Position {
line: (end.line - 1) as u32, line: (end.line - 1) as u32,
character: end.col_display as u32, character: end.col as u32,
}, },
} }
} }
struct DenoTestCollector { struct DenoTestCollector<'a> {
code_lenses: Vec<lsp::CodeLens>, code_lenses: Vec<lsp::CodeLens>,
source_map: Rc<SourceMap>, parsed_module: &'a ParsedModule,
specifier: ModuleSpecifier, specifier: ModuleSpecifier,
test_vars: HashSet<String>, test_vars: HashSet<String>,
} }
impl DenoTestCollector { impl<'a> DenoTestCollector<'a> {
pub fn new(specifier: ModuleSpecifier, source_map: Rc<SourceMap>) -> Self { pub fn new(
specifier: ModuleSpecifier,
parsed_module: &'a ParsedModule,
) -> Self {
Self { Self {
code_lenses: Vec::new(), code_lenses: Vec::new(),
source_map, parsed_module,
specifier, specifier,
test_vars: HashSet::new(), test_vars: HashSet::new(),
} }
} }
fn add_code_lens<N: AsRef<str>>(&mut self, name: N, span: &Span) { fn add_code_lens<N: AsRef<str>>(&mut self, name: N, span: &Span) {
let range = span_to_range(span, self.source_map.clone()); let range = span_to_range(span, self.parsed_module);
self.code_lenses.push(lsp::CodeLens { self.code_lenses.push(lsp::CodeLens {
range, range,
command: Some(lsp::Command { command: Some(lsp::Command {
@ -125,7 +128,7 @@ impl DenoTestCollector {
} }
} }
impl Visit for DenoTestCollector { impl<'a> Visit for DenoTestCollector<'a> {
fn visit_call_expr(&mut self, node: &ast::CallExpr, _parent: &dyn Node) { fn visit_call_expr(&mut self, node: &ast::CallExpr, _parent: &dyn Node) {
if let ast::ExprOrSuper::Expr(callee_expr) = &node.callee { if let ast::ExprOrSuper::Expr(callee_expr) = &node.callee {
match callee_expr.as_ref() { match callee_expr.as_ref() {
@ -392,10 +395,8 @@ fn collect_test(
if let Ok(parsed_module) = if let Ok(parsed_module) =
analysis::parse_module(specifier, &source, &media_type) analysis::parse_module(specifier, &source, &media_type)
{ {
let mut collector = DenoTestCollector::new( let mut collector =
specifier.clone(), DenoTestCollector::new(specifier.clone(), &parsed_module);
parsed_module.source_map.clone(),
);
parsed_module.module.visit_with( parsed_module.module.visit_with(
&ast::Invalid { &ast::Invalid {
span: swc_common::DUMMY_SP, span: swc_common::DUMMY_SP,
@ -521,8 +522,7 @@ mod tests {
let parsed_module = let parsed_module =
analysis::parse_module(&specifier, source, &MediaType::TypeScript) analysis::parse_module(&specifier, source, &MediaType::TypeScript)
.unwrap(); .unwrap();
let mut collector = let mut collector = DenoTestCollector::new(specifier, &parsed_module);
DenoTestCollector::new(specifier, parsed_module.source_map.clone());
parsed_module.module.visit_with( parsed_module.module.visit_with(
&ast::Invalid { &ast::Invalid {
span: swc_common::DUMMY_SP, span: swc_common::DUMMY_SP,

View file

@ -349,7 +349,7 @@ impl Module {
// parse out any triple slash references // parse out any triple slash references
for comment in parsed_module.get_leading_comments().iter() { for comment in parsed_module.get_leading_comments().iter() {
if let Some((ts_reference, _)) = parse_ts_reference(comment) { if let Some((ts_reference, _)) = parse_ts_reference(comment) {
let location = parsed_module.get_location(&comment.span); let location = parsed_module.get_location(comment.span.lo);
match ts_reference { match ts_reference {
TypeScriptReference::Path(import) => { TypeScriptReference::Path(import) => {
let specifier = let specifier =
@ -386,12 +386,7 @@ impl Module {
for desc in dependencies.iter().filter(|desc| { for desc in dependencies.iter().filter(|desc| {
desc.kind != swc_ecmascript::dep_graph::DependencyKind::Require desc.kind != swc_ecmascript::dep_graph::DependencyKind::Require
}) { }) {
let loc = parsed_module.source_map.lookup_char_pos(desc.span.lo); let location = parsed_module.get_location(desc.span.lo);
let location = Location {
filename: self.specifier.to_string(),
col: loc.col_display,
line: loc.line,
};
// In situations where there is a potential issue with resolving the // In situations where there is a potential issue with resolving the
// import specifier, that ends up being a module resolution error for a // import specifier, that ends up being a module resolution error for a
@ -468,7 +463,7 @@ impl Module {
let referrer_scheme = self.specifier.scheme(); let referrer_scheme = self.specifier.scheme();
let specifier_scheme = specifier.scheme(); let specifier_scheme = specifier.scheme();
let location = maybe_location.unwrap_or(Location { let location = maybe_location.unwrap_or(Location {
filename: self.specifier.to_string(), specifier: self.specifier.to_string(),
line: 0, line: 0,
col: 0, col: 0,
}); });

View file

@ -274,7 +274,7 @@ impl SpecifierHandler for FetchHandler {
let message = if let Some(location) = &maybe_location { let message = if let Some(location) = &maybe_location {
format!( format!(
"Cannot resolve module \"{}\" from \"{}\".", "Cannot resolve module \"{}\" from \"{}\".",
requested_specifier, location.filename requested_specifier, location.specifier
) )
} else { } else {
format!("Cannot resolve module \"{}\".", requested_specifier) format!("Cannot resolve module \"{}\".", requested_specifier)
@ -291,7 +291,7 @@ impl SpecifierHandler for FetchHandler {
// they are confusing to the user to print out the location because // they are confusing to the user to print out the location because
// they cannot actually get to the source code that is quoted, as // they cannot actually get to the source code that is quoted, as
// it only exists in the runtime memory of Deno. // it only exists in the runtime memory of Deno.
if !location.filename.contains("$deno$") { if !location.specifier.contains("$deno$") {
( (
requested_specifier.clone(), requested_specifier.clone(),
HandlerError::FetchErrorWithLocation(err.to_string(), location) HandlerError::FetchErrorWithLocation(err.to_string(), location)

View file

@ -432,7 +432,7 @@ impl CoverageReporter for PrettyCoverageReporter {
.map(|source_map| SourceMap::from_slice(&source_map).unwrap()); .map(|source_map| SourceMap::from_slice(&source_map).unwrap());
let mut ignored_spans: Vec<Span> = Vec::new(); let mut ignored_spans: Vec<Span> = Vec::new();
for item in ast::lex("", script_source, &MediaType::JavaScript) { for item in ast::lex(script_source, &MediaType::JavaScript) {
if let TokenOrComment::Token(_) = item.inner { if let TokenOrComment::Token(_) = item.inner {
continue; continue;
} }

View file

@ -231,7 +231,7 @@ impl Validator for EditorHelper {
let mut stack: Vec<Token> = Vec::new(); let mut stack: Vec<Token> = Vec::new();
let mut in_template = false; let mut in_template = false;
for item in ast::lex("", ctx.input(), &MediaType::TypeScript) { for item in ast::lex(ctx.input(), &MediaType::TypeScript) {
if let TokenOrComment::Token(token) = item.inner { if let TokenOrComment::Token(token) = item.inner {
match token { match token {
Token::BackQuote => in_template = !in_template, Token::BackQuote => in_template = !in_template,
@ -302,7 +302,7 @@ impl Highlighter for EditorHelper {
fn highlight<'l>(&self, line: &'l str, _: usize) -> Cow<'l, str> { fn highlight<'l>(&self, line: &'l str, _: usize) -> Cow<'l, str> {
let mut out_line = String::from(line); let mut out_line = String::from(line);
for item in ast::lex("", line, &MediaType::TypeScript) { for item in ast::lex(line, &MediaType::TypeScript) {
// Adding color adds more bytes to the string, // Adding color adds more bytes to the string,
// so an offset is needed to stop spans falling out of sync. // so an offset is needed to stop spans falling out of sync.
let offset = out_line.len() - line.len(); let offset = out_line.len() - line.len();

View file

@ -399,7 +399,7 @@ fn extract_files_from_regex_blocks(
let file_specifier = deno_core::resolve_url_or_path(&format!( let file_specifier = deno_core::resolve_url_or_path(&format!(
"{}${}-{}{}", "{}${}-{}{}",
location.filename, location.specifier,
location.line + line_offset, location.line + line_offset,
location.line + line_offset + line_count, location.line + line_offset + line_count,
file_media_type.as_ts_extension(), file_media_type.as_ts_extension(),
@ -425,10 +425,7 @@ fn extract_files_from_source_comments(
media_type: &MediaType, media_type: &MediaType,
) -> Result<Vec<File>, AnyError> { ) -> Result<Vec<File>, AnyError> {
let parsed_module = ast::parse(specifier.as_str(), source, media_type)?; let parsed_module = ast::parse(specifier.as_str(), source, media_type)?;
let mut comments = parsed_module.get_comments(); let comments = parsed_module.get_comments();
comments
.sort_by_key(|comment| parsed_module.get_location(&comment.span).line);
let blocks_regex = Regex::new(r"```([^\n]*)\n([\S\s]*?)```")?; let blocks_regex = Regex::new(r"```([^\n]*)\n([\S\s]*?)```")?;
let lines_regex = Regex::new(r"(?:\* ?)(?:\# ?)?(.*)")?; let lines_regex = Regex::new(r"(?:\* ?)(?:\# ?)?(.*)")?;
@ -442,7 +439,7 @@ fn extract_files_from_source_comments(
true true
}) })
.flat_map(|comment| { .flat_map(|comment| {
let location = parsed_module.get_location(&comment.span); let location = parsed_module.get_location(comment.span.lo);
extract_files_from_regex_blocks( extract_files_from_regex_blocks(
&location, &location,
@ -464,7 +461,7 @@ fn extract_files_from_fenced_blocks(
media_type: &MediaType, media_type: &MediaType,
) -> Result<Vec<File>, AnyError> { ) -> Result<Vec<File>, AnyError> {
let location = Location { let location = Location {
filename: specifier.to_string(), specifier: specifier.to_string(),
line: 1, line: 1,
col: 0, col: 0,
}; };