mirror of
https://github.com/denoland/deno.git
synced 2024-12-22 15:24:46 -05:00
refactor(cli): remove TextDocument (#7850)
This commit is contained in:
parent
cb3a3a1e95
commit
99aa23b8dd
8 changed files with 126 additions and 227 deletions
31
cli/ast.rs
31
cli/ast.rs
|
@ -1,6 +1,5 @@
|
|||
// Copyright 2018-2020 the Deno authors. All rights reserved. MIT license.
|
||||
|
||||
use crate::file_fetcher::TextDocument;
|
||||
use crate::media_type::MediaType;
|
||||
|
||||
use deno_core::error::AnyError;
|
||||
|
@ -247,7 +246,7 @@ impl ParsedModule {
|
|||
pub fn transpile(
|
||||
self,
|
||||
options: &TranspileOptions,
|
||||
) -> Result<(TextDocument, Option<TextDocument>)> {
|
||||
) -> Result<(String, Option<String>)> {
|
||||
let program = Program::Module(self.module);
|
||||
|
||||
let jsx_pass = react::react(
|
||||
|
@ -297,7 +296,7 @@ impl ParsedModule {
|
|||
program.emit_with(&mut emitter)?;
|
||||
}
|
||||
let mut src = String::from_utf8(buf)?;
|
||||
let mut map: Option<TextDocument> = None;
|
||||
let mut map: Option<String> = None;
|
||||
{
|
||||
let mut buf = Vec::new();
|
||||
self
|
||||
|
@ -310,10 +309,10 @@ impl ParsedModule {
|
|||
let encoded_map = base64::encode(buf);
|
||||
src.push_str(&encoded_map);
|
||||
} else {
|
||||
map = Some(TextDocument::from(buf));
|
||||
map = Some(String::from_utf8(buf)?);
|
||||
}
|
||||
}
|
||||
Ok((src.into(), map))
|
||||
Ok((src, map))
|
||||
}
|
||||
}
|
||||
|
||||
|
@ -439,14 +438,10 @@ mod tests {
|
|||
let (code, maybe_map) = module
|
||||
.transpile(&TranspileOptions::default())
|
||||
.expect("could not strip types");
|
||||
assert!(code
|
||||
.to_string()
|
||||
.unwrap()
|
||||
.starts_with("var D;\n(function(D) {\n"));
|
||||
assert!(code
|
||||
.to_string()
|
||||
.unwrap()
|
||||
.contains("\n//# sourceMappingURL=data:application/json;base64,"));
|
||||
assert!(code.starts_with("var D;\n(function(D) {\n"));
|
||||
assert!(
|
||||
code.contains("\n//# sourceMappingURL=data:application/json;base64,")
|
||||
);
|
||||
assert!(maybe_map.is_none());
|
||||
}
|
||||
|
||||
|
@ -467,10 +462,7 @@ mod tests {
|
|||
let (code, _) = module
|
||||
.transpile(&TranspileOptions::default())
|
||||
.expect("could not strip types");
|
||||
assert!(code
|
||||
.to_string()
|
||||
.unwrap()
|
||||
.contains("React.createElement(\"div\", null"));
|
||||
assert!(code.contains("React.createElement(\"div\", null"));
|
||||
}
|
||||
|
||||
#[test]
|
||||
|
@ -501,9 +493,6 @@ mod tests {
|
|||
let (code, _) = module
|
||||
.transpile(&TranspileOptions::default())
|
||||
.expect("could not strip types");
|
||||
assert!(code
|
||||
.to_string()
|
||||
.unwrap()
|
||||
.contains("_applyDecoratedDescriptor("));
|
||||
assert!(code.contains("_applyDecoratedDescriptor("));
|
||||
}
|
||||
}
|
||||
|
|
|
@ -19,7 +19,6 @@ use deno_core::url::Url;
|
|||
use deno_core::ModuleSpecifier;
|
||||
use deno_fetch::reqwest;
|
||||
use log::info;
|
||||
use std::borrow::Cow;
|
||||
use std::collections::HashMap;
|
||||
use std::fs;
|
||||
use std::future::Future;
|
||||
|
@ -32,59 +31,6 @@ use std::str;
|
|||
use std::sync::Arc;
|
||||
use std::sync::Mutex;
|
||||
|
||||
/// Structure representing a text document.
|
||||
#[derive(Debug, Clone, Eq, PartialEq)]
|
||||
pub struct TextDocument {
|
||||
bytes: Vec<u8>,
|
||||
charset: Cow<'static, str>,
|
||||
}
|
||||
|
||||
impl TextDocument {
|
||||
pub fn new(
|
||||
bytes: Vec<u8>,
|
||||
charset: Option<impl Into<Cow<'static, str>>>,
|
||||
) -> TextDocument {
|
||||
let charset = charset
|
||||
.map(|cs| cs.into())
|
||||
.unwrap_or_else(|| text_encoding::detect_charset(&bytes).into());
|
||||
TextDocument { bytes, charset }
|
||||
}
|
||||
|
||||
pub fn as_bytes(&self) -> &Vec<u8> {
|
||||
&self.bytes
|
||||
}
|
||||
|
||||
pub fn into_bytes(self) -> Vec<u8> {
|
||||
self.bytes
|
||||
}
|
||||
|
||||
pub fn to_str(&self) -> Result<Cow<str>, std::io::Error> {
|
||||
text_encoding::convert_to_utf8(&self.bytes, &self.charset)
|
||||
}
|
||||
|
||||
pub fn to_string(&self) -> Result<String, std::io::Error> {
|
||||
self.to_str().map(String::from)
|
||||
}
|
||||
}
|
||||
|
||||
impl From<Vec<u8>> for TextDocument {
|
||||
fn from(bytes: Vec<u8>) -> Self {
|
||||
TextDocument::new(bytes, Option::<&str>::None)
|
||||
}
|
||||
}
|
||||
|
||||
impl From<String> for TextDocument {
|
||||
fn from(s: String) -> Self {
|
||||
TextDocument::new(s.as_bytes().to_vec(), Option::<&str>::None)
|
||||
}
|
||||
}
|
||||
|
||||
impl From<&str> for TextDocument {
|
||||
fn from(s: &str) -> Self {
|
||||
TextDocument::new(s.as_bytes().to_vec(), Option::<&str>::None)
|
||||
}
|
||||
}
|
||||
|
||||
/// Structure representing local or remote file.
|
||||
///
|
||||
/// In case of remote file `url` might be different than originally requested URL, if so
|
||||
|
@ -95,7 +41,7 @@ pub struct SourceFile {
|
|||
pub filename: PathBuf,
|
||||
pub types_header: Option<String>,
|
||||
pub media_type: MediaType,
|
||||
pub source_code: TextDocument,
|
||||
pub source_code: String,
|
||||
}
|
||||
|
||||
/// Simple struct implementing in-process caching to prevent multiple
|
||||
|
@ -242,9 +188,8 @@ impl SourceFileFetcher {
|
|||
match result {
|
||||
Ok(mut file) => {
|
||||
// TODO: move somewhere?
|
||||
if file.source_code.bytes.starts_with(b"#!") {
|
||||
file.source_code =
|
||||
filter_shebang(&file.source_code.to_str().unwrap()[..]).into();
|
||||
if file.source_code.starts_with("#!") {
|
||||
file.source_code = filter_shebang(&file.source_code);
|
||||
}
|
||||
|
||||
// Cache in-process for subsequent access.
|
||||
|
@ -369,17 +314,18 @@ impl SourceFileFetcher {
|
|||
.map_err(|()| uri_error("File URL contains invalid path"))?;
|
||||
|
||||
permissions.check_read(&filepath)?;
|
||||
let source_code = match fs::read(filepath.clone()) {
|
||||
Ok(c) => c,
|
||||
Err(e) => return Err(e.into()),
|
||||
};
|
||||
|
||||
let (media_type, charset) = map_content_type(&filepath, None);
|
||||
let bytes = fs::read(filepath.clone())?;
|
||||
let source_code = text_encoding::convert_to_utf8(
|
||||
&bytes,
|
||||
text_encoding::detect_charset(&bytes),
|
||||
)?
|
||||
.to_string();
|
||||
let (media_type, _) = map_content_type(&filepath, None);
|
||||
Ok(SourceFile {
|
||||
url: module_url.clone(),
|
||||
filename: filepath,
|
||||
media_type,
|
||||
source_code: TextDocument::new(source_code, charset),
|
||||
source_code,
|
||||
types_header: None,
|
||||
})
|
||||
}
|
||||
|
@ -444,12 +390,17 @@ impl SourceFileFetcher {
|
|||
&fake_filepath,
|
||||
headers.get("content-type").map(|e| e.as_str()),
|
||||
);
|
||||
let source_code = if let Some(charset) = charset {
|
||||
text_encoding::convert_to_utf8(&source_code, &charset)?.to_string()
|
||||
} else {
|
||||
String::from_utf8(source_code)?
|
||||
};
|
||||
let types_header = headers.get("x-typescript-types").map(|e| e.to_string());
|
||||
Ok(Some(SourceFile {
|
||||
url: module_url.clone(),
|
||||
filename: cache_filename,
|
||||
media_type,
|
||||
source_code: TextDocument::new(source_code, charset),
|
||||
source_code,
|
||||
types_header,
|
||||
}))
|
||||
}
|
||||
|
@ -549,6 +500,11 @@ impl SourceFileFetcher {
|
|||
&fake_filepath,
|
||||
headers.get("content-type").map(String::as_str),
|
||||
);
|
||||
let source_code = if let Some(charset) = charset {
|
||||
text_encoding::convert_to_utf8(&source, &charset)?.to_string()
|
||||
} else {
|
||||
String::from_utf8(source)?
|
||||
};
|
||||
|
||||
let types_header =
|
||||
headers.get("x-typescript-types").map(String::to_string);
|
||||
|
@ -557,7 +513,7 @@ impl SourceFileFetcher {
|
|||
url: module_url.clone(),
|
||||
filename: cache_filepath,
|
||||
media_type,
|
||||
source_code: TextDocument::new(source, charset),
|
||||
source_code,
|
||||
types_header,
|
||||
};
|
||||
|
||||
|
@ -631,12 +587,12 @@ fn map_js_like_extension(path: &Path, default: MediaType) -> MediaType {
|
|||
}
|
||||
}
|
||||
|
||||
fn filter_shebang(string: &str) -> Vec<u8> {
|
||||
fn filter_shebang(string: &str) -> String {
|
||||
if let Some(i) = string.find('\n') {
|
||||
let (_, rest) = string.split_at(i);
|
||||
rest.as_bytes().to_owned()
|
||||
rest.to_string()
|
||||
} else {
|
||||
Vec::new()
|
||||
"".to_string()
|
||||
}
|
||||
}
|
||||
|
||||
|
@ -811,8 +767,8 @@ mod tests {
|
|||
assert!(result.is_ok());
|
||||
let r = result.unwrap();
|
||||
assert_eq!(
|
||||
r.source_code.bytes,
|
||||
&b"export { printHello } from \"./print_hello.ts\";\n"[..]
|
||||
r.source_code,
|
||||
"export { printHello } from \"./print_hello.ts\";\n"
|
||||
);
|
||||
assert_eq!(&(r.media_type), &MediaType::TypeScript);
|
||||
|
||||
|
@ -838,8 +794,8 @@ mod tests {
|
|||
assert!(result2.is_ok());
|
||||
let r2 = result2.unwrap();
|
||||
assert_eq!(
|
||||
r2.source_code.bytes,
|
||||
&b"export { printHello } from \"./print_hello.ts\";\n"[..]
|
||||
r2.source_code,
|
||||
"export { printHello } from \"./print_hello.ts\";\n"
|
||||
);
|
||||
// If get_source_file does not call remote, this should be JavaScript
|
||||
// as we modified before! (we do not overwrite .headers.json due to no http fetch)
|
||||
|
@ -867,8 +823,8 @@ mod tests {
|
|||
assert!(result3.is_ok());
|
||||
let r3 = result3.unwrap();
|
||||
assert_eq!(
|
||||
r3.source_code.bytes,
|
||||
&b"export { printHello } from \"./print_hello.ts\";\n"[..]
|
||||
r3.source_code,
|
||||
"export { printHello } from \"./print_hello.ts\";\n"
|
||||
);
|
||||
// If get_source_file does not call remote, this should be JavaScript
|
||||
// as we modified before! (we do not overwrite .headers.json due to no http fetch)
|
||||
|
@ -893,8 +849,8 @@ mod tests {
|
|||
.await;
|
||||
assert!(result4.is_ok());
|
||||
let r4 = result4.unwrap();
|
||||
let expected4 = &b"export { printHello } from \"./print_hello.ts\";\n"[..];
|
||||
assert_eq!(r4.source_code.bytes, expected4);
|
||||
let expected4 = "export { printHello } from \"./print_hello.ts\";\n";
|
||||
assert_eq!(r4.source_code, expected4);
|
||||
// Resolved back to TypeScript
|
||||
assert_eq!(&(r4.media_type), &MediaType::TypeScript);
|
||||
}
|
||||
|
@ -921,8 +877,8 @@ mod tests {
|
|||
.await;
|
||||
assert!(result.is_ok());
|
||||
let r = result.unwrap();
|
||||
let expected = b"export const loaded = true;\n";
|
||||
assert_eq!(r.source_code.bytes, expected);
|
||||
let expected = "export const loaded = true;\n";
|
||||
assert_eq!(r.source_code, expected);
|
||||
assert_eq!(&(r.media_type), &MediaType::JavaScript);
|
||||
let (_, headers) = fetcher.http_cache.get(&module_url).unwrap();
|
||||
assert_eq!(headers.get("content-type").unwrap(), "text/javascript");
|
||||
|
@ -947,8 +903,8 @@ mod tests {
|
|||
.await;
|
||||
assert!(result2.is_ok());
|
||||
let r2 = result2.unwrap();
|
||||
let expected2 = b"export const loaded = true;\n";
|
||||
assert_eq!(r2.source_code.bytes, expected2);
|
||||
let expected2 = "export const loaded = true;\n";
|
||||
assert_eq!(r2.source_code, expected2);
|
||||
// If get_source_file does not call remote, this should be TypeScript
|
||||
// as we modified before! (we do not overwrite .headers.json due to no http
|
||||
// fetch)
|
||||
|
@ -973,8 +929,8 @@ mod tests {
|
|||
.await;
|
||||
assert!(result3.is_ok());
|
||||
let r3 = result3.unwrap();
|
||||
let expected3 = b"export const loaded = true;\n";
|
||||
assert_eq!(r3.source_code.bytes, expected3);
|
||||
let expected3 = "export const loaded = true;\n";
|
||||
assert_eq!(r3.source_code, expected3);
|
||||
// Now the old .headers.json file should be overwritten back to JavaScript!
|
||||
// (due to http fetch)
|
||||
assert_eq!(&(r3.media_type), &MediaType::JavaScript);
|
||||
|
@ -1378,7 +1334,7 @@ mod tests {
|
|||
.await;
|
||||
assert!(result.is_ok());
|
||||
let r = result.unwrap();
|
||||
assert_eq!(r.source_code.bytes, b"export const loaded = true;\n");
|
||||
assert_eq!(r.source_code, "export const loaded = true;\n");
|
||||
assert_eq!(&(r.media_type), &MediaType::TypeScript);
|
||||
|
||||
// Modify .metadata.json, make sure read from local
|
||||
|
@ -1394,7 +1350,7 @@ mod tests {
|
|||
let result2 = fetcher.fetch_cached_remote_source(&module_url, 1);
|
||||
assert!(result2.is_ok());
|
||||
let r2 = result2.unwrap().unwrap();
|
||||
assert_eq!(r2.source_code.bytes, b"export const loaded = true;\n");
|
||||
assert_eq!(r2.source_code, "export const loaded = true;\n");
|
||||
// Not MediaType::TypeScript due to .headers.json modification
|
||||
assert_eq!(&(r2.media_type), &MediaType::JavaScript);
|
||||
}
|
||||
|
@ -1416,7 +1372,7 @@ mod tests {
|
|||
.await;
|
||||
assert!(result.is_ok());
|
||||
let r = result.unwrap();
|
||||
assert_eq!(r.source_code.bytes, b"export const loaded = true;\n");
|
||||
assert_eq!(r.source_code, "export const loaded = true;\n");
|
||||
assert_eq!(&(r.media_type), &MediaType::TypeScript);
|
||||
let (_, headers) = fetcher.http_cache.get(module_url).unwrap();
|
||||
assert_eq!(headers.get("content-type").unwrap(), "text/typescript");
|
||||
|
@ -1440,7 +1396,7 @@ mod tests {
|
|||
.await;
|
||||
assert!(result.is_ok());
|
||||
let r2 = result.unwrap();
|
||||
assert_eq!(r2.source_code.bytes, b"export const loaded = true;\n");
|
||||
assert_eq!(r2.source_code, "export const loaded = true;\n");
|
||||
assert_eq!(&(r2.media_type), &MediaType::JavaScript);
|
||||
let (_, headers) = fetcher.http_cache.get(module_url).unwrap();
|
||||
assert_eq!(headers.get("content-type").unwrap(), "text/javascript");
|
||||
|
@ -1464,7 +1420,7 @@ mod tests {
|
|||
.await;
|
||||
assert!(result.is_ok());
|
||||
let r3 = result.unwrap();
|
||||
assert_eq!(r3.source_code.bytes, b"export const loaded = true;\n");
|
||||
assert_eq!(r3.source_code, "export const loaded = true;\n");
|
||||
assert_eq!(&(r3.media_type), &MediaType::TypeScript);
|
||||
let (_, headers) = fetcher.http_cache.get(module_url).unwrap();
|
||||
assert_eq!(headers.get("content-type").unwrap(), "text/typescript");
|
||||
|
@ -1559,10 +1515,7 @@ mod tests {
|
|||
.await;
|
||||
assert!(r.is_ok());
|
||||
let fetched_file = r.unwrap();
|
||||
let source_code = fetched_file.source_code.to_str();
|
||||
assert!(source_code.is_ok());
|
||||
let actual = source_code.unwrap();
|
||||
assert_eq!(expected_content, actual);
|
||||
assert_eq!(expected_content, fetched_file.source_code);
|
||||
}
|
||||
|
||||
#[tokio::test]
|
||||
|
@ -1795,10 +1748,10 @@ mod tests {
|
|||
|
||||
#[test]
|
||||
fn test_filter_shebang() {
|
||||
assert_eq!(filter_shebang("#!"), b"");
|
||||
assert_eq!(filter_shebang("#!\n\n"), b"\n\n");
|
||||
assert_eq!(filter_shebang("#!"), "");
|
||||
assert_eq!(filter_shebang("#!\n\n"), "\n\n");
|
||||
let code = "#!/usr/bin/env deno\nconsole.log('hello');\n";
|
||||
assert_eq!(filter_shebang(code), b"\nconsole.log('hello');\n");
|
||||
assert_eq!(filter_shebang(code), "\nconsole.log('hello');\n");
|
||||
}
|
||||
|
||||
#[tokio::test]
|
||||
|
@ -1819,7 +1772,7 @@ mod tests {
|
|||
.await;
|
||||
assert!(source.is_ok());
|
||||
let source = source.unwrap();
|
||||
assert_eq!(source.source_code.bytes, b"console.log('etag')");
|
||||
assert_eq!(source.source_code, "console.log('etag')");
|
||||
assert_eq!(&(source.media_type), &MediaType::TypeScript);
|
||||
|
||||
let (_, headers) = fetcher.http_cache.get(&module_url).unwrap();
|
||||
|
@ -1846,7 +1799,7 @@ mod tests {
|
|||
)
|
||||
.await
|
||||
.unwrap();
|
||||
assert_eq!(cached_source.source_code.bytes, b"changed content");
|
||||
assert_eq!(cached_source.source_code, "changed content");
|
||||
|
||||
let modified2 = metadata_path.metadata().unwrap().modified().unwrap();
|
||||
|
||||
|
@ -1871,7 +1824,7 @@ mod tests {
|
|||
.await;
|
||||
assert!(source.is_ok());
|
||||
let source = source.unwrap();
|
||||
assert_eq!(source.source_code.bytes, b"export const foo = 'foo';");
|
||||
assert_eq!(source.source_code, "export const foo = 'foo';");
|
||||
assert_eq!(&(source.media_type), &MediaType::JavaScript);
|
||||
assert_eq!(
|
||||
source.types_header,
|
||||
|
@ -1941,9 +1894,7 @@ mod tests {
|
|||
.await;
|
||||
assert!(source.is_ok());
|
||||
let source = source.unwrap();
|
||||
assert_eq!(&source.source_code.charset.to_lowercase()[..], charset);
|
||||
let text = &source.source_code.to_str().unwrap();
|
||||
assert_eq!(text, expected_content);
|
||||
assert_eq!(source.source_code, expected_content);
|
||||
assert_eq!(&(source.media_type), &MediaType::TypeScript);
|
||||
|
||||
let (_, headers) = fetcher.http_cache.get(&module_url).unwrap();
|
||||
|
|
|
@ -247,7 +247,7 @@ impl GlobalState {
|
|||
}
|
||||
} else {
|
||||
CompiledModule {
|
||||
code: out.source_code.to_string()?,
|
||||
code: out.source_code,
|
||||
name: out.url.to_string(),
|
||||
}
|
||||
};
|
||||
|
|
17
cli/main.rs
17
cli/main.rs
|
@ -60,7 +60,6 @@ use crate::coverage::CoverageCollector;
|
|||
use crate::coverage::PrettyCoverageReporter;
|
||||
use crate::file_fetcher::SourceFile;
|
||||
use crate::file_fetcher::SourceFileFetcher;
|
||||
use crate::file_fetcher::TextDocument;
|
||||
use crate::fs as deno_fs;
|
||||
use crate::global_state::GlobalState;
|
||||
use crate::media_type::MediaType;
|
||||
|
@ -266,7 +265,7 @@ async fn eval_command(
|
|||
} else {
|
||||
MediaType::JavaScript
|
||||
},
|
||||
source_code: TextDocument::new(source_code, Some("utf-8")),
|
||||
source_code: String::from_utf8(source_code)?,
|
||||
};
|
||||
// Save our fake file into file fetcher cache
|
||||
// to allow module access by TS compiler.
|
||||
|
@ -358,12 +357,7 @@ async fn doc_command(
|
|||
e.to_string(),
|
||||
))
|
||||
})?;
|
||||
source_file.source_code.to_string().map_err(|e| {
|
||||
doc::DocError::Io(std::io::Error::new(
|
||||
std::io::ErrorKind::Other,
|
||||
e.to_string(),
|
||||
))
|
||||
})
|
||||
Ok(source_file.source_code)
|
||||
}
|
||||
.boxed_local()
|
||||
}
|
||||
|
@ -449,7 +443,7 @@ async fn run_from_stdin(flags: Flags) -> Result<(), AnyError> {
|
|||
url: main_module_url,
|
||||
types_header: None,
|
||||
media_type: MediaType::TypeScript,
|
||||
source_code: source.into(),
|
||||
source_code: String::from_utf8(source)?,
|
||||
};
|
||||
// Save our fake file into file fetcher cache
|
||||
// to allow module access by TS compiler
|
||||
|
@ -575,10 +569,7 @@ async fn test_command(
|
|||
url: test_file_url.clone(),
|
||||
types_header: None,
|
||||
media_type: MediaType::TypeScript,
|
||||
source_code: TextDocument::new(
|
||||
test_file.clone().into_bytes(),
|
||||
Some("utf-8"),
|
||||
),
|
||||
source_code: test_file.clone(),
|
||||
};
|
||||
// Save our fake file into file fetcher cache
|
||||
// to allow module access by TS compiler
|
||||
|
|
|
@ -489,7 +489,7 @@ impl ModuleGraphLoader {
|
|||
&source_file.source_code.as_bytes(),
|
||||
version::DENO.as_bytes(),
|
||||
]);
|
||||
let source_code = source_file.source_code.to_string()?;
|
||||
let source_code = source_file.source_code.clone();
|
||||
|
||||
if SUPPORTED_MEDIA_TYPES.contains(&source_file.media_type) {
|
||||
if let Some(types_specifier) = source_file.types_header {
|
||||
|
|
|
@ -4,7 +4,6 @@ use crate::ast;
|
|||
use crate::ast::parse;
|
||||
use crate::ast::Location;
|
||||
use crate::ast::ParsedModule;
|
||||
use crate::file_fetcher::TextDocument;
|
||||
use crate::import_map::ImportMap;
|
||||
use crate::lockfile::Lockfile;
|
||||
use crate::media_type::MediaType;
|
||||
|
@ -37,7 +36,7 @@ use std::sync::Mutex;
|
|||
use std::time::Instant;
|
||||
use swc_ecmascript::dep_graph::DependencyKind;
|
||||
|
||||
pub type BuildInfoMap = HashMap<EmitType, TextDocument>;
|
||||
pub type BuildInfoMap = HashMap<EmitType, String>;
|
||||
|
||||
lazy_static! {
|
||||
/// Matched the `@deno-types` pragma.
|
||||
|
@ -151,12 +150,8 @@ fn parse_deno_types(comment: &str) -> Option<String> {
|
|||
/// A hashing function that takes the source code, version and optionally a
|
||||
/// user provided config and generates a string hash which can be stored to
|
||||
/// determine if the cached emit is valid or not.
|
||||
fn get_version(source: &TextDocument, version: &str, config: &[u8]) -> String {
|
||||
crate::checksum::gen(&[
|
||||
source.to_str().unwrap().as_bytes(),
|
||||
version.as_bytes(),
|
||||
config,
|
||||
])
|
||||
fn get_version(source: &str, version: &str, config: &[u8]) -> String {
|
||||
crate::checksum::gen(&[source.as_bytes(), version.as_bytes(), config])
|
||||
}
|
||||
|
||||
/// A logical representation of a module within a graph.
|
||||
|
@ -173,7 +168,7 @@ struct Module {
|
|||
maybe_version: Option<String>,
|
||||
media_type: MediaType,
|
||||
specifier: ModuleSpecifier,
|
||||
source: TextDocument,
|
||||
source: String,
|
||||
}
|
||||
|
||||
impl Default for Module {
|
||||
|
@ -190,7 +185,7 @@ impl Default for Module {
|
|||
maybe_version: None,
|
||||
media_type: MediaType::Unknown,
|
||||
specifier: ModuleSpecifier::resolve_url("https://deno.land/x/").unwrap(),
|
||||
source: TextDocument::new(Vec::new(), Option::<&str>::None),
|
||||
source: "".to_string(),
|
||||
}
|
||||
}
|
||||
}
|
||||
|
@ -243,8 +238,7 @@ impl Module {
|
|||
}
|
||||
|
||||
pub fn parse(&mut self) -> Result<(), AnyError> {
|
||||
let parsed_module =
|
||||
parse(&self.specifier, &self.source.to_str()?, &self.media_type)?;
|
||||
let parsed_module = parse(&self.specifier, &self.source, &self.media_type)?;
|
||||
|
||||
// parse out any triple slash references
|
||||
for comment in parsed_module.get_leading_comments().iter() {
|
||||
|
@ -470,8 +464,7 @@ impl Graph2 {
|
|||
let mut lockfile = lf.lock().unwrap();
|
||||
for (ms, module) in self.modules.iter() {
|
||||
let specifier = module.specifier.to_string();
|
||||
let code = module.source.to_string()?;
|
||||
let valid = lockfile.check_or_insert(&specifier, &code);
|
||||
let valid = lockfile.check_or_insert(&specifier, &module.source);
|
||||
if !valid {
|
||||
return Err(
|
||||
InvalidSource(ms.clone(), lockfile.filename.clone()).into(),
|
||||
|
@ -702,14 +695,9 @@ mod tests {
|
|||
#[derive(Debug, Default)]
|
||||
pub struct MockSpecifierHandler {
|
||||
pub fixtures: PathBuf,
|
||||
pub build_info: HashMap<ModuleSpecifier, TextDocument>,
|
||||
pub build_info_calls: Vec<(ModuleSpecifier, EmitType, TextDocument)>,
|
||||
pub cache_calls: Vec<(
|
||||
ModuleSpecifier,
|
||||
EmitType,
|
||||
TextDocument,
|
||||
Option<TextDocument>,
|
||||
)>,
|
||||
pub build_info: HashMap<ModuleSpecifier, String>,
|
||||
pub build_info_calls: Vec<(ModuleSpecifier, EmitType, String)>,
|
||||
pub cache_calls: Vec<(ModuleSpecifier, EmitType, String, Option<String>)>,
|
||||
pub deps_calls: Vec<(ModuleSpecifier, DependencyMap)>,
|
||||
pub types_calls: Vec<(ModuleSpecifier, String)>,
|
||||
pub version_calls: Vec<(ModuleSpecifier, String)>,
|
||||
|
@ -740,8 +728,7 @@ mod tests {
|
|||
"jsx" => MediaType::JSX,
|
||||
_ => MediaType::Unknown,
|
||||
};
|
||||
let source =
|
||||
TextDocument::new(fs::read(specifier_path)?, Option::<&str>::None);
|
||||
let source = fs::read_to_string(specifier_path)?;
|
||||
|
||||
Ok(CachedModule {
|
||||
source,
|
||||
|
@ -760,15 +747,15 @@ mod tests {
|
|||
&self,
|
||||
specifier: &ModuleSpecifier,
|
||||
_cache_type: &EmitType,
|
||||
) -> Result<Option<TextDocument>, AnyError> {
|
||||
) -> Result<Option<String>, AnyError> {
|
||||
Ok(self.build_info.get(specifier).cloned())
|
||||
}
|
||||
fn set_cache(
|
||||
&mut self,
|
||||
specifier: &ModuleSpecifier,
|
||||
cache_type: &EmitType,
|
||||
code: TextDocument,
|
||||
maybe_map: Option<TextDocument>,
|
||||
code: String,
|
||||
maybe_map: Option<String>,
|
||||
) -> Result<(), AnyError> {
|
||||
self.cache_calls.push((
|
||||
specifier.clone(),
|
||||
|
@ -790,7 +777,7 @@ mod tests {
|
|||
&mut self,
|
||||
specifier: &ModuleSpecifier,
|
||||
cache_type: &EmitType,
|
||||
build_info: TextDocument,
|
||||
build_info: String,
|
||||
) -> Result<(), AnyError> {
|
||||
self
|
||||
.build_info
|
||||
|
@ -822,11 +809,9 @@ mod tests {
|
|||
|
||||
#[test]
|
||||
fn test_get_version() {
|
||||
let doc_a =
|
||||
TextDocument::new(b"console.log(42);".to_vec(), Option::<&str>::None);
|
||||
let doc_a = "console.log(42);";
|
||||
let version_a = get_version(&doc_a, "1.2.3", b"");
|
||||
let doc_b =
|
||||
TextDocument::new(b"console.log(42);".to_vec(), Option::<&str>::None);
|
||||
let doc_b = "console.log(42);";
|
||||
let version_b = get_version(&doc_b, "1.2.3", b"");
|
||||
assert_eq!(version_a, version_b);
|
||||
|
||||
|
@ -845,8 +830,7 @@ mod tests {
|
|||
|
||||
#[test]
|
||||
fn test_module_emit_valid() {
|
||||
let source =
|
||||
TextDocument::new(b"console.log(42);".to_vec(), Option::<&str>::None);
|
||||
let source = "console.log(42);".to_string();
|
||||
let maybe_version = Some(get_version(&source, version::DENO, b""));
|
||||
let module = Module {
|
||||
source,
|
||||
|
@ -855,11 +839,9 @@ mod tests {
|
|||
};
|
||||
assert!(module.emit_valid(b""));
|
||||
|
||||
let source =
|
||||
TextDocument::new(b"console.log(42);".to_vec(), Option::<&str>::None);
|
||||
let old_source =
|
||||
TextDocument::new(b"console.log(43);".to_vec(), Option::<&str>::None);
|
||||
let maybe_version = Some(get_version(&old_source, version::DENO, b""));
|
||||
let source = "console.log(42);".to_string();
|
||||
let old_source = "console.log(43);";
|
||||
let maybe_version = Some(get_version(old_source, version::DENO, b""));
|
||||
let module = Module {
|
||||
source,
|
||||
maybe_version,
|
||||
|
@ -867,8 +849,7 @@ mod tests {
|
|||
};
|
||||
assert!(!module.emit_valid(b""));
|
||||
|
||||
let source =
|
||||
TextDocument::new(b"console.log(42);".to_vec(), Option::<&str>::None);
|
||||
let source = "console.log(42);".to_string();
|
||||
let maybe_version = Some(get_version(&source, "0.0.0", b""));
|
||||
let module = Module {
|
||||
source,
|
||||
|
@ -877,8 +858,7 @@ mod tests {
|
|||
};
|
||||
assert!(!module.emit_valid(b""));
|
||||
|
||||
let source =
|
||||
TextDocument::new(b"console.log(42);".to_vec(), Option::<&str>::None);
|
||||
let source = "console.log(42);".to_string();
|
||||
let module = Module {
|
||||
source,
|
||||
..Module::default()
|
||||
|
@ -888,8 +868,7 @@ mod tests {
|
|||
|
||||
#[test]
|
||||
fn test_module_set_version() {
|
||||
let source =
|
||||
TextDocument::new(b"console.log(42);".to_vec(), Option::<&str>::None);
|
||||
let source = "console.log(42);".to_string();
|
||||
let expected = Some(get_version(&source, version::DENO, b""));
|
||||
let mut module = Module {
|
||||
source,
|
||||
|
@ -933,15 +912,11 @@ mod tests {
|
|||
assert_eq!(h.cache_calls[0].1, EmitType::Cli);
|
||||
assert!(h.cache_calls[0]
|
||||
.2
|
||||
.to_string()
|
||||
.unwrap()
|
||||
.contains("# sourceMappingURL=data:application/json;base64,"));
|
||||
assert_eq!(h.cache_calls[0].3, None);
|
||||
assert_eq!(h.cache_calls[1].1, EmitType::Cli);
|
||||
assert!(h.cache_calls[1]
|
||||
.2
|
||||
.to_string()
|
||||
.unwrap()
|
||||
.contains("# sourceMappingURL=data:application/json;base64,"));
|
||||
assert_eq!(h.cache_calls[0].3, None);
|
||||
assert_eq!(h.deps_calls.len(), 7);
|
||||
|
@ -1002,11 +977,7 @@ mod tests {
|
|||
assert_eq!(h.cache_calls.len(), 1, "only one file should be emitted");
|
||||
// FIXME(bartlomieju): had to add space in `<div>`, probably a quirk in swc_ecma_codegen
|
||||
assert!(
|
||||
h.cache_calls[0]
|
||||
.2
|
||||
.to_string()
|
||||
.unwrap()
|
||||
.contains("<div >Hello world!</div>"),
|
||||
h.cache_calls[0].2.contains("<div >Hello world!</div>"),
|
||||
"jsx should have been preserved"
|
||||
);
|
||||
}
|
||||
|
|
|
@ -3,7 +3,6 @@
|
|||
use crate::deno_dir::DenoDir;
|
||||
use crate::disk_cache::DiskCache;
|
||||
use crate::file_fetcher::SourceFileFetcher;
|
||||
use crate::file_fetcher::TextDocument;
|
||||
use crate::global_state::GlobalState;
|
||||
use crate::media_type::MediaType;
|
||||
use crate::permissions::Permissions;
|
||||
|
@ -23,7 +22,7 @@ use std::pin::Pin;
|
|||
use std::sync::Arc;
|
||||
|
||||
pub type DependencyMap = HashMap<String, Dependency>;
|
||||
pub type EmitMap = HashMap<EmitType, (TextDocument, Option<TextDocument>)>;
|
||||
pub type EmitMap = HashMap<EmitType, (String, Option<String>)>;
|
||||
pub type FetchFuture =
|
||||
Pin<Box<(dyn Future<Output = Result<CachedModule, AnyError>> + 'static)>>;
|
||||
|
||||
|
@ -34,7 +33,7 @@ pub struct CachedModule {
|
|||
pub maybe_types: Option<String>,
|
||||
pub maybe_version: Option<String>,
|
||||
pub media_type: MediaType,
|
||||
pub source: TextDocument,
|
||||
pub source: String,
|
||||
pub specifier: ModuleSpecifier,
|
||||
}
|
||||
|
||||
|
@ -47,7 +46,7 @@ impl Default for CachedModule {
|
|||
maybe_types: None,
|
||||
maybe_version: None,
|
||||
media_type: MediaType::Unknown,
|
||||
source: TextDocument::new(Vec::new(), Option::<&str>::None),
|
||||
source: "".to_string(),
|
||||
specifier: ModuleSpecifier::resolve_url("https://deno.land/x/mod.ts")
|
||||
.unwrap(),
|
||||
}
|
||||
|
@ -98,7 +97,7 @@ pub trait SpecifierHandler {
|
|||
&self,
|
||||
specifier: &ModuleSpecifier,
|
||||
emit_type: &EmitType,
|
||||
) -> Result<Option<TextDocument>, AnyError>;
|
||||
) -> Result<Option<String>, AnyError>;
|
||||
|
||||
/// Set the emitted code (and maybe map) for a given module specifier. The
|
||||
/// cache type indicates what form the emit is related to.
|
||||
|
@ -106,8 +105,8 @@ pub trait SpecifierHandler {
|
|||
&mut self,
|
||||
specifier: &ModuleSpecifier,
|
||||
emit_type: &EmitType,
|
||||
code: TextDocument,
|
||||
maybe_map: Option<TextDocument>,
|
||||
code: String,
|
||||
maybe_map: Option<String>,
|
||||
) -> Result<(), AnyError>;
|
||||
|
||||
/// When parsed out of a JavaScript module source, the triple slash reference
|
||||
|
@ -123,7 +122,7 @@ pub trait SpecifierHandler {
|
|||
&mut self,
|
||||
specifier: &ModuleSpecifier,
|
||||
emit_type: &EmitType,
|
||||
build_info: TextDocument,
|
||||
build_info: String,
|
||||
) -> Result<(), AnyError>;
|
||||
|
||||
/// Set the graph dependencies for a given module specifier.
|
||||
|
@ -245,16 +244,16 @@ impl SpecifierHandler for FetchHandler {
|
|||
|
||||
let filename =
|
||||
disk_cache.get_cache_filename_with_extension(&url, "js.map");
|
||||
let maybe_map: Option<TextDocument> =
|
||||
if let Ok(map) = disk_cache.get(&filename) {
|
||||
Some(map.into())
|
||||
} else {
|
||||
None
|
||||
};
|
||||
let maybe_map: Option<String> = if let Ok(map) = disk_cache.get(&filename)
|
||||
{
|
||||
Some(String::from_utf8(map)?)
|
||||
} else {
|
||||
None
|
||||
};
|
||||
let mut emits = HashMap::new();
|
||||
let filename = disk_cache.get_cache_filename_with_extension(&url, "js");
|
||||
if let Ok(code) = disk_cache.get(&filename) {
|
||||
emits.insert(EmitType::Cli, (code.into(), maybe_map));
|
||||
emits.insert(EmitType::Cli, (String::from_utf8(code)?, maybe_map));
|
||||
};
|
||||
|
||||
Ok(CachedModule {
|
||||
|
@ -274,7 +273,7 @@ impl SpecifierHandler for FetchHandler {
|
|||
&self,
|
||||
specifier: &ModuleSpecifier,
|
||||
emit_type: &EmitType,
|
||||
) -> Result<Option<TextDocument>, AnyError> {
|
||||
) -> Result<Option<String>, AnyError> {
|
||||
if emit_type != &EmitType::Cli {
|
||||
return Err(UnsupportedEmitType(emit_type.clone()).into());
|
||||
}
|
||||
|
@ -282,7 +281,7 @@ impl SpecifierHandler for FetchHandler {
|
|||
.disk_cache
|
||||
.get_cache_filename_with_extension(specifier.as_url(), "buildinfo");
|
||||
if let Ok(build_info) = self.disk_cache.get(&filename) {
|
||||
return Ok(Some(build_info.into()));
|
||||
return Ok(Some(String::from_utf8(build_info)?));
|
||||
}
|
||||
|
||||
Ok(None)
|
||||
|
@ -292,7 +291,7 @@ impl SpecifierHandler for FetchHandler {
|
|||
&mut self,
|
||||
specifier: &ModuleSpecifier,
|
||||
emit_type: &EmitType,
|
||||
build_info: TextDocument,
|
||||
build_info: String,
|
||||
) -> Result<(), AnyError> {
|
||||
if emit_type != &EmitType::Cli {
|
||||
return Err(UnsupportedEmitType(emit_type.clone()).into());
|
||||
|
@ -310,8 +309,8 @@ impl SpecifierHandler for FetchHandler {
|
|||
&mut self,
|
||||
specifier: &ModuleSpecifier,
|
||||
emit_type: &EmitType,
|
||||
code: TextDocument,
|
||||
maybe_map: Option<TextDocument>,
|
||||
code: String,
|
||||
maybe_map: Option<String>,
|
||||
) -> Result<(), AnyError> {
|
||||
if emit_type != &EmitType::Cli {
|
||||
return Err(UnsupportedEmitType(emit_type.clone()).into());
|
||||
|
@ -414,7 +413,7 @@ pub mod tests {
|
|||
assert!(cached_module.maybe_dependencies.is_none());
|
||||
assert_eq!(cached_module.media_type, MediaType::TypeScript);
|
||||
assert_eq!(
|
||||
cached_module.source.to_str().unwrap(),
|
||||
cached_module.source,
|
||||
"export { printHello } from \"./print_hello.ts\";\n"
|
||||
);
|
||||
assert_eq!(cached_module.specifier, specifier);
|
||||
|
@ -431,7 +430,7 @@ pub mod tests {
|
|||
let cached_module: CachedModule =
|
||||
file_fetcher.fetch(specifier.clone()).await.unwrap();
|
||||
assert_eq!(cached_module.emits.len(), 0);
|
||||
let code = TextDocument::from("some code");
|
||||
let code = String::from("some code");
|
||||
file_fetcher
|
||||
.set_cache(&specifier, &EmitType::Cli, code, None)
|
||||
.expect("could not set cache");
|
||||
|
@ -439,7 +438,7 @@ pub mod tests {
|
|||
file_fetcher.fetch(specifier.clone()).await.unwrap();
|
||||
assert_eq!(cached_module.emits.len(), 1);
|
||||
let actual_emit = cached_module.emits.get(&EmitType::Cli).unwrap();
|
||||
assert_eq!(actual_emit.0.to_str().unwrap(), "some code");
|
||||
assert_eq!(actual_emit.0, "some code");
|
||||
assert_eq!(actual_emit.1, None);
|
||||
}
|
||||
}
|
||||
|
|
22
cli/tsc.rs
22
cli/tsc.rs
|
@ -735,7 +735,7 @@ impl TsCompiler {
|
|||
let compiled_source_file = self.get_compiled_source_file(module_url)?;
|
||||
|
||||
let compiled_module = CompiledModule {
|
||||
code: compiled_source_file.source_code.to_string()?,
|
||||
code: compiled_source_file.source_code,
|
||||
name: module_url.to_string(),
|
||||
};
|
||||
|
||||
|
@ -760,7 +760,7 @@ impl TsCompiler {
|
|||
url: module_url.clone(),
|
||||
filename: compiled_code_filename,
|
||||
media_type: MediaType::JavaScript,
|
||||
source_code: compiled_code.into(),
|
||||
source_code: String::from_utf8(compiled_code)?,
|
||||
types_header: None,
|
||||
};
|
||||
|
||||
|
@ -817,7 +817,7 @@ impl TsCompiler {
|
|||
url: module_specifier.as_url().to_owned(),
|
||||
filename: source_map_filename,
|
||||
media_type: MediaType::JavaScript,
|
||||
source_code: source_code.into(),
|
||||
source_code: String::from_utf8(source_code)?,
|
||||
types_header: None,
|
||||
};
|
||||
|
||||
|
@ -862,14 +862,12 @@ impl SourceMapGetter for TsCompiler {
|
|||
fn get_source_line(&self, script_name: &str, line: usize) -> Option<String> {
|
||||
self
|
||||
.try_resolve_and_get_source_file(script_name)
|
||||
.and_then(|out| {
|
||||
out.source_code.to_str().ok().map(|v| {
|
||||
// Do NOT use .lines(): it skips the terminating empty line.
|
||||
// (due to internally using .split_terminator() instead of .split())
|
||||
let lines: Vec<&str> = v.split('\n').collect();
|
||||
assert!(lines.len() > line);
|
||||
lines[line].to_string()
|
||||
})
|
||||
.map(|out| {
|
||||
// Do NOT use .lines(): it skips the terminating empty line.
|
||||
// (due to internally using .split_terminator() instead of .split())
|
||||
let lines: Vec<&str> = out.source_code.split('\n').collect();
|
||||
assert!(lines.len() > line);
|
||||
lines[line].to_string()
|
||||
})
|
||||
}
|
||||
}
|
||||
|
@ -1528,7 +1526,7 @@ mod tests {
|
|||
url: specifier.as_url().clone(),
|
||||
filename: PathBuf::from(p.to_str().unwrap().to_string()),
|
||||
media_type: MediaType::TypeScript,
|
||||
source_code: include_bytes!("./tests/002_hello.ts").to_vec().into(),
|
||||
source_code: include_str!("./tests/002_hello.ts").to_string(),
|
||||
types_header: None,
|
||||
};
|
||||
let dir =
|
||||
|
|
Loading…
Reference in a new issue