mirror of
https://github.com/denoland/deno.git
synced 2024-11-25 15:29:32 -05:00
refactor: cleanup compiler pipeline (#2686)
* remove fetch_source_file_and_maybe_compile_async and replace it with State.fetch_compiled_module * remove SourceFile.js_source() * introduce CompiledModule which is basically the same as deno::SourceInfo and represents arbitrary file that has been compiled to JS module * introduce //cli/compilers module containing all compilers * introduce JsCompiler which is a no-op compiler - output is the same as input, no compilation takes place - it is used for MediaType::JavaScript and MediaType::Unknown * introduce JsonCompiler that wraps JSON in default export * support JS-to-JS compilation using checkJs
This commit is contained in:
parent
e7cee29c84
commit
2e1ab82321
14 changed files with 353 additions and 199 deletions
25
cli/compilers/js.rs
Normal file
25
cli/compilers/js.rs
Normal file
|
@ -0,0 +1,25 @@
|
||||||
|
// Copyright 2018-2019 the Deno authors. All rights reserved. MIT license.
|
||||||
|
use crate::compilers::CompiledModule;
|
||||||
|
use crate::compilers::CompiledModuleFuture;
|
||||||
|
use crate::file_fetcher::SourceFile;
|
||||||
|
use crate::state::ThreadSafeState;
|
||||||
|
use std::str;
|
||||||
|
|
||||||
|
pub struct JsCompiler {}
|
||||||
|
|
||||||
|
impl JsCompiler {
|
||||||
|
pub fn compile_async(
|
||||||
|
self: &Self,
|
||||||
|
_state: ThreadSafeState,
|
||||||
|
source_file: &SourceFile,
|
||||||
|
) -> Box<CompiledModuleFuture> {
|
||||||
|
let module = CompiledModule {
|
||||||
|
code: str::from_utf8(&source_file.source_code)
|
||||||
|
.unwrap()
|
||||||
|
.to_string(),
|
||||||
|
name: source_file.url.to_string(),
|
||||||
|
};
|
||||||
|
|
||||||
|
Box::new(futures::future::ok(module))
|
||||||
|
}
|
||||||
|
}
|
26
cli/compilers/json.rs
Normal file
26
cli/compilers/json.rs
Normal file
|
@ -0,0 +1,26 @@
|
||||||
|
// Copyright 2018-2019 the Deno authors. All rights reserved. MIT license.
|
||||||
|
use crate::compilers::CompiledModule;
|
||||||
|
use crate::compilers::CompiledModuleFuture;
|
||||||
|
use crate::file_fetcher::SourceFile;
|
||||||
|
use crate::state::ThreadSafeState;
|
||||||
|
use std::str;
|
||||||
|
|
||||||
|
pub struct JsonCompiler {}
|
||||||
|
|
||||||
|
impl JsonCompiler {
|
||||||
|
pub fn compile_async(
|
||||||
|
self: &Self,
|
||||||
|
_state: ThreadSafeState,
|
||||||
|
source_file: &SourceFile,
|
||||||
|
) -> Box<CompiledModuleFuture> {
|
||||||
|
let module = CompiledModule {
|
||||||
|
code: format!(
|
||||||
|
"export default {};",
|
||||||
|
str::from_utf8(&source_file.source_code).unwrap()
|
||||||
|
),
|
||||||
|
name: source_file.url.to_string(),
|
||||||
|
};
|
||||||
|
|
||||||
|
Box::new(futures::future::ok(module))
|
||||||
|
}
|
||||||
|
}
|
20
cli/compilers/mod.rs
Normal file
20
cli/compilers/mod.rs
Normal file
|
@ -0,0 +1,20 @@
|
||||||
|
// Copyright 2018-2019 the Deno authors. All rights reserved. MIT license.
|
||||||
|
use deno::ErrBox;
|
||||||
|
use futures::Future;
|
||||||
|
|
||||||
|
mod js;
|
||||||
|
mod json;
|
||||||
|
mod ts;
|
||||||
|
|
||||||
|
pub use js::JsCompiler;
|
||||||
|
pub use json::JsonCompiler;
|
||||||
|
pub use ts::TsCompiler;
|
||||||
|
|
||||||
|
#[derive(Debug, Clone)]
|
||||||
|
pub struct CompiledModule {
|
||||||
|
pub code: String,
|
||||||
|
pub name: String,
|
||||||
|
}
|
||||||
|
|
||||||
|
pub type CompiledModuleFuture =
|
||||||
|
dyn Future<Item = CompiledModule, Error = ErrBox> + Send;
|
|
@ -1,9 +1,13 @@
|
||||||
// Copyright 2018-2019 the Deno authors. All rights reserved. MIT license.
|
// Copyright 2018-2019 the Deno authors. All rights reserved. MIT license.
|
||||||
|
use crate::compilers::CompiledModule;
|
||||||
|
use crate::compilers::CompiledModuleFuture;
|
||||||
|
use crate::deno_error::DenoError;
|
||||||
use crate::diagnostics::Diagnostic;
|
use crate::diagnostics::Diagnostic;
|
||||||
use crate::disk_cache::DiskCache;
|
use crate::disk_cache::DiskCache;
|
||||||
use crate::file_fetcher::SourceFile;
|
use crate::file_fetcher::SourceFile;
|
||||||
use crate::file_fetcher::SourceFileFetcher;
|
use crate::file_fetcher::SourceFileFetcher;
|
||||||
use crate::msg;
|
use crate::msg;
|
||||||
|
use crate::msg::ErrorKind;
|
||||||
use crate::resources;
|
use crate::resources;
|
||||||
use crate::source_maps::SourceMapGetter;
|
use crate::source_maps::SourceMapGetter;
|
||||||
use crate::startup_data;
|
use crate::startup_data;
|
||||||
|
@ -13,7 +17,6 @@ use crate::worker::Worker;
|
||||||
use deno::Buf;
|
use deno::Buf;
|
||||||
use deno::ErrBox;
|
use deno::ErrBox;
|
||||||
use deno::ModuleSpecifier;
|
use deno::ModuleSpecifier;
|
||||||
use futures::future::Either;
|
|
||||||
use futures::Future;
|
use futures::Future;
|
||||||
use futures::Stream;
|
use futures::Stream;
|
||||||
use ring;
|
use ring;
|
||||||
|
@ -26,10 +29,78 @@ use std::sync::atomic::Ordering;
|
||||||
use std::sync::Mutex;
|
use std::sync::Mutex;
|
||||||
use url::Url;
|
use url::Url;
|
||||||
|
|
||||||
/// Optional tuple which represents the state of the compiler
|
/// Struct which represents the state of the compiler
|
||||||
/// configuration where the first is canonical name for the configuration file
|
/// configuration where the first is canonical name for the configuration file,
|
||||||
/// and a vector of the bytes of the contents of the configuration file.
|
/// second is a vector of the bytes of the contents of the configuration file,
|
||||||
type CompilerConfig = Option<(PathBuf, Vec<u8>)>;
|
/// third is bytes of the hash of contents.
|
||||||
|
#[derive(Clone)]
|
||||||
|
pub struct CompilerConfig {
|
||||||
|
pub path: Option<PathBuf>,
|
||||||
|
pub content: Option<Vec<u8>>,
|
||||||
|
pub hash: Vec<u8>,
|
||||||
|
}
|
||||||
|
|
||||||
|
impl CompilerConfig {
|
||||||
|
/// Take the passed flag and resolve the file name relative to the cwd.
|
||||||
|
pub fn load(config_path: Option<String>) -> Result<Self, ErrBox> {
|
||||||
|
let config_file = match &config_path {
|
||||||
|
Some(config_file_name) => {
|
||||||
|
debug!("Compiler config file: {}", config_file_name);
|
||||||
|
let cwd = std::env::current_dir().unwrap();
|
||||||
|
Some(cwd.join(config_file_name))
|
||||||
|
}
|
||||||
|
_ => None,
|
||||||
|
};
|
||||||
|
|
||||||
|
// Convert the PathBuf to a canonicalized string. This is needed by the
|
||||||
|
// compiler to properly deal with the configuration.
|
||||||
|
let config_path = match &config_file {
|
||||||
|
Some(config_file) => Some(config_file.canonicalize().unwrap().to_owned()),
|
||||||
|
_ => None,
|
||||||
|
};
|
||||||
|
|
||||||
|
// Load the contents of the configuration file
|
||||||
|
let config = match &config_file {
|
||||||
|
Some(config_file) => {
|
||||||
|
debug!("Attempt to load config: {}", config_file.to_str().unwrap());
|
||||||
|
let config = fs::read(&config_file)?;
|
||||||
|
Some(config)
|
||||||
|
}
|
||||||
|
_ => None,
|
||||||
|
};
|
||||||
|
|
||||||
|
let config_hash = match &config {
|
||||||
|
Some(bytes) => bytes.clone(),
|
||||||
|
_ => b"".to_vec(),
|
||||||
|
};
|
||||||
|
|
||||||
|
let ts_config = Self {
|
||||||
|
path: config_path,
|
||||||
|
content: config,
|
||||||
|
hash: config_hash,
|
||||||
|
};
|
||||||
|
|
||||||
|
Ok(ts_config)
|
||||||
|
}
|
||||||
|
|
||||||
|
pub fn json(self: &Self) -> Result<serde_json::Value, ErrBox> {
|
||||||
|
if self.content.is_none() {
|
||||||
|
return Ok(serde_json::Value::Null);
|
||||||
|
}
|
||||||
|
|
||||||
|
let bytes = self.content.clone().unwrap();
|
||||||
|
let json_string = std::str::from_utf8(&bytes)?;
|
||||||
|
match serde_json::from_str(&json_string) {
|
||||||
|
Ok(json_map) => Ok(json_map),
|
||||||
|
Err(_) => Err(
|
||||||
|
DenoError::new(
|
||||||
|
ErrorKind::InvalidInput,
|
||||||
|
"Compiler config is not a valid JSON".to_string(),
|
||||||
|
).into(),
|
||||||
|
),
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
/// Information associated with compiled file in cache.
|
/// Information associated with compiled file in cache.
|
||||||
/// Includes source code path and state hash.
|
/// Includes source code path and state hash.
|
||||||
|
@ -80,19 +151,19 @@ fn req(
|
||||||
compiler_config: CompilerConfig,
|
compiler_config: CompilerConfig,
|
||||||
bundle: Option<String>,
|
bundle: Option<String>,
|
||||||
) -> Buf {
|
) -> Buf {
|
||||||
let j = if let Some((config_path, config_data)) = compiler_config {
|
let j = match (compiler_config.path, compiler_config.content) {
|
||||||
json!({
|
(Some(config_path), Some(config_data)) => json!({
|
||||||
"rootNames": root_names,
|
"rootNames": root_names,
|
||||||
"bundle": bundle,
|
"bundle": bundle,
|
||||||
"configPath": config_path,
|
"configPath": config_path,
|
||||||
"config": str::from_utf8(&config_data).unwrap(),
|
"config": str::from_utf8(&config_data).unwrap(),
|
||||||
})
|
}),
|
||||||
} else {
|
_ => json!({
|
||||||
json!({
|
|
||||||
"rootNames": root_names,
|
"rootNames": root_names,
|
||||||
"bundle": bundle,
|
"bundle": bundle,
|
||||||
})
|
}),
|
||||||
};
|
};
|
||||||
|
|
||||||
j.to_string().into_boxed_str().into_boxed_bytes()
|
j.to_string().into_boxed_str().into_boxed_bytes()
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -120,48 +191,9 @@ pub fn source_code_version_hash(
|
||||||
gen_hash(vec![source_code, version.as_bytes(), config_hash])
|
gen_hash(vec![source_code, version.as_bytes(), config_hash])
|
||||||
}
|
}
|
||||||
|
|
||||||
fn load_config_file(
|
|
||||||
config_path: Option<String>,
|
|
||||||
) -> (Option<PathBuf>, Option<Vec<u8>>) {
|
|
||||||
// take the passed flag and resolve the file name relative to the cwd
|
|
||||||
let config_file = match &config_path {
|
|
||||||
Some(config_file_name) => {
|
|
||||||
debug!("Compiler config file: {}", config_file_name);
|
|
||||||
let cwd = std::env::current_dir().unwrap();
|
|
||||||
Some(cwd.join(config_file_name))
|
|
||||||
}
|
|
||||||
_ => None,
|
|
||||||
};
|
|
||||||
|
|
||||||
// Convert the PathBuf to a canonicalized string. This is needed by the
|
|
||||||
// compiler to properly deal with the configuration.
|
|
||||||
let config_path = match &config_file {
|
|
||||||
Some(config_file) => Some(config_file.canonicalize().unwrap().to_owned()),
|
|
||||||
_ => None,
|
|
||||||
};
|
|
||||||
|
|
||||||
// Load the contents of the configuration file
|
|
||||||
let config = match &config_file {
|
|
||||||
Some(config_file) => {
|
|
||||||
debug!("Attempt to load config: {}", config_file.to_str().unwrap());
|
|
||||||
match fs::read(&config_file) {
|
|
||||||
Ok(config_data) => Some(config_data.to_owned()),
|
|
||||||
_ => panic!(
|
|
||||||
"Error retrieving compiler config file at \"{}\"",
|
|
||||||
config_file.to_str().unwrap()
|
|
||||||
),
|
|
||||||
}
|
|
||||||
}
|
|
||||||
_ => None,
|
|
||||||
};
|
|
||||||
|
|
||||||
(config_path, config)
|
|
||||||
}
|
|
||||||
|
|
||||||
pub struct TsCompiler {
|
pub struct TsCompiler {
|
||||||
pub file_fetcher: SourceFileFetcher,
|
pub file_fetcher: SourceFileFetcher,
|
||||||
pub config: CompilerConfig,
|
pub config: CompilerConfig,
|
||||||
pub config_hash: Vec<u8>,
|
|
||||||
pub disk_cache: DiskCache,
|
pub disk_cache: DiskCache,
|
||||||
/// Set of all URLs that have been compiled. This prevents double
|
/// Set of all URLs that have been compiled. This prevents double
|
||||||
/// compilation of module.
|
/// compilation of module.
|
||||||
|
@ -169,6 +201,8 @@ pub struct TsCompiler {
|
||||||
/// This setting is controlled by `--reload` flag. Unless the flag
|
/// This setting is controlled by `--reload` flag. Unless the flag
|
||||||
/// is provided disk cache is used.
|
/// is provided disk cache is used.
|
||||||
pub use_disk_cache: bool,
|
pub use_disk_cache: bool,
|
||||||
|
/// This setting is controlled by `compilerOptions.checkJs`
|
||||||
|
pub compile_js: bool,
|
||||||
}
|
}
|
||||||
|
|
||||||
impl TsCompiler {
|
impl TsCompiler {
|
||||||
|
@ -177,25 +211,30 @@ impl TsCompiler {
|
||||||
disk_cache: DiskCache,
|
disk_cache: DiskCache,
|
||||||
use_disk_cache: bool,
|
use_disk_cache: bool,
|
||||||
config_path: Option<String>,
|
config_path: Option<String>,
|
||||||
) -> Self {
|
) -> Result<Self, ErrBox> {
|
||||||
let compiler_config = match load_config_file(config_path) {
|
let config = CompilerConfig::load(config_path)?;
|
||||||
(Some(config_path), Some(config)) => Some((config_path, config.to_vec())),
|
|
||||||
_ => None,
|
// If `checkJs` is set to true in `compilerOptions` then we're gonna be compiling
|
||||||
|
// JavaScript files as well
|
||||||
|
let config_json = config.json()?;
|
||||||
|
let compile_js = match &config_json.get("compilerOptions") {
|
||||||
|
Some(serde_json::Value::Object(m)) => match m.get("checkJs") {
|
||||||
|
Some(serde_json::Value::Bool(bool_)) => *bool_,
|
||||||
|
_ => false,
|
||||||
|
},
|
||||||
|
_ => false,
|
||||||
};
|
};
|
||||||
|
|
||||||
let config_bytes = match &compiler_config {
|
let compiler = Self {
|
||||||
Some((_, config)) => config.clone(),
|
|
||||||
_ => b"".to_vec(),
|
|
||||||
};
|
|
||||||
|
|
||||||
Self {
|
|
||||||
file_fetcher,
|
file_fetcher,
|
||||||
disk_cache,
|
disk_cache,
|
||||||
config: compiler_config,
|
config,
|
||||||
config_hash: config_bytes,
|
|
||||||
compiled: Mutex::new(HashSet::new()),
|
compiled: Mutex::new(HashSet::new()),
|
||||||
use_disk_cache,
|
use_disk_cache,
|
||||||
}
|
compile_js,
|
||||||
|
};
|
||||||
|
|
||||||
|
Ok(compiler)
|
||||||
}
|
}
|
||||||
|
|
||||||
/// Create a new V8 worker with snapshot of TS compiler and setup compiler's runtime.
|
/// Create a new V8 worker with snapshot of TS compiler and setup compiler's runtime.
|
||||||
|
@ -290,22 +329,12 @@ impl TsCompiler {
|
||||||
self: &Self,
|
self: &Self,
|
||||||
state: ThreadSafeState,
|
state: ThreadSafeState,
|
||||||
source_file: &SourceFile,
|
source_file: &SourceFile,
|
||||||
) -> impl Future<Item = SourceFile, Error = ErrBox> {
|
) -> Box<CompiledModuleFuture> {
|
||||||
// TODO: maybe fetching of original SourceFile should be done here?
|
|
||||||
|
|
||||||
if source_file.media_type != msg::MediaType::TypeScript {
|
|
||||||
return Either::A(futures::future::ok(source_file.clone()));
|
|
||||||
}
|
|
||||||
|
|
||||||
if self.has_compiled(&source_file.url) {
|
if self.has_compiled(&source_file.url) {
|
||||||
match self.get_compiled_source_file(&source_file) {
|
return match self.get_compiled_module(&source_file.url) {
|
||||||
Ok(compiled_module) => {
|
Ok(compiled) => Box::new(futures::future::ok(compiled)),
|
||||||
return Either::A(futures::future::ok(compiled_module));
|
Err(err) => Box::new(futures::future::err(err)),
|
||||||
}
|
};
|
||||||
Err(err) => {
|
|
||||||
return Either::A(futures::future::err(err));
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
}
|
||||||
|
|
||||||
if self.use_disk_cache {
|
if self.use_disk_cache {
|
||||||
|
@ -317,20 +346,16 @@ impl TsCompiler {
|
||||||
let version_hash_to_validate = source_code_version_hash(
|
let version_hash_to_validate = source_code_version_hash(
|
||||||
&source_file.source_code,
|
&source_file.source_code,
|
||||||
version::DENO,
|
version::DENO,
|
||||||
&self.config_hash,
|
&self.config.hash,
|
||||||
);
|
);
|
||||||
|
|
||||||
if metadata.version_hash == version_hash_to_validate {
|
if metadata.version_hash == version_hash_to_validate {
|
||||||
debug!("load_cache metadata version hash match");
|
debug!("load_cache metadata version hash match");
|
||||||
if let Ok(compiled_module) =
|
if let Ok(compiled_module) =
|
||||||
self.get_compiled_source_file(&source_file)
|
self.get_compiled_module(&source_file.url)
|
||||||
{
|
{
|
||||||
debug!(
|
self.mark_compiled(&source_file.url);
|
||||||
"found cached compiled module: {:?}",
|
return Box::new(futures::future::ok(compiled_module));
|
||||||
compiled_module.clone().filename
|
|
||||||
);
|
|
||||||
// TODO: store in in-process cache for subsequent access
|
|
||||||
return Either::A(futures::future::ok(compiled_module));
|
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
@ -388,19 +413,18 @@ impl TsCompiler {
|
||||||
}).and_then(move |_| {
|
}).and_then(move |_| {
|
||||||
// if we are this far it means compilation was successful and we can
|
// if we are this far it means compilation was successful and we can
|
||||||
// load compiled filed from disk
|
// load compiled filed from disk
|
||||||
// TODO: can this be somehow called using `self.`?
|
|
||||||
state_
|
state_
|
||||||
.ts_compiler
|
.ts_compiler
|
||||||
.get_compiled_source_file(&source_file_)
|
.get_compiled_module(&source_file_.url)
|
||||||
.map_err(|e| {
|
.map_err(|e| {
|
||||||
// TODO: this situation shouldn't happen
|
// TODO: this situation shouldn't happen
|
||||||
panic!("Expected to find compiled file: {}", e)
|
panic!("Expected to find compiled file: {}", e)
|
||||||
})
|
})
|
||||||
}).and_then(move |source_file_after_compile| {
|
}).and_then(move |compiled_module| {
|
||||||
// Explicit drop to keep reference alive until future completes.
|
// Explicit drop to keep reference alive until future completes.
|
||||||
drop(compiling_job);
|
drop(compiling_job);
|
||||||
|
|
||||||
Ok(source_file_after_compile)
|
Ok(compiled_module)
|
||||||
}).then(move |r| {
|
}).then(move |r| {
|
||||||
debug!(">>>>> compile_sync END");
|
debug!(">>>>> compile_sync END");
|
||||||
// TODO(ry) do this in worker's destructor.
|
// TODO(ry) do this in worker's destructor.
|
||||||
|
@ -408,7 +432,7 @@ impl TsCompiler {
|
||||||
r
|
r
|
||||||
});
|
});
|
||||||
|
|
||||||
Either::B(fut)
|
Box::new(fut)
|
||||||
}
|
}
|
||||||
|
|
||||||
/// Get associated `CompiledFileMetadata` for given module if it exists.
|
/// Get associated `CompiledFileMetadata` for given module if it exists.
|
||||||
|
@ -431,22 +455,38 @@ impl TsCompiler {
|
||||||
None
|
None
|
||||||
}
|
}
|
||||||
|
|
||||||
|
pub fn get_compiled_module(
|
||||||
|
self: &Self,
|
||||||
|
module_url: &Url,
|
||||||
|
) -> Result<CompiledModule, ErrBox> {
|
||||||
|
let compiled_source_file = self.get_compiled_source_file(module_url)?;
|
||||||
|
|
||||||
|
let compiled_module = CompiledModule {
|
||||||
|
code: str::from_utf8(&compiled_source_file.source_code)
|
||||||
|
.unwrap()
|
||||||
|
.to_string(),
|
||||||
|
name: module_url.to_string(),
|
||||||
|
};
|
||||||
|
|
||||||
|
Ok(compiled_module)
|
||||||
|
}
|
||||||
|
|
||||||
/// Return compiled JS file for given TS module.
|
/// Return compiled JS file for given TS module.
|
||||||
// TODO: ideally we shouldn't construct SourceFile by hand, but it should be delegated to
|
// TODO: ideally we shouldn't construct SourceFile by hand, but it should be delegated to
|
||||||
// SourceFileFetcher
|
// SourceFileFetcher
|
||||||
pub fn get_compiled_source_file(
|
pub fn get_compiled_source_file(
|
||||||
self: &Self,
|
self: &Self,
|
||||||
source_file: &SourceFile,
|
module_url: &Url,
|
||||||
) -> Result<SourceFile, ErrBox> {
|
) -> Result<SourceFile, ErrBox> {
|
||||||
let cache_key = self
|
let cache_key = self
|
||||||
.disk_cache
|
.disk_cache
|
||||||
.get_cache_filename_with_extension(&source_file.url, "js");
|
.get_cache_filename_with_extension(&module_url, "js");
|
||||||
let compiled_code = self.disk_cache.get(&cache_key)?;
|
let compiled_code = self.disk_cache.get(&cache_key)?;
|
||||||
let compiled_code_filename = self.disk_cache.location.join(cache_key);
|
let compiled_code_filename = self.disk_cache.location.join(cache_key);
|
||||||
debug!("compiled filename: {:?}", compiled_code_filename);
|
debug!("compiled filename: {:?}", compiled_code_filename);
|
||||||
|
|
||||||
let compiled_module = SourceFile {
|
let compiled_module = SourceFile {
|
||||||
url: source_file.url.clone(),
|
url: module_url.clone(),
|
||||||
filename: compiled_code_filename,
|
filename: compiled_code_filename,
|
||||||
media_type: msg::MediaType::JavaScript,
|
media_type: msg::MediaType::JavaScript,
|
||||||
source_code: compiled_code,
|
source_code: compiled_code,
|
||||||
|
@ -481,7 +521,7 @@ impl TsCompiler {
|
||||||
let version_hash = source_code_version_hash(
|
let version_hash = source_code_version_hash(
|
||||||
&source_file.source_code,
|
&source_file.source_code,
|
||||||
version::DENO,
|
version::DENO,
|
||||||
&self.config_hash,
|
&self.config.hash,
|
||||||
);
|
);
|
||||||
|
|
||||||
let compiled_file_metadata = CompiledFileMetadata {
|
let compiled_file_metadata = CompiledFileMetadata {
|
||||||
|
@ -619,7 +659,7 @@ mod tests {
|
||||||
self: &Self,
|
self: &Self,
|
||||||
state: ThreadSafeState,
|
state: ThreadSafeState,
|
||||||
source_file: &SourceFile,
|
source_file: &SourceFile,
|
||||||
) -> Result<SourceFile, ErrBox> {
|
) -> Result<CompiledModule, ErrBox> {
|
||||||
tokio_util::block_on(self.compile_async(state, source_file))
|
tokio_util::block_on(self.compile_async(state, source_file))
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
@ -630,24 +670,25 @@ mod tests {
|
||||||
let specifier =
|
let specifier =
|
||||||
ModuleSpecifier::resolve_url_or_path("./tests/002_hello.ts").unwrap();
|
ModuleSpecifier::resolve_url_or_path("./tests/002_hello.ts").unwrap();
|
||||||
|
|
||||||
let mut out = SourceFile {
|
let out = SourceFile {
|
||||||
url: specifier.as_url().clone(),
|
url: specifier.as_url().clone(),
|
||||||
filename: PathBuf::from("/tests/002_hello.ts"),
|
filename: PathBuf::from("/tests/002_hello.ts"),
|
||||||
media_type: msg::MediaType::TypeScript,
|
media_type: msg::MediaType::TypeScript,
|
||||||
source_code: include_bytes!("../tests/002_hello.ts").to_vec(),
|
source_code: include_bytes!("../../tests/002_hello.ts").to_vec(),
|
||||||
};
|
};
|
||||||
|
|
||||||
let mock_state = ThreadSafeState::mock(vec![
|
let mock_state = ThreadSafeState::mock(vec![
|
||||||
String::from("./deno"),
|
String::from("./deno"),
|
||||||
String::from("hello.js"),
|
String::from("hello.js"),
|
||||||
]);
|
]);
|
||||||
out = mock_state
|
let compiled = mock_state
|
||||||
.ts_compiler
|
.ts_compiler
|
||||||
.compile_sync(mock_state.clone(), &out)
|
.compile_sync(mock_state.clone(), &out)
|
||||||
.unwrap();
|
.unwrap();
|
||||||
assert!(
|
assert!(
|
||||||
out
|
compiled
|
||||||
.source_code
|
.code
|
||||||
|
.as_bytes()
|
||||||
.starts_with("console.log(\"Hello World\");".as_bytes())
|
.starts_with("console.log(\"Hello World\");".as_bytes())
|
||||||
);
|
);
|
||||||
})
|
})
|
|
@ -17,8 +17,6 @@ pub struct DenoDir {
|
||||||
}
|
}
|
||||||
|
|
||||||
impl DenoDir {
|
impl DenoDir {
|
||||||
// Must be called before using any function from this module.
|
|
||||||
// https://github.com/denoland/deno/blob/golang/deno_dir.go#L99-L111
|
|
||||||
pub fn new(custom_root: Option<PathBuf>) -> std::io::Result<Self> {
|
pub fn new(custom_root: Option<PathBuf>) -> std::io::Result<Self> {
|
||||||
// Only setup once.
|
// Only setup once.
|
||||||
let home_dir = dirs::home_dir().expect("Could not get home directory.");
|
let home_dir = dirs::home_dir().expect("Could not get home directory.");
|
||||||
|
|
|
@ -18,6 +18,10 @@ impl DiskCache {
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
// TODO(bartlomieju) this method is not working properly for Windows paths,
|
||||||
|
// Example: file:///C:/deno/js/unit_test_runner.ts
|
||||||
|
// would produce: C:deno\\js\\unit_test_runner.ts
|
||||||
|
// it should produce: file\deno\js\unit_test_runner.ts
|
||||||
pub fn get_cache_filename(self: &Self, url: &Url) -> PathBuf {
|
pub fn get_cache_filename(self: &Self, url: &Url) -> PathBuf {
|
||||||
let mut out = PathBuf::new();
|
let mut out = PathBuf::new();
|
||||||
|
|
||||||
|
|
|
@ -39,27 +39,6 @@ pub struct SourceFile {
|
||||||
pub source_code: Vec<u8>,
|
pub source_code: Vec<u8>,
|
||||||
}
|
}
|
||||||
|
|
||||||
impl SourceFile {
|
|
||||||
// TODO(bartlomieju): this method should be implemented on new `CompiledSourceFile`
|
|
||||||
// trait and should be handled by "compiler pipeline"
|
|
||||||
pub fn js_source(&self) -> String {
|
|
||||||
if self.media_type == msg::MediaType::TypeScript {
|
|
||||||
panic!("TypeScript module has no JS source, did you forget to run it through compiler?");
|
|
||||||
}
|
|
||||||
|
|
||||||
// TODO: this should be done by compiler and JS module should be returned
|
|
||||||
if self.media_type == msg::MediaType::Json {
|
|
||||||
return format!(
|
|
||||||
"export default {};",
|
|
||||||
str::from_utf8(&self.source_code).unwrap()
|
|
||||||
);
|
|
||||||
}
|
|
||||||
|
|
||||||
// it's either JS or Unknown media type
|
|
||||||
str::from_utf8(&self.source_code).unwrap().to_string()
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
pub type SourceFileFuture =
|
pub type SourceFileFuture =
|
||||||
dyn Future<Item = SourceFile, Error = ErrBox> + Send;
|
dyn Future<Item = SourceFile, Error = ErrBox> + Send;
|
||||||
|
|
||||||
|
|
28
cli/main.rs
28
cli/main.rs
|
@ -16,7 +16,7 @@ extern crate rand;
|
||||||
extern crate url;
|
extern crate url;
|
||||||
|
|
||||||
mod ansi;
|
mod ansi;
|
||||||
pub mod compiler;
|
pub mod compilers;
|
||||||
pub mod deno_dir;
|
pub mod deno_dir;
|
||||||
pub mod deno_error;
|
pub mod deno_error;
|
||||||
pub mod diagnostics;
|
pub mod diagnostics;
|
||||||
|
@ -99,6 +99,7 @@ fn js_check(r: Result<(), ErrBox>) {
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
// TODO: we might want to rethink how this method works
|
||||||
pub fn print_file_info(
|
pub fn print_file_info(
|
||||||
worker: Worker,
|
worker: Worker,
|
||||||
module_specifier: &ModuleSpecifier,
|
module_specifier: &ModuleSpecifier,
|
||||||
|
@ -110,7 +111,7 @@ pub fn print_file_info(
|
||||||
.file_fetcher
|
.file_fetcher
|
||||||
.fetch_source_file_async(&module_specifier)
|
.fetch_source_file_async(&module_specifier)
|
||||||
.map_err(|err| println!("{}", err))
|
.map_err(|err| println!("{}", err))
|
||||||
.and_then(move |out| {
|
.and_then(|out| {
|
||||||
println!(
|
println!(
|
||||||
"{} {}",
|
"{} {}",
|
||||||
ansi::bold("local:".to_string()),
|
ansi::bold("local:".to_string()),
|
||||||
|
@ -125,18 +126,25 @@ pub fn print_file_info(
|
||||||
|
|
||||||
state_
|
state_
|
||||||
.clone()
|
.clone()
|
||||||
.ts_compiler
|
.fetch_compiled_module(&module_specifier_)
|
||||||
.compile_async(state_.clone(), &out)
|
|
||||||
.map_err(|e| {
|
.map_err(|e| {
|
||||||
debug!("compiler error exiting!");
|
debug!("compiler error exiting!");
|
||||||
eprintln!("\n{}", e.to_string());
|
eprintln!("\n{}", e.to_string());
|
||||||
std::process::exit(1);
|
std::process::exit(1);
|
||||||
}).and_then(move |compiled| {
|
}).and_then(move |compiled| {
|
||||||
if out.media_type == msg::MediaType::TypeScript {
|
if out.media_type == msg::MediaType::TypeScript
|
||||||
|
|| (out.media_type == msg::MediaType::JavaScript
|
||||||
|
&& state_.ts_compiler.compile_js)
|
||||||
|
{
|
||||||
|
let compiled_source_file = state_
|
||||||
|
.ts_compiler
|
||||||
|
.get_compiled_source_file(&out.url)
|
||||||
|
.unwrap();
|
||||||
|
|
||||||
println!(
|
println!(
|
||||||
"{} {}",
|
"{} {}",
|
||||||
ansi::bold("compiled:".to_string()),
|
ansi::bold("compiled:".to_string()),
|
||||||
compiled.filename.to_str().unwrap(),
|
compiled_source_file.filename.to_str().unwrap(),
|
||||||
);
|
);
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -152,12 +160,8 @@ pub fn print_file_info(
|
||||||
);
|
);
|
||||||
}
|
}
|
||||||
|
|
||||||
if let Some(deps) = worker
|
if let Some(deps) =
|
||||||
.state
|
worker.state.modules.lock().unwrap().deps(&compiled.name)
|
||||||
.modules
|
|
||||||
.lock()
|
|
||||||
.unwrap()
|
|
||||||
.deps(&compiled.url.to_string())
|
|
||||||
{
|
{
|
||||||
println!("{}{}", ansi::bold("deps:\n".to_string()), deps.name);
|
println!("{}{}", ansi::bold("deps:\n".to_string()), deps.name);
|
||||||
if let Some(ref depsdeps) = deps.deps {
|
if let Some(ref depsdeps) = deps.deps {
|
||||||
|
|
101
cli/state.rs
101
cli/state.rs
|
@ -1,11 +1,14 @@
|
||||||
// Copyright 2018-2019 the Deno authors. All rights reserved. MIT license.
|
// Copyright 2018-2019 the Deno authors. All rights reserved. MIT license.
|
||||||
use crate::compiler::TsCompiler;
|
use crate::compilers::CompiledModule;
|
||||||
|
use crate::compilers::JsCompiler;
|
||||||
|
use crate::compilers::JsonCompiler;
|
||||||
|
use crate::compilers::TsCompiler;
|
||||||
use crate::deno_dir;
|
use crate::deno_dir;
|
||||||
use crate::file_fetcher::SourceFile;
|
|
||||||
use crate::file_fetcher::SourceFileFetcher;
|
use crate::file_fetcher::SourceFileFetcher;
|
||||||
use crate::flags;
|
use crate::flags;
|
||||||
use crate::global_timer::GlobalTimer;
|
use crate::global_timer::GlobalTimer;
|
||||||
use crate::import_map::ImportMap;
|
use crate::import_map::ImportMap;
|
||||||
|
use crate::msg;
|
||||||
use crate::ops;
|
use crate::ops;
|
||||||
use crate::permissions::DenoPermissions;
|
use crate::permissions::DenoPermissions;
|
||||||
use crate::progress::Progress;
|
use crate::progress::Progress;
|
||||||
|
@ -26,6 +29,7 @@ use std;
|
||||||
use std::collections::HashMap;
|
use std::collections::HashMap;
|
||||||
use std::env;
|
use std::env;
|
||||||
use std::ops::Deref;
|
use std::ops::Deref;
|
||||||
|
use std::str;
|
||||||
use std::sync::atomic::{AtomicUsize, Ordering};
|
use std::sync::atomic::{AtomicUsize, Ordering};
|
||||||
use std::sync::Arc;
|
use std::sync::Arc;
|
||||||
use std::sync::Mutex;
|
use std::sync::Mutex;
|
||||||
|
@ -77,6 +81,8 @@ pub struct State {
|
||||||
pub seeded_rng: Option<Mutex<StdRng>>,
|
pub seeded_rng: Option<Mutex<StdRng>>,
|
||||||
|
|
||||||
pub file_fetcher: SourceFileFetcher,
|
pub file_fetcher: SourceFileFetcher,
|
||||||
|
pub js_compiler: JsCompiler,
|
||||||
|
pub json_compiler: JsonCompiler,
|
||||||
pub ts_compiler: TsCompiler,
|
pub ts_compiler: TsCompiler,
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -103,28 +109,6 @@ impl ThreadSafeState {
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
pub fn fetch_source_file_and_maybe_compile_async(
|
|
||||||
state: &ThreadSafeState,
|
|
||||||
module_specifier: &ModuleSpecifier,
|
|
||||||
) -> impl Future<Item = SourceFile, Error = ErrBox> {
|
|
||||||
let state_ = state.clone();
|
|
||||||
|
|
||||||
state_
|
|
||||||
.file_fetcher
|
|
||||||
.fetch_source_file_async(&module_specifier)
|
|
||||||
.and_then(move |out| {
|
|
||||||
state_
|
|
||||||
.clone()
|
|
||||||
.ts_compiler
|
|
||||||
.compile_async(state_.clone(), &out)
|
|
||||||
.map_err(|e| {
|
|
||||||
debug!("compiler error exiting!");
|
|
||||||
eprintln!("\n{}", e.to_string());
|
|
||||||
std::process::exit(1);
|
|
||||||
})
|
|
||||||
})
|
|
||||||
}
|
|
||||||
|
|
||||||
impl Loader for ThreadSafeState {
|
impl Loader for ThreadSafeState {
|
||||||
fn resolve(
|
fn resolve(
|
||||||
&self,
|
&self,
|
||||||
|
@ -150,16 +134,14 @@ impl Loader for ThreadSafeState {
|
||||||
module_specifier: &ModuleSpecifier,
|
module_specifier: &ModuleSpecifier,
|
||||||
) -> Box<deno::SourceCodeInfoFuture> {
|
) -> Box<deno::SourceCodeInfoFuture> {
|
||||||
self.metrics.resolve_count.fetch_add(1, Ordering::SeqCst);
|
self.metrics.resolve_count.fetch_add(1, Ordering::SeqCst);
|
||||||
Box::new(
|
Box::new(self.fetch_compiled_module(module_specifier).map(
|
||||||
fetch_source_file_and_maybe_compile_async(self, module_specifier).map(
|
|compiled_module| deno::SourceCodeInfo {
|
||||||
|source_file| deno::SourceCodeInfo {
|
|
||||||
// Real module name, might be different from initial specifier
|
// Real module name, might be different from initial specifier
|
||||||
// due to redirections.
|
// due to redirections.
|
||||||
code: source_file.js_source(),
|
code: compiled_module.code,
|
||||||
module_name: source_file.url.to_string(),
|
module_name: compiled_module.name,
|
||||||
},
|
},
|
||||||
),
|
))
|
||||||
)
|
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -192,36 +174,26 @@ impl ThreadSafeState {
|
||||||
dir.gen_cache.clone(),
|
dir.gen_cache.clone(),
|
||||||
!flags.reload,
|
!flags.reload,
|
||||||
flags.config_path.clone(),
|
flags.config_path.clone(),
|
||||||
);
|
)?;
|
||||||
|
|
||||||
let main_module: Option<ModuleSpecifier> = if argv_rest.len() <= 1 {
|
let main_module: Option<ModuleSpecifier> = if argv_rest.len() <= 1 {
|
||||||
None
|
None
|
||||||
} else {
|
} else {
|
||||||
let root_specifier = argv_rest[1].clone();
|
let root_specifier = argv_rest[1].clone();
|
||||||
match ModuleSpecifier::resolve_url_or_path(&root_specifier) {
|
Some(ModuleSpecifier::resolve_url_or_path(&root_specifier)?)
|
||||||
Ok(specifier) => Some(specifier),
|
|
||||||
Err(e) => {
|
|
||||||
// TODO: handle unresolvable specifier
|
|
||||||
panic!("Unable to resolve root specifier: {:?}", e);
|
|
||||||
}
|
|
||||||
}
|
|
||||||
};
|
};
|
||||||
|
|
||||||
let mut import_map = None;
|
let import_map: Option<ImportMap> = match &flags.import_map_path {
|
||||||
if let Some(file_name) = &flags.import_map_path {
|
None => None,
|
||||||
|
Some(file_name) => {
|
||||||
let base_url = match &main_module {
|
let base_url = match &main_module {
|
||||||
Some(module_specifier) => module_specifier.clone(),
|
Some(module_specifier) => module_specifier.clone(),
|
||||||
None => unreachable!(),
|
None => unreachable!(),
|
||||||
};
|
};
|
||||||
|
let import_map = ImportMap::load(&base_url.to_string(), file_name)?;
|
||||||
match ImportMap::load(&base_url.to_string(), file_name) {
|
Some(import_map)
|
||||||
Ok(map) => import_map = Some(map),
|
|
||||||
Err(err) => {
|
|
||||||
println!("{:?}", err);
|
|
||||||
panic!("Error parsing import map");
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
}
|
||||||
|
};
|
||||||
|
|
||||||
let mut seeded_rng = None;
|
let mut seeded_rng = None;
|
||||||
if let Some(seed) = flags.seed {
|
if let Some(seed) = flags.seed {
|
||||||
|
@ -249,11 +221,42 @@ impl ThreadSafeState {
|
||||||
seeded_rng,
|
seeded_rng,
|
||||||
file_fetcher,
|
file_fetcher,
|
||||||
ts_compiler,
|
ts_compiler,
|
||||||
|
js_compiler: JsCompiler {},
|
||||||
|
json_compiler: JsonCompiler {},
|
||||||
};
|
};
|
||||||
|
|
||||||
Ok(ThreadSafeState(Arc::new(state)))
|
Ok(ThreadSafeState(Arc::new(state)))
|
||||||
}
|
}
|
||||||
|
|
||||||
|
pub fn fetch_compiled_module(
|
||||||
|
self: &Self,
|
||||||
|
module_specifier: &ModuleSpecifier,
|
||||||
|
) -> impl Future<Item = CompiledModule, Error = ErrBox> {
|
||||||
|
let state_ = self.clone();
|
||||||
|
|
||||||
|
self
|
||||||
|
.file_fetcher
|
||||||
|
.fetch_source_file_async(&module_specifier)
|
||||||
|
.and_then(move |out| match out.media_type {
|
||||||
|
msg::MediaType::Unknown => {
|
||||||
|
state_.js_compiler.compile_async(state_.clone(), &out)
|
||||||
|
}
|
||||||
|
msg::MediaType::Json => {
|
||||||
|
state_.json_compiler.compile_async(state_.clone(), &out)
|
||||||
|
}
|
||||||
|
msg::MediaType::TypeScript => {
|
||||||
|
state_.ts_compiler.compile_async(state_.clone(), &out)
|
||||||
|
}
|
||||||
|
msg::MediaType::JavaScript => {
|
||||||
|
if state_.ts_compiler.compile_js {
|
||||||
|
state_.ts_compiler.compile_async(state_.clone(), &out)
|
||||||
|
} else {
|
||||||
|
state_.js_compiler.compile_async(state_.clone(), &out)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
})
|
||||||
|
}
|
||||||
|
|
||||||
/// Read main module from argv
|
/// Read main module from argv
|
||||||
pub fn main_module(&self) -> Option<ModuleSpecifier> {
|
pub fn main_module(&self) -> Option<ModuleSpecifier> {
|
||||||
match &self.main_module {
|
match &self.main_module {
|
||||||
|
|
|
@ -219,6 +219,8 @@ function getExtension(
|
||||||
}
|
}
|
||||||
|
|
||||||
class Host implements ts.CompilerHost {
|
class Host implements ts.CompilerHost {
|
||||||
|
extensionCache: Record<string, ts.Extension> = {};
|
||||||
|
|
||||||
private readonly _options: ts.CompilerOptions = {
|
private readonly _options: ts.CompilerOptions = {
|
||||||
allowJs: true,
|
allowJs: true,
|
||||||
allowNonTsExtensions: true,
|
allowNonTsExtensions: true,
|
||||||
|
@ -370,10 +372,16 @@ class Host implements ts.CompilerHost {
|
||||||
// This flags to the compiler to not go looking to transpile functional
|
// This flags to the compiler to not go looking to transpile functional
|
||||||
// code, anything that is in `/$asset$/` is just library code
|
// code, anything that is in `/$asset$/` is just library code
|
||||||
const isExternalLibraryImport = moduleName.startsWith(ASSETS);
|
const isExternalLibraryImport = moduleName.startsWith(ASSETS);
|
||||||
|
const extension = getExtension(
|
||||||
|
resolvedFileName,
|
||||||
|
SourceFile.mediaType
|
||||||
|
);
|
||||||
|
this.extensionCache[resolvedFileName] = extension;
|
||||||
|
|
||||||
const r = {
|
const r = {
|
||||||
resolvedFileName,
|
resolvedFileName,
|
||||||
isExternalLibraryImport,
|
isExternalLibraryImport,
|
||||||
extension: getExtension(resolvedFileName, SourceFile.mediaType)
|
extension
|
||||||
};
|
};
|
||||||
return r;
|
return r;
|
||||||
} else {
|
} else {
|
||||||
|
@ -401,6 +409,21 @@ class Host implements ts.CompilerHost {
|
||||||
} else {
|
} else {
|
||||||
assert(sourceFiles != null && sourceFiles.length == 1);
|
assert(sourceFiles != null && sourceFiles.length == 1);
|
||||||
const sourceFileName = sourceFiles![0].fileName;
|
const sourceFileName = sourceFiles![0].fileName;
|
||||||
|
const maybeExtension = this.extensionCache[sourceFileName];
|
||||||
|
|
||||||
|
if (maybeExtension) {
|
||||||
|
// NOTE: If it's a `.json` file we don't want to write it to disk.
|
||||||
|
// JSON files are loaded and used by TS compiler to check types, but we don't want
|
||||||
|
// to emit them to disk because output file is the same as input file.
|
||||||
|
if (maybeExtension === ts.Extension.Json) {
|
||||||
|
return;
|
||||||
|
}
|
||||||
|
|
||||||
|
// NOTE: JavaScript files are only emitted to disk if `checkJs` option in on
|
||||||
|
if (maybeExtension === ts.Extension.Js && !this._options.checkJs) {
|
||||||
|
return;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
if (fileName.endsWith(".map")) {
|
if (fileName.endsWith(".map")) {
|
||||||
// Source Map
|
// Source Map
|
||||||
|
|
6
tests/038_checkjs.js
Normal file
6
tests/038_checkjs.js
Normal file
|
@ -0,0 +1,6 @@
|
||||||
|
// console.log intentionally misspelled to trigger a type error
|
||||||
|
consol.log("hello world!");
|
||||||
|
|
||||||
|
// the following error should be ignored and not output to the console
|
||||||
|
// eslint-disable-next-line
|
||||||
|
const foo = new Foo();
|
15
tests/038_checkjs.js.out
Normal file
15
tests/038_checkjs.js.out
Normal file
|
@ -0,0 +1,15 @@
|
||||||
|
[WILDCARD]
|
||||||
|
error TS2552: Cannot find name 'consol'. Did you mean 'console'?
|
||||||
|
|
||||||
|
[WILDCARD]tests/038_checkjs.js:2:1
|
||||||
|
|
||||||
|
2 consol.log("hello world!");
|
||||||
|
[WILDCARD]
|
||||||
|
error TS2552: Cannot find name 'Foo'. Did you mean 'foo'?
|
||||||
|
|
||||||
|
[WILDCARD]tests/038_checkjs.js:6:17
|
||||||
|
|
||||||
|
6 const foo = new Foo();
|
||||||
|
[WILDCARD]
|
||||||
|
Found 2 errors.
|
||||||
|
[WILDCARD]
|
5
tests/038_checkjs.test
Normal file
5
tests/038_checkjs.test
Normal file
|
@ -0,0 +1,5 @@
|
||||||
|
# checking if JS file is run through TS compiler
|
||||||
|
args: run --reload --config tests/038_checkjs.tsconfig.json tests/038_checkjs.js
|
||||||
|
check_stderr: true
|
||||||
|
exit_code: 1
|
||||||
|
output: tests/038_checkjs.js.out
|
5
tests/038_checkjs.tsconfig.json
Normal file
5
tests/038_checkjs.tsconfig.json
Normal file
|
@ -0,0 +1,5 @@
|
||||||
|
{
|
||||||
|
"compilerOptions": {
|
||||||
|
"checkJs": true
|
||||||
|
}
|
||||||
|
}
|
Loading…
Reference in a new issue