mirror of
https://github.com/denoland/deno.git
synced 2024-11-24 15:19:26 -05:00
parent
48c6f71787
commit
84c793275b
17 changed files with 917 additions and 1244 deletions
|
@ -6,8 +6,9 @@ rustflags = [
|
|||
"-C",
|
||||
"target-feature=+crt-static",
|
||||
"-C",
|
||||
# increase the stack size to prevent swc overflowing the stack in debug
|
||||
"link-arg=/STACK:3145728",
|
||||
# increase the stack size to prevent overflowing the
|
||||
# stack in debug when launching sub commands
|
||||
"link-arg=/STACK:4194304",
|
||||
]
|
||||
|
||||
[target.aarch64-apple-darwin]
|
||||
|
|
|
@ -117,6 +117,13 @@ pub struct FmtFlags {
|
|||
pub no_semicolons: Option<bool>,
|
||||
}
|
||||
|
||||
impl FmtFlags {
|
||||
pub fn is_stdin(&self) -> bool {
|
||||
let args = &self.files.include;
|
||||
args.len() == 1 && args[0].to_string_lossy() == "-"
|
||||
}
|
||||
}
|
||||
|
||||
#[derive(Clone, Debug, Eq, PartialEq)]
|
||||
pub struct InitFlags {
|
||||
pub dir: Option<String>,
|
||||
|
@ -154,6 +161,13 @@ pub struct LintFlags {
|
|||
pub compact: bool,
|
||||
}
|
||||
|
||||
impl LintFlags {
|
||||
pub fn is_stdin(&self) -> bool {
|
||||
let args = &self.files.include;
|
||||
args.len() == 1 && args[0].to_string_lossy() == "-"
|
||||
}
|
||||
}
|
||||
|
||||
#[derive(Clone, Debug, Eq, PartialEq)]
|
||||
pub struct ReplFlags {
|
||||
pub eval_files: Option<Vec<String>>,
|
||||
|
|
|
@ -148,7 +148,6 @@ impl BenchOptions {
|
|||
|
||||
#[derive(Clone, Debug, Default)]
|
||||
pub struct FmtOptions {
|
||||
pub is_stdin: bool,
|
||||
pub check: bool,
|
||||
pub options: FmtOptionsConfig,
|
||||
pub files: FilesConfig,
|
||||
|
@ -157,24 +156,12 @@ pub struct FmtOptions {
|
|||
impl FmtOptions {
|
||||
pub fn resolve(
|
||||
maybe_fmt_config: Option<FmtConfig>,
|
||||
mut maybe_fmt_flags: Option<FmtFlags>,
|
||||
maybe_fmt_flags: Option<FmtFlags>,
|
||||
) -> Result<Self, AnyError> {
|
||||
let is_stdin = if let Some(fmt_flags) = maybe_fmt_flags.as_mut() {
|
||||
let args = &mut fmt_flags.files.include;
|
||||
if args.len() == 1 && args[0].to_string_lossy() == "-" {
|
||||
args.pop(); // remove the "-" arg
|
||||
true
|
||||
} else {
|
||||
false
|
||||
}
|
||||
} else {
|
||||
false
|
||||
};
|
||||
let (maybe_config_options, maybe_config_files) =
|
||||
maybe_fmt_config.map(|c| (c.options, c.files)).unzip();
|
||||
|
||||
Ok(Self {
|
||||
is_stdin,
|
||||
check: maybe_fmt_flags.as_ref().map(|f| f.check).unwrap_or(false),
|
||||
options: resolve_fmt_options(
|
||||
maybe_fmt_flags.as_ref(),
|
||||
|
@ -280,27 +267,14 @@ pub enum LintReporterKind {
|
|||
pub struct LintOptions {
|
||||
pub rules: LintRulesConfig,
|
||||
pub files: FilesConfig,
|
||||
pub is_stdin: bool,
|
||||
pub reporter_kind: LintReporterKind,
|
||||
}
|
||||
|
||||
impl LintOptions {
|
||||
pub fn resolve(
|
||||
maybe_lint_config: Option<LintConfig>,
|
||||
mut maybe_lint_flags: Option<LintFlags>,
|
||||
maybe_lint_flags: Option<LintFlags>,
|
||||
) -> Result<Self, AnyError> {
|
||||
let is_stdin = if let Some(lint_flags) = maybe_lint_flags.as_mut() {
|
||||
let args = &mut lint_flags.files.include;
|
||||
if args.len() == 1 && args[0].to_string_lossy() == "-" {
|
||||
args.pop(); // remove the "-" arg
|
||||
true
|
||||
} else {
|
||||
false
|
||||
}
|
||||
} else {
|
||||
false
|
||||
};
|
||||
|
||||
let mut maybe_reporter_kind =
|
||||
maybe_lint_flags.as_ref().and_then(|lint_flags| {
|
||||
if lint_flags.json {
|
||||
|
@ -347,7 +321,6 @@ impl LintOptions {
|
|||
maybe_lint_config.map(|c| (c.files, c.rules)).unzip();
|
||||
Ok(Self {
|
||||
reporter_kind: maybe_reporter_kind.unwrap_or_default(),
|
||||
is_stdin,
|
||||
files: resolve_files(maybe_config_files, Some(maybe_file_flags))?,
|
||||
rules: resolve_lint_rules_options(
|
||||
maybe_config_rules,
|
||||
|
@ -1112,10 +1085,6 @@ impl CliOptions {
|
|||
&self.flags.cache_path
|
||||
}
|
||||
|
||||
pub fn no_clear_screen(&self) -> bool {
|
||||
self.flags.no_clear_screen
|
||||
}
|
||||
|
||||
pub fn no_prompt(&self) -> bool {
|
||||
resolve_no_prompt(&self.flags)
|
||||
}
|
||||
|
@ -1170,8 +1139,25 @@ impl CliOptions {
|
|||
&self.flags.v8_flags
|
||||
}
|
||||
|
||||
pub fn watch_paths(&self) -> &Option<Vec<PathBuf>> {
|
||||
&self.flags.watch
|
||||
pub fn watch_paths(&self) -> Option<Vec<PathBuf>> {
|
||||
if let Some(mut paths) = self.flags.watch.clone() {
|
||||
if let Ok(Some(import_map_path)) = self
|
||||
.resolve_import_map_specifier()
|
||||
.map(|ms| ms.and_then(|ref s| s.to_file_path().ok()))
|
||||
{
|
||||
paths.push(import_map_path);
|
||||
}
|
||||
if let Some(specifier) = self.maybe_config_file_specifier() {
|
||||
if specifier.scheme() == "file" {
|
||||
if let Ok(path) = specifier.to_file_path() {
|
||||
paths.push(path);
|
||||
}
|
||||
}
|
||||
}
|
||||
Some(paths)
|
||||
} else {
|
||||
None
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
|
|
4
cli/cache/parsed_source.rs
vendored
4
cli/cache/parsed_source.rs
vendored
|
@ -94,10 +94,6 @@ impl ParsedSourceCache {
|
|||
}
|
||||
}
|
||||
|
||||
pub fn clear(&self) {
|
||||
self.sources.0.lock().clear();
|
||||
}
|
||||
|
||||
pub fn get_parsed_source_from_esm_module(
|
||||
&self,
|
||||
module: &deno_graph::EsmModule,
|
||||
|
|
|
@ -17,6 +17,7 @@ use crate::cache::NodeAnalysisCache;
|
|||
use crate::cache::ParsedSourceCache;
|
||||
use crate::emit::Emitter;
|
||||
use crate::file_fetcher::FileFetcher;
|
||||
use crate::graph_util::FileWatcherReporter;
|
||||
use crate::graph_util::ModuleGraphBuilder;
|
||||
use crate::graph_util::ModuleGraphContainer;
|
||||
use crate::http_util::HttpClient;
|
||||
|
@ -39,8 +40,6 @@ use crate::standalone::DenoCompileBinaryWriter;
|
|||
use crate::tools::check::TypeChecker;
|
||||
use crate::util::progress_bar::ProgressBar;
|
||||
use crate::util::progress_bar::ProgressBarStyle;
|
||||
use crate::watcher::FileWatcher;
|
||||
use crate::watcher::FileWatcherReporter;
|
||||
use crate::worker::CliMainWorkerFactory;
|
||||
use crate::worker::CliMainWorkerOptions;
|
||||
use crate::worker::HasNodeSpecifierChecker;
|
||||
|
@ -148,7 +147,6 @@ struct CliFactoryServices {
|
|||
blob_store: Deferred<BlobStore>,
|
||||
parsed_source_cache: Deferred<Arc<ParsedSourceCache>>,
|
||||
resolver: Deferred<Arc<CliGraphResolver>>,
|
||||
file_watcher: Deferred<Arc<FileWatcher>>,
|
||||
maybe_file_watcher_reporter: Deferred<Option<FileWatcherReporter>>,
|
||||
module_graph_builder: Deferred<Arc<ModuleGraphBuilder>>,
|
||||
module_load_preparer: Deferred<Arc<ModuleLoadPreparer>>,
|
||||
|
@ -412,20 +410,6 @@ impl CliFactory {
|
|||
.await
|
||||
}
|
||||
|
||||
pub fn file_watcher(&self) -> Result<&Arc<FileWatcher>, AnyError> {
|
||||
self.services.file_watcher.get_or_try_init(|| {
|
||||
let watcher = FileWatcher::new(
|
||||
self.options.clone(),
|
||||
self.cjs_resolutions().clone(),
|
||||
self.graph_container().clone(),
|
||||
self.maybe_file_watcher_reporter().clone(),
|
||||
self.parsed_source_cache()?.clone(),
|
||||
);
|
||||
watcher.init_watcher();
|
||||
Ok(Arc::new(watcher))
|
||||
})
|
||||
}
|
||||
|
||||
pub fn maybe_file_watcher_reporter(&self) -> &Option<FileWatcherReporter> {
|
||||
let maybe_sender = self.maybe_sender.borrow_mut().take();
|
||||
self
|
||||
|
@ -531,6 +515,7 @@ impl CliFactory {
|
|||
self.npm_resolver().await?.clone(),
|
||||
self.parsed_source_cache()?.clone(),
|
||||
self.maybe_lockfile().clone(),
|
||||
self.maybe_file_watcher_reporter().clone(),
|
||||
self.emit_cache()?.clone(),
|
||||
self.file_fetcher()?.clone(),
|
||||
self.type_checker().await?.clone(),
|
||||
|
@ -600,57 +585,6 @@ impl CliFactory {
|
|||
))
|
||||
}
|
||||
|
||||
/// Gets a function that can be used to create a CliMainWorkerFactory
|
||||
/// for a file watcher.
|
||||
pub async fn create_cli_main_worker_factory_func(
|
||||
&self,
|
||||
) -> Result<Arc<dyn Fn() -> CliMainWorkerFactory>, AnyError> {
|
||||
let emitter = self.emitter()?.clone();
|
||||
let graph_container = self.graph_container().clone();
|
||||
let module_load_preparer = self.module_load_preparer().await?.clone();
|
||||
let parsed_source_cache = self.parsed_source_cache()?.clone();
|
||||
let resolver = self.resolver().await?.clone();
|
||||
let blob_store = self.blob_store().clone();
|
||||
let cjs_resolutions = self.cjs_resolutions().clone();
|
||||
let node_code_translator = self.node_code_translator().await?.clone();
|
||||
let options = self.cli_options().clone();
|
||||
let main_worker_options = self.create_cli_main_worker_options()?;
|
||||
let fs = self.fs().clone();
|
||||
let root_cert_store_provider = self.root_cert_store_provider().clone();
|
||||
let node_resolver = self.node_resolver().await?.clone();
|
||||
let npm_resolver = self.npm_resolver().await?.clone();
|
||||
let maybe_inspector_server = self.maybe_inspector_server().clone();
|
||||
let maybe_lockfile = self.maybe_lockfile().clone();
|
||||
Ok(Arc::new(move || {
|
||||
CliMainWorkerFactory::new(
|
||||
StorageKeyResolver::from_options(&options),
|
||||
npm_resolver.clone(),
|
||||
node_resolver.clone(),
|
||||
Box::new(CliHasNodeSpecifierChecker(graph_container.clone())),
|
||||
blob_store.clone(),
|
||||
Box::new(CliModuleLoaderFactory::new(
|
||||
&options,
|
||||
emitter.clone(),
|
||||
graph_container.clone(),
|
||||
module_load_preparer.clone(),
|
||||
parsed_source_cache.clone(),
|
||||
resolver.clone(),
|
||||
NpmModuleLoader::new(
|
||||
cjs_resolutions.clone(),
|
||||
node_code_translator.clone(),
|
||||
fs.clone(),
|
||||
node_resolver.clone(),
|
||||
),
|
||||
)),
|
||||
root_cert_store_provider.clone(),
|
||||
fs.clone(),
|
||||
maybe_inspector_server.clone(),
|
||||
maybe_lockfile.clone(),
|
||||
main_worker_options.clone(),
|
||||
)
|
||||
}))
|
||||
}
|
||||
|
||||
pub async fn create_cli_main_worker_factory(
|
||||
&self,
|
||||
) -> Result<CliMainWorkerFactory, AnyError> {
|
||||
|
|
|
@ -34,6 +34,7 @@ use deno_runtime::permissions::PermissionsContainer;
|
|||
use import_map::ImportMapError;
|
||||
use std::collections::HashMap;
|
||||
use std::collections::HashSet;
|
||||
use std::path::PathBuf;
|
||||
use std::sync::Arc;
|
||||
|
||||
#[derive(Clone, Copy)]
|
||||
|
@ -169,6 +170,7 @@ pub struct ModuleGraphBuilder {
|
|||
npm_resolver: Arc<CliNpmResolver>,
|
||||
parsed_source_cache: Arc<ParsedSourceCache>,
|
||||
lockfile: Option<Arc<Mutex<Lockfile>>>,
|
||||
maybe_file_watcher_reporter: Option<FileWatcherReporter>,
|
||||
emit_cache: cache::EmitCache,
|
||||
file_fetcher: Arc<FileFetcher>,
|
||||
type_checker: Arc<TypeChecker>,
|
||||
|
@ -182,6 +184,7 @@ impl ModuleGraphBuilder {
|
|||
npm_resolver: Arc<CliNpmResolver>,
|
||||
parsed_source_cache: Arc<ParsedSourceCache>,
|
||||
lockfile: Option<Arc<Mutex<Lockfile>>>,
|
||||
maybe_file_watcher_reporter: Option<FileWatcherReporter>,
|
||||
emit_cache: cache::EmitCache,
|
||||
file_fetcher: Arc<FileFetcher>,
|
||||
type_checker: Arc<TypeChecker>,
|
||||
|
@ -192,6 +195,7 @@ impl ModuleGraphBuilder {
|
|||
npm_resolver,
|
||||
parsed_source_cache,
|
||||
lockfile,
|
||||
maybe_file_watcher_reporter,
|
||||
emit_cache,
|
||||
file_fetcher,
|
||||
type_checker,
|
||||
|
@ -210,6 +214,10 @@ impl ModuleGraphBuilder {
|
|||
let graph_resolver = cli_resolver.as_graph_resolver();
|
||||
let graph_npm_resolver = cli_resolver.as_graph_npm_resolver();
|
||||
let analyzer = self.parsed_source_cache.as_analyzer();
|
||||
let maybe_file_watcher_reporter = self
|
||||
.maybe_file_watcher_reporter
|
||||
.as_ref()
|
||||
.map(|r| r.as_reporter());
|
||||
|
||||
let mut graph = ModuleGraph::new(graph_kind);
|
||||
self
|
||||
|
@ -223,7 +231,7 @@ impl ModuleGraphBuilder {
|
|||
resolver: Some(graph_resolver),
|
||||
npm_resolver: Some(graph_npm_resolver),
|
||||
module_analyzer: Some(&*analyzer),
|
||||
reporter: None,
|
||||
reporter: maybe_file_watcher_reporter,
|
||||
},
|
||||
)
|
||||
.await?;
|
||||
|
@ -250,6 +258,11 @@ impl ModuleGraphBuilder {
|
|||
let analyzer = self.parsed_source_cache.as_analyzer();
|
||||
let graph_kind = self.options.type_check_mode().as_graph_kind();
|
||||
let mut graph = ModuleGraph::new(graph_kind);
|
||||
let maybe_file_watcher_reporter = self
|
||||
.maybe_file_watcher_reporter
|
||||
.as_ref()
|
||||
.map(|r| r.as_reporter());
|
||||
|
||||
self
|
||||
.build_graph_with_npm_resolution(
|
||||
&mut graph,
|
||||
|
@ -261,7 +274,7 @@ impl ModuleGraphBuilder {
|
|||
resolver: Some(graph_resolver),
|
||||
npm_resolver: Some(graph_npm_resolver),
|
||||
module_analyzer: Some(&*analyzer),
|
||||
reporter: None,
|
||||
reporter: maybe_file_watcher_reporter,
|
||||
},
|
||||
)
|
||||
.await?;
|
||||
|
@ -415,7 +428,6 @@ struct GraphData {
|
|||
|
||||
/// Holds the `ModuleGraph` and what parts of it are type checked.
|
||||
pub struct ModuleGraphContainer {
|
||||
graph_kind: GraphKind,
|
||||
// Allow only one request to update the graph data at a time,
|
||||
// but allow other requests to read from it at any time even
|
||||
// while another request is updating the data.
|
||||
|
@ -426,7 +438,6 @@ pub struct ModuleGraphContainer {
|
|||
impl ModuleGraphContainer {
|
||||
pub fn new(graph_kind: GraphKind) -> Self {
|
||||
Self {
|
||||
graph_kind,
|
||||
update_queue: Default::default(),
|
||||
graph_data: Arc::new(RwLock::new(GraphData {
|
||||
graph: Arc::new(ModuleGraph::new(graph_kind)),
|
||||
|
@ -435,10 +446,6 @@ impl ModuleGraphContainer {
|
|||
}
|
||||
}
|
||||
|
||||
pub fn clear(&self) {
|
||||
self.graph_data.write().graph = Arc::new(ModuleGraph::new(self.graph_kind));
|
||||
}
|
||||
|
||||
/// Acquires a permit to modify the module graph without other code
|
||||
/// having the chance to modify it. In the meantime, other code may
|
||||
/// still read from the existing module graph.
|
||||
|
@ -496,6 +503,33 @@ impl ModuleGraphContainer {
|
|||
}
|
||||
}
|
||||
|
||||
/// Gets if any of the specified root's "file:" dependents are in the
|
||||
/// provided changed set.
|
||||
pub fn has_graph_root_local_dependent_changed(
|
||||
graph: &ModuleGraph,
|
||||
root: &ModuleSpecifier,
|
||||
changed_specifiers: &HashSet<ModuleSpecifier>,
|
||||
) -> bool {
|
||||
let roots = vec![root.clone()];
|
||||
let mut dependent_specifiers = graph.walk(
|
||||
&roots,
|
||||
deno_graph::WalkOptions {
|
||||
follow_dynamic: true,
|
||||
follow_type_only: true,
|
||||
check_js: true,
|
||||
},
|
||||
);
|
||||
while let Some((s, _)) = dependent_specifiers.next() {
|
||||
if s.scheme() != "file" {
|
||||
// skip walking this remote module's dependencies
|
||||
dependent_specifiers.skip_previous_dependencies();
|
||||
} else if changed_specifiers.contains(s) {
|
||||
return true;
|
||||
}
|
||||
}
|
||||
false
|
||||
}
|
||||
|
||||
/// A permit for updating the module graph. When complete and
|
||||
/// everything looks fine, calling `.commit()` will store the
|
||||
/// new graph in the ModuleGraphContainer.
|
||||
|
@ -521,6 +555,43 @@ impl<'a> ModuleGraphUpdatePermit<'a> {
|
|||
}
|
||||
}
|
||||
|
||||
#[derive(Clone, Debug)]
|
||||
pub struct FileWatcherReporter {
|
||||
sender: tokio::sync::mpsc::UnboundedSender<Vec<PathBuf>>,
|
||||
file_paths: Arc<Mutex<Vec<PathBuf>>>,
|
||||
}
|
||||
|
||||
impl FileWatcherReporter {
|
||||
pub fn new(sender: tokio::sync::mpsc::UnboundedSender<Vec<PathBuf>>) -> Self {
|
||||
Self {
|
||||
sender,
|
||||
file_paths: Default::default(),
|
||||
}
|
||||
}
|
||||
|
||||
pub fn as_reporter(&self) -> &dyn deno_graph::source::Reporter {
|
||||
self
|
||||
}
|
||||
}
|
||||
|
||||
impl deno_graph::source::Reporter for FileWatcherReporter {
|
||||
fn on_load(
|
||||
&self,
|
||||
specifier: &ModuleSpecifier,
|
||||
modules_done: usize,
|
||||
modules_total: usize,
|
||||
) {
|
||||
let mut file_paths = self.file_paths.lock();
|
||||
if specifier.scheme() == "file" {
|
||||
file_paths.push(specifier.to_file_path().unwrap());
|
||||
}
|
||||
|
||||
if modules_done == modules_total {
|
||||
self.sender.send(file_paths.drain(..).collect()).unwrap();
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
#[cfg(test)]
|
||||
mod test {
|
||||
use std::sync::Arc;
|
||||
|
|
33
cli/main.rs
33
cli/main.rs
|
@ -23,7 +23,6 @@ mod tools;
|
|||
mod tsc;
|
||||
mod util;
|
||||
mod version;
|
||||
mod watcher;
|
||||
mod worker;
|
||||
|
||||
use crate::args::flags_from_vec;
|
||||
|
@ -33,7 +32,6 @@ use crate::util::display;
|
|||
use crate::util::v8::get_v8_flags_from_env;
|
||||
use crate::util::v8::init_v8_flags;
|
||||
|
||||
use args::CliOptions;
|
||||
use deno_core::anyhow::Context;
|
||||
use deno_core::error::AnyError;
|
||||
use deno_core::error::JsError;
|
||||
|
@ -84,13 +82,10 @@ fn spawn_subcommand<F: Future<Output = T> + 'static, T: SubcommandOutput>(
|
|||
async fn run_subcommand(flags: Flags) -> Result<i32, AnyError> {
|
||||
let handle = match flags.subcommand.clone() {
|
||||
DenoSubcommand::Bench(bench_flags) => spawn_subcommand(async {
|
||||
let cli_options = CliOptions::from_flags(flags)?;
|
||||
let bench_options = cli_options.resolve_bench_options(bench_flags)?;
|
||||
if cli_options.watch_paths().is_some() {
|
||||
tools::bench::run_benchmarks_with_watch(cli_options, bench_options)
|
||||
.await
|
||||
if flags.watch.is_some() {
|
||||
tools::bench::run_benchmarks_with_watch(flags, bench_flags).await
|
||||
} else {
|
||||
tools::bench::run_benchmarks(cli_options, bench_options).await
|
||||
tools::bench::run_benchmarks(flags, bench_flags).await
|
||||
}
|
||||
}),
|
||||
DenoSubcommand::Bundle(bundle_flags) => spawn_subcommand(async {
|
||||
|
@ -125,11 +120,11 @@ async fn run_subcommand(flags: Flags) -> Result<i32, AnyError> {
|
|||
DenoSubcommand::Coverage(coverage_flags) => spawn_subcommand(async {
|
||||
tools::coverage::cover_files(flags, coverage_flags).await
|
||||
}),
|
||||
DenoSubcommand::Fmt(fmt_flags) => spawn_subcommand(async move {
|
||||
let cli_options = CliOptions::from_flags(flags.clone())?;
|
||||
let fmt_options = cli_options.resolve_fmt_options(fmt_flags)?;
|
||||
tools::fmt::format(cli_options, fmt_options).await
|
||||
}),
|
||||
DenoSubcommand::Fmt(fmt_flags) => {
|
||||
spawn_subcommand(
|
||||
async move { tools::fmt::format(flags, fmt_flags).await },
|
||||
)
|
||||
}
|
||||
DenoSubcommand::Init(init_flags) => {
|
||||
spawn_subcommand(async { tools::init::init_project(init_flags).await })
|
||||
}
|
||||
|
@ -148,9 +143,7 @@ async fn run_subcommand(flags: Flags) -> Result<i32, AnyError> {
|
|||
tools::lint::print_rules_list(lint_flags.json);
|
||||
Ok(())
|
||||
} else {
|
||||
let cli_options = CliOptions::from_flags(flags)?;
|
||||
let lint_options = cli_options.resolve_lint_options(lint_flags)?;
|
||||
tools::lint::lint(cli_options, lint_options).await
|
||||
tools::lint::lint(flags, lint_flags).await
|
||||
}
|
||||
}),
|
||||
DenoSubcommand::Repl(repl_flags) => {
|
||||
|
@ -178,13 +171,11 @@ async fn run_subcommand(flags: Flags) -> Result<i32, AnyError> {
|
|||
PathBuf::from(coverage_dir).canonicalize()?,
|
||||
);
|
||||
}
|
||||
let cli_options = CliOptions::from_flags(flags)?;
|
||||
let test_options = cli_options.resolve_test_options(test_flags)?;
|
||||
|
||||
if cli_options.watch_paths().is_some() {
|
||||
tools::test::run_tests_with_watch(cli_options, test_options).await
|
||||
if flags.watch.is_some() {
|
||||
tools::test::run_tests_with_watch(flags, test_flags).await
|
||||
} else {
|
||||
tools::test::run_tests(cli_options, test_options).await
|
||||
tools::test::run_tests(flags, test_flags).await
|
||||
}
|
||||
})
|
||||
}
|
||||
|
|
|
@ -7,6 +7,7 @@ use crate::cache::ParsedSourceCache;
|
|||
use crate::emit::Emitter;
|
||||
use crate::graph_util::graph_lock_or_exit;
|
||||
use crate::graph_util::graph_valid_with_cli_options;
|
||||
use crate::graph_util::FileWatcherReporter;
|
||||
use crate::graph_util::ModuleGraphBuilder;
|
||||
use crate::graph_util::ModuleGraphContainer;
|
||||
use crate::node;
|
||||
|
@ -17,7 +18,6 @@ use crate::tools::check::TypeChecker;
|
|||
use crate::util::progress_bar::ProgressBar;
|
||||
use crate::util::text_encoding::code_without_source_map;
|
||||
use crate::util::text_encoding::source_map_from_code;
|
||||
use crate::watcher::FileWatcherReporter;
|
||||
use crate::worker::ModuleLoaderFactory;
|
||||
|
||||
use deno_ast::MediaType;
|
||||
|
@ -115,12 +115,10 @@ impl ModuleLoadPreparer {
|
|||
let maybe_imports = self.options.to_maybe_imports()?;
|
||||
let graph_resolver = self.resolver.as_graph_resolver();
|
||||
let graph_npm_resolver = self.resolver.as_graph_npm_resolver();
|
||||
let maybe_file_watcher_reporter: Option<&dyn deno_graph::source::Reporter> =
|
||||
if let Some(reporter) = &self.maybe_file_watcher_reporter {
|
||||
Some(reporter)
|
||||
} else {
|
||||
None
|
||||
};
|
||||
let maybe_file_watcher_reporter = self
|
||||
.maybe_file_watcher_reporter
|
||||
.as_ref()
|
||||
.map(|r| r.as_reporter());
|
||||
|
||||
let analyzer = self.parsed_source_cache.as_analyzer();
|
||||
|
||||
|
@ -800,10 +798,6 @@ impl NpmModuleLoader {
|
|||
pub struct CjsResolutionStore(Mutex<HashSet<ModuleSpecifier>>);
|
||||
|
||||
impl CjsResolutionStore {
|
||||
pub fn clear(&self) {
|
||||
self.0.lock().clear();
|
||||
}
|
||||
|
||||
pub fn contains(&self, specifier: &ModuleSpecifier) -> bool {
|
||||
self.0.lock().contains(specifier)
|
||||
}
|
||||
|
|
|
@ -1,7 +1,6 @@
|
|||
// Copyright 2018-2023 the Deno authors. All rights reserved. MIT license.
|
||||
|
||||
use flaky_test::flaky_test;
|
||||
use std::fs::write;
|
||||
use test_util as util;
|
||||
use test_util::assert_contains;
|
||||
use test_util::TempDir;
|
||||
|
@ -508,7 +507,7 @@ async fn bundle_js_watch() {
|
|||
// Test strategy extends this of test bundle_js by adding watcher
|
||||
let t = TempDir::new();
|
||||
let file_to_watch = t.path().join("file_to_watch.ts");
|
||||
write(&file_to_watch, "console.log('Hello world');").unwrap();
|
||||
file_to_watch.write("console.log('Hello world');");
|
||||
assert!(file_to_watch.is_file());
|
||||
let t = TempDir::new();
|
||||
let bundle = t.path().join("mod6.bundle.js");
|
||||
|
@ -529,15 +528,14 @@ async fn bundle_js_watch() {
|
|||
|
||||
assert_contains!(next_line(&mut stderr_lines).await.unwrap(), "Warning");
|
||||
assert_contains!(next_line(&mut stderr_lines).await.unwrap(), "deno_emit");
|
||||
assert_contains!(next_line(&mut stderr_lines).await.unwrap(), "Check");
|
||||
assert_contains!(
|
||||
next_line(&mut stderr_lines).await.unwrap(),
|
||||
"Bundle started"
|
||||
);
|
||||
assert_contains!(
|
||||
next_line(&mut stderr_lines).await.unwrap(),
|
||||
"file_to_watch.ts"
|
||||
);
|
||||
let line = next_line(&mut stderr_lines).await.unwrap();
|
||||
assert_contains!(line, "file_to_watch.ts");
|
||||
assert_contains!(line, "Check");
|
||||
assert_contains!(next_line(&mut stderr_lines).await.unwrap(), "Bundle");
|
||||
assert_contains!(
|
||||
next_line(&mut stderr_lines).await.unwrap(),
|
||||
"mod6.bundle.js"
|
||||
|
@ -547,13 +545,13 @@ async fn bundle_js_watch() {
|
|||
|
||||
wait_contains("Bundle finished", &mut stderr_lines).await;
|
||||
|
||||
write(&file_to_watch, "console.log('Hello world2');").unwrap();
|
||||
file_to_watch.write("console.log('Hello world2');");
|
||||
|
||||
assert_contains!(next_line(&mut stderr_lines).await.unwrap(), "Check");
|
||||
let line = next_line(&mut stderr_lines).await.unwrap();
|
||||
// Should not clear screen, as we are in non-TTY environment
|
||||
assert_not_contains!(&line, CLEAR_SCREEN);
|
||||
assert_contains!(&line, "File change detected!");
|
||||
assert_contains!(next_line(&mut stderr_lines).await.unwrap(), "Check");
|
||||
assert_contains!(
|
||||
next_line(&mut stderr_lines).await.unwrap(),
|
||||
"file_to_watch.ts"
|
||||
|
@ -567,7 +565,7 @@ async fn bundle_js_watch() {
|
|||
wait_contains("Bundle finished", &mut stderr_lines).await;
|
||||
|
||||
// Confirm that the watcher keeps on working even if the file is updated and has invalid syntax
|
||||
write(&file_to_watch, "syntax error ^^").unwrap();
|
||||
file_to_watch.write("syntax error ^^");
|
||||
|
||||
assert_contains!(
|
||||
next_line(&mut stderr_lines).await.unwrap(),
|
||||
|
@ -583,7 +581,7 @@ async fn bundle_js_watch() {
|
|||
async fn bundle_watch_not_exit() {
|
||||
let t = TempDir::new();
|
||||
let file_to_watch = t.path().join("file_to_watch.ts");
|
||||
write(&file_to_watch, "syntax error ^^").unwrap();
|
||||
file_to_watch.write("syntax error ^^");
|
||||
let target_file = t.path().join("target.js");
|
||||
|
||||
let mut deno = util::deno_cmd()
|
||||
|
@ -624,17 +622,17 @@ async fn bundle_watch_not_exit() {
|
|||
assert!(!target_file.is_file());
|
||||
|
||||
// Make sure the watcher actually restarts and works fine with the proper syntax
|
||||
write(&file_to_watch, "console.log(42);").unwrap();
|
||||
file_to_watch.write("console.log(42);");
|
||||
|
||||
assert_contains!(
|
||||
next_line(&mut stderr_lines).await.unwrap(),
|
||||
"File change detected"
|
||||
);
|
||||
assert_contains!(next_line(&mut stderr_lines).await.unwrap(), "Check");
|
||||
let line = next_line(&mut stderr_lines).await.unwrap();
|
||||
// Should not clear screen, as we are in non-TTY environment
|
||||
assert_not_contains!(&line, CLEAR_SCREEN);
|
||||
assert_contains!(&line, "File change detected!");
|
||||
assert_contains!(
|
||||
next_line(&mut stderr_lines).await.unwrap(),
|
||||
"file_to_watch.ts"
|
||||
);
|
||||
assert_contains!(line, "file_to_watch.ts");
|
||||
assert_contains!(next_line(&mut stderr_lines).await.unwrap(), "target.js");
|
||||
|
||||
wait_contains("Bundle finished", &mut stderr_lines).await;
|
||||
|
@ -648,7 +646,7 @@ async fn bundle_watch_not_exit() {
|
|||
async fn run_watch_no_dynamic() {
|
||||
let t = TempDir::new();
|
||||
let file_to_watch = t.path().join("file_to_watch.js");
|
||||
write(&file_to_watch, "console.log('Hello world');").unwrap();
|
||||
file_to_watch.write("console.log('Hello world');");
|
||||
|
||||
let mut child = util::deno_cmd()
|
||||
.current_dir(util::testdata_path())
|
||||
|
@ -669,7 +667,7 @@ async fn run_watch_no_dynamic() {
|
|||
wait_for_watcher("file_to_watch.js", &mut stderr_lines).await;
|
||||
|
||||
// Change content of the file
|
||||
write(&file_to_watch, "console.log('Hello world2');").unwrap();
|
||||
file_to_watch.write("console.log('Hello world2');");
|
||||
|
||||
wait_contains("Restarting", &mut stderr_lines).await;
|
||||
wait_contains("Hello world2", &mut stdout_lines).await;
|
||||
|
@ -677,51 +675,45 @@ async fn run_watch_no_dynamic() {
|
|||
|
||||
// Add dependency
|
||||
let another_file = t.path().join("another_file.js");
|
||||
write(&another_file, "export const foo = 0;").unwrap();
|
||||
write(
|
||||
&file_to_watch,
|
||||
"import { foo } from './another_file.js'; console.log(foo);",
|
||||
)
|
||||
.unwrap();
|
||||
another_file.write("export const foo = 0;");
|
||||
file_to_watch
|
||||
.write("import { foo } from './another_file.js'; console.log(foo);");
|
||||
|
||||
wait_contains("Restarting", &mut stderr_lines).await;
|
||||
wait_contains("0", &mut stdout_lines).await;
|
||||
wait_for_watcher("another_file.js", &mut stderr_lines).await;
|
||||
|
||||
// Confirm that restarting occurs when a new file is updated
|
||||
write(&another_file, "export const foo = 42;").unwrap();
|
||||
another_file.write("export const foo = 42;");
|
||||
|
||||
wait_contains("Restarting", &mut stderr_lines).await;
|
||||
wait_contains("42", &mut stdout_lines).await;
|
||||
wait_for_watcher("file_to_watch.js", &mut stderr_lines).await;
|
||||
|
||||
// Confirm that the watcher keeps on working even if the file is updated and has invalid syntax
|
||||
write(&file_to_watch, "syntax error ^^").unwrap();
|
||||
file_to_watch.write("syntax error ^^");
|
||||
|
||||
wait_contains("Restarting", &mut stderr_lines).await;
|
||||
wait_contains("error:", &mut stderr_lines).await;
|
||||
wait_for_watcher("file_to_watch.js", &mut stderr_lines).await;
|
||||
|
||||
// Then restore the file
|
||||
write(
|
||||
&file_to_watch,
|
||||
"import { foo } from './another_file.js'; console.log(foo);",
|
||||
)
|
||||
.unwrap();
|
||||
file_to_watch
|
||||
.write("import { foo } from './another_file.js'; console.log(foo);");
|
||||
|
||||
wait_contains("Restarting", &mut stderr_lines).await;
|
||||
wait_contains("42", &mut stdout_lines).await;
|
||||
wait_for_watcher("another_file.js", &mut stderr_lines).await;
|
||||
|
||||
// Update the content of the imported file with invalid syntax
|
||||
write(&another_file, "syntax error ^^").unwrap();
|
||||
another_file.write("syntax error ^^");
|
||||
|
||||
wait_contains("Restarting", &mut stderr_lines).await;
|
||||
wait_contains("error:", &mut stderr_lines).await;
|
||||
wait_for_watcher("another_file.js", &mut stderr_lines).await;
|
||||
|
||||
// Modify the imported file and make sure that restarting occurs
|
||||
write(&another_file, "export const foo = 'modified!';").unwrap();
|
||||
another_file.write("export const foo = 'modified!';");
|
||||
|
||||
wait_contains("Restarting", &mut stderr_lines).await;
|
||||
wait_contains("modified!", &mut stdout_lines).await;
|
||||
|
@ -737,10 +729,10 @@ async fn run_watch_no_dynamic() {
|
|||
async fn run_watch_external_watch_files() {
|
||||
let t = TempDir::new();
|
||||
let file_to_watch = t.path().join("file_to_watch.js");
|
||||
write(&file_to_watch, "console.log('Hello world');").unwrap();
|
||||
file_to_watch.write("console.log('Hello world');");
|
||||
|
||||
let external_file_to_watch = t.path().join("external_file_to_watch.txt");
|
||||
write(&external_file_to_watch, "Hello world").unwrap();
|
||||
external_file_to_watch.write("Hello world");
|
||||
|
||||
let mut watch_arg = "--watch=".to_owned();
|
||||
let external_file_to_watch_str = external_file_to_watch.to_string();
|
||||
|
@ -765,12 +757,12 @@ async fn run_watch_external_watch_files() {
|
|||
wait_for_watcher("external_file_to_watch.txt", &mut stderr_lines).await;
|
||||
|
||||
// Change content of the external file
|
||||
write(&external_file_to_watch, "Hello world2").unwrap();
|
||||
external_file_to_watch.write("Hello world2");
|
||||
wait_contains("Restarting", &mut stderr_lines).await;
|
||||
wait_contains("Process finished", &mut stderr_lines).await;
|
||||
|
||||
// Again (https://github.com/denoland/deno/issues/17584)
|
||||
write(&external_file_to_watch, "Hello world3").unwrap();
|
||||
external_file_to_watch.write("Hello world3");
|
||||
wait_contains("Restarting", &mut stderr_lines).await;
|
||||
wait_contains("Process finished", &mut stderr_lines).await;
|
||||
|
||||
|
@ -781,8 +773,7 @@ async fn run_watch_external_watch_files() {
|
|||
async fn run_watch_load_unload_events() {
|
||||
let t = TempDir::new();
|
||||
let file_to_watch = t.path().join("file_to_watch.js");
|
||||
write(
|
||||
&file_to_watch,
|
||||
file_to_watch.write(
|
||||
r#"
|
||||
setInterval(() => {}, 0);
|
||||
window.addEventListener("load", () => {
|
||||
|
@ -793,8 +784,7 @@ async fn run_watch_load_unload_events() {
|
|||
console.log("unload");
|
||||
});
|
||||
"#,
|
||||
)
|
||||
.unwrap();
|
||||
);
|
||||
|
||||
let mut child = util::deno_cmd()
|
||||
.current_dir(util::testdata_path())
|
||||
|
@ -816,8 +806,7 @@ async fn run_watch_load_unload_events() {
|
|||
wait_for_watcher("file_to_watch.js", &mut stderr_lines).await;
|
||||
|
||||
// Change content of the file, this time without an interval to keep it alive.
|
||||
write(
|
||||
&file_to_watch,
|
||||
file_to_watch.write(
|
||||
r#"
|
||||
window.addEventListener("load", () => {
|
||||
console.log("load");
|
||||
|
@ -827,8 +816,7 @@ async fn run_watch_load_unload_events() {
|
|||
console.log("unload");
|
||||
});
|
||||
"#,
|
||||
)
|
||||
.unwrap();
|
||||
);
|
||||
|
||||
// Wait for the restart
|
||||
wait_contains("Restarting", &mut stderr_lines).await;
|
||||
|
@ -849,7 +837,7 @@ async fn run_watch_load_unload_events() {
|
|||
async fn run_watch_not_exit() {
|
||||
let t = TempDir::new();
|
||||
let file_to_watch = t.path().join("file_to_watch.js");
|
||||
write(&file_to_watch, "syntax error ^^").unwrap();
|
||||
file_to_watch.write("syntax error ^^");
|
||||
|
||||
let mut child = util::deno_cmd()
|
||||
.current_dir(util::testdata_path())
|
||||
|
@ -871,7 +859,7 @@ async fn run_watch_not_exit() {
|
|||
wait_for_watcher("file_to_watch.js", &mut stderr_lines).await;
|
||||
|
||||
// Make sure the watcher actually restarts and works fine with the proper syntax
|
||||
write(&file_to_watch, "console.log(42);").unwrap();
|
||||
file_to_watch.write("console.log(42);");
|
||||
|
||||
wait_contains("Restarting", &mut stderr_lines).await;
|
||||
wait_contains("42", &mut stdout_lines).await;
|
||||
|
@ -887,7 +875,7 @@ async fn run_watch_with_import_map_and_relative_paths() {
|
|||
filecontent: &'static str,
|
||||
) -> std::path::PathBuf {
|
||||
let absolute_path = directory.path().join(filename);
|
||||
write(&absolute_path, filecontent).unwrap();
|
||||
absolute_path.write(filecontent);
|
||||
let relative_path = absolute_path
|
||||
.as_path()
|
||||
.strip_prefix(directory.path())
|
||||
|
@ -938,7 +926,7 @@ async fn run_watch_with_import_map_and_relative_paths() {
|
|||
async fn run_watch_with_ext_flag() {
|
||||
let t = TempDir::new();
|
||||
let file_to_watch = t.path().join("file_to_watch");
|
||||
write(&file_to_watch, "interface I{}; console.log(42);").unwrap();
|
||||
file_to_watch.write("interface I{}; console.log(42);");
|
||||
|
||||
let mut child = util::deno_cmd()
|
||||
.current_dir(util::testdata_path())
|
||||
|
@ -962,11 +950,7 @@ async fn run_watch_with_ext_flag() {
|
|||
wait_for_watcher("file_to_watch", &mut stderr_lines).await;
|
||||
wait_contains("Process finished", &mut stderr_lines).await;
|
||||
|
||||
write(
|
||||
&file_to_watch,
|
||||
"type Bear = 'polar' | 'grizzly'; console.log(123);",
|
||||
)
|
||||
.unwrap();
|
||||
file_to_watch.write("type Bear = 'polar' | 'grizzly'; console.log(123);");
|
||||
|
||||
wait_contains("Restarting!", &mut stderr_lines).await;
|
||||
wait_contains("123", &mut stdout_lines).await;
|
||||
|
@ -979,11 +963,8 @@ async fn run_watch_with_ext_flag() {
|
|||
async fn run_watch_error_messages() {
|
||||
let t = TempDir::new();
|
||||
let file_to_watch = t.path().join("file_to_watch.js");
|
||||
write(
|
||||
&file_to_watch,
|
||||
"throw SyntaxError(`outer`, {cause: TypeError(`inner`)})",
|
||||
)
|
||||
.unwrap();
|
||||
file_to_watch
|
||||
.write("throw SyntaxError(`outer`, {cause: TypeError(`inner`)})");
|
||||
|
||||
let mut child = util::deno_cmd()
|
||||
.current_dir(util::testdata_path())
|
||||
|
@ -1000,13 +981,13 @@ async fn run_watch_error_messages() {
|
|||
wait_contains("Process started", &mut stderr_lines).await;
|
||||
wait_contains("error: Uncaught SyntaxError: outer", &mut stderr_lines).await;
|
||||
wait_contains("Caused by: TypeError: inner", &mut stderr_lines).await;
|
||||
wait_contains("Process finished", &mut stderr_lines).await;
|
||||
wait_contains("Process failed", &mut stderr_lines).await;
|
||||
|
||||
check_alive_then_kill(child);
|
||||
}
|
||||
|
||||
#[tokio::test]
|
||||
async fn test_watch() {
|
||||
async fn test_watch_basic() {
|
||||
let t = TempDir::new();
|
||||
|
||||
let mut child = util::deno_cmd()
|
||||
|
@ -1034,18 +1015,10 @@ async fn test_watch() {
|
|||
let bar_file = t.path().join("bar.js");
|
||||
let foo_test = t.path().join("foo_test.js");
|
||||
let bar_test = t.path().join("bar_test.js");
|
||||
write(&foo_file, "export default function foo() { 1 + 1 }").unwrap();
|
||||
write(&bar_file, "export default function bar() { 2 + 2 }").unwrap();
|
||||
write(
|
||||
&foo_test,
|
||||
"import foo from './foo.js'; Deno.test('foo', foo);",
|
||||
)
|
||||
.unwrap();
|
||||
write(
|
||||
bar_test,
|
||||
"import bar from './bar.js'; Deno.test('bar', bar);",
|
||||
)
|
||||
.unwrap();
|
||||
foo_file.write("export default function foo() { 1 + 1 }");
|
||||
bar_file.write("export default function bar() { 2 + 2 }");
|
||||
foo_test.write("import foo from './foo.js'; Deno.test('foo', foo);");
|
||||
bar_test.write("import bar from './bar.js'; Deno.test('bar', bar);");
|
||||
|
||||
assert_eq!(next_line(&mut stdout_lines).await.unwrap(), "");
|
||||
assert_contains!(
|
||||
|
@ -1064,11 +1037,7 @@ async fn test_watch() {
|
|||
wait_contains("Test finished", &mut stderr_lines).await;
|
||||
|
||||
// Change content of the file
|
||||
write(
|
||||
&foo_test,
|
||||
"import foo from './foo.js'; Deno.test('foobar', foo);",
|
||||
)
|
||||
.unwrap();
|
||||
foo_test.write("import foo from './foo.js'; Deno.test('foobar', foo);");
|
||||
|
||||
assert_contains!(next_line(&mut stderr_lines).await.unwrap(), "Restarting");
|
||||
assert_contains!(
|
||||
|
@ -1083,7 +1052,7 @@ async fn test_watch() {
|
|||
|
||||
// Add test
|
||||
let another_test = t.path().join("new_test.js");
|
||||
write(&another_test, "Deno.test('another one', () => 3 + 3)").unwrap();
|
||||
another_test.write("Deno.test('another one', () => 3 + 3)");
|
||||
assert_contains!(next_line(&mut stderr_lines).await.unwrap(), "Restarting");
|
||||
assert_contains!(
|
||||
next_line(&mut stdout_lines).await.unwrap(),
|
||||
|
@ -1096,8 +1065,7 @@ async fn test_watch() {
|
|||
wait_contains("Test finished", &mut stderr_lines).await;
|
||||
|
||||
// Confirm that restarting occurs when a new file is updated
|
||||
write(&another_test, "Deno.test('another one', () => 3 + 3); Deno.test('another another one', () => 4 + 4)")
|
||||
.unwrap();
|
||||
another_test.write("Deno.test('another one', () => 3 + 3); Deno.test('another another one', () => 4 + 4)");
|
||||
assert_contains!(next_line(&mut stderr_lines).await.unwrap(), "Restarting");
|
||||
assert_contains!(
|
||||
next_line(&mut stdout_lines).await.unwrap(),
|
||||
|
@ -1114,7 +1082,7 @@ async fn test_watch() {
|
|||
wait_contains("Test finished", &mut stderr_lines).await;
|
||||
|
||||
// Confirm that the watcher keeps on working even if the file is updated and has invalid syntax
|
||||
write(&another_test, "syntax error ^^").unwrap();
|
||||
another_test.write("syntax error ^^");
|
||||
assert_contains!(next_line(&mut stderr_lines).await.unwrap(), "Restarting");
|
||||
assert_contains!(next_line(&mut stderr_lines).await.unwrap(), "error:");
|
||||
assert_eq!(next_line(&mut stderr_lines).await.unwrap(), "");
|
||||
|
@ -1129,7 +1097,7 @@ async fn test_watch() {
|
|||
assert_contains!(next_line(&mut stderr_lines).await.unwrap(), "Test failed");
|
||||
|
||||
// Then restore the file
|
||||
write(&another_test, "Deno.test('another one', () => 3 + 3)").unwrap();
|
||||
another_test.write("Deno.test('another one', () => 3 + 3)");
|
||||
assert_contains!(next_line(&mut stderr_lines).await.unwrap(), "Restarting");
|
||||
assert_contains!(
|
||||
next_line(&mut stdout_lines).await.unwrap(),
|
||||
|
@ -1143,11 +1111,8 @@ async fn test_watch() {
|
|||
|
||||
// Confirm that the watcher keeps on working even if the file is updated and the test fails
|
||||
// This also confirms that it restarts when dependencies change
|
||||
write(
|
||||
&foo_file,
|
||||
"export default function foo() { throw new Error('Whoops!'); }",
|
||||
)
|
||||
.unwrap();
|
||||
foo_file
|
||||
.write("export default function foo() { throw new Error('Whoops!'); }");
|
||||
assert_contains!(next_line(&mut stderr_lines).await.unwrap(), "Restarting");
|
||||
assert_contains!(
|
||||
next_line(&mut stdout_lines).await.unwrap(),
|
||||
|
@ -1156,10 +1121,10 @@ async fn test_watch() {
|
|||
assert_contains!(next_line(&mut stdout_lines).await.unwrap(), "FAILED");
|
||||
wait_contains("FAILED", &mut stdout_lines).await;
|
||||
next_line(&mut stdout_lines).await;
|
||||
wait_contains("Test finished", &mut stderr_lines).await;
|
||||
wait_contains("Test failed", &mut stderr_lines).await;
|
||||
|
||||
// Then restore the file
|
||||
write(&foo_file, "export default function foo() { 1 + 1 }").unwrap();
|
||||
foo_file.write("export default function foo() { 1 + 1 }");
|
||||
assert_contains!(next_line(&mut stderr_lines).await.unwrap(), "Restarting");
|
||||
assert_contains!(
|
||||
next_line(&mut stdout_lines).await.unwrap(),
|
||||
|
@ -1172,16 +1137,8 @@ async fn test_watch() {
|
|||
wait_contains("Test finished", &mut stderr_lines).await;
|
||||
|
||||
// Test that circular dependencies work fine
|
||||
write(
|
||||
&foo_file,
|
||||
"import './bar.js'; export default function foo() { 1 + 1 }",
|
||||
)
|
||||
.unwrap();
|
||||
write(
|
||||
&bar_file,
|
||||
"import './foo.js'; export default function bar() { 2 + 2 }",
|
||||
)
|
||||
.unwrap();
|
||||
foo_file.write("import './bar.js'; export default function foo() { 1 + 1 }");
|
||||
bar_file.write("import './foo.js'; export default function bar() { 2 + 2 }");
|
||||
check_alive_then_kill(child);
|
||||
}
|
||||
|
||||
|
@ -1212,16 +1169,13 @@ async fn test_watch_doc() {
|
|||
wait_contains("Test finished", &mut stderr_lines).await;
|
||||
|
||||
let foo_file = t.path().join("foo.ts");
|
||||
write(
|
||||
&foo_file,
|
||||
foo_file.write(
|
||||
r#"
|
||||
export default function foo() {}
|
||||
"#,
|
||||
)
|
||||
.unwrap();
|
||||
);
|
||||
|
||||
write(
|
||||
&foo_file,
|
||||
foo_file.write(
|
||||
r#"
|
||||
/**
|
||||
* ```ts
|
||||
|
@ -1230,8 +1184,7 @@ async fn test_watch_doc() {
|
|||
*/
|
||||
export default function foo() {}
|
||||
"#,
|
||||
)
|
||||
.unwrap();
|
||||
);
|
||||
|
||||
// We only need to scan for a Check file://.../foo.ts$3-6 line that
|
||||
// corresponds to the documentation block being type-checked.
|
||||
|
@ -1243,7 +1196,7 @@ async fn test_watch_doc() {
|
|||
async fn test_watch_module_graph_error_referrer() {
|
||||
let t = TempDir::new();
|
||||
let file_to_watch = t.path().join("file_to_watch.js");
|
||||
write(&file_to_watch, "import './nonexistent.js';").unwrap();
|
||||
file_to_watch.write("import './nonexistent.js';");
|
||||
let mut child = util::deno_cmd()
|
||||
.current_dir(util::testdata_path())
|
||||
.arg("run")
|
||||
|
@ -1264,7 +1217,7 @@ async fn test_watch_module_graph_error_referrer() {
|
|||
let line3 = next_line(&mut stderr_lines).await.unwrap();
|
||||
assert_contains!(&line3, " at ");
|
||||
assert_contains!(&line3, "file_to_watch.js");
|
||||
wait_contains("Process finished", &mut stderr_lines).await;
|
||||
wait_contains("Process failed", &mut stderr_lines).await;
|
||||
check_alive_then_kill(child);
|
||||
}
|
||||
|
||||
|
@ -1273,8 +1226,7 @@ async fn test_watch_module_graph_error_referrer() {
|
|||
async fn test_watch_unload_handler_error_on_drop() {
|
||||
let t = TempDir::new();
|
||||
let file_to_watch = t.path().join("file_to_watch.js");
|
||||
write(
|
||||
&file_to_watch,
|
||||
file_to_watch.write(
|
||||
r#"
|
||||
addEventListener("unload", () => {
|
||||
throw new Error("foo");
|
||||
|
@ -1283,8 +1235,7 @@ async fn test_watch_unload_handler_error_on_drop() {
|
|||
throw new Error("bar");
|
||||
});
|
||||
"#,
|
||||
)
|
||||
.unwrap();
|
||||
);
|
||||
let mut child = util::deno_cmd()
|
||||
.current_dir(util::testdata_path())
|
||||
.arg("run")
|
||||
|
@ -1298,7 +1249,7 @@ async fn test_watch_unload_handler_error_on_drop() {
|
|||
let (_, mut stderr_lines) = child_lines(&mut child);
|
||||
wait_contains("Process started", &mut stderr_lines).await;
|
||||
wait_contains("Uncaught Error: bar", &mut stderr_lines).await;
|
||||
wait_contains("Process finished", &mut stderr_lines).await;
|
||||
wait_contains("Process failed", &mut stderr_lines).await;
|
||||
check_alive_then_kill(child);
|
||||
}
|
||||
|
||||
|
@ -1311,7 +1262,7 @@ async fn test_watch_sigint() {
|
|||
|
||||
let t = TempDir::new();
|
||||
let file_to_watch = t.path().join("file_to_watch.js");
|
||||
write(&file_to_watch, r#"Deno.test("foo", () => {});"#).unwrap();
|
||||
file_to_watch.write(r#"Deno.test("foo", () => {});"#);
|
||||
let mut child = util::deno_cmd()
|
||||
.current_dir(util::testdata_path())
|
||||
.arg("test")
|
||||
|
@ -1331,6 +1282,120 @@ async fn test_watch_sigint() {
|
|||
assert_eq!(exit_status.code(), Some(130));
|
||||
}
|
||||
|
||||
#[tokio::test]
|
||||
async fn bench_watch_basic() {
|
||||
let t = TempDir::new();
|
||||
|
||||
let mut child = util::deno_cmd()
|
||||
.current_dir(util::testdata_path())
|
||||
.arg("bench")
|
||||
.arg("--watch")
|
||||
.arg("--unstable")
|
||||
.arg("--no-check")
|
||||
.arg(t.path())
|
||||
.env("NO_COLOR", "1")
|
||||
.stdout(std::process::Stdio::piped())
|
||||
.stderr(std::process::Stdio::piped())
|
||||
.spawn()
|
||||
.unwrap();
|
||||
let (mut stdout_lines, mut stderr_lines) = child_lines(&mut child);
|
||||
|
||||
assert_contains!(
|
||||
next_line(&mut stderr_lines).await.unwrap(),
|
||||
"Bench started"
|
||||
);
|
||||
assert_contains!(
|
||||
next_line(&mut stderr_lines).await.unwrap(),
|
||||
"Bench finished"
|
||||
);
|
||||
|
||||
let foo_file = t.path().join("foo.js");
|
||||
let bar_file = t.path().join("bar.js");
|
||||
let foo_bench = t.path().join("foo_bench.js");
|
||||
let bar_bench = t.path().join("bar_bench.js");
|
||||
foo_file.write("export default function foo() { 1 + 1 }");
|
||||
bar_file.write("export default function bar() { 2 + 2 }");
|
||||
foo_bench.write("import foo from './foo.js'; Deno.bench('foo bench', foo);");
|
||||
bar_bench.write("import bar from './bar.js'; Deno.bench('bar bench', bar);");
|
||||
|
||||
wait_contains("bar_bench.js", &mut stdout_lines).await;
|
||||
wait_contains("bar bench", &mut stdout_lines).await;
|
||||
wait_contains("foo_bench.js", &mut stdout_lines).await;
|
||||
wait_contains("foo bench", &mut stdout_lines).await;
|
||||
wait_contains("Bench finished", &mut stderr_lines).await;
|
||||
|
||||
// Change content of the file
|
||||
foo_bench.write("import foo from './foo.js'; Deno.bench('foo asdf', foo);");
|
||||
|
||||
assert_contains!(next_line(&mut stderr_lines).await.unwrap(), "Restarting");
|
||||
loop {
|
||||
let line = next_line(&mut stdout_lines).await.unwrap();
|
||||
assert_not_contains!(line, "bar");
|
||||
if line.contains("foo asdf") {
|
||||
break; // last line
|
||||
}
|
||||
}
|
||||
wait_contains("Bench finished", &mut stderr_lines).await;
|
||||
|
||||
// Add bench
|
||||
let another_test = t.path().join("new_bench.js");
|
||||
another_test.write("Deno.bench('another one', () => 3 + 3)");
|
||||
loop {
|
||||
let line = next_line(&mut stdout_lines).await.unwrap();
|
||||
assert_not_contains!(line, "bar");
|
||||
assert_not_contains!(line, "foo");
|
||||
if line.contains("another one") {
|
||||
break; // last line
|
||||
}
|
||||
}
|
||||
wait_contains("Bench finished", &mut stderr_lines).await;
|
||||
|
||||
// Confirm that restarting occurs when a new file is updated
|
||||
another_test.write("Deno.bench('another one', () => 3 + 3); Deno.bench('another another one', () => 4 + 4)");
|
||||
loop {
|
||||
let line = next_line(&mut stdout_lines).await.unwrap();
|
||||
assert_not_contains!(line, "bar");
|
||||
assert_not_contains!(line, "foo");
|
||||
if line.contains("another another one") {
|
||||
break; // last line
|
||||
}
|
||||
}
|
||||
wait_contains("Bench finished", &mut stderr_lines).await;
|
||||
|
||||
// Confirm that the watcher keeps on working even if the file is updated and has invalid syntax
|
||||
another_test.write("syntax error ^^");
|
||||
assert_contains!(next_line(&mut stderr_lines).await.unwrap(), "Restarting");
|
||||
assert_contains!(next_line(&mut stderr_lines).await.unwrap(), "error:");
|
||||
assert_eq!(next_line(&mut stderr_lines).await.unwrap(), "");
|
||||
assert_eq!(
|
||||
next_line(&mut stderr_lines).await.unwrap(),
|
||||
" syntax error ^^"
|
||||
);
|
||||
assert_eq!(
|
||||
next_line(&mut stderr_lines).await.unwrap(),
|
||||
" ~~~~~"
|
||||
);
|
||||
assert_contains!(next_line(&mut stderr_lines).await.unwrap(), "Bench failed");
|
||||
|
||||
// Then restore the file
|
||||
another_test.write("Deno.bench('another one', () => 3 + 3)");
|
||||
assert_contains!(next_line(&mut stderr_lines).await.unwrap(), "Restarting");
|
||||
loop {
|
||||
let line = next_line(&mut stdout_lines).await.unwrap();
|
||||
assert_not_contains!(line, "bar");
|
||||
assert_not_contains!(line, "foo");
|
||||
if line.contains("another one") {
|
||||
break; // last line
|
||||
}
|
||||
}
|
||||
wait_contains("Bench finished", &mut stderr_lines).await;
|
||||
|
||||
// Test that circular dependencies work fine
|
||||
foo_file.write("import './bar.js'; export default function foo() { 1 + 1 }");
|
||||
bar_file.write("import './foo.js'; export default function bar() { 2 + 2 }");
|
||||
check_alive_then_kill(child);
|
||||
}
|
||||
|
||||
// Regression test for https://github.com/denoland/deno/issues/15465.
|
||||
#[tokio::test]
|
||||
async fn run_watch_reload_once() {
|
||||
|
@ -1341,7 +1406,7 @@ async fn run_watch_reload_once() {
|
|||
import { time } from "http://localhost:4545/dynamic_module.ts";
|
||||
console.log(time);
|
||||
"#;
|
||||
write(&file_to_watch, file_content).unwrap();
|
||||
file_to_watch.write(file_content);
|
||||
|
||||
let mut child = util::deno_cmd()
|
||||
.current_dir(util::testdata_path())
|
||||
|
@ -1359,7 +1424,7 @@ async fn run_watch_reload_once() {
|
|||
wait_contains("finished", &mut stderr_lines).await;
|
||||
let first_output = next_line(&mut stdout_lines).await.unwrap();
|
||||
|
||||
write(&file_to_watch, file_content).unwrap();
|
||||
file_to_watch.write(file_content);
|
||||
// The remote dynamic module should not have been reloaded again.
|
||||
|
||||
wait_contains("finished", &mut stderr_lines).await;
|
||||
|
@ -1379,7 +1444,7 @@ async fn test_watch_serve() {
|
|||
console.error("serving");
|
||||
await Deno.serve({port: 4600, handler: () => new Response("hello")});
|
||||
"#;
|
||||
write(&file_to_watch, file_content).unwrap();
|
||||
file_to_watch.write(file_content);
|
||||
|
||||
let mut child = util::deno_cmd()
|
||||
.current_dir(util::testdata_path())
|
||||
|
@ -1401,7 +1466,7 @@ async fn test_watch_serve() {
|
|||
// Note that we start serving very quickly, so we specifically want to wait for this message
|
||||
wait_contains(r#"Watching paths: [""#, &mut stderr_lines).await;
|
||||
|
||||
write(&file_to_watch, file_content).unwrap();
|
||||
file_to_watch.write(file_content);
|
||||
|
||||
wait_contains("serving", &mut stderr_lines).await;
|
||||
wait_contains("Listening on", &mut stdout_lines).await;
|
||||
|
@ -1413,31 +1478,25 @@ async fn test_watch_serve() {
|
|||
async fn run_watch_dynamic_imports() {
|
||||
let t = TempDir::new();
|
||||
let file_to_watch = t.path().join("file_to_watch.js");
|
||||
write(
|
||||
&file_to_watch,
|
||||
file_to_watch.write(
|
||||
r#"
|
||||
console.log("Hopefully dynamic import will be watched...");
|
||||
await import("./imported.js");
|
||||
"#,
|
||||
)
|
||||
.unwrap();
|
||||
);
|
||||
let file_to_watch2 = t.path().join("imported.js");
|
||||
write(
|
||||
file_to_watch2,
|
||||
file_to_watch2.write(
|
||||
r#"
|
||||
import "./imported2.js";
|
||||
console.log("I'm dynamically imported and I cause restarts!");
|
||||
"#,
|
||||
)
|
||||
.unwrap();
|
||||
);
|
||||
let file_to_watch3 = t.path().join("imported2.js");
|
||||
write(
|
||||
&file_to_watch3,
|
||||
file_to_watch3.write(
|
||||
r#"
|
||||
console.log("I'm statically imported from the dynamic import");
|
||||
"#,
|
||||
)
|
||||
.unwrap();
|
||||
);
|
||||
|
||||
let mut child = util::deno_cmd()
|
||||
.current_dir(util::testdata_path())
|
||||
|
@ -1454,8 +1513,8 @@ async fn run_watch_dynamic_imports() {
|
|||
.spawn()
|
||||
.unwrap();
|
||||
let (mut stdout_lines, mut stderr_lines) = child_lines(&mut child);
|
||||
wait_contains("No package.json file found", &mut stderr_lines).await;
|
||||
wait_contains("Process started", &mut stderr_lines).await;
|
||||
wait_contains("No package.json file found", &mut stderr_lines).await;
|
||||
|
||||
wait_contains(
|
||||
"Hopefully dynamic import will be watched...",
|
||||
|
@ -1476,13 +1535,11 @@ async fn run_watch_dynamic_imports() {
|
|||
wait_for_watcher("imported2.js", &mut stderr_lines).await;
|
||||
wait_contains("finished", &mut stderr_lines).await;
|
||||
|
||||
write(
|
||||
&file_to_watch3,
|
||||
file_to_watch3.write(
|
||||
r#"
|
||||
console.log("I'm statically imported from the dynamic import and I've changed");
|
||||
"#,
|
||||
)
|
||||
.unwrap();
|
||||
);
|
||||
|
||||
wait_contains("Restarting", &mut stderr_lines).await;
|
||||
wait_contains(
|
||||
|
|
|
@ -1,17 +1,19 @@
|
|||
// Copyright 2018-2023 the Deno authors. All rights reserved. MIT license.
|
||||
|
||||
use crate::args::BenchOptions;
|
||||
use crate::args::BenchFlags;
|
||||
use crate::args::CliOptions;
|
||||
use crate::args::Flags;
|
||||
use crate::colors;
|
||||
use crate::display::write_json_to_stdout;
|
||||
use crate::factory::CliFactory;
|
||||
use crate::factory::CliFactoryBuilder;
|
||||
use crate::graph_util::graph_valid_with_cli_options;
|
||||
use crate::graph_util::has_graph_root_local_dependent_changed;
|
||||
use crate::module_loader::ModuleLoadPreparer;
|
||||
use crate::ops;
|
||||
use crate::tools::test::format_test_error;
|
||||
use crate::tools::test::TestFilter;
|
||||
use crate::util::file_watcher;
|
||||
use crate::util::file_watcher::ResolutionResult;
|
||||
use crate::util::fs::collect_specifiers;
|
||||
use crate::util::path::is_supported_ext;
|
||||
use crate::version::get_user_agent;
|
||||
|
@ -22,7 +24,6 @@ use deno_core::error::AnyError;
|
|||
use deno_core::error::JsError;
|
||||
use deno_core::futures::future;
|
||||
use deno_core::futures::stream;
|
||||
use deno_core::futures::FutureExt;
|
||||
use deno_core::futures::StreamExt;
|
||||
use deno_core::located_script_name;
|
||||
use deno_core::serde_v8;
|
||||
|
@ -40,7 +41,6 @@ use serde::Deserialize;
|
|||
use serde::Serialize;
|
||||
use std::collections::HashSet;
|
||||
use std::path::Path;
|
||||
use std::path::PathBuf;
|
||||
use std::sync::Arc;
|
||||
use tokio::sync::mpsc::unbounded_channel;
|
||||
use tokio::sync::mpsc::UnboundedSender;
|
||||
|
@ -630,9 +630,11 @@ fn is_supported_bench_path(path: &Path) -> bool {
|
|||
}
|
||||
|
||||
pub async fn run_benchmarks(
|
||||
cli_options: CliOptions,
|
||||
bench_options: BenchOptions,
|
||||
flags: Flags,
|
||||
bench_flags: BenchFlags,
|
||||
) -> Result<(), AnyError> {
|
||||
let cli_options = CliOptions::from_flags(flags)?;
|
||||
let bench_options = cli_options.resolve_bench_options(bench_flags)?;
|
||||
let factory = CliFactory::from_cli_options(Arc::new(cli_options));
|
||||
let cli_options = factory.cli_options();
|
||||
// Various bench files should not share the same permissions in terms of
|
||||
|
@ -679,169 +681,102 @@ pub async fn run_benchmarks(
|
|||
|
||||
// TODO(bartlomieju): heavy duplication of code with `cli/tools/test.rs`
|
||||
pub async fn run_benchmarks_with_watch(
|
||||
cli_options: CliOptions,
|
||||
bench_options: BenchOptions,
|
||||
flags: Flags,
|
||||
bench_flags: BenchFlags,
|
||||
) -> Result<(), AnyError> {
|
||||
let factory = CliFactory::from_cli_options(Arc::new(cli_options));
|
||||
let cli_options = factory.cli_options();
|
||||
let module_graph_builder = factory.module_graph_builder().await?;
|
||||
let file_watcher = factory.file_watcher()?;
|
||||
let module_load_preparer = factory.module_load_preparer().await?;
|
||||
// Various bench files should not share the same permissions in terms of
|
||||
// `PermissionsContainer` - otherwise granting/revoking permissions in one
|
||||
// file would have impact on other files, which is undesirable.
|
||||
let permissions =
|
||||
Permissions::from_options(&cli_options.permissions_options())?;
|
||||
let graph_kind = cli_options.type_check_mode().as_graph_kind();
|
||||
|
||||
let resolver = |changed: Option<Vec<PathBuf>>| {
|
||||
let paths_to_watch = bench_options.files.include.clone();
|
||||
let paths_to_watch_clone = paths_to_watch.clone();
|
||||
let files_changed = changed.is_some();
|
||||
let bench_options = &bench_options;
|
||||
let module_graph_builder = module_graph_builder.clone();
|
||||
let cli_options = cli_options.clone();
|
||||
|
||||
async move {
|
||||
let bench_modules =
|
||||
collect_specifiers(&bench_options.files, is_supported_bench_path)?;
|
||||
|
||||
let mut paths_to_watch = paths_to_watch_clone;
|
||||
let mut modules_to_reload = if files_changed {
|
||||
Vec::new()
|
||||
} else {
|
||||
bench_modules.clone()
|
||||
};
|
||||
let graph = module_graph_builder
|
||||
.create_graph(graph_kind, bench_modules.clone())
|
||||
.await?;
|
||||
graph_valid_with_cli_options(&graph, &bench_modules, &cli_options)?;
|
||||
|
||||
// TODO(@kitsonk) - This should be totally derivable from the graph.
|
||||
for specifier in bench_modules {
|
||||
fn get_dependencies<'a>(
|
||||
graph: &'a deno_graph::ModuleGraph,
|
||||
maybe_module: Option<&'a deno_graph::Module>,
|
||||
// This needs to be accessible to skip getting dependencies if they're already there,
|
||||
// otherwise this will cause a stack overflow with circular dependencies
|
||||
output: &mut HashSet<&'a ModuleSpecifier>,
|
||||
) {
|
||||
if let Some(module) = maybe_module.and_then(|m| m.esm()) {
|
||||
for dep in module.dependencies.values() {
|
||||
if let Some(specifier) = &dep.get_code() {
|
||||
if !output.contains(specifier) {
|
||||
output.insert(specifier);
|
||||
get_dependencies(graph, graph.get(specifier), output);
|
||||
}
|
||||
}
|
||||
if let Some(specifier) = &dep.get_type() {
|
||||
if !output.contains(specifier) {
|
||||
output.insert(specifier);
|
||||
get_dependencies(graph, graph.get(specifier), output);
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
// This bench module and all it's dependencies
|
||||
let mut modules = HashSet::new();
|
||||
modules.insert(&specifier);
|
||||
get_dependencies(&graph, graph.get(&specifier), &mut modules);
|
||||
|
||||
paths_to_watch.extend(
|
||||
modules
|
||||
.iter()
|
||||
.filter_map(|specifier| specifier.to_file_path().ok()),
|
||||
);
|
||||
|
||||
if let Some(changed) = &changed {
|
||||
for path in changed
|
||||
.iter()
|
||||
.filter_map(|path| ModuleSpecifier::from_file_path(path).ok())
|
||||
{
|
||||
if modules.contains(&path) {
|
||||
modules_to_reload.push(specifier);
|
||||
break;
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
Ok((paths_to_watch, modules_to_reload))
|
||||
}
|
||||
.map(move |result| {
|
||||
if files_changed
|
||||
&& matches!(result, Ok((_, ref modules)) if modules.is_empty())
|
||||
{
|
||||
ResolutionResult::Ignore
|
||||
} else {
|
||||
match result {
|
||||
Ok((paths_to_watch, modules_to_reload)) => {
|
||||
ResolutionResult::Restart {
|
||||
paths_to_watch,
|
||||
result: Ok(modules_to_reload),
|
||||
}
|
||||
}
|
||||
Err(e) => ResolutionResult::Restart {
|
||||
paths_to_watch,
|
||||
result: Err(e),
|
||||
},
|
||||
}
|
||||
}
|
||||
})
|
||||
};
|
||||
|
||||
let create_cli_main_worker_factory =
|
||||
factory.create_cli_main_worker_factory_func().await?;
|
||||
let operation = |modules_to_reload: Vec<ModuleSpecifier>| {
|
||||
let permissions = &permissions;
|
||||
let bench_options = &bench_options;
|
||||
file_watcher.reset();
|
||||
let module_load_preparer = module_load_preparer.clone();
|
||||
let cli_options = cli_options.clone();
|
||||
let create_cli_main_worker_factory = create_cli_main_worker_factory.clone();
|
||||
|
||||
async move {
|
||||
let worker_factory = Arc::new(create_cli_main_worker_factory());
|
||||
let specifiers =
|
||||
collect_specifiers(&bench_options.files, is_supported_bench_path)?
|
||||
.into_iter()
|
||||
.filter(|specifier| modules_to_reload.contains(specifier))
|
||||
.collect::<Vec<ModuleSpecifier>>();
|
||||
|
||||
check_specifiers(&cli_options, &module_load_preparer, specifiers.clone())
|
||||
.await?;
|
||||
|
||||
if bench_options.no_run {
|
||||
return Ok(());
|
||||
}
|
||||
|
||||
let log_level = cli_options.log_level();
|
||||
bench_specifiers(
|
||||
worker_factory,
|
||||
permissions,
|
||||
specifiers,
|
||||
BenchSpecifierOptions {
|
||||
filter: TestFilter::from_flag(&bench_options.filter),
|
||||
json: bench_options.json,
|
||||
log_level,
|
||||
},
|
||||
)
|
||||
.await?;
|
||||
|
||||
Ok(())
|
||||
}
|
||||
};
|
||||
|
||||
let clear_screen = !cli_options.no_clear_screen();
|
||||
let clear_screen = !flags.no_clear_screen;
|
||||
file_watcher::watch_func(
|
||||
resolver,
|
||||
operation,
|
||||
flags,
|
||||
file_watcher::PrintConfig {
|
||||
job_name: "Bench".to_string(),
|
||||
clear_screen,
|
||||
},
|
||||
move |flags, sender, changed_paths| {
|
||||
let bench_flags = bench_flags.clone();
|
||||
Ok(async move {
|
||||
let factory = CliFactoryBuilder::new()
|
||||
.with_watcher(sender.clone())
|
||||
.build_from_flags(flags)
|
||||
.await?;
|
||||
let cli_options = factory.cli_options();
|
||||
let bench_options = cli_options.resolve_bench_options(bench_flags)?;
|
||||
|
||||
if let Some(watch_paths) = cli_options.watch_paths() {
|
||||
let _ = sender.send(watch_paths);
|
||||
}
|
||||
let _ = sender.send(bench_options.files.include.clone());
|
||||
|
||||
let graph_kind = cli_options.type_check_mode().as_graph_kind();
|
||||
let module_graph_builder = factory.module_graph_builder().await?;
|
||||
let module_load_preparer = factory.module_load_preparer().await?;
|
||||
|
||||
let bench_modules =
|
||||
collect_specifiers(&bench_options.files, is_supported_bench_path)?;
|
||||
|
||||
// Various bench files should not share the same permissions in terms of
|
||||
// `PermissionsContainer` - otherwise granting/revoking permissions in one
|
||||
// file would have impact on other files, which is undesirable.
|
||||
let permissions =
|
||||
Permissions::from_options(&cli_options.permissions_options())?;
|
||||
|
||||
let graph = module_graph_builder
|
||||
.create_graph(graph_kind, bench_modules.clone())
|
||||
.await?;
|
||||
graph_valid_with_cli_options(&graph, &bench_modules, cli_options)?;
|
||||
|
||||
let bench_modules_to_reload = if let Some(changed_paths) = changed_paths
|
||||
{
|
||||
let changed_specifiers = changed_paths
|
||||
.into_iter()
|
||||
.filter_map(|p| ModuleSpecifier::from_file_path(p).ok())
|
||||
.collect::<HashSet<_>>();
|
||||
let mut result = Vec::new();
|
||||
for bench_module_specifier in bench_modules {
|
||||
if has_graph_root_local_dependent_changed(
|
||||
&graph,
|
||||
&bench_module_specifier,
|
||||
&changed_specifiers,
|
||||
) {
|
||||
result.push(bench_module_specifier.clone());
|
||||
}
|
||||
}
|
||||
result
|
||||
} else {
|
||||
bench_modules.clone()
|
||||
};
|
||||
|
||||
let worker_factory =
|
||||
Arc::new(factory.create_cli_main_worker_factory().await?);
|
||||
|
||||
let specifiers =
|
||||
collect_specifiers(&bench_options.files, is_supported_bench_path)?
|
||||
.into_iter()
|
||||
.filter(|specifier| bench_modules_to_reload.contains(specifier))
|
||||
.collect::<Vec<ModuleSpecifier>>();
|
||||
|
||||
check_specifiers(cli_options, module_load_preparer, specifiers.clone())
|
||||
.await?;
|
||||
|
||||
if bench_options.no_run {
|
||||
return Ok(());
|
||||
}
|
||||
|
||||
let log_level = cli_options.log_level();
|
||||
bench_specifiers(
|
||||
worker_factory,
|
||||
&permissions,
|
||||
specifiers,
|
||||
BenchSpecifierOptions {
|
||||
filter: TestFilter::from_flag(&bench_options.filter),
|
||||
json: bench_options.json,
|
||||
log_level,
|
||||
},
|
||||
)
|
||||
.await?;
|
||||
|
||||
Ok(())
|
||||
})
|
||||
},
|
||||
)
|
||||
.await?;
|
||||
|
||||
|
|
|
@ -1,10 +1,8 @@
|
|||
// Copyright 2018-2023 the Deno authors. All rights reserved. MIT license.
|
||||
|
||||
use std::path::PathBuf;
|
||||
use std::sync::Arc;
|
||||
|
||||
use deno_core::error::AnyError;
|
||||
use deno_core::futures::FutureExt;
|
||||
use deno_graph::Module;
|
||||
use deno_runtime::colors;
|
||||
|
||||
|
@ -13,17 +11,15 @@ use crate::args::CliOptions;
|
|||
use crate::args::Flags;
|
||||
use crate::args::TsConfigType;
|
||||
use crate::factory::CliFactory;
|
||||
use crate::factory::CliFactoryBuilder;
|
||||
use crate::graph_util::error_for_any_npm_specifier;
|
||||
use crate::util;
|
||||
use crate::util::display;
|
||||
use crate::util::file_watcher::ResolutionResult;
|
||||
|
||||
pub async fn bundle(
|
||||
flags: Flags,
|
||||
bundle_flags: BundleFlags,
|
||||
) -> Result<(), AnyError> {
|
||||
let cli_options = Arc::new(CliOptions::from_flags(flags)?);
|
||||
|
||||
log::info!(
|
||||
"{} \"deno bundle\" is deprecated and will be removed in the future.",
|
||||
colors::yellow("Warning"),
|
||||
|
@ -32,122 +28,115 @@ pub async fn bundle(
|
|||
"Use alternative bundlers like \"deno_emit\", \"esbuild\" or \"rollup\" instead."
|
||||
);
|
||||
|
||||
let module_specifier = cli_options.resolve_main_module()?;
|
||||
|
||||
let resolver = |_| {
|
||||
let cli_options = cli_options.clone();
|
||||
let module_specifier = &module_specifier;
|
||||
async move {
|
||||
log::debug!(">>>>> bundle START");
|
||||
let factory = CliFactory::from_cli_options(cli_options);
|
||||
let module_graph_builder = factory.module_graph_builder().await?;
|
||||
let cli_options = factory.cli_options();
|
||||
|
||||
let graph = module_graph_builder
|
||||
.create_graph_and_maybe_check(vec![module_specifier.clone()])
|
||||
.await?;
|
||||
|
||||
let mut paths_to_watch: Vec<PathBuf> = graph
|
||||
.specifiers()
|
||||
.filter_map(|(_, r)| {
|
||||
r.ok().and_then(|module| match module {
|
||||
Module::Esm(m) => m.specifier.to_file_path().ok(),
|
||||
Module::Json(m) => m.specifier.to_file_path().ok(),
|
||||
// nothing to watch
|
||||
Module::Node(_) | Module::Npm(_) | Module::External(_) => None,
|
||||
})
|
||||
})
|
||||
.collect();
|
||||
|
||||
if let Ok(Some(import_map_path)) = cli_options
|
||||
.resolve_import_map_specifier()
|
||||
.map(|ms| ms.and_then(|ref s| s.to_file_path().ok()))
|
||||
{
|
||||
paths_to_watch.push(import_map_path);
|
||||
}
|
||||
|
||||
Ok((paths_to_watch, graph, cli_options.clone()))
|
||||
}
|
||||
.map(move |result| match result {
|
||||
Ok((paths_to_watch, graph, ps)) => ResolutionResult::Restart {
|
||||
paths_to_watch,
|
||||
result: Ok((ps, graph)),
|
||||
},
|
||||
Err(e) => ResolutionResult::Restart {
|
||||
paths_to_watch: vec![module_specifier.to_file_path().unwrap()],
|
||||
result: Err(e),
|
||||
},
|
||||
})
|
||||
};
|
||||
|
||||
let operation =
|
||||
|(cli_options, graph): (Arc<CliOptions>, Arc<deno_graph::ModuleGraph>)| {
|
||||
let out_file = &bundle_flags.out_file;
|
||||
async move {
|
||||
// at the moment, we don't support npm specifiers in deno bundle, so show an error
|
||||
error_for_any_npm_specifier(&graph)?;
|
||||
|
||||
let bundle_output = bundle_module_graph(graph.as_ref(), &cli_options)?;
|
||||
log::debug!(">>>>> bundle END");
|
||||
|
||||
if let Some(out_file) = out_file {
|
||||
let output_bytes = bundle_output.code.as_bytes();
|
||||
let output_len = output_bytes.len();
|
||||
util::fs::write_file(out_file, output_bytes, 0o644)?;
|
||||
log::info!(
|
||||
"{} {:?} ({})",
|
||||
colors::green("Emit"),
|
||||
out_file,
|
||||
colors::gray(display::human_size(output_len as f64))
|
||||
);
|
||||
if let Some(bundle_map) = bundle_output.maybe_map {
|
||||
let map_bytes = bundle_map.as_bytes();
|
||||
let map_len = map_bytes.len();
|
||||
let ext = if let Some(curr_ext) = out_file.extension() {
|
||||
format!("{}.map", curr_ext.to_string_lossy())
|
||||
} else {
|
||||
"map".to_string()
|
||||
};
|
||||
let map_out_file = out_file.with_extension(ext);
|
||||
util::fs::write_file(&map_out_file, map_bytes, 0o644)?;
|
||||
log::info!(
|
||||
"{} {:?} ({})",
|
||||
colors::green("Emit"),
|
||||
map_out_file,
|
||||
colors::gray(display::human_size(map_len as f64))
|
||||
);
|
||||
}
|
||||
} else {
|
||||
println!("{}", bundle_output.code);
|
||||
}
|
||||
|
||||
Ok(())
|
||||
}
|
||||
};
|
||||
|
||||
if cli_options.watch_paths().is_some() {
|
||||
if flags.watch.is_some() {
|
||||
let clear_screen = !flags.no_clear_screen;
|
||||
util::file_watcher::watch_func(
|
||||
resolver,
|
||||
operation,
|
||||
flags,
|
||||
util::file_watcher::PrintConfig {
|
||||
job_name: "Bundle".to_string(),
|
||||
clear_screen: !cli_options.no_clear_screen(),
|
||||
clear_screen,
|
||||
},
|
||||
move |flags, sender, _changed_paths| {
|
||||
let bundle_flags = bundle_flags.clone();
|
||||
Ok(async move {
|
||||
let factory = CliFactoryBuilder::new()
|
||||
.with_watcher(sender.clone())
|
||||
.build_from_flags(flags)
|
||||
.await?;
|
||||
let cli_options = factory.cli_options();
|
||||
|
||||
if let Some(watch_paths) = cli_options.watch_paths() {
|
||||
let _ = sender.send(watch_paths);
|
||||
}
|
||||
|
||||
bundle_action(factory, &bundle_flags).await?;
|
||||
|
||||
Ok(())
|
||||
})
|
||||
},
|
||||
)
|
||||
.await?;
|
||||
} else {
|
||||
let module_graph =
|
||||
if let ResolutionResult::Restart { result, .. } = resolver(None).await {
|
||||
result?
|
||||
} else {
|
||||
unreachable!();
|
||||
};
|
||||
operation(module_graph).await?;
|
||||
let factory = CliFactory::from_flags(flags).await?;
|
||||
bundle_action(factory, &bundle_flags).await?;
|
||||
}
|
||||
|
||||
Ok(())
|
||||
}
|
||||
|
||||
async fn bundle_action(
|
||||
factory: CliFactory,
|
||||
bundle_flags: &BundleFlags,
|
||||
) -> Result<(), AnyError> {
|
||||
let cli_options = factory.cli_options();
|
||||
let module_specifier = cli_options.resolve_main_module()?;
|
||||
log::debug!(">>>>> bundle START");
|
||||
let module_graph_builder = factory.module_graph_builder().await?;
|
||||
let cli_options = factory.cli_options();
|
||||
|
||||
let graph = module_graph_builder
|
||||
.create_graph_and_maybe_check(vec![module_specifier.clone()])
|
||||
.await?;
|
||||
|
||||
let mut paths_to_watch: Vec<PathBuf> = graph
|
||||
.specifiers()
|
||||
.filter_map(|(_, r)| {
|
||||
r.ok().and_then(|module| match module {
|
||||
Module::Esm(m) => m.specifier.to_file_path().ok(),
|
||||
Module::Json(m) => m.specifier.to_file_path().ok(),
|
||||
// nothing to watch
|
||||
Module::Node(_) | Module::Npm(_) | Module::External(_) => None,
|
||||
})
|
||||
})
|
||||
.collect();
|
||||
|
||||
if let Ok(Some(import_map_path)) = cli_options
|
||||
.resolve_import_map_specifier()
|
||||
.map(|ms| ms.and_then(|ref s| s.to_file_path().ok()))
|
||||
{
|
||||
paths_to_watch.push(import_map_path);
|
||||
}
|
||||
|
||||
// at the moment, we don't support npm specifiers in deno bundle, so show an error
|
||||
error_for_any_npm_specifier(&graph)?;
|
||||
|
||||
let bundle_output = bundle_module_graph(graph.as_ref(), cli_options)?;
|
||||
log::debug!(">>>>> bundle END");
|
||||
let out_file = &bundle_flags.out_file;
|
||||
|
||||
if let Some(out_file) = out_file {
|
||||
let output_bytes = bundle_output.code.as_bytes();
|
||||
let output_len = output_bytes.len();
|
||||
util::fs::write_file(out_file, output_bytes, 0o644)?;
|
||||
log::info!(
|
||||
"{} {:?} ({})",
|
||||
colors::green("Emit"),
|
||||
out_file,
|
||||
colors::gray(display::human_size(output_len as f64))
|
||||
);
|
||||
if let Some(bundle_map) = bundle_output.maybe_map {
|
||||
let map_bytes = bundle_map.as_bytes();
|
||||
let map_len = map_bytes.len();
|
||||
let ext = if let Some(curr_ext) = out_file.extension() {
|
||||
format!("{}.map", curr_ext.to_string_lossy())
|
||||
} else {
|
||||
"map".to_string()
|
||||
};
|
||||
let map_out_file = out_file.with_extension(ext);
|
||||
util::fs::write_file(&map_out_file, map_bytes, 0o644)?;
|
||||
log::info!(
|
||||
"{} {:?} ({})",
|
||||
colors::green("Emit"),
|
||||
map_out_file,
|
||||
colors::gray(display::human_size(map_len as f64))
|
||||
);
|
||||
}
|
||||
} else {
|
||||
println!("{}", bundle_output.code);
|
||||
}
|
||||
Ok(())
|
||||
}
|
||||
|
||||
fn bundle_module_graph(
|
||||
graph: &deno_graph::ModuleGraph,
|
||||
cli_options: &CliOptions,
|
||||
|
|
147
cli/tools/fmt.rs
147
cli/tools/fmt.rs
|
@ -9,6 +9,8 @@
|
|||
|
||||
use crate::args::CliOptions;
|
||||
use crate::args::FilesConfig;
|
||||
use crate::args::Flags;
|
||||
use crate::args::FmtFlags;
|
||||
use crate::args::FmtOptions;
|
||||
use crate::args::FmtOptionsConfig;
|
||||
use crate::args::ProseWrap;
|
||||
|
@ -16,7 +18,6 @@ use crate::colors;
|
|||
use crate::factory::CliFactory;
|
||||
use crate::util::diff::diff;
|
||||
use crate::util::file_watcher;
|
||||
use crate::util::file_watcher::ResolutionResult;
|
||||
use crate::util::fs::FileCollector;
|
||||
use crate::util::path::get_extension;
|
||||
use crate::util::text_encoding;
|
||||
|
@ -46,11 +47,10 @@ use std::sync::Arc;
|
|||
use crate::cache::IncrementalCache;
|
||||
|
||||
/// Format JavaScript/TypeScript files.
|
||||
pub async fn format(
|
||||
cli_options: CliOptions,
|
||||
fmt_options: FmtOptions,
|
||||
) -> Result<(), AnyError> {
|
||||
if fmt_options.is_stdin {
|
||||
pub async fn format(flags: Flags, fmt_flags: FmtFlags) -> Result<(), AnyError> {
|
||||
if fmt_flags.is_stdin() {
|
||||
let cli_options = CliOptions::from_flags(flags)?;
|
||||
let fmt_options = cli_options.resolve_fmt_options(fmt_flags)?;
|
||||
return format_stdin(
|
||||
fmt_options,
|
||||
cli_options
|
||||
|
@ -61,90 +61,93 @@ pub async fn format(
|
|||
);
|
||||
}
|
||||
|
||||
let files = fmt_options.files;
|
||||
let check = fmt_options.check;
|
||||
let fmt_config_options = fmt_options.options;
|
||||
|
||||
let resolver = |changed: Option<Vec<PathBuf>>| {
|
||||
let files_changed = changed.is_some();
|
||||
|
||||
let result = collect_fmt_files(&files).map(|files| {
|
||||
let refmt_files = if let Some(paths) = changed {
|
||||
if check {
|
||||
files
|
||||
.iter()
|
||||
.any(|path| paths.contains(path))
|
||||
.then_some(files)
|
||||
.unwrap_or_else(|| [].to_vec())
|
||||
} else {
|
||||
files
|
||||
.into_iter()
|
||||
.filter(|path| paths.contains(path))
|
||||
.collect::<Vec<_>>()
|
||||
}
|
||||
} else {
|
||||
files
|
||||
};
|
||||
(refmt_files, fmt_config_options.clone())
|
||||
});
|
||||
|
||||
let paths_to_watch = files.include.clone();
|
||||
async move {
|
||||
if files_changed
|
||||
&& matches!(result, Ok((ref files, _)) if files.is_empty())
|
||||
{
|
||||
ResolutionResult::Ignore
|
||||
} else {
|
||||
ResolutionResult::Restart {
|
||||
paths_to_watch,
|
||||
result,
|
||||
}
|
||||
}
|
||||
}
|
||||
};
|
||||
let factory = CliFactory::from_cli_options(Arc::new(cli_options));
|
||||
let cli_options = factory.cli_options();
|
||||
let caches = factory.caches()?;
|
||||
let operation = |(paths, fmt_options): (Vec<PathBuf>, FmtOptionsConfig)| async {
|
||||
let incremental_cache = Arc::new(IncrementalCache::new(
|
||||
caches.fmt_incremental_cache_db(),
|
||||
&fmt_options,
|
||||
&paths,
|
||||
));
|
||||
if check {
|
||||
check_source_files(paths, fmt_options, incremental_cache.clone()).await?;
|
||||
} else {
|
||||
format_source_files(paths, fmt_options, incremental_cache.clone())
|
||||
.await?;
|
||||
}
|
||||
incremental_cache.wait_completion().await;
|
||||
Ok(())
|
||||
};
|
||||
|
||||
if cli_options.watch_paths().is_some() {
|
||||
if flags.watch.is_some() {
|
||||
let clear_screen = !flags.no_clear_screen;
|
||||
file_watcher::watch_func(
|
||||
resolver,
|
||||
operation,
|
||||
flags,
|
||||
file_watcher::PrintConfig {
|
||||
job_name: "Fmt".to_string(),
|
||||
clear_screen: !cli_options.no_clear_screen(),
|
||||
clear_screen,
|
||||
},
|
||||
move |flags, sender, changed_paths| {
|
||||
let fmt_flags = fmt_flags.clone();
|
||||
Ok(async move {
|
||||
let factory = CliFactory::from_flags(flags).await?;
|
||||
let cli_options = factory.cli_options();
|
||||
let fmt_options = cli_options.resolve_fmt_options(fmt_flags)?;
|
||||
let files =
|
||||
collect_fmt_files(&fmt_options.files).and_then(|files| {
|
||||
if files.is_empty() {
|
||||
Err(generic_error("No target files found."))
|
||||
} else {
|
||||
Ok(files)
|
||||
}
|
||||
})?;
|
||||
_ = sender.send(files.clone());
|
||||
let refmt_files = if let Some(paths) = changed_paths {
|
||||
if fmt_options.check {
|
||||
// check all files on any changed (https://github.com/denoland/deno/issues/12446)
|
||||
files
|
||||
.iter()
|
||||
.any(|path| paths.contains(path))
|
||||
.then_some(files)
|
||||
.unwrap_or_else(|| [].to_vec())
|
||||
} else {
|
||||
files
|
||||
.into_iter()
|
||||
.filter(|path| paths.contains(path))
|
||||
.collect::<Vec<_>>()
|
||||
}
|
||||
} else {
|
||||
files
|
||||
};
|
||||
format_files(factory, fmt_options, refmt_files).await?;
|
||||
|
||||
Ok(())
|
||||
})
|
||||
},
|
||||
)
|
||||
.await?;
|
||||
} else {
|
||||
let files = collect_fmt_files(&files).and_then(|files| {
|
||||
let factory = CliFactory::from_flags(flags).await?;
|
||||
let cli_options = factory.cli_options();
|
||||
let fmt_options = cli_options.resolve_fmt_options(fmt_flags)?;
|
||||
let files = collect_fmt_files(&fmt_options.files).and_then(|files| {
|
||||
if files.is_empty() {
|
||||
Err(generic_error("No target files found."))
|
||||
} else {
|
||||
Ok(files)
|
||||
}
|
||||
})?;
|
||||
operation((files, fmt_config_options)).await?;
|
||||
format_files(factory, fmt_options, files).await?;
|
||||
}
|
||||
|
||||
Ok(())
|
||||
}
|
||||
|
||||
async fn format_files(
|
||||
factory: CliFactory,
|
||||
fmt_options: FmtOptions,
|
||||
paths: Vec<PathBuf>,
|
||||
) -> Result<(), AnyError> {
|
||||
let caches = factory.caches()?;
|
||||
let check = fmt_options.check;
|
||||
let incremental_cache = Arc::new(IncrementalCache::new(
|
||||
caches.fmt_incremental_cache_db(),
|
||||
&fmt_options.options,
|
||||
&paths,
|
||||
));
|
||||
if check {
|
||||
check_source_files(paths, fmt_options.options, incremental_cache.clone())
|
||||
.await?;
|
||||
} else {
|
||||
format_source_files(paths, fmt_options.options, incremental_cache.clone())
|
||||
.await?;
|
||||
}
|
||||
incremental_cache.wait_completion().await;
|
||||
Ok(())
|
||||
}
|
||||
|
||||
fn collect_fmt_files(files: &FilesConfig) -> Result<Vec<PathBuf>, AnyError> {
|
||||
FileCollector::new(is_supported_ext_fmt)
|
||||
.ignore_git_folder()
|
||||
|
|
|
@ -2,12 +2,9 @@
|
|||
|
||||
//! This module provides file linting utilities using
|
||||
//! [`deno_lint`](https://github.com/denoland/deno_lint).
|
||||
//!
|
||||
//! At the moment it is only consumed using CLI but in
|
||||
//! the future it can be easily extended to provide
|
||||
//! the same functions as ops available in JS runtime.
|
||||
use crate::args::CliOptions;
|
||||
use crate::args::FilesConfig;
|
||||
use crate::args::Flags;
|
||||
use crate::args::LintFlags;
|
||||
use crate::args::LintOptions;
|
||||
use crate::args::LintReporterKind;
|
||||
use crate::args::LintRulesConfig;
|
||||
|
@ -15,9 +12,9 @@ use crate::colors;
|
|||
use crate::factory::CliFactory;
|
||||
use crate::tools::fmt::run_parallelized;
|
||||
use crate::util::file_watcher;
|
||||
use crate::util::file_watcher::ResolutionResult;
|
||||
use crate::util::fs::FileCollector;
|
||||
use crate::util::path::is_supported_ext;
|
||||
use crate::util::sync::AtomicFlag;
|
||||
use deno_ast::MediaType;
|
||||
use deno_core::anyhow::bail;
|
||||
use deno_core::error::generic_error;
|
||||
|
@ -38,8 +35,6 @@ use std::io::stdin;
|
|||
use std::io::Read;
|
||||
use std::path::Path;
|
||||
use std::path::PathBuf;
|
||||
use std::sync::atomic::AtomicBool;
|
||||
use std::sync::atomic::Ordering;
|
||||
use std::sync::Arc;
|
||||
use std::sync::Mutex;
|
||||
|
||||
|
@ -55,133 +50,70 @@ fn create_reporter(kind: LintReporterKind) -> Box<dyn LintReporter + Send> {
|
|||
}
|
||||
}
|
||||
|
||||
pub async fn lint(
|
||||
cli_options: CliOptions,
|
||||
lint_options: LintOptions,
|
||||
) -> Result<(), AnyError> {
|
||||
// Try to get lint rules. If none were set use recommended rules.
|
||||
let lint_rules = get_configured_rules(lint_options.rules);
|
||||
|
||||
if lint_rules.is_empty() {
|
||||
bail!("No rules have been configured")
|
||||
}
|
||||
|
||||
let files = lint_options.files;
|
||||
let reporter_kind = lint_options.reporter_kind;
|
||||
|
||||
let resolver = |changed: Option<Vec<PathBuf>>| {
|
||||
let files_changed = changed.is_some();
|
||||
let result = collect_lint_files(&files).map(|files| {
|
||||
if let Some(paths) = changed {
|
||||
files
|
||||
.iter()
|
||||
.any(|path| paths.contains(path))
|
||||
.then_some(files)
|
||||
.unwrap_or_else(|| [].to_vec())
|
||||
} else {
|
||||
files
|
||||
}
|
||||
});
|
||||
|
||||
let paths_to_watch = files.include.clone();
|
||||
|
||||
async move {
|
||||
if files_changed && matches!(result, Ok(ref files) if files.is_empty()) {
|
||||
ResolutionResult::Ignore
|
||||
} else {
|
||||
ResolutionResult::Restart {
|
||||
paths_to_watch,
|
||||
result,
|
||||
}
|
||||
}
|
||||
}
|
||||
};
|
||||
|
||||
let has_error = Arc::new(AtomicBool::new(false));
|
||||
let factory = CliFactory::from_cli_options(Arc::new(cli_options));
|
||||
let cli_options = factory.cli_options();
|
||||
let caches = factory.caches()?;
|
||||
let operation = |paths: Vec<PathBuf>| async {
|
||||
let incremental_cache = Arc::new(IncrementalCache::new(
|
||||
caches.lint_incremental_cache_db(),
|
||||
// use a hash of the rule names in order to bust the cache
|
||||
&{
|
||||
// ensure this is stable by sorting it
|
||||
let mut names = lint_rules.iter().map(|r| r.code()).collect::<Vec<_>>();
|
||||
names.sort_unstable();
|
||||
names
|
||||
},
|
||||
&paths,
|
||||
));
|
||||
let target_files_len = paths.len();
|
||||
let reporter_lock =
|
||||
Arc::new(Mutex::new(create_reporter(reporter_kind.clone())));
|
||||
|
||||
run_parallelized(paths, {
|
||||
let has_error = has_error.clone();
|
||||
let lint_rules = lint_rules.clone();
|
||||
let reporter_lock = reporter_lock.clone();
|
||||
let incremental_cache = incremental_cache.clone();
|
||||
move |file_path| {
|
||||
let file_text = fs::read_to_string(&file_path)?;
|
||||
|
||||
// don't bother rechecking this file if it didn't have any diagnostics before
|
||||
if incremental_cache.is_file_same(&file_path, &file_text) {
|
||||
return Ok(());
|
||||
}
|
||||
|
||||
let r = lint_file(&file_path, file_text, lint_rules);
|
||||
if let Ok((file_diagnostics, file_text)) = &r {
|
||||
if file_diagnostics.is_empty() {
|
||||
// update the incremental cache if there were no diagnostics
|
||||
incremental_cache.update_file(&file_path, file_text)
|
||||
}
|
||||
}
|
||||
|
||||
handle_lint_result(
|
||||
&file_path.to_string_lossy(),
|
||||
r,
|
||||
reporter_lock.clone(),
|
||||
has_error,
|
||||
);
|
||||
|
||||
Ok(())
|
||||
}
|
||||
})
|
||||
.await?;
|
||||
incremental_cache.wait_completion().await;
|
||||
reporter_lock.lock().unwrap().close(target_files_len);
|
||||
|
||||
Ok(())
|
||||
};
|
||||
if cli_options.watch_paths().is_some() {
|
||||
if lint_options.is_stdin {
|
||||
pub async fn lint(flags: Flags, lint_flags: LintFlags) -> Result<(), AnyError> {
|
||||
if flags.watch.is_some() {
|
||||
if lint_flags.is_stdin() {
|
||||
return Err(generic_error(
|
||||
"Lint watch on standard input is not supported.",
|
||||
));
|
||||
}
|
||||
let clear_screen = !flags.no_clear_screen;
|
||||
file_watcher::watch_func(
|
||||
resolver,
|
||||
operation,
|
||||
flags,
|
||||
file_watcher::PrintConfig {
|
||||
job_name: "Lint".to_string(),
|
||||
clear_screen: !cli_options.no_clear_screen(),
|
||||
clear_screen,
|
||||
},
|
||||
move |flags, sender, changed_paths| {
|
||||
let lint_flags = lint_flags.clone();
|
||||
Ok(async move {
|
||||
let factory = CliFactory::from_flags(flags).await?;
|
||||
let cli_options = factory.cli_options();
|
||||
let lint_options = cli_options.resolve_lint_options(lint_flags)?;
|
||||
let files =
|
||||
collect_lint_files(&lint_options.files).and_then(|files| {
|
||||
if files.is_empty() {
|
||||
Err(generic_error("No target files found."))
|
||||
} else {
|
||||
Ok(files)
|
||||
}
|
||||
})?;
|
||||
_ = sender.send(files.clone());
|
||||
|
||||
let lint_paths = if let Some(paths) = changed_paths {
|
||||
// lint all files on any changed (https://github.com/denoland/deno/issues/12446)
|
||||
files
|
||||
.iter()
|
||||
.any(|path| paths.contains(path))
|
||||
.then_some(files)
|
||||
.unwrap_or_else(|| [].to_vec())
|
||||
} else {
|
||||
files
|
||||
};
|
||||
|
||||
lint_files(factory, lint_options, lint_paths).await?;
|
||||
Ok(())
|
||||
})
|
||||
},
|
||||
)
|
||||
.await?;
|
||||
} else {
|
||||
if lint_options.is_stdin {
|
||||
let factory = CliFactory::from_flags(flags).await?;
|
||||
let cli_options = factory.cli_options();
|
||||
let is_stdin = lint_flags.is_stdin();
|
||||
let lint_options = cli_options.resolve_lint_options(lint_flags)?;
|
||||
let files = &lint_options.files;
|
||||
let success = if is_stdin {
|
||||
let reporter_kind = lint_options.reporter_kind;
|
||||
let reporter_lock = Arc::new(Mutex::new(create_reporter(reporter_kind)));
|
||||
let lint_rules = get_config_rules_err_empty(lint_options.rules)?;
|
||||
let r = lint_stdin(lint_rules);
|
||||
handle_lint_result(
|
||||
STDIN_FILE_NAME,
|
||||
r,
|
||||
reporter_lock.clone(),
|
||||
has_error.clone(),
|
||||
);
|
||||
let success =
|
||||
handle_lint_result(STDIN_FILE_NAME, r, reporter_lock.clone());
|
||||
reporter_lock.lock().unwrap().close(1);
|
||||
success
|
||||
} else {
|
||||
let target_files = collect_lint_files(&files).and_then(|files| {
|
||||
let target_files = collect_lint_files(files).and_then(|files| {
|
||||
if files.is_empty() {
|
||||
Err(generic_error("No target files found."))
|
||||
} else {
|
||||
|
@ -189,10 +121,9 @@ pub async fn lint(
|
|||
}
|
||||
})?;
|
||||
debug!("Found {} files", target_files.len());
|
||||
operation(target_files).await?;
|
||||
lint_files(factory, lint_options, target_files).await?
|
||||
};
|
||||
let has_error = has_error.load(Ordering::Relaxed);
|
||||
if has_error {
|
||||
if !success {
|
||||
std::process::exit(1);
|
||||
}
|
||||
}
|
||||
|
@ -200,6 +131,70 @@ pub async fn lint(
|
|||
Ok(())
|
||||
}
|
||||
|
||||
async fn lint_files(
|
||||
factory: CliFactory,
|
||||
lint_options: LintOptions,
|
||||
paths: Vec<PathBuf>,
|
||||
) -> Result<bool, AnyError> {
|
||||
let caches = factory.caches()?;
|
||||
let lint_rules = get_config_rules_err_empty(lint_options.rules)?;
|
||||
let incremental_cache = Arc::new(IncrementalCache::new(
|
||||
caches.lint_incremental_cache_db(),
|
||||
// use a hash of the rule names in order to bust the cache
|
||||
&{
|
||||
// ensure this is stable by sorting it
|
||||
let mut names = lint_rules.iter().map(|r| r.code()).collect::<Vec<_>>();
|
||||
names.sort_unstable();
|
||||
names
|
||||
},
|
||||
&paths,
|
||||
));
|
||||
let target_files_len = paths.len();
|
||||
let reporter_kind = lint_options.reporter_kind;
|
||||
let reporter_lock =
|
||||
Arc::new(Mutex::new(create_reporter(reporter_kind.clone())));
|
||||
let has_error = Arc::new(AtomicFlag::default());
|
||||
|
||||
run_parallelized(paths, {
|
||||
let has_error = has_error.clone();
|
||||
let lint_rules = lint_rules.clone();
|
||||
let reporter_lock = reporter_lock.clone();
|
||||
let incremental_cache = incremental_cache.clone();
|
||||
move |file_path| {
|
||||
let file_text = fs::read_to_string(&file_path)?;
|
||||
|
||||
// don't bother rechecking this file if it didn't have any diagnostics before
|
||||
if incremental_cache.is_file_same(&file_path, &file_text) {
|
||||
return Ok(());
|
||||
}
|
||||
|
||||
let r = lint_file(&file_path, file_text, lint_rules);
|
||||
if let Ok((file_diagnostics, file_text)) = &r {
|
||||
if file_diagnostics.is_empty() {
|
||||
// update the incremental cache if there were no diagnostics
|
||||
incremental_cache.update_file(&file_path, file_text)
|
||||
}
|
||||
}
|
||||
|
||||
let success = handle_lint_result(
|
||||
&file_path.to_string_lossy(),
|
||||
r,
|
||||
reporter_lock.clone(),
|
||||
);
|
||||
if !success {
|
||||
has_error.raise();
|
||||
}
|
||||
|
||||
Ok(())
|
||||
}
|
||||
})
|
||||
.await?;
|
||||
incremental_cache.wait_completion().await;
|
||||
reporter_lock.lock().unwrap().close(target_files_len);
|
||||
|
||||
Ok(!has_error.is_raised())
|
||||
}
|
||||
|
||||
fn collect_lint_files(files: &FilesConfig) -> Result<Vec<PathBuf>, AnyError> {
|
||||
FileCollector::new(is_supported_ext)
|
||||
.ignore_git_folder()
|
||||
|
@ -286,21 +281,20 @@ fn handle_lint_result(
|
|||
file_path: &str,
|
||||
result: Result<(Vec<LintDiagnostic>, String), AnyError>,
|
||||
reporter_lock: Arc<Mutex<Box<dyn LintReporter + Send>>>,
|
||||
has_error: Arc<AtomicBool>,
|
||||
) {
|
||||
) -> bool {
|
||||
let mut reporter = reporter_lock.lock().unwrap();
|
||||
|
||||
match result {
|
||||
Ok((mut file_diagnostics, source)) => {
|
||||
sort_diagnostics(&mut file_diagnostics);
|
||||
for d in file_diagnostics.iter() {
|
||||
has_error.store(true, Ordering::Relaxed);
|
||||
reporter.visit_diagnostic(d, source.split('\n').collect());
|
||||
}
|
||||
file_diagnostics.is_empty()
|
||||
}
|
||||
Err(err) => {
|
||||
has_error.store(true, Ordering::Relaxed);
|
||||
reporter.visit_error(file_path, &err);
|
||||
false
|
||||
}
|
||||
}
|
||||
}
|
||||
|
@ -534,6 +528,16 @@ fn sort_diagnostics(diagnostics: &mut [LintDiagnostic]) {
|
|||
});
|
||||
}
|
||||
|
||||
fn get_config_rules_err_empty(
|
||||
rules: LintRulesConfig,
|
||||
) -> Result<Vec<&'static dyn LintRule>, AnyError> {
|
||||
let lint_rules = get_configured_rules(rules);
|
||||
if lint_rules.is_empty() {
|
||||
bail!("No rules have been configured")
|
||||
}
|
||||
Ok(lint_rules)
|
||||
}
|
||||
|
||||
pub fn get_configured_rules(
|
||||
rules: LintRulesConfig,
|
||||
) -> Vec<&'static dyn LintRule> {
|
||||
|
|
|
@ -3,7 +3,6 @@
|
|||
use std::io::Read;
|
||||
|
||||
use deno_ast::MediaType;
|
||||
use deno_ast::ModuleSpecifier;
|
||||
use deno_core::error::AnyError;
|
||||
use deno_runtime::permissions::Permissions;
|
||||
use deno_runtime::permissions::PermissionsContainer;
|
||||
|
@ -98,45 +97,42 @@ pub async fn run_from_stdin(flags: Flags) -> Result<i32, AnyError> {
|
|||
// TODO(bartlomieju): this function is not handling `exit_code` set by the runtime
|
||||
// code properly.
|
||||
async fn run_with_watch(flags: Flags) -> Result<i32, AnyError> {
|
||||
let (sender, receiver) = tokio::sync::mpsc::unbounded_channel();
|
||||
let factory = CliFactoryBuilder::new()
|
||||
.with_watcher(sender.clone())
|
||||
.build_from_flags(flags)
|
||||
.await?;
|
||||
let file_watcher = factory.file_watcher()?;
|
||||
let cli_options = factory.cli_options();
|
||||
let clear_screen = !cli_options.no_clear_screen();
|
||||
let main_module = cli_options.resolve_main_module()?;
|
||||
let clear_screen = !flags.no_clear_screen;
|
||||
|
||||
maybe_npm_install(&factory).await?;
|
||||
|
||||
let create_cli_main_worker_factory =
|
||||
factory.create_cli_main_worker_factory_func().await?;
|
||||
let operation = |main_module: ModuleSpecifier| {
|
||||
file_watcher.reset();
|
||||
let permissions = PermissionsContainer::new(Permissions::from_options(
|
||||
&cli_options.permissions_options(),
|
||||
)?);
|
||||
let create_cli_main_worker_factory = create_cli_main_worker_factory.clone();
|
||||
|
||||
Ok(async move {
|
||||
let worker = create_cli_main_worker_factory()
|
||||
.create_main_worker(main_module, permissions)
|
||||
.await?;
|
||||
worker.run_for_watcher().await?;
|
||||
|
||||
Ok(())
|
||||
})
|
||||
};
|
||||
|
||||
util::file_watcher::watch_func2(
|
||||
receiver,
|
||||
operation,
|
||||
main_module,
|
||||
util::file_watcher::watch_func(
|
||||
flags,
|
||||
util::file_watcher::PrintConfig {
|
||||
job_name: "Process".to_string(),
|
||||
clear_screen,
|
||||
},
|
||||
move |flags, sender, _changed_paths| {
|
||||
Ok(async move {
|
||||
let factory = CliFactoryBuilder::new()
|
||||
.with_watcher(sender.clone())
|
||||
.build_from_flags(flags)
|
||||
.await?;
|
||||
let cli_options = factory.cli_options();
|
||||
let main_module = cli_options.resolve_main_module()?;
|
||||
|
||||
maybe_npm_install(&factory).await?;
|
||||
|
||||
if let Some(watch_paths) = cli_options.watch_paths() {
|
||||
let _ = sender.send(watch_paths);
|
||||
}
|
||||
|
||||
let permissions = PermissionsContainer::new(Permissions::from_options(
|
||||
&cli_options.permissions_options(),
|
||||
)?);
|
||||
let worker = factory
|
||||
.create_cli_main_worker_factory()
|
||||
.await?
|
||||
.create_main_worker(main_module, permissions)
|
||||
.await?;
|
||||
worker.run_for_watcher().await?;
|
||||
|
||||
Ok(())
|
||||
})
|
||||
},
|
||||
)
|
||||
.await?;
|
||||
|
||||
|
|
|
@ -2,18 +2,20 @@
|
|||
|
||||
use crate::args::CliOptions;
|
||||
use crate::args::FilesConfig;
|
||||
use crate::args::TestOptions;
|
||||
use crate::args::Flags;
|
||||
use crate::args::TestFlags;
|
||||
use crate::colors;
|
||||
use crate::display;
|
||||
use crate::factory::CliFactory;
|
||||
use crate::factory::CliFactoryBuilder;
|
||||
use crate::file_fetcher::File;
|
||||
use crate::file_fetcher::FileFetcher;
|
||||
use crate::graph_util::graph_valid_with_cli_options;
|
||||
use crate::graph_util::has_graph_root_local_dependent_changed;
|
||||
use crate::module_loader::ModuleLoadPreparer;
|
||||
use crate::ops;
|
||||
use crate::util::checksum;
|
||||
use crate::util::file_watcher;
|
||||
use crate::util::file_watcher::ResolutionResult;
|
||||
use crate::util::fs::collect_specifiers;
|
||||
use crate::util::path::get_extension;
|
||||
use crate::util::path::is_supported_ext;
|
||||
|
@ -62,7 +64,6 @@ use std::io::Read;
|
|||
use std::io::Write;
|
||||
use std::num::NonZeroUsize;
|
||||
use std::path::Path;
|
||||
use std::path::PathBuf;
|
||||
use std::sync::atomic::AtomicBool;
|
||||
use std::sync::atomic::AtomicUsize;
|
||||
use std::sync::atomic::Ordering;
|
||||
|
@ -1641,11 +1642,12 @@ async fn fetch_specifiers_with_test_mode(
|
|||
}
|
||||
|
||||
pub async fn run_tests(
|
||||
cli_options: CliOptions,
|
||||
test_options: TestOptions,
|
||||
flags: Flags,
|
||||
test_flags: TestFlags,
|
||||
) -> Result<(), AnyError> {
|
||||
let factory = CliFactory::from_cli_options(Arc::new(cli_options));
|
||||
let factory = CliFactory::from_flags(flags).await?;
|
||||
let cli_options = factory.cli_options();
|
||||
let test_options = cli_options.resolve_test_options(test_flags)?;
|
||||
let file_fetcher = factory.file_fetcher()?;
|
||||
let module_load_preparer = factory.module_load_preparer().await?;
|
||||
// Various test files should not share the same permissions in terms of
|
||||
|
@ -1708,186 +1710,9 @@ pub async fn run_tests(
|
|||
}
|
||||
|
||||
pub async fn run_tests_with_watch(
|
||||
cli_options: CliOptions,
|
||||
test_options: TestOptions,
|
||||
flags: Flags,
|
||||
test_flags: TestFlags,
|
||||
) -> Result<(), AnyError> {
|
||||
let factory = CliFactory::from_cli_options(Arc::new(cli_options));
|
||||
let cli_options = factory.cli_options();
|
||||
let module_graph_builder = factory.module_graph_builder().await?;
|
||||
let module_load_preparer = factory.module_load_preparer().await?;
|
||||
let file_fetcher = factory.file_fetcher()?;
|
||||
let file_watcher = factory.file_watcher()?;
|
||||
// Various test files should not share the same permissions in terms of
|
||||
// `PermissionsContainer` - otherwise granting/revoking permissions in one
|
||||
// file would have impact on other files, which is undesirable.
|
||||
let permissions =
|
||||
Permissions::from_options(&cli_options.permissions_options())?;
|
||||
let graph_kind = cli_options.type_check_mode().as_graph_kind();
|
||||
let log_level = cli_options.log_level();
|
||||
|
||||
let resolver = |changed: Option<Vec<PathBuf>>| {
|
||||
let paths_to_watch = test_options.files.include.clone();
|
||||
let paths_to_watch_clone = paths_to_watch.clone();
|
||||
let files_changed = changed.is_some();
|
||||
let test_options = &test_options;
|
||||
let cli_options = cli_options.clone();
|
||||
let module_graph_builder = module_graph_builder.clone();
|
||||
|
||||
async move {
|
||||
let test_modules = if test_options.doc {
|
||||
collect_specifiers(&test_options.files, is_supported_test_ext)
|
||||
} else {
|
||||
collect_specifiers(&test_options.files, is_supported_test_path)
|
||||
}?;
|
||||
|
||||
let mut paths_to_watch = paths_to_watch_clone;
|
||||
let mut modules_to_reload = if files_changed {
|
||||
Vec::new()
|
||||
} else {
|
||||
test_modules.clone()
|
||||
};
|
||||
let graph = module_graph_builder
|
||||
.create_graph(graph_kind, test_modules.clone())
|
||||
.await?;
|
||||
graph_valid_with_cli_options(&graph, &test_modules, &cli_options)?;
|
||||
|
||||
// TODO(@kitsonk) - This should be totally derivable from the graph.
|
||||
for specifier in test_modules {
|
||||
fn get_dependencies<'a>(
|
||||
graph: &'a deno_graph::ModuleGraph,
|
||||
maybe_module: Option<&'a deno_graph::Module>,
|
||||
// This needs to be accessible to skip getting dependencies if they're already there,
|
||||
// otherwise this will cause a stack overflow with circular dependencies
|
||||
output: &mut HashSet<&'a ModuleSpecifier>,
|
||||
) {
|
||||
if let Some(module) = maybe_module.and_then(|m| m.esm()) {
|
||||
for dep in module.dependencies.values() {
|
||||
if let Some(specifier) = &dep.get_code() {
|
||||
if !output.contains(specifier) {
|
||||
output.insert(specifier);
|
||||
get_dependencies(graph, graph.get(specifier), output);
|
||||
}
|
||||
}
|
||||
if let Some(specifier) = &dep.get_type() {
|
||||
if !output.contains(specifier) {
|
||||
output.insert(specifier);
|
||||
get_dependencies(graph, graph.get(specifier), output);
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
// This test module and all it's dependencies
|
||||
let mut modules = HashSet::new();
|
||||
modules.insert(&specifier);
|
||||
get_dependencies(&graph, graph.get(&specifier), &mut modules);
|
||||
|
||||
paths_to_watch.extend(
|
||||
modules
|
||||
.iter()
|
||||
.filter_map(|specifier| specifier.to_file_path().ok()),
|
||||
);
|
||||
|
||||
if let Some(changed) = &changed {
|
||||
for path in changed
|
||||
.iter()
|
||||
.filter_map(|path| ModuleSpecifier::from_file_path(path).ok())
|
||||
{
|
||||
if modules.contains(&path) {
|
||||
modules_to_reload.push(specifier);
|
||||
break;
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
Ok((paths_to_watch, modules_to_reload))
|
||||
}
|
||||
.map(move |result| {
|
||||
if files_changed
|
||||
&& matches!(result, Ok((_, ref modules)) if modules.is_empty())
|
||||
{
|
||||
ResolutionResult::Ignore
|
||||
} else {
|
||||
match result {
|
||||
Ok((paths_to_watch, modules_to_reload)) => {
|
||||
ResolutionResult::Restart {
|
||||
paths_to_watch,
|
||||
result: Ok(modules_to_reload),
|
||||
}
|
||||
}
|
||||
Err(e) => ResolutionResult::Restart {
|
||||
paths_to_watch,
|
||||
result: Err(e),
|
||||
},
|
||||
}
|
||||
}
|
||||
})
|
||||
};
|
||||
|
||||
let create_cli_main_worker_factory =
|
||||
factory.create_cli_main_worker_factory_func().await?;
|
||||
let operation = |modules_to_reload: Vec<ModuleSpecifier>| {
|
||||
let permissions = &permissions;
|
||||
let test_options = &test_options;
|
||||
file_watcher.reset();
|
||||
let cli_options = cli_options.clone();
|
||||
let file_fetcher = file_fetcher.clone();
|
||||
let module_load_preparer = module_load_preparer.clone();
|
||||
let create_cli_main_worker_factory = create_cli_main_worker_factory.clone();
|
||||
|
||||
async move {
|
||||
let worker_factory = Arc::new(create_cli_main_worker_factory());
|
||||
let specifiers_with_mode = fetch_specifiers_with_test_mode(
|
||||
&file_fetcher,
|
||||
&test_options.files,
|
||||
&test_options.doc,
|
||||
)
|
||||
.await?
|
||||
.into_iter()
|
||||
.filter(|(specifier, _)| modules_to_reload.contains(specifier))
|
||||
.collect::<Vec<(ModuleSpecifier, TestMode)>>();
|
||||
|
||||
check_specifiers(
|
||||
&cli_options,
|
||||
&file_fetcher,
|
||||
&module_load_preparer,
|
||||
specifiers_with_mode.clone(),
|
||||
)
|
||||
.await?;
|
||||
|
||||
if test_options.no_run {
|
||||
return Ok(());
|
||||
}
|
||||
|
||||
test_specifiers(
|
||||
worker_factory,
|
||||
permissions,
|
||||
specifiers_with_mode
|
||||
.into_iter()
|
||||
.filter_map(|(s, m)| match m {
|
||||
TestMode::Documentation => None,
|
||||
_ => Some(s),
|
||||
})
|
||||
.collect(),
|
||||
TestSpecifiersOptions {
|
||||
concurrent_jobs: test_options.concurrent_jobs,
|
||||
fail_fast: test_options.fail_fast,
|
||||
log_level,
|
||||
specifier: TestSpecifierOptions {
|
||||
filter: TestFilter::from_flag(&test_options.filter),
|
||||
shuffle: test_options.shuffle,
|
||||
trace_ops: test_options.trace_ops,
|
||||
},
|
||||
},
|
||||
)
|
||||
.await?;
|
||||
|
||||
Ok(())
|
||||
}
|
||||
};
|
||||
|
||||
// On top of the sigint handlers which are added and unbound for each test
|
||||
// run, a process-scoped basic exit handler is required due to a tokio
|
||||
// limitation where it doesn't unbind its own handler for the entire process
|
||||
|
@ -1901,14 +1726,118 @@ pub async fn run_tests_with_watch(
|
|||
}
|
||||
});
|
||||
|
||||
let clear_screen = !cli_options.no_clear_screen();
|
||||
let clear_screen = !flags.no_clear_screen;
|
||||
file_watcher::watch_func(
|
||||
resolver,
|
||||
operation,
|
||||
flags,
|
||||
file_watcher::PrintConfig {
|
||||
job_name: "Test".to_string(),
|
||||
clear_screen,
|
||||
},
|
||||
move |flags, sender, changed_paths| {
|
||||
let test_flags = test_flags.clone();
|
||||
Ok(async move {
|
||||
let factory = CliFactoryBuilder::new()
|
||||
.with_watcher(sender.clone())
|
||||
.build_from_flags(flags)
|
||||
.await?;
|
||||
let cli_options = factory.cli_options();
|
||||
let test_options = cli_options.resolve_test_options(test_flags)?;
|
||||
|
||||
if let Some(watch_paths) = cli_options.watch_paths() {
|
||||
let _ = sender.send(watch_paths);
|
||||
}
|
||||
let _ = sender.send(test_options.files.include.clone());
|
||||
|
||||
let graph_kind = cli_options.type_check_mode().as_graph_kind();
|
||||
let log_level = cli_options.log_level();
|
||||
let cli_options = cli_options.clone();
|
||||
let module_graph_builder = factory.module_graph_builder().await?;
|
||||
let file_fetcher = factory.file_fetcher()?;
|
||||
let test_modules = if test_options.doc {
|
||||
collect_specifiers(&test_options.files, is_supported_test_ext)
|
||||
} else {
|
||||
collect_specifiers(&test_options.files, is_supported_test_path)
|
||||
}?;
|
||||
let permissions =
|
||||
Permissions::from_options(&cli_options.permissions_options())?;
|
||||
|
||||
let graph = module_graph_builder
|
||||
.create_graph(graph_kind, test_modules.clone())
|
||||
.await?;
|
||||
graph_valid_with_cli_options(&graph, &test_modules, &cli_options)?;
|
||||
|
||||
let test_modules_to_reload = if let Some(changed_paths) = changed_paths
|
||||
{
|
||||
let changed_specifiers = changed_paths
|
||||
.into_iter()
|
||||
.filter_map(|p| ModuleSpecifier::from_file_path(p).ok())
|
||||
.collect::<HashSet<_>>();
|
||||
let mut result = Vec::new();
|
||||
for test_module_specifier in test_modules {
|
||||
if has_graph_root_local_dependent_changed(
|
||||
&graph,
|
||||
&test_module_specifier,
|
||||
&changed_specifiers,
|
||||
) {
|
||||
result.push(test_module_specifier.clone());
|
||||
}
|
||||
}
|
||||
result
|
||||
} else {
|
||||
test_modules.clone()
|
||||
};
|
||||
|
||||
let worker_factory =
|
||||
Arc::new(factory.create_cli_main_worker_factory().await?);
|
||||
let module_load_preparer = factory.module_load_preparer().await?;
|
||||
let specifiers_with_mode = fetch_specifiers_with_test_mode(
|
||||
file_fetcher,
|
||||
&test_options.files,
|
||||
&test_options.doc,
|
||||
)
|
||||
.await?
|
||||
.into_iter()
|
||||
.filter(|(specifier, _)| test_modules_to_reload.contains(specifier))
|
||||
.collect::<Vec<(ModuleSpecifier, TestMode)>>();
|
||||
|
||||
check_specifiers(
|
||||
&cli_options,
|
||||
file_fetcher,
|
||||
module_load_preparer,
|
||||
specifiers_with_mode.clone(),
|
||||
)
|
||||
.await?;
|
||||
|
||||
if test_options.no_run {
|
||||
return Ok(());
|
||||
}
|
||||
|
||||
test_specifiers(
|
||||
worker_factory,
|
||||
&permissions,
|
||||
specifiers_with_mode
|
||||
.into_iter()
|
||||
.filter_map(|(s, m)| match m {
|
||||
TestMode::Documentation => None,
|
||||
_ => Some(s),
|
||||
})
|
||||
.collect(),
|
||||
TestSpecifiersOptions {
|
||||
concurrent_jobs: test_options.concurrent_jobs,
|
||||
fail_fast: test_options.fail_fast,
|
||||
log_level,
|
||||
specifier: TestSpecifierOptions {
|
||||
filter: TestFilter::from_flag(&test_options.filter),
|
||||
shuffle: test_options.shuffle,
|
||||
trace_ops: test_options.trace_ops,
|
||||
},
|
||||
},
|
||||
)
|
||||
.await?;
|
||||
|
||||
Ok(())
|
||||
})
|
||||
},
|
||||
)
|
||||
.await?;
|
||||
|
||||
|
|
|
@ -1,5 +1,6 @@
|
|||
// Copyright 2018-2023 the Deno authors. All rights reserved. MIT license.
|
||||
|
||||
use crate::args::Flags;
|
||||
use crate::colors;
|
||||
use crate::util::fs::canonicalize_path;
|
||||
|
||||
|
@ -21,6 +22,7 @@ use std::time::Duration;
|
|||
use tokio::select;
|
||||
use tokio::sync::mpsc;
|
||||
use tokio::sync::mpsc::UnboundedReceiver;
|
||||
use tokio::sync::mpsc::UnboundedSender;
|
||||
use tokio::time::sleep;
|
||||
|
||||
const CLEAR_SCREEN: &str = "\x1B[2J\x1B[1;1H";
|
||||
|
@ -66,7 +68,7 @@ impl DebouncedReceiver {
|
|||
}
|
||||
}
|
||||
|
||||
async fn error_handler<F>(watch_future: F)
|
||||
async fn error_handler<F>(watch_future: F) -> bool
|
||||
where
|
||||
F: Future<Output = Result<(), AnyError>>,
|
||||
{
|
||||
|
@ -81,42 +83,9 @@ where
|
|||
colors::red_bold("error"),
|
||||
error_string.trim_start_matches("error: ")
|
||||
);
|
||||
}
|
||||
}
|
||||
|
||||
pub enum ResolutionResult<T> {
|
||||
Restart {
|
||||
paths_to_watch: Vec<PathBuf>,
|
||||
result: Result<T, AnyError>,
|
||||
},
|
||||
Ignore,
|
||||
}
|
||||
|
||||
async fn next_restart<R, T, F>(
|
||||
resolver: &mut R,
|
||||
debounced_receiver: &mut DebouncedReceiver,
|
||||
) -> (Vec<PathBuf>, Result<T, AnyError>)
|
||||
where
|
||||
R: FnMut(Option<Vec<PathBuf>>) -> F,
|
||||
F: Future<Output = ResolutionResult<T>>,
|
||||
{
|
||||
loop {
|
||||
let changed = debounced_receiver.recv().await;
|
||||
match resolver(changed).await {
|
||||
ResolutionResult::Ignore => {
|
||||
log::debug!("File change ignored")
|
||||
}
|
||||
ResolutionResult::Restart {
|
||||
mut paths_to_watch,
|
||||
result,
|
||||
} => {
|
||||
// watch the current directory when empty
|
||||
if paths_to_watch.is_empty() {
|
||||
paths_to_watch.push(PathBuf::from("."));
|
||||
}
|
||||
return (paths_to_watch, result);
|
||||
}
|
||||
}
|
||||
false
|
||||
} else {
|
||||
true
|
||||
}
|
||||
}
|
||||
|
||||
|
@ -139,138 +108,26 @@ fn create_print_after_restart_fn(clear_screen: bool) -> impl Fn() {
|
|||
}
|
||||
}
|
||||
|
||||
/// Creates a file watcher, which will call `resolver` with every file change.
|
||||
///
|
||||
/// - `resolver` is used for resolving file paths to be watched at every restarting
|
||||
/// of the watcher, and can also return a value to be passed to `operation`.
|
||||
/// It returns a [`ResolutionResult`], which can either instruct the watcher to restart or ignore the change.
|
||||
/// This always contains paths to watch;
|
||||
///
|
||||
/// - `operation` is the actual operation we want to run every time the watcher detects file
|
||||
/// changes. For example, in the case where we would like to bundle, then `operation` would
|
||||
/// have the logic for it like bundling the code.
|
||||
pub async fn watch_func<R, O, T, F1, F2>(
|
||||
mut resolver: R,
|
||||
mut operation: O,
|
||||
print_config: PrintConfig,
|
||||
) -> Result<(), AnyError>
|
||||
where
|
||||
R: FnMut(Option<Vec<PathBuf>>) -> F1,
|
||||
O: FnMut(T) -> F2,
|
||||
F1: Future<Output = ResolutionResult<T>>,
|
||||
F2: Future<Output = Result<(), AnyError>>,
|
||||
{
|
||||
let (sender, mut receiver) = DebouncedReceiver::new_with_sender();
|
||||
|
||||
let PrintConfig {
|
||||
job_name,
|
||||
clear_screen,
|
||||
} = print_config;
|
||||
|
||||
// Store previous data. If module resolution fails at some point, the watcher will try to
|
||||
// continue watching files using these data.
|
||||
let mut paths_to_watch;
|
||||
let mut resolution_result;
|
||||
|
||||
let print_after_restart = create_print_after_restart_fn(clear_screen);
|
||||
|
||||
match resolver(None).await {
|
||||
ResolutionResult::Ignore => {
|
||||
// The only situation where it makes sense to ignore the initial 'change'
|
||||
// is if the command isn't supposed to do anything until something changes,
|
||||
// e.g. a variant of `deno test` which doesn't run the entire test suite to start with,
|
||||
// but instead does nothing until you make a change.
|
||||
//
|
||||
// In that case, this is probably the correct output.
|
||||
info!(
|
||||
"{} Waiting for file changes...",
|
||||
colors::intense_blue("Watcher"),
|
||||
);
|
||||
|
||||
let (paths, result) = next_restart(&mut resolver, &mut receiver).await;
|
||||
paths_to_watch = paths;
|
||||
resolution_result = result;
|
||||
|
||||
print_after_restart();
|
||||
}
|
||||
ResolutionResult::Restart {
|
||||
paths_to_watch: mut paths,
|
||||
result,
|
||||
} => {
|
||||
// watch the current directory when empty
|
||||
if paths.is_empty() {
|
||||
paths.push(PathBuf::from("."));
|
||||
}
|
||||
paths_to_watch = paths;
|
||||
resolution_result = result;
|
||||
}
|
||||
};
|
||||
|
||||
info!("{} {} started.", colors::intense_blue("Watcher"), job_name,);
|
||||
|
||||
loop {
|
||||
let mut watcher = new_watcher(sender.clone())?;
|
||||
add_paths_to_watcher(&mut watcher, &paths_to_watch);
|
||||
|
||||
match resolution_result {
|
||||
Ok(operation_arg) => {
|
||||
let fut = error_handler(operation(operation_arg));
|
||||
select! {
|
||||
(paths, result) = next_restart(&mut resolver, &mut receiver) => {
|
||||
if result.is_ok() {
|
||||
paths_to_watch = paths;
|
||||
}
|
||||
resolution_result = result;
|
||||
|
||||
print_after_restart();
|
||||
continue;
|
||||
},
|
||||
_ = fut => {},
|
||||
};
|
||||
|
||||
info!(
|
||||
"{} {} finished. Restarting on file change...",
|
||||
colors::intense_blue("Watcher"),
|
||||
job_name,
|
||||
);
|
||||
}
|
||||
Err(error) => {
|
||||
eprintln!("{}: {}", colors::red_bold("error"), error);
|
||||
info!(
|
||||
"{} {} failed. Restarting on file change...",
|
||||
colors::intense_blue("Watcher"),
|
||||
job_name,
|
||||
);
|
||||
}
|
||||
}
|
||||
|
||||
let (paths, result) = next_restart(&mut resolver, &mut receiver).await;
|
||||
if result.is_ok() {
|
||||
paths_to_watch = paths;
|
||||
}
|
||||
resolution_result = result;
|
||||
|
||||
print_after_restart();
|
||||
|
||||
drop(watcher);
|
||||
}
|
||||
}
|
||||
|
||||
/// Creates a file watcher.
|
||||
///
|
||||
/// - `operation` is the actual operation we want to run every time the watcher detects file
|
||||
/// changes. For example, in the case where we would like to bundle, then `operation` would
|
||||
/// have the logic for it like bundling the code.
|
||||
pub async fn watch_func2<T: Clone, O, F>(
|
||||
mut paths_to_watch_receiver: UnboundedReceiver<Vec<PathBuf>>,
|
||||
mut operation: O,
|
||||
operation_args: T,
|
||||
pub async fn watch_func<O, F>(
|
||||
mut flags: Flags,
|
||||
print_config: PrintConfig,
|
||||
mut operation: O,
|
||||
) -> Result<(), AnyError>
|
||||
where
|
||||
O: FnMut(T) -> Result<F, AnyError>,
|
||||
O: FnMut(
|
||||
Flags,
|
||||
UnboundedSender<Vec<PathBuf>>,
|
||||
Option<Vec<PathBuf>>,
|
||||
) -> Result<F, AnyError>,
|
||||
F: Future<Output = Result<(), AnyError>>,
|
||||
{
|
||||
let (paths_to_watch_sender, mut paths_to_watch_receiver) =
|
||||
tokio::sync::mpsc::unbounded_channel();
|
||||
let (watcher_sender, mut watcher_receiver) =
|
||||
DebouncedReceiver::new_with_sender();
|
||||
|
||||
|
@ -303,6 +160,7 @@ where
|
|||
}
|
||||
}
|
||||
|
||||
let mut changed_paths = None;
|
||||
loop {
|
||||
// We may need to give the runtime a tick to settle, as cancellations may need to propagate
|
||||
// to tasks. We choose yielding 10 times to the runtime as a decent heuristic. If watch tests
|
||||
|
@ -320,21 +178,34 @@ where
|
|||
add_paths_to_watcher(&mut watcher, &maybe_paths.unwrap());
|
||||
}
|
||||
};
|
||||
let operation_future = error_handler(operation(operation_args.clone())?);
|
||||
let operation_future = error_handler(operation(
|
||||
flags.clone(),
|
||||
paths_to_watch_sender.clone(),
|
||||
changed_paths.take(),
|
||||
)?);
|
||||
|
||||
// don't reload dependencies after the first run
|
||||
flags.reload = false;
|
||||
|
||||
select! {
|
||||
_ = receiver_future => {},
|
||||
_ = watcher_receiver.recv() => {
|
||||
received_changed_paths = watcher_receiver.recv() => {
|
||||
print_after_restart();
|
||||
changed_paths = received_changed_paths;
|
||||
continue;
|
||||
},
|
||||
_ = operation_future => {
|
||||
success = operation_future => {
|
||||
consume_paths_to_watch(&mut watcher, &mut paths_to_watch_receiver);
|
||||
// TODO(bartlomieju): print exit code here?
|
||||
info!(
|
||||
"{} {} finished. Restarting on file change...",
|
||||
"{} {} {}. Restarting on file change...",
|
||||
colors::intense_blue("Watcher"),
|
||||
job_name,
|
||||
if success {
|
||||
"finished"
|
||||
} else {
|
||||
"failed"
|
||||
}
|
||||
);
|
||||
},
|
||||
};
|
||||
|
@ -347,8 +218,9 @@ where
|
|||
};
|
||||
select! {
|
||||
_ = receiver_future => {},
|
||||
_ = watcher_receiver.recv() => {
|
||||
received_changed_paths = watcher_receiver.recv() => {
|
||||
print_after_restart();
|
||||
changed_paths = received_changed_paths;
|
||||
continue;
|
||||
},
|
||||
};
|
||||
|
|
|
@ -1,99 +0,0 @@
|
|||
// Copyright 2018-2023 the Deno authors. All rights reserved. MIT license.
|
||||
|
||||
use crate::args::CliOptions;
|
||||
use crate::cache::ParsedSourceCache;
|
||||
use crate::graph_util::ModuleGraphContainer;
|
||||
use crate::module_loader::CjsResolutionStore;
|
||||
|
||||
use deno_core::parking_lot::Mutex;
|
||||
use deno_core::ModuleSpecifier;
|
||||
|
||||
use std::path::PathBuf;
|
||||
use std::sync::Arc;
|
||||
|
||||
pub struct FileWatcher {
|
||||
cli_options: Arc<CliOptions>,
|
||||
cjs_resolutions: Arc<CjsResolutionStore>,
|
||||
graph_container: Arc<ModuleGraphContainer>,
|
||||
maybe_reporter: Option<FileWatcherReporter>,
|
||||
parsed_source_cache: Arc<ParsedSourceCache>,
|
||||
}
|
||||
|
||||
impl FileWatcher {
|
||||
pub fn new(
|
||||
cli_options: Arc<CliOptions>,
|
||||
cjs_resolutions: Arc<CjsResolutionStore>,
|
||||
graph_container: Arc<ModuleGraphContainer>,
|
||||
maybe_reporter: Option<FileWatcherReporter>,
|
||||
parsed_source_cache: Arc<ParsedSourceCache>,
|
||||
) -> Self {
|
||||
Self {
|
||||
cli_options,
|
||||
cjs_resolutions,
|
||||
parsed_source_cache,
|
||||
graph_container,
|
||||
maybe_reporter,
|
||||
}
|
||||
}
|
||||
/// Reset all runtime state to its default. This should be used on file
|
||||
/// watcher restarts.
|
||||
pub fn reset(&self) {
|
||||
self.cjs_resolutions.clear();
|
||||
self.parsed_source_cache.clear();
|
||||
self.graph_container.clear();
|
||||
|
||||
self.init_watcher();
|
||||
}
|
||||
|
||||
// Add invariant files like the import map and explicit watch flag list to
|
||||
// the watcher. Dedup for build_for_file_watcher and reset_for_file_watcher.
|
||||
pub fn init_watcher(&self) {
|
||||
let files_to_watch_sender = match &self.maybe_reporter {
|
||||
Some(reporter) => &reporter.sender,
|
||||
None => return,
|
||||
};
|
||||
if let Some(watch_paths) = self.cli_options.watch_paths() {
|
||||
files_to_watch_sender.send(watch_paths.clone()).unwrap();
|
||||
}
|
||||
if let Ok(Some(import_map_path)) = self
|
||||
.cli_options
|
||||
.resolve_import_map_specifier()
|
||||
.map(|ms| ms.and_then(|ref s| s.to_file_path().ok()))
|
||||
{
|
||||
files_to_watch_sender.send(vec![import_map_path]).unwrap();
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
#[derive(Clone, Debug)]
|
||||
pub struct FileWatcherReporter {
|
||||
sender: tokio::sync::mpsc::UnboundedSender<Vec<PathBuf>>,
|
||||
file_paths: Arc<Mutex<Vec<PathBuf>>>,
|
||||
}
|
||||
|
||||
impl FileWatcherReporter {
|
||||
pub fn new(sender: tokio::sync::mpsc::UnboundedSender<Vec<PathBuf>>) -> Self {
|
||||
Self {
|
||||
sender,
|
||||
file_paths: Default::default(),
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
impl deno_graph::source::Reporter for FileWatcherReporter {
|
||||
fn on_load(
|
||||
&self,
|
||||
specifier: &ModuleSpecifier,
|
||||
modules_done: usize,
|
||||
modules_total: usize,
|
||||
) {
|
||||
let mut file_paths = self.file_paths.lock();
|
||||
if specifier.scheme() == "file" {
|
||||
file_paths.push(specifier.to_file_path().unwrap());
|
||||
}
|
||||
|
||||
if modules_done == modules_total {
|
||||
self.sender.send(file_paths.drain(..).collect()).unwrap();
|
||||
}
|
||||
}
|
||||
}
|
Loading…
Reference in a new issue