1
0
Fork 0
mirror of https://github.com/denoland/deno.git synced 2024-11-21 15:04:11 -05:00

fix: reload config files on watcher restarts (#19487)

Closes #19468
This commit is contained in:
David Sherret 2023-06-14 18:29:19 -04:00 committed by GitHub
parent 48c6f71787
commit 84c793275b
No known key found for this signature in database
GPG key ID: 4AEE18F83AFDEB23
17 changed files with 917 additions and 1244 deletions

View file

@ -6,8 +6,9 @@ rustflags = [
"-C", "-C",
"target-feature=+crt-static", "target-feature=+crt-static",
"-C", "-C",
# increase the stack size to prevent swc overflowing the stack in debug # increase the stack size to prevent overflowing the
"link-arg=/STACK:3145728", # stack in debug when launching sub commands
"link-arg=/STACK:4194304",
] ]
[target.aarch64-apple-darwin] [target.aarch64-apple-darwin]

View file

@ -117,6 +117,13 @@ pub struct FmtFlags {
pub no_semicolons: Option<bool>, pub no_semicolons: Option<bool>,
} }
impl FmtFlags {
pub fn is_stdin(&self) -> bool {
let args = &self.files.include;
args.len() == 1 && args[0].to_string_lossy() == "-"
}
}
#[derive(Clone, Debug, Eq, PartialEq)] #[derive(Clone, Debug, Eq, PartialEq)]
pub struct InitFlags { pub struct InitFlags {
pub dir: Option<String>, pub dir: Option<String>,
@ -154,6 +161,13 @@ pub struct LintFlags {
pub compact: bool, pub compact: bool,
} }
impl LintFlags {
pub fn is_stdin(&self) -> bool {
let args = &self.files.include;
args.len() == 1 && args[0].to_string_lossy() == "-"
}
}
#[derive(Clone, Debug, Eq, PartialEq)] #[derive(Clone, Debug, Eq, PartialEq)]
pub struct ReplFlags { pub struct ReplFlags {
pub eval_files: Option<Vec<String>>, pub eval_files: Option<Vec<String>>,

View file

@ -148,7 +148,6 @@ impl BenchOptions {
#[derive(Clone, Debug, Default)] #[derive(Clone, Debug, Default)]
pub struct FmtOptions { pub struct FmtOptions {
pub is_stdin: bool,
pub check: bool, pub check: bool,
pub options: FmtOptionsConfig, pub options: FmtOptionsConfig,
pub files: FilesConfig, pub files: FilesConfig,
@ -157,24 +156,12 @@ pub struct FmtOptions {
impl FmtOptions { impl FmtOptions {
pub fn resolve( pub fn resolve(
maybe_fmt_config: Option<FmtConfig>, maybe_fmt_config: Option<FmtConfig>,
mut maybe_fmt_flags: Option<FmtFlags>, maybe_fmt_flags: Option<FmtFlags>,
) -> Result<Self, AnyError> { ) -> Result<Self, AnyError> {
let is_stdin = if let Some(fmt_flags) = maybe_fmt_flags.as_mut() {
let args = &mut fmt_flags.files.include;
if args.len() == 1 && args[0].to_string_lossy() == "-" {
args.pop(); // remove the "-" arg
true
} else {
false
}
} else {
false
};
let (maybe_config_options, maybe_config_files) = let (maybe_config_options, maybe_config_files) =
maybe_fmt_config.map(|c| (c.options, c.files)).unzip(); maybe_fmt_config.map(|c| (c.options, c.files)).unzip();
Ok(Self { Ok(Self {
is_stdin,
check: maybe_fmt_flags.as_ref().map(|f| f.check).unwrap_or(false), check: maybe_fmt_flags.as_ref().map(|f| f.check).unwrap_or(false),
options: resolve_fmt_options( options: resolve_fmt_options(
maybe_fmt_flags.as_ref(), maybe_fmt_flags.as_ref(),
@ -280,27 +267,14 @@ pub enum LintReporterKind {
pub struct LintOptions { pub struct LintOptions {
pub rules: LintRulesConfig, pub rules: LintRulesConfig,
pub files: FilesConfig, pub files: FilesConfig,
pub is_stdin: bool,
pub reporter_kind: LintReporterKind, pub reporter_kind: LintReporterKind,
} }
impl LintOptions { impl LintOptions {
pub fn resolve( pub fn resolve(
maybe_lint_config: Option<LintConfig>, maybe_lint_config: Option<LintConfig>,
mut maybe_lint_flags: Option<LintFlags>, maybe_lint_flags: Option<LintFlags>,
) -> Result<Self, AnyError> { ) -> Result<Self, AnyError> {
let is_stdin = if let Some(lint_flags) = maybe_lint_flags.as_mut() {
let args = &mut lint_flags.files.include;
if args.len() == 1 && args[0].to_string_lossy() == "-" {
args.pop(); // remove the "-" arg
true
} else {
false
}
} else {
false
};
let mut maybe_reporter_kind = let mut maybe_reporter_kind =
maybe_lint_flags.as_ref().and_then(|lint_flags| { maybe_lint_flags.as_ref().and_then(|lint_flags| {
if lint_flags.json { if lint_flags.json {
@ -347,7 +321,6 @@ impl LintOptions {
maybe_lint_config.map(|c| (c.files, c.rules)).unzip(); maybe_lint_config.map(|c| (c.files, c.rules)).unzip();
Ok(Self { Ok(Self {
reporter_kind: maybe_reporter_kind.unwrap_or_default(), reporter_kind: maybe_reporter_kind.unwrap_or_default(),
is_stdin,
files: resolve_files(maybe_config_files, Some(maybe_file_flags))?, files: resolve_files(maybe_config_files, Some(maybe_file_flags))?,
rules: resolve_lint_rules_options( rules: resolve_lint_rules_options(
maybe_config_rules, maybe_config_rules,
@ -1112,10 +1085,6 @@ impl CliOptions {
&self.flags.cache_path &self.flags.cache_path
} }
pub fn no_clear_screen(&self) -> bool {
self.flags.no_clear_screen
}
pub fn no_prompt(&self) -> bool { pub fn no_prompt(&self) -> bool {
resolve_no_prompt(&self.flags) resolve_no_prompt(&self.flags)
} }
@ -1170,8 +1139,25 @@ impl CliOptions {
&self.flags.v8_flags &self.flags.v8_flags
} }
pub fn watch_paths(&self) -> &Option<Vec<PathBuf>> { pub fn watch_paths(&self) -> Option<Vec<PathBuf>> {
&self.flags.watch if let Some(mut paths) = self.flags.watch.clone() {
if let Ok(Some(import_map_path)) = self
.resolve_import_map_specifier()
.map(|ms| ms.and_then(|ref s| s.to_file_path().ok()))
{
paths.push(import_map_path);
}
if let Some(specifier) = self.maybe_config_file_specifier() {
if specifier.scheme() == "file" {
if let Ok(path) = specifier.to_file_path() {
paths.push(path);
}
}
}
Some(paths)
} else {
None
}
} }
} }

View file

@ -94,10 +94,6 @@ impl ParsedSourceCache {
} }
} }
pub fn clear(&self) {
self.sources.0.lock().clear();
}
pub fn get_parsed_source_from_esm_module( pub fn get_parsed_source_from_esm_module(
&self, &self,
module: &deno_graph::EsmModule, module: &deno_graph::EsmModule,

View file

@ -17,6 +17,7 @@ use crate::cache::NodeAnalysisCache;
use crate::cache::ParsedSourceCache; use crate::cache::ParsedSourceCache;
use crate::emit::Emitter; use crate::emit::Emitter;
use crate::file_fetcher::FileFetcher; use crate::file_fetcher::FileFetcher;
use crate::graph_util::FileWatcherReporter;
use crate::graph_util::ModuleGraphBuilder; use crate::graph_util::ModuleGraphBuilder;
use crate::graph_util::ModuleGraphContainer; use crate::graph_util::ModuleGraphContainer;
use crate::http_util::HttpClient; use crate::http_util::HttpClient;
@ -39,8 +40,6 @@ use crate::standalone::DenoCompileBinaryWriter;
use crate::tools::check::TypeChecker; use crate::tools::check::TypeChecker;
use crate::util::progress_bar::ProgressBar; use crate::util::progress_bar::ProgressBar;
use crate::util::progress_bar::ProgressBarStyle; use crate::util::progress_bar::ProgressBarStyle;
use crate::watcher::FileWatcher;
use crate::watcher::FileWatcherReporter;
use crate::worker::CliMainWorkerFactory; use crate::worker::CliMainWorkerFactory;
use crate::worker::CliMainWorkerOptions; use crate::worker::CliMainWorkerOptions;
use crate::worker::HasNodeSpecifierChecker; use crate::worker::HasNodeSpecifierChecker;
@ -148,7 +147,6 @@ struct CliFactoryServices {
blob_store: Deferred<BlobStore>, blob_store: Deferred<BlobStore>,
parsed_source_cache: Deferred<Arc<ParsedSourceCache>>, parsed_source_cache: Deferred<Arc<ParsedSourceCache>>,
resolver: Deferred<Arc<CliGraphResolver>>, resolver: Deferred<Arc<CliGraphResolver>>,
file_watcher: Deferred<Arc<FileWatcher>>,
maybe_file_watcher_reporter: Deferred<Option<FileWatcherReporter>>, maybe_file_watcher_reporter: Deferred<Option<FileWatcherReporter>>,
module_graph_builder: Deferred<Arc<ModuleGraphBuilder>>, module_graph_builder: Deferred<Arc<ModuleGraphBuilder>>,
module_load_preparer: Deferred<Arc<ModuleLoadPreparer>>, module_load_preparer: Deferred<Arc<ModuleLoadPreparer>>,
@ -412,20 +410,6 @@ impl CliFactory {
.await .await
} }
pub fn file_watcher(&self) -> Result<&Arc<FileWatcher>, AnyError> {
self.services.file_watcher.get_or_try_init(|| {
let watcher = FileWatcher::new(
self.options.clone(),
self.cjs_resolutions().clone(),
self.graph_container().clone(),
self.maybe_file_watcher_reporter().clone(),
self.parsed_source_cache()?.clone(),
);
watcher.init_watcher();
Ok(Arc::new(watcher))
})
}
pub fn maybe_file_watcher_reporter(&self) -> &Option<FileWatcherReporter> { pub fn maybe_file_watcher_reporter(&self) -> &Option<FileWatcherReporter> {
let maybe_sender = self.maybe_sender.borrow_mut().take(); let maybe_sender = self.maybe_sender.borrow_mut().take();
self self
@ -531,6 +515,7 @@ impl CliFactory {
self.npm_resolver().await?.clone(), self.npm_resolver().await?.clone(),
self.parsed_source_cache()?.clone(), self.parsed_source_cache()?.clone(),
self.maybe_lockfile().clone(), self.maybe_lockfile().clone(),
self.maybe_file_watcher_reporter().clone(),
self.emit_cache()?.clone(), self.emit_cache()?.clone(),
self.file_fetcher()?.clone(), self.file_fetcher()?.clone(),
self.type_checker().await?.clone(), self.type_checker().await?.clone(),
@ -600,57 +585,6 @@ impl CliFactory {
)) ))
} }
/// Gets a function that can be used to create a CliMainWorkerFactory
/// for a file watcher.
pub async fn create_cli_main_worker_factory_func(
&self,
) -> Result<Arc<dyn Fn() -> CliMainWorkerFactory>, AnyError> {
let emitter = self.emitter()?.clone();
let graph_container = self.graph_container().clone();
let module_load_preparer = self.module_load_preparer().await?.clone();
let parsed_source_cache = self.parsed_source_cache()?.clone();
let resolver = self.resolver().await?.clone();
let blob_store = self.blob_store().clone();
let cjs_resolutions = self.cjs_resolutions().clone();
let node_code_translator = self.node_code_translator().await?.clone();
let options = self.cli_options().clone();
let main_worker_options = self.create_cli_main_worker_options()?;
let fs = self.fs().clone();
let root_cert_store_provider = self.root_cert_store_provider().clone();
let node_resolver = self.node_resolver().await?.clone();
let npm_resolver = self.npm_resolver().await?.clone();
let maybe_inspector_server = self.maybe_inspector_server().clone();
let maybe_lockfile = self.maybe_lockfile().clone();
Ok(Arc::new(move || {
CliMainWorkerFactory::new(
StorageKeyResolver::from_options(&options),
npm_resolver.clone(),
node_resolver.clone(),
Box::new(CliHasNodeSpecifierChecker(graph_container.clone())),
blob_store.clone(),
Box::new(CliModuleLoaderFactory::new(
&options,
emitter.clone(),
graph_container.clone(),
module_load_preparer.clone(),
parsed_source_cache.clone(),
resolver.clone(),
NpmModuleLoader::new(
cjs_resolutions.clone(),
node_code_translator.clone(),
fs.clone(),
node_resolver.clone(),
),
)),
root_cert_store_provider.clone(),
fs.clone(),
maybe_inspector_server.clone(),
maybe_lockfile.clone(),
main_worker_options.clone(),
)
}))
}
pub async fn create_cli_main_worker_factory( pub async fn create_cli_main_worker_factory(
&self, &self,
) -> Result<CliMainWorkerFactory, AnyError> { ) -> Result<CliMainWorkerFactory, AnyError> {

View file

@ -34,6 +34,7 @@ use deno_runtime::permissions::PermissionsContainer;
use import_map::ImportMapError; use import_map::ImportMapError;
use std::collections::HashMap; use std::collections::HashMap;
use std::collections::HashSet; use std::collections::HashSet;
use std::path::PathBuf;
use std::sync::Arc; use std::sync::Arc;
#[derive(Clone, Copy)] #[derive(Clone, Copy)]
@ -169,6 +170,7 @@ pub struct ModuleGraphBuilder {
npm_resolver: Arc<CliNpmResolver>, npm_resolver: Arc<CliNpmResolver>,
parsed_source_cache: Arc<ParsedSourceCache>, parsed_source_cache: Arc<ParsedSourceCache>,
lockfile: Option<Arc<Mutex<Lockfile>>>, lockfile: Option<Arc<Mutex<Lockfile>>>,
maybe_file_watcher_reporter: Option<FileWatcherReporter>,
emit_cache: cache::EmitCache, emit_cache: cache::EmitCache,
file_fetcher: Arc<FileFetcher>, file_fetcher: Arc<FileFetcher>,
type_checker: Arc<TypeChecker>, type_checker: Arc<TypeChecker>,
@ -182,6 +184,7 @@ impl ModuleGraphBuilder {
npm_resolver: Arc<CliNpmResolver>, npm_resolver: Arc<CliNpmResolver>,
parsed_source_cache: Arc<ParsedSourceCache>, parsed_source_cache: Arc<ParsedSourceCache>,
lockfile: Option<Arc<Mutex<Lockfile>>>, lockfile: Option<Arc<Mutex<Lockfile>>>,
maybe_file_watcher_reporter: Option<FileWatcherReporter>,
emit_cache: cache::EmitCache, emit_cache: cache::EmitCache,
file_fetcher: Arc<FileFetcher>, file_fetcher: Arc<FileFetcher>,
type_checker: Arc<TypeChecker>, type_checker: Arc<TypeChecker>,
@ -192,6 +195,7 @@ impl ModuleGraphBuilder {
npm_resolver, npm_resolver,
parsed_source_cache, parsed_source_cache,
lockfile, lockfile,
maybe_file_watcher_reporter,
emit_cache, emit_cache,
file_fetcher, file_fetcher,
type_checker, type_checker,
@ -210,6 +214,10 @@ impl ModuleGraphBuilder {
let graph_resolver = cli_resolver.as_graph_resolver(); let graph_resolver = cli_resolver.as_graph_resolver();
let graph_npm_resolver = cli_resolver.as_graph_npm_resolver(); let graph_npm_resolver = cli_resolver.as_graph_npm_resolver();
let analyzer = self.parsed_source_cache.as_analyzer(); let analyzer = self.parsed_source_cache.as_analyzer();
let maybe_file_watcher_reporter = self
.maybe_file_watcher_reporter
.as_ref()
.map(|r| r.as_reporter());
let mut graph = ModuleGraph::new(graph_kind); let mut graph = ModuleGraph::new(graph_kind);
self self
@ -223,7 +231,7 @@ impl ModuleGraphBuilder {
resolver: Some(graph_resolver), resolver: Some(graph_resolver),
npm_resolver: Some(graph_npm_resolver), npm_resolver: Some(graph_npm_resolver),
module_analyzer: Some(&*analyzer), module_analyzer: Some(&*analyzer),
reporter: None, reporter: maybe_file_watcher_reporter,
}, },
) )
.await?; .await?;
@ -250,6 +258,11 @@ impl ModuleGraphBuilder {
let analyzer = self.parsed_source_cache.as_analyzer(); let analyzer = self.parsed_source_cache.as_analyzer();
let graph_kind = self.options.type_check_mode().as_graph_kind(); let graph_kind = self.options.type_check_mode().as_graph_kind();
let mut graph = ModuleGraph::new(graph_kind); let mut graph = ModuleGraph::new(graph_kind);
let maybe_file_watcher_reporter = self
.maybe_file_watcher_reporter
.as_ref()
.map(|r| r.as_reporter());
self self
.build_graph_with_npm_resolution( .build_graph_with_npm_resolution(
&mut graph, &mut graph,
@ -261,7 +274,7 @@ impl ModuleGraphBuilder {
resolver: Some(graph_resolver), resolver: Some(graph_resolver),
npm_resolver: Some(graph_npm_resolver), npm_resolver: Some(graph_npm_resolver),
module_analyzer: Some(&*analyzer), module_analyzer: Some(&*analyzer),
reporter: None, reporter: maybe_file_watcher_reporter,
}, },
) )
.await?; .await?;
@ -415,7 +428,6 @@ struct GraphData {
/// Holds the `ModuleGraph` and what parts of it are type checked. /// Holds the `ModuleGraph` and what parts of it are type checked.
pub struct ModuleGraphContainer { pub struct ModuleGraphContainer {
graph_kind: GraphKind,
// Allow only one request to update the graph data at a time, // Allow only one request to update the graph data at a time,
// but allow other requests to read from it at any time even // but allow other requests to read from it at any time even
// while another request is updating the data. // while another request is updating the data.
@ -426,7 +438,6 @@ pub struct ModuleGraphContainer {
impl ModuleGraphContainer { impl ModuleGraphContainer {
pub fn new(graph_kind: GraphKind) -> Self { pub fn new(graph_kind: GraphKind) -> Self {
Self { Self {
graph_kind,
update_queue: Default::default(), update_queue: Default::default(),
graph_data: Arc::new(RwLock::new(GraphData { graph_data: Arc::new(RwLock::new(GraphData {
graph: Arc::new(ModuleGraph::new(graph_kind)), graph: Arc::new(ModuleGraph::new(graph_kind)),
@ -435,10 +446,6 @@ impl ModuleGraphContainer {
} }
} }
pub fn clear(&self) {
self.graph_data.write().graph = Arc::new(ModuleGraph::new(self.graph_kind));
}
/// Acquires a permit to modify the module graph without other code /// Acquires a permit to modify the module graph without other code
/// having the chance to modify it. In the meantime, other code may /// having the chance to modify it. In the meantime, other code may
/// still read from the existing module graph. /// still read from the existing module graph.
@ -496,6 +503,33 @@ impl ModuleGraphContainer {
} }
} }
/// Gets if any of the specified root's "file:" dependents are in the
/// provided changed set.
pub fn has_graph_root_local_dependent_changed(
graph: &ModuleGraph,
root: &ModuleSpecifier,
changed_specifiers: &HashSet<ModuleSpecifier>,
) -> bool {
let roots = vec![root.clone()];
let mut dependent_specifiers = graph.walk(
&roots,
deno_graph::WalkOptions {
follow_dynamic: true,
follow_type_only: true,
check_js: true,
},
);
while let Some((s, _)) = dependent_specifiers.next() {
if s.scheme() != "file" {
// skip walking this remote module's dependencies
dependent_specifiers.skip_previous_dependencies();
} else if changed_specifiers.contains(s) {
return true;
}
}
false
}
/// A permit for updating the module graph. When complete and /// A permit for updating the module graph. When complete and
/// everything looks fine, calling `.commit()` will store the /// everything looks fine, calling `.commit()` will store the
/// new graph in the ModuleGraphContainer. /// new graph in the ModuleGraphContainer.
@ -521,6 +555,43 @@ impl<'a> ModuleGraphUpdatePermit<'a> {
} }
} }
#[derive(Clone, Debug)]
pub struct FileWatcherReporter {
sender: tokio::sync::mpsc::UnboundedSender<Vec<PathBuf>>,
file_paths: Arc<Mutex<Vec<PathBuf>>>,
}
impl FileWatcherReporter {
pub fn new(sender: tokio::sync::mpsc::UnboundedSender<Vec<PathBuf>>) -> Self {
Self {
sender,
file_paths: Default::default(),
}
}
pub fn as_reporter(&self) -> &dyn deno_graph::source::Reporter {
self
}
}
impl deno_graph::source::Reporter for FileWatcherReporter {
fn on_load(
&self,
specifier: &ModuleSpecifier,
modules_done: usize,
modules_total: usize,
) {
let mut file_paths = self.file_paths.lock();
if specifier.scheme() == "file" {
file_paths.push(specifier.to_file_path().unwrap());
}
if modules_done == modules_total {
self.sender.send(file_paths.drain(..).collect()).unwrap();
}
}
}
#[cfg(test)] #[cfg(test)]
mod test { mod test {
use std::sync::Arc; use std::sync::Arc;

View file

@ -23,7 +23,6 @@ mod tools;
mod tsc; mod tsc;
mod util; mod util;
mod version; mod version;
mod watcher;
mod worker; mod worker;
use crate::args::flags_from_vec; use crate::args::flags_from_vec;
@ -33,7 +32,6 @@ use crate::util::display;
use crate::util::v8::get_v8_flags_from_env; use crate::util::v8::get_v8_flags_from_env;
use crate::util::v8::init_v8_flags; use crate::util::v8::init_v8_flags;
use args::CliOptions;
use deno_core::anyhow::Context; use deno_core::anyhow::Context;
use deno_core::error::AnyError; use deno_core::error::AnyError;
use deno_core::error::JsError; use deno_core::error::JsError;
@ -84,13 +82,10 @@ fn spawn_subcommand<F: Future<Output = T> + 'static, T: SubcommandOutput>(
async fn run_subcommand(flags: Flags) -> Result<i32, AnyError> { async fn run_subcommand(flags: Flags) -> Result<i32, AnyError> {
let handle = match flags.subcommand.clone() { let handle = match flags.subcommand.clone() {
DenoSubcommand::Bench(bench_flags) => spawn_subcommand(async { DenoSubcommand::Bench(bench_flags) => spawn_subcommand(async {
let cli_options = CliOptions::from_flags(flags)?; if flags.watch.is_some() {
let bench_options = cli_options.resolve_bench_options(bench_flags)?; tools::bench::run_benchmarks_with_watch(flags, bench_flags).await
if cli_options.watch_paths().is_some() {
tools::bench::run_benchmarks_with_watch(cli_options, bench_options)
.await
} else { } else {
tools::bench::run_benchmarks(cli_options, bench_options).await tools::bench::run_benchmarks(flags, bench_flags).await
} }
}), }),
DenoSubcommand::Bundle(bundle_flags) => spawn_subcommand(async { DenoSubcommand::Bundle(bundle_flags) => spawn_subcommand(async {
@ -125,11 +120,11 @@ async fn run_subcommand(flags: Flags) -> Result<i32, AnyError> {
DenoSubcommand::Coverage(coverage_flags) => spawn_subcommand(async { DenoSubcommand::Coverage(coverage_flags) => spawn_subcommand(async {
tools::coverage::cover_files(flags, coverage_flags).await tools::coverage::cover_files(flags, coverage_flags).await
}), }),
DenoSubcommand::Fmt(fmt_flags) => spawn_subcommand(async move { DenoSubcommand::Fmt(fmt_flags) => {
let cli_options = CliOptions::from_flags(flags.clone())?; spawn_subcommand(
let fmt_options = cli_options.resolve_fmt_options(fmt_flags)?; async move { tools::fmt::format(flags, fmt_flags).await },
tools::fmt::format(cli_options, fmt_options).await )
}), }
DenoSubcommand::Init(init_flags) => { DenoSubcommand::Init(init_flags) => {
spawn_subcommand(async { tools::init::init_project(init_flags).await }) spawn_subcommand(async { tools::init::init_project(init_flags).await })
} }
@ -148,9 +143,7 @@ async fn run_subcommand(flags: Flags) -> Result<i32, AnyError> {
tools::lint::print_rules_list(lint_flags.json); tools::lint::print_rules_list(lint_flags.json);
Ok(()) Ok(())
} else { } else {
let cli_options = CliOptions::from_flags(flags)?; tools::lint::lint(flags, lint_flags).await
let lint_options = cli_options.resolve_lint_options(lint_flags)?;
tools::lint::lint(cli_options, lint_options).await
} }
}), }),
DenoSubcommand::Repl(repl_flags) => { DenoSubcommand::Repl(repl_flags) => {
@ -178,13 +171,11 @@ async fn run_subcommand(flags: Flags) -> Result<i32, AnyError> {
PathBuf::from(coverage_dir).canonicalize()?, PathBuf::from(coverage_dir).canonicalize()?,
); );
} }
let cli_options = CliOptions::from_flags(flags)?;
let test_options = cli_options.resolve_test_options(test_flags)?;
if cli_options.watch_paths().is_some() { if flags.watch.is_some() {
tools::test::run_tests_with_watch(cli_options, test_options).await tools::test::run_tests_with_watch(flags, test_flags).await
} else { } else {
tools::test::run_tests(cli_options, test_options).await tools::test::run_tests(flags, test_flags).await
} }
}) })
} }

View file

@ -7,6 +7,7 @@ use crate::cache::ParsedSourceCache;
use crate::emit::Emitter; use crate::emit::Emitter;
use crate::graph_util::graph_lock_or_exit; use crate::graph_util::graph_lock_or_exit;
use crate::graph_util::graph_valid_with_cli_options; use crate::graph_util::graph_valid_with_cli_options;
use crate::graph_util::FileWatcherReporter;
use crate::graph_util::ModuleGraphBuilder; use crate::graph_util::ModuleGraphBuilder;
use crate::graph_util::ModuleGraphContainer; use crate::graph_util::ModuleGraphContainer;
use crate::node; use crate::node;
@ -17,7 +18,6 @@ use crate::tools::check::TypeChecker;
use crate::util::progress_bar::ProgressBar; use crate::util::progress_bar::ProgressBar;
use crate::util::text_encoding::code_without_source_map; use crate::util::text_encoding::code_without_source_map;
use crate::util::text_encoding::source_map_from_code; use crate::util::text_encoding::source_map_from_code;
use crate::watcher::FileWatcherReporter;
use crate::worker::ModuleLoaderFactory; use crate::worker::ModuleLoaderFactory;
use deno_ast::MediaType; use deno_ast::MediaType;
@ -115,12 +115,10 @@ impl ModuleLoadPreparer {
let maybe_imports = self.options.to_maybe_imports()?; let maybe_imports = self.options.to_maybe_imports()?;
let graph_resolver = self.resolver.as_graph_resolver(); let graph_resolver = self.resolver.as_graph_resolver();
let graph_npm_resolver = self.resolver.as_graph_npm_resolver(); let graph_npm_resolver = self.resolver.as_graph_npm_resolver();
let maybe_file_watcher_reporter: Option<&dyn deno_graph::source::Reporter> = let maybe_file_watcher_reporter = self
if let Some(reporter) = &self.maybe_file_watcher_reporter { .maybe_file_watcher_reporter
Some(reporter) .as_ref()
} else { .map(|r| r.as_reporter());
None
};
let analyzer = self.parsed_source_cache.as_analyzer(); let analyzer = self.parsed_source_cache.as_analyzer();
@ -800,10 +798,6 @@ impl NpmModuleLoader {
pub struct CjsResolutionStore(Mutex<HashSet<ModuleSpecifier>>); pub struct CjsResolutionStore(Mutex<HashSet<ModuleSpecifier>>);
impl CjsResolutionStore { impl CjsResolutionStore {
pub fn clear(&self) {
self.0.lock().clear();
}
pub fn contains(&self, specifier: &ModuleSpecifier) -> bool { pub fn contains(&self, specifier: &ModuleSpecifier) -> bool {
self.0.lock().contains(specifier) self.0.lock().contains(specifier)
} }

View file

@ -1,7 +1,6 @@
// Copyright 2018-2023 the Deno authors. All rights reserved. MIT license. // Copyright 2018-2023 the Deno authors. All rights reserved. MIT license.
use flaky_test::flaky_test; use flaky_test::flaky_test;
use std::fs::write;
use test_util as util; use test_util as util;
use test_util::assert_contains; use test_util::assert_contains;
use test_util::TempDir; use test_util::TempDir;
@ -508,7 +507,7 @@ async fn bundle_js_watch() {
// Test strategy extends this of test bundle_js by adding watcher // Test strategy extends this of test bundle_js by adding watcher
let t = TempDir::new(); let t = TempDir::new();
let file_to_watch = t.path().join("file_to_watch.ts"); let file_to_watch = t.path().join("file_to_watch.ts");
write(&file_to_watch, "console.log('Hello world');").unwrap(); file_to_watch.write("console.log('Hello world');");
assert!(file_to_watch.is_file()); assert!(file_to_watch.is_file());
let t = TempDir::new(); let t = TempDir::new();
let bundle = t.path().join("mod6.bundle.js"); let bundle = t.path().join("mod6.bundle.js");
@ -529,15 +528,14 @@ async fn bundle_js_watch() {
assert_contains!(next_line(&mut stderr_lines).await.unwrap(), "Warning"); assert_contains!(next_line(&mut stderr_lines).await.unwrap(), "Warning");
assert_contains!(next_line(&mut stderr_lines).await.unwrap(), "deno_emit"); assert_contains!(next_line(&mut stderr_lines).await.unwrap(), "deno_emit");
assert_contains!(next_line(&mut stderr_lines).await.unwrap(), "Check");
assert_contains!( assert_contains!(
next_line(&mut stderr_lines).await.unwrap(), next_line(&mut stderr_lines).await.unwrap(),
"Bundle started" "Bundle started"
); );
assert_contains!( let line = next_line(&mut stderr_lines).await.unwrap();
next_line(&mut stderr_lines).await.unwrap(), assert_contains!(line, "file_to_watch.ts");
"file_to_watch.ts" assert_contains!(line, "Check");
); assert_contains!(next_line(&mut stderr_lines).await.unwrap(), "Bundle");
assert_contains!( assert_contains!(
next_line(&mut stderr_lines).await.unwrap(), next_line(&mut stderr_lines).await.unwrap(),
"mod6.bundle.js" "mod6.bundle.js"
@ -547,13 +545,13 @@ async fn bundle_js_watch() {
wait_contains("Bundle finished", &mut stderr_lines).await; wait_contains("Bundle finished", &mut stderr_lines).await;
write(&file_to_watch, "console.log('Hello world2');").unwrap(); file_to_watch.write("console.log('Hello world2');");
assert_contains!(next_line(&mut stderr_lines).await.unwrap(), "Check");
let line = next_line(&mut stderr_lines).await.unwrap(); let line = next_line(&mut stderr_lines).await.unwrap();
// Should not clear screen, as we are in non-TTY environment // Should not clear screen, as we are in non-TTY environment
assert_not_contains!(&line, CLEAR_SCREEN); assert_not_contains!(&line, CLEAR_SCREEN);
assert_contains!(&line, "File change detected!"); assert_contains!(&line, "File change detected!");
assert_contains!(next_line(&mut stderr_lines).await.unwrap(), "Check");
assert_contains!( assert_contains!(
next_line(&mut stderr_lines).await.unwrap(), next_line(&mut stderr_lines).await.unwrap(),
"file_to_watch.ts" "file_to_watch.ts"
@ -567,7 +565,7 @@ async fn bundle_js_watch() {
wait_contains("Bundle finished", &mut stderr_lines).await; wait_contains("Bundle finished", &mut stderr_lines).await;
// Confirm that the watcher keeps on working even if the file is updated and has invalid syntax // Confirm that the watcher keeps on working even if the file is updated and has invalid syntax
write(&file_to_watch, "syntax error ^^").unwrap(); file_to_watch.write("syntax error ^^");
assert_contains!( assert_contains!(
next_line(&mut stderr_lines).await.unwrap(), next_line(&mut stderr_lines).await.unwrap(),
@ -583,7 +581,7 @@ async fn bundle_js_watch() {
async fn bundle_watch_not_exit() { async fn bundle_watch_not_exit() {
let t = TempDir::new(); let t = TempDir::new();
let file_to_watch = t.path().join("file_to_watch.ts"); let file_to_watch = t.path().join("file_to_watch.ts");
write(&file_to_watch, "syntax error ^^").unwrap(); file_to_watch.write("syntax error ^^");
let target_file = t.path().join("target.js"); let target_file = t.path().join("target.js");
let mut deno = util::deno_cmd() let mut deno = util::deno_cmd()
@ -624,17 +622,17 @@ async fn bundle_watch_not_exit() {
assert!(!target_file.is_file()); assert!(!target_file.is_file());
// Make sure the watcher actually restarts and works fine with the proper syntax // Make sure the watcher actually restarts and works fine with the proper syntax
write(&file_to_watch, "console.log(42);").unwrap(); file_to_watch.write("console.log(42);");
assert_contains!(
next_line(&mut stderr_lines).await.unwrap(),
"File change detected"
);
assert_contains!(next_line(&mut stderr_lines).await.unwrap(), "Check"); assert_contains!(next_line(&mut stderr_lines).await.unwrap(), "Check");
let line = next_line(&mut stderr_lines).await.unwrap(); let line = next_line(&mut stderr_lines).await.unwrap();
// Should not clear screen, as we are in non-TTY environment // Should not clear screen, as we are in non-TTY environment
assert_not_contains!(&line, CLEAR_SCREEN); assert_not_contains!(&line, CLEAR_SCREEN);
assert_contains!(&line, "File change detected!"); assert_contains!(line, "file_to_watch.ts");
assert_contains!(
next_line(&mut stderr_lines).await.unwrap(),
"file_to_watch.ts"
);
assert_contains!(next_line(&mut stderr_lines).await.unwrap(), "target.js"); assert_contains!(next_line(&mut stderr_lines).await.unwrap(), "target.js");
wait_contains("Bundle finished", &mut stderr_lines).await; wait_contains("Bundle finished", &mut stderr_lines).await;
@ -648,7 +646,7 @@ async fn bundle_watch_not_exit() {
async fn run_watch_no_dynamic() { async fn run_watch_no_dynamic() {
let t = TempDir::new(); let t = TempDir::new();
let file_to_watch = t.path().join("file_to_watch.js"); let file_to_watch = t.path().join("file_to_watch.js");
write(&file_to_watch, "console.log('Hello world');").unwrap(); file_to_watch.write("console.log('Hello world');");
let mut child = util::deno_cmd() let mut child = util::deno_cmd()
.current_dir(util::testdata_path()) .current_dir(util::testdata_path())
@ -669,7 +667,7 @@ async fn run_watch_no_dynamic() {
wait_for_watcher("file_to_watch.js", &mut stderr_lines).await; wait_for_watcher("file_to_watch.js", &mut stderr_lines).await;
// Change content of the file // Change content of the file
write(&file_to_watch, "console.log('Hello world2');").unwrap(); file_to_watch.write("console.log('Hello world2');");
wait_contains("Restarting", &mut stderr_lines).await; wait_contains("Restarting", &mut stderr_lines).await;
wait_contains("Hello world2", &mut stdout_lines).await; wait_contains("Hello world2", &mut stdout_lines).await;
@ -677,51 +675,45 @@ async fn run_watch_no_dynamic() {
// Add dependency // Add dependency
let another_file = t.path().join("another_file.js"); let another_file = t.path().join("another_file.js");
write(&another_file, "export const foo = 0;").unwrap(); another_file.write("export const foo = 0;");
write( file_to_watch
&file_to_watch, .write("import { foo } from './another_file.js'; console.log(foo);");
"import { foo } from './another_file.js'; console.log(foo);",
)
.unwrap();
wait_contains("Restarting", &mut stderr_lines).await; wait_contains("Restarting", &mut stderr_lines).await;
wait_contains("0", &mut stdout_lines).await; wait_contains("0", &mut stdout_lines).await;
wait_for_watcher("another_file.js", &mut stderr_lines).await; wait_for_watcher("another_file.js", &mut stderr_lines).await;
// Confirm that restarting occurs when a new file is updated // Confirm that restarting occurs when a new file is updated
write(&another_file, "export const foo = 42;").unwrap(); another_file.write("export const foo = 42;");
wait_contains("Restarting", &mut stderr_lines).await; wait_contains("Restarting", &mut stderr_lines).await;
wait_contains("42", &mut stdout_lines).await; wait_contains("42", &mut stdout_lines).await;
wait_for_watcher("file_to_watch.js", &mut stderr_lines).await; wait_for_watcher("file_to_watch.js", &mut stderr_lines).await;
// Confirm that the watcher keeps on working even if the file is updated and has invalid syntax // Confirm that the watcher keeps on working even if the file is updated and has invalid syntax
write(&file_to_watch, "syntax error ^^").unwrap(); file_to_watch.write("syntax error ^^");
wait_contains("Restarting", &mut stderr_lines).await; wait_contains("Restarting", &mut stderr_lines).await;
wait_contains("error:", &mut stderr_lines).await; wait_contains("error:", &mut stderr_lines).await;
wait_for_watcher("file_to_watch.js", &mut stderr_lines).await; wait_for_watcher("file_to_watch.js", &mut stderr_lines).await;
// Then restore the file // Then restore the file
write( file_to_watch
&file_to_watch, .write("import { foo } from './another_file.js'; console.log(foo);");
"import { foo } from './another_file.js'; console.log(foo);",
)
.unwrap();
wait_contains("Restarting", &mut stderr_lines).await; wait_contains("Restarting", &mut stderr_lines).await;
wait_contains("42", &mut stdout_lines).await; wait_contains("42", &mut stdout_lines).await;
wait_for_watcher("another_file.js", &mut stderr_lines).await; wait_for_watcher("another_file.js", &mut stderr_lines).await;
// Update the content of the imported file with invalid syntax // Update the content of the imported file with invalid syntax
write(&another_file, "syntax error ^^").unwrap(); another_file.write("syntax error ^^");
wait_contains("Restarting", &mut stderr_lines).await; wait_contains("Restarting", &mut stderr_lines).await;
wait_contains("error:", &mut stderr_lines).await; wait_contains("error:", &mut stderr_lines).await;
wait_for_watcher("another_file.js", &mut stderr_lines).await; wait_for_watcher("another_file.js", &mut stderr_lines).await;
// Modify the imported file and make sure that restarting occurs // Modify the imported file and make sure that restarting occurs
write(&another_file, "export const foo = 'modified!';").unwrap(); another_file.write("export const foo = 'modified!';");
wait_contains("Restarting", &mut stderr_lines).await; wait_contains("Restarting", &mut stderr_lines).await;
wait_contains("modified!", &mut stdout_lines).await; wait_contains("modified!", &mut stdout_lines).await;
@ -737,10 +729,10 @@ async fn run_watch_no_dynamic() {
async fn run_watch_external_watch_files() { async fn run_watch_external_watch_files() {
let t = TempDir::new(); let t = TempDir::new();
let file_to_watch = t.path().join("file_to_watch.js"); let file_to_watch = t.path().join("file_to_watch.js");
write(&file_to_watch, "console.log('Hello world');").unwrap(); file_to_watch.write("console.log('Hello world');");
let external_file_to_watch = t.path().join("external_file_to_watch.txt"); let external_file_to_watch = t.path().join("external_file_to_watch.txt");
write(&external_file_to_watch, "Hello world").unwrap(); external_file_to_watch.write("Hello world");
let mut watch_arg = "--watch=".to_owned(); let mut watch_arg = "--watch=".to_owned();
let external_file_to_watch_str = external_file_to_watch.to_string(); let external_file_to_watch_str = external_file_to_watch.to_string();
@ -765,12 +757,12 @@ async fn run_watch_external_watch_files() {
wait_for_watcher("external_file_to_watch.txt", &mut stderr_lines).await; wait_for_watcher("external_file_to_watch.txt", &mut stderr_lines).await;
// Change content of the external file // Change content of the external file
write(&external_file_to_watch, "Hello world2").unwrap(); external_file_to_watch.write("Hello world2");
wait_contains("Restarting", &mut stderr_lines).await; wait_contains("Restarting", &mut stderr_lines).await;
wait_contains("Process finished", &mut stderr_lines).await; wait_contains("Process finished", &mut stderr_lines).await;
// Again (https://github.com/denoland/deno/issues/17584) // Again (https://github.com/denoland/deno/issues/17584)
write(&external_file_to_watch, "Hello world3").unwrap(); external_file_to_watch.write("Hello world3");
wait_contains("Restarting", &mut stderr_lines).await; wait_contains("Restarting", &mut stderr_lines).await;
wait_contains("Process finished", &mut stderr_lines).await; wait_contains("Process finished", &mut stderr_lines).await;
@ -781,8 +773,7 @@ async fn run_watch_external_watch_files() {
async fn run_watch_load_unload_events() { async fn run_watch_load_unload_events() {
let t = TempDir::new(); let t = TempDir::new();
let file_to_watch = t.path().join("file_to_watch.js"); let file_to_watch = t.path().join("file_to_watch.js");
write( file_to_watch.write(
&file_to_watch,
r#" r#"
setInterval(() => {}, 0); setInterval(() => {}, 0);
window.addEventListener("load", () => { window.addEventListener("load", () => {
@ -793,8 +784,7 @@ async fn run_watch_load_unload_events() {
console.log("unload"); console.log("unload");
}); });
"#, "#,
) );
.unwrap();
let mut child = util::deno_cmd() let mut child = util::deno_cmd()
.current_dir(util::testdata_path()) .current_dir(util::testdata_path())
@ -816,8 +806,7 @@ async fn run_watch_load_unload_events() {
wait_for_watcher("file_to_watch.js", &mut stderr_lines).await; wait_for_watcher("file_to_watch.js", &mut stderr_lines).await;
// Change content of the file, this time without an interval to keep it alive. // Change content of the file, this time without an interval to keep it alive.
write( file_to_watch.write(
&file_to_watch,
r#" r#"
window.addEventListener("load", () => { window.addEventListener("load", () => {
console.log("load"); console.log("load");
@ -827,8 +816,7 @@ async fn run_watch_load_unload_events() {
console.log("unload"); console.log("unload");
}); });
"#, "#,
) );
.unwrap();
// Wait for the restart // Wait for the restart
wait_contains("Restarting", &mut stderr_lines).await; wait_contains("Restarting", &mut stderr_lines).await;
@ -849,7 +837,7 @@ async fn run_watch_load_unload_events() {
async fn run_watch_not_exit() { async fn run_watch_not_exit() {
let t = TempDir::new(); let t = TempDir::new();
let file_to_watch = t.path().join("file_to_watch.js"); let file_to_watch = t.path().join("file_to_watch.js");
write(&file_to_watch, "syntax error ^^").unwrap(); file_to_watch.write("syntax error ^^");
let mut child = util::deno_cmd() let mut child = util::deno_cmd()
.current_dir(util::testdata_path()) .current_dir(util::testdata_path())
@ -871,7 +859,7 @@ async fn run_watch_not_exit() {
wait_for_watcher("file_to_watch.js", &mut stderr_lines).await; wait_for_watcher("file_to_watch.js", &mut stderr_lines).await;
// Make sure the watcher actually restarts and works fine with the proper syntax // Make sure the watcher actually restarts and works fine with the proper syntax
write(&file_to_watch, "console.log(42);").unwrap(); file_to_watch.write("console.log(42);");
wait_contains("Restarting", &mut stderr_lines).await; wait_contains("Restarting", &mut stderr_lines).await;
wait_contains("42", &mut stdout_lines).await; wait_contains("42", &mut stdout_lines).await;
@ -887,7 +875,7 @@ async fn run_watch_with_import_map_and_relative_paths() {
filecontent: &'static str, filecontent: &'static str,
) -> std::path::PathBuf { ) -> std::path::PathBuf {
let absolute_path = directory.path().join(filename); let absolute_path = directory.path().join(filename);
write(&absolute_path, filecontent).unwrap(); absolute_path.write(filecontent);
let relative_path = absolute_path let relative_path = absolute_path
.as_path() .as_path()
.strip_prefix(directory.path()) .strip_prefix(directory.path())
@ -938,7 +926,7 @@ async fn run_watch_with_import_map_and_relative_paths() {
async fn run_watch_with_ext_flag() { async fn run_watch_with_ext_flag() {
let t = TempDir::new(); let t = TempDir::new();
let file_to_watch = t.path().join("file_to_watch"); let file_to_watch = t.path().join("file_to_watch");
write(&file_to_watch, "interface I{}; console.log(42);").unwrap(); file_to_watch.write("interface I{}; console.log(42);");
let mut child = util::deno_cmd() let mut child = util::deno_cmd()
.current_dir(util::testdata_path()) .current_dir(util::testdata_path())
@ -962,11 +950,7 @@ async fn run_watch_with_ext_flag() {
wait_for_watcher("file_to_watch", &mut stderr_lines).await; wait_for_watcher("file_to_watch", &mut stderr_lines).await;
wait_contains("Process finished", &mut stderr_lines).await; wait_contains("Process finished", &mut stderr_lines).await;
write( file_to_watch.write("type Bear = 'polar' | 'grizzly'; console.log(123);");
&file_to_watch,
"type Bear = 'polar' | 'grizzly'; console.log(123);",
)
.unwrap();
wait_contains("Restarting!", &mut stderr_lines).await; wait_contains("Restarting!", &mut stderr_lines).await;
wait_contains("123", &mut stdout_lines).await; wait_contains("123", &mut stdout_lines).await;
@ -979,11 +963,8 @@ async fn run_watch_with_ext_flag() {
async fn run_watch_error_messages() { async fn run_watch_error_messages() {
let t = TempDir::new(); let t = TempDir::new();
let file_to_watch = t.path().join("file_to_watch.js"); let file_to_watch = t.path().join("file_to_watch.js");
write( file_to_watch
&file_to_watch, .write("throw SyntaxError(`outer`, {cause: TypeError(`inner`)})");
"throw SyntaxError(`outer`, {cause: TypeError(`inner`)})",
)
.unwrap();
let mut child = util::deno_cmd() let mut child = util::deno_cmd()
.current_dir(util::testdata_path()) .current_dir(util::testdata_path())
@ -1000,13 +981,13 @@ async fn run_watch_error_messages() {
wait_contains("Process started", &mut stderr_lines).await; wait_contains("Process started", &mut stderr_lines).await;
wait_contains("error: Uncaught SyntaxError: outer", &mut stderr_lines).await; wait_contains("error: Uncaught SyntaxError: outer", &mut stderr_lines).await;
wait_contains("Caused by: TypeError: inner", &mut stderr_lines).await; wait_contains("Caused by: TypeError: inner", &mut stderr_lines).await;
wait_contains("Process finished", &mut stderr_lines).await; wait_contains("Process failed", &mut stderr_lines).await;
check_alive_then_kill(child); check_alive_then_kill(child);
} }
#[tokio::test] #[tokio::test]
async fn test_watch() { async fn test_watch_basic() {
let t = TempDir::new(); let t = TempDir::new();
let mut child = util::deno_cmd() let mut child = util::deno_cmd()
@ -1034,18 +1015,10 @@ async fn test_watch() {
let bar_file = t.path().join("bar.js"); let bar_file = t.path().join("bar.js");
let foo_test = t.path().join("foo_test.js"); let foo_test = t.path().join("foo_test.js");
let bar_test = t.path().join("bar_test.js"); let bar_test = t.path().join("bar_test.js");
write(&foo_file, "export default function foo() { 1 + 1 }").unwrap(); foo_file.write("export default function foo() { 1 + 1 }");
write(&bar_file, "export default function bar() { 2 + 2 }").unwrap(); bar_file.write("export default function bar() { 2 + 2 }");
write( foo_test.write("import foo from './foo.js'; Deno.test('foo', foo);");
&foo_test, bar_test.write("import bar from './bar.js'; Deno.test('bar', bar);");
"import foo from './foo.js'; Deno.test('foo', foo);",
)
.unwrap();
write(
bar_test,
"import bar from './bar.js'; Deno.test('bar', bar);",
)
.unwrap();
assert_eq!(next_line(&mut stdout_lines).await.unwrap(), ""); assert_eq!(next_line(&mut stdout_lines).await.unwrap(), "");
assert_contains!( assert_contains!(
@ -1064,11 +1037,7 @@ async fn test_watch() {
wait_contains("Test finished", &mut stderr_lines).await; wait_contains("Test finished", &mut stderr_lines).await;
// Change content of the file // Change content of the file
write( foo_test.write("import foo from './foo.js'; Deno.test('foobar', foo);");
&foo_test,
"import foo from './foo.js'; Deno.test('foobar', foo);",
)
.unwrap();
assert_contains!(next_line(&mut stderr_lines).await.unwrap(), "Restarting"); assert_contains!(next_line(&mut stderr_lines).await.unwrap(), "Restarting");
assert_contains!( assert_contains!(
@ -1083,7 +1052,7 @@ async fn test_watch() {
// Add test // Add test
let another_test = t.path().join("new_test.js"); let another_test = t.path().join("new_test.js");
write(&another_test, "Deno.test('another one', () => 3 + 3)").unwrap(); another_test.write("Deno.test('another one', () => 3 + 3)");
assert_contains!(next_line(&mut stderr_lines).await.unwrap(), "Restarting"); assert_contains!(next_line(&mut stderr_lines).await.unwrap(), "Restarting");
assert_contains!( assert_contains!(
next_line(&mut stdout_lines).await.unwrap(), next_line(&mut stdout_lines).await.unwrap(),
@ -1096,8 +1065,7 @@ async fn test_watch() {
wait_contains("Test finished", &mut stderr_lines).await; wait_contains("Test finished", &mut stderr_lines).await;
// Confirm that restarting occurs when a new file is updated // Confirm that restarting occurs when a new file is updated
write(&another_test, "Deno.test('another one', () => 3 + 3); Deno.test('another another one', () => 4 + 4)") another_test.write("Deno.test('another one', () => 3 + 3); Deno.test('another another one', () => 4 + 4)");
.unwrap();
assert_contains!(next_line(&mut stderr_lines).await.unwrap(), "Restarting"); assert_contains!(next_line(&mut stderr_lines).await.unwrap(), "Restarting");
assert_contains!( assert_contains!(
next_line(&mut stdout_lines).await.unwrap(), next_line(&mut stdout_lines).await.unwrap(),
@ -1114,7 +1082,7 @@ async fn test_watch() {
wait_contains("Test finished", &mut stderr_lines).await; wait_contains("Test finished", &mut stderr_lines).await;
// Confirm that the watcher keeps on working even if the file is updated and has invalid syntax // Confirm that the watcher keeps on working even if the file is updated and has invalid syntax
write(&another_test, "syntax error ^^").unwrap(); another_test.write("syntax error ^^");
assert_contains!(next_line(&mut stderr_lines).await.unwrap(), "Restarting"); assert_contains!(next_line(&mut stderr_lines).await.unwrap(), "Restarting");
assert_contains!(next_line(&mut stderr_lines).await.unwrap(), "error:"); assert_contains!(next_line(&mut stderr_lines).await.unwrap(), "error:");
assert_eq!(next_line(&mut stderr_lines).await.unwrap(), ""); assert_eq!(next_line(&mut stderr_lines).await.unwrap(), "");
@ -1129,7 +1097,7 @@ async fn test_watch() {
assert_contains!(next_line(&mut stderr_lines).await.unwrap(), "Test failed"); assert_contains!(next_line(&mut stderr_lines).await.unwrap(), "Test failed");
// Then restore the file // Then restore the file
write(&another_test, "Deno.test('another one', () => 3 + 3)").unwrap(); another_test.write("Deno.test('another one', () => 3 + 3)");
assert_contains!(next_line(&mut stderr_lines).await.unwrap(), "Restarting"); assert_contains!(next_line(&mut stderr_lines).await.unwrap(), "Restarting");
assert_contains!( assert_contains!(
next_line(&mut stdout_lines).await.unwrap(), next_line(&mut stdout_lines).await.unwrap(),
@ -1143,11 +1111,8 @@ async fn test_watch() {
// Confirm that the watcher keeps on working even if the file is updated and the test fails // Confirm that the watcher keeps on working even if the file is updated and the test fails
// This also confirms that it restarts when dependencies change // This also confirms that it restarts when dependencies change
write( foo_file
&foo_file, .write("export default function foo() { throw new Error('Whoops!'); }");
"export default function foo() { throw new Error('Whoops!'); }",
)
.unwrap();
assert_contains!(next_line(&mut stderr_lines).await.unwrap(), "Restarting"); assert_contains!(next_line(&mut stderr_lines).await.unwrap(), "Restarting");
assert_contains!( assert_contains!(
next_line(&mut stdout_lines).await.unwrap(), next_line(&mut stdout_lines).await.unwrap(),
@ -1156,10 +1121,10 @@ async fn test_watch() {
assert_contains!(next_line(&mut stdout_lines).await.unwrap(), "FAILED"); assert_contains!(next_line(&mut stdout_lines).await.unwrap(), "FAILED");
wait_contains("FAILED", &mut stdout_lines).await; wait_contains("FAILED", &mut stdout_lines).await;
next_line(&mut stdout_lines).await; next_line(&mut stdout_lines).await;
wait_contains("Test finished", &mut stderr_lines).await; wait_contains("Test failed", &mut stderr_lines).await;
// Then restore the file // Then restore the file
write(&foo_file, "export default function foo() { 1 + 1 }").unwrap(); foo_file.write("export default function foo() { 1 + 1 }");
assert_contains!(next_line(&mut stderr_lines).await.unwrap(), "Restarting"); assert_contains!(next_line(&mut stderr_lines).await.unwrap(), "Restarting");
assert_contains!( assert_contains!(
next_line(&mut stdout_lines).await.unwrap(), next_line(&mut stdout_lines).await.unwrap(),
@ -1172,16 +1137,8 @@ async fn test_watch() {
wait_contains("Test finished", &mut stderr_lines).await; wait_contains("Test finished", &mut stderr_lines).await;
// Test that circular dependencies work fine // Test that circular dependencies work fine
write( foo_file.write("import './bar.js'; export default function foo() { 1 + 1 }");
&foo_file, bar_file.write("import './foo.js'; export default function bar() { 2 + 2 }");
"import './bar.js'; export default function foo() { 1 + 1 }",
)
.unwrap();
write(
&bar_file,
"import './foo.js'; export default function bar() { 2 + 2 }",
)
.unwrap();
check_alive_then_kill(child); check_alive_then_kill(child);
} }
@ -1212,16 +1169,13 @@ async fn test_watch_doc() {
wait_contains("Test finished", &mut stderr_lines).await; wait_contains("Test finished", &mut stderr_lines).await;
let foo_file = t.path().join("foo.ts"); let foo_file = t.path().join("foo.ts");
write( foo_file.write(
&foo_file,
r#" r#"
export default function foo() {} export default function foo() {}
"#, "#,
) );
.unwrap();
write( foo_file.write(
&foo_file,
r#" r#"
/** /**
* ```ts * ```ts
@ -1230,8 +1184,7 @@ async fn test_watch_doc() {
*/ */
export default function foo() {} export default function foo() {}
"#, "#,
) );
.unwrap();
// We only need to scan for a Check file://.../foo.ts$3-6 line that // We only need to scan for a Check file://.../foo.ts$3-6 line that
// corresponds to the documentation block being type-checked. // corresponds to the documentation block being type-checked.
@ -1243,7 +1196,7 @@ async fn test_watch_doc() {
async fn test_watch_module_graph_error_referrer() { async fn test_watch_module_graph_error_referrer() {
let t = TempDir::new(); let t = TempDir::new();
let file_to_watch = t.path().join("file_to_watch.js"); let file_to_watch = t.path().join("file_to_watch.js");
write(&file_to_watch, "import './nonexistent.js';").unwrap(); file_to_watch.write("import './nonexistent.js';");
let mut child = util::deno_cmd() let mut child = util::deno_cmd()
.current_dir(util::testdata_path()) .current_dir(util::testdata_path())
.arg("run") .arg("run")
@ -1264,7 +1217,7 @@ async fn test_watch_module_graph_error_referrer() {
let line3 = next_line(&mut stderr_lines).await.unwrap(); let line3 = next_line(&mut stderr_lines).await.unwrap();
assert_contains!(&line3, " at "); assert_contains!(&line3, " at ");
assert_contains!(&line3, "file_to_watch.js"); assert_contains!(&line3, "file_to_watch.js");
wait_contains("Process finished", &mut stderr_lines).await; wait_contains("Process failed", &mut stderr_lines).await;
check_alive_then_kill(child); check_alive_then_kill(child);
} }
@ -1273,8 +1226,7 @@ async fn test_watch_module_graph_error_referrer() {
async fn test_watch_unload_handler_error_on_drop() { async fn test_watch_unload_handler_error_on_drop() {
let t = TempDir::new(); let t = TempDir::new();
let file_to_watch = t.path().join("file_to_watch.js"); let file_to_watch = t.path().join("file_to_watch.js");
write( file_to_watch.write(
&file_to_watch,
r#" r#"
addEventListener("unload", () => { addEventListener("unload", () => {
throw new Error("foo"); throw new Error("foo");
@ -1283,8 +1235,7 @@ async fn test_watch_unload_handler_error_on_drop() {
throw new Error("bar"); throw new Error("bar");
}); });
"#, "#,
) );
.unwrap();
let mut child = util::deno_cmd() let mut child = util::deno_cmd()
.current_dir(util::testdata_path()) .current_dir(util::testdata_path())
.arg("run") .arg("run")
@ -1298,7 +1249,7 @@ async fn test_watch_unload_handler_error_on_drop() {
let (_, mut stderr_lines) = child_lines(&mut child); let (_, mut stderr_lines) = child_lines(&mut child);
wait_contains("Process started", &mut stderr_lines).await; wait_contains("Process started", &mut stderr_lines).await;
wait_contains("Uncaught Error: bar", &mut stderr_lines).await; wait_contains("Uncaught Error: bar", &mut stderr_lines).await;
wait_contains("Process finished", &mut stderr_lines).await; wait_contains("Process failed", &mut stderr_lines).await;
check_alive_then_kill(child); check_alive_then_kill(child);
} }
@ -1311,7 +1262,7 @@ async fn test_watch_sigint() {
let t = TempDir::new(); let t = TempDir::new();
let file_to_watch = t.path().join("file_to_watch.js"); let file_to_watch = t.path().join("file_to_watch.js");
write(&file_to_watch, r#"Deno.test("foo", () => {});"#).unwrap(); file_to_watch.write(r#"Deno.test("foo", () => {});"#);
let mut child = util::deno_cmd() let mut child = util::deno_cmd()
.current_dir(util::testdata_path()) .current_dir(util::testdata_path())
.arg("test") .arg("test")
@ -1331,6 +1282,120 @@ async fn test_watch_sigint() {
assert_eq!(exit_status.code(), Some(130)); assert_eq!(exit_status.code(), Some(130));
} }
#[tokio::test]
async fn bench_watch_basic() {
let t = TempDir::new();
let mut child = util::deno_cmd()
.current_dir(util::testdata_path())
.arg("bench")
.arg("--watch")
.arg("--unstable")
.arg("--no-check")
.arg(t.path())
.env("NO_COLOR", "1")
.stdout(std::process::Stdio::piped())
.stderr(std::process::Stdio::piped())
.spawn()
.unwrap();
let (mut stdout_lines, mut stderr_lines) = child_lines(&mut child);
assert_contains!(
next_line(&mut stderr_lines).await.unwrap(),
"Bench started"
);
assert_contains!(
next_line(&mut stderr_lines).await.unwrap(),
"Bench finished"
);
let foo_file = t.path().join("foo.js");
let bar_file = t.path().join("bar.js");
let foo_bench = t.path().join("foo_bench.js");
let bar_bench = t.path().join("bar_bench.js");
foo_file.write("export default function foo() { 1 + 1 }");
bar_file.write("export default function bar() { 2 + 2 }");
foo_bench.write("import foo from './foo.js'; Deno.bench('foo bench', foo);");
bar_bench.write("import bar from './bar.js'; Deno.bench('bar bench', bar);");
wait_contains("bar_bench.js", &mut stdout_lines).await;
wait_contains("bar bench", &mut stdout_lines).await;
wait_contains("foo_bench.js", &mut stdout_lines).await;
wait_contains("foo bench", &mut stdout_lines).await;
wait_contains("Bench finished", &mut stderr_lines).await;
// Change content of the file
foo_bench.write("import foo from './foo.js'; Deno.bench('foo asdf', foo);");
assert_contains!(next_line(&mut stderr_lines).await.unwrap(), "Restarting");
loop {
let line = next_line(&mut stdout_lines).await.unwrap();
assert_not_contains!(line, "bar");
if line.contains("foo asdf") {
break; // last line
}
}
wait_contains("Bench finished", &mut stderr_lines).await;
// Add bench
let another_test = t.path().join("new_bench.js");
another_test.write("Deno.bench('another one', () => 3 + 3)");
loop {
let line = next_line(&mut stdout_lines).await.unwrap();
assert_not_contains!(line, "bar");
assert_not_contains!(line, "foo");
if line.contains("another one") {
break; // last line
}
}
wait_contains("Bench finished", &mut stderr_lines).await;
// Confirm that restarting occurs when a new file is updated
another_test.write("Deno.bench('another one', () => 3 + 3); Deno.bench('another another one', () => 4 + 4)");
loop {
let line = next_line(&mut stdout_lines).await.unwrap();
assert_not_contains!(line, "bar");
assert_not_contains!(line, "foo");
if line.contains("another another one") {
break; // last line
}
}
wait_contains("Bench finished", &mut stderr_lines).await;
// Confirm that the watcher keeps on working even if the file is updated and has invalid syntax
another_test.write("syntax error ^^");
assert_contains!(next_line(&mut stderr_lines).await.unwrap(), "Restarting");
assert_contains!(next_line(&mut stderr_lines).await.unwrap(), "error:");
assert_eq!(next_line(&mut stderr_lines).await.unwrap(), "");
assert_eq!(
next_line(&mut stderr_lines).await.unwrap(),
" syntax error ^^"
);
assert_eq!(
next_line(&mut stderr_lines).await.unwrap(),
" ~~~~~"
);
assert_contains!(next_line(&mut stderr_lines).await.unwrap(), "Bench failed");
// Then restore the file
another_test.write("Deno.bench('another one', () => 3 + 3)");
assert_contains!(next_line(&mut stderr_lines).await.unwrap(), "Restarting");
loop {
let line = next_line(&mut stdout_lines).await.unwrap();
assert_not_contains!(line, "bar");
assert_not_contains!(line, "foo");
if line.contains("another one") {
break; // last line
}
}
wait_contains("Bench finished", &mut stderr_lines).await;
// Test that circular dependencies work fine
foo_file.write("import './bar.js'; export default function foo() { 1 + 1 }");
bar_file.write("import './foo.js'; export default function bar() { 2 + 2 }");
check_alive_then_kill(child);
}
// Regression test for https://github.com/denoland/deno/issues/15465. // Regression test for https://github.com/denoland/deno/issues/15465.
#[tokio::test] #[tokio::test]
async fn run_watch_reload_once() { async fn run_watch_reload_once() {
@ -1341,7 +1406,7 @@ async fn run_watch_reload_once() {
import { time } from "http://localhost:4545/dynamic_module.ts"; import { time } from "http://localhost:4545/dynamic_module.ts";
console.log(time); console.log(time);
"#; "#;
write(&file_to_watch, file_content).unwrap(); file_to_watch.write(file_content);
let mut child = util::deno_cmd() let mut child = util::deno_cmd()
.current_dir(util::testdata_path()) .current_dir(util::testdata_path())
@ -1359,7 +1424,7 @@ async fn run_watch_reload_once() {
wait_contains("finished", &mut stderr_lines).await; wait_contains("finished", &mut stderr_lines).await;
let first_output = next_line(&mut stdout_lines).await.unwrap(); let first_output = next_line(&mut stdout_lines).await.unwrap();
write(&file_to_watch, file_content).unwrap(); file_to_watch.write(file_content);
// The remote dynamic module should not have been reloaded again. // The remote dynamic module should not have been reloaded again.
wait_contains("finished", &mut stderr_lines).await; wait_contains("finished", &mut stderr_lines).await;
@ -1379,7 +1444,7 @@ async fn test_watch_serve() {
console.error("serving"); console.error("serving");
await Deno.serve({port: 4600, handler: () => new Response("hello")}); await Deno.serve({port: 4600, handler: () => new Response("hello")});
"#; "#;
write(&file_to_watch, file_content).unwrap(); file_to_watch.write(file_content);
let mut child = util::deno_cmd() let mut child = util::deno_cmd()
.current_dir(util::testdata_path()) .current_dir(util::testdata_path())
@ -1401,7 +1466,7 @@ async fn test_watch_serve() {
// Note that we start serving very quickly, so we specifically want to wait for this message // Note that we start serving very quickly, so we specifically want to wait for this message
wait_contains(r#"Watching paths: [""#, &mut stderr_lines).await; wait_contains(r#"Watching paths: [""#, &mut stderr_lines).await;
write(&file_to_watch, file_content).unwrap(); file_to_watch.write(file_content);
wait_contains("serving", &mut stderr_lines).await; wait_contains("serving", &mut stderr_lines).await;
wait_contains("Listening on", &mut stdout_lines).await; wait_contains("Listening on", &mut stdout_lines).await;
@ -1413,31 +1478,25 @@ async fn test_watch_serve() {
async fn run_watch_dynamic_imports() { async fn run_watch_dynamic_imports() {
let t = TempDir::new(); let t = TempDir::new();
let file_to_watch = t.path().join("file_to_watch.js"); let file_to_watch = t.path().join("file_to_watch.js");
write( file_to_watch.write(
&file_to_watch,
r#" r#"
console.log("Hopefully dynamic import will be watched..."); console.log("Hopefully dynamic import will be watched...");
await import("./imported.js"); await import("./imported.js");
"#, "#,
) );
.unwrap();
let file_to_watch2 = t.path().join("imported.js"); let file_to_watch2 = t.path().join("imported.js");
write( file_to_watch2.write(
file_to_watch2,
r#" r#"
import "./imported2.js"; import "./imported2.js";
console.log("I'm dynamically imported and I cause restarts!"); console.log("I'm dynamically imported and I cause restarts!");
"#, "#,
) );
.unwrap();
let file_to_watch3 = t.path().join("imported2.js"); let file_to_watch3 = t.path().join("imported2.js");
write( file_to_watch3.write(
&file_to_watch3,
r#" r#"
console.log("I'm statically imported from the dynamic import"); console.log("I'm statically imported from the dynamic import");
"#, "#,
) );
.unwrap();
let mut child = util::deno_cmd() let mut child = util::deno_cmd()
.current_dir(util::testdata_path()) .current_dir(util::testdata_path())
@ -1454,8 +1513,8 @@ async fn run_watch_dynamic_imports() {
.spawn() .spawn()
.unwrap(); .unwrap();
let (mut stdout_lines, mut stderr_lines) = child_lines(&mut child); let (mut stdout_lines, mut stderr_lines) = child_lines(&mut child);
wait_contains("No package.json file found", &mut stderr_lines).await;
wait_contains("Process started", &mut stderr_lines).await; wait_contains("Process started", &mut stderr_lines).await;
wait_contains("No package.json file found", &mut stderr_lines).await;
wait_contains( wait_contains(
"Hopefully dynamic import will be watched...", "Hopefully dynamic import will be watched...",
@ -1476,13 +1535,11 @@ async fn run_watch_dynamic_imports() {
wait_for_watcher("imported2.js", &mut stderr_lines).await; wait_for_watcher("imported2.js", &mut stderr_lines).await;
wait_contains("finished", &mut stderr_lines).await; wait_contains("finished", &mut stderr_lines).await;
write( file_to_watch3.write(
&file_to_watch3,
r#" r#"
console.log("I'm statically imported from the dynamic import and I've changed"); console.log("I'm statically imported from the dynamic import and I've changed");
"#, "#,
) );
.unwrap();
wait_contains("Restarting", &mut stderr_lines).await; wait_contains("Restarting", &mut stderr_lines).await;
wait_contains( wait_contains(

View file

@ -1,17 +1,19 @@
// Copyright 2018-2023 the Deno authors. All rights reserved. MIT license. // Copyright 2018-2023 the Deno authors. All rights reserved. MIT license.
use crate::args::BenchOptions; use crate::args::BenchFlags;
use crate::args::CliOptions; use crate::args::CliOptions;
use crate::args::Flags;
use crate::colors; use crate::colors;
use crate::display::write_json_to_stdout; use crate::display::write_json_to_stdout;
use crate::factory::CliFactory; use crate::factory::CliFactory;
use crate::factory::CliFactoryBuilder;
use crate::graph_util::graph_valid_with_cli_options; use crate::graph_util::graph_valid_with_cli_options;
use crate::graph_util::has_graph_root_local_dependent_changed;
use crate::module_loader::ModuleLoadPreparer; use crate::module_loader::ModuleLoadPreparer;
use crate::ops; use crate::ops;
use crate::tools::test::format_test_error; use crate::tools::test::format_test_error;
use crate::tools::test::TestFilter; use crate::tools::test::TestFilter;
use crate::util::file_watcher; use crate::util::file_watcher;
use crate::util::file_watcher::ResolutionResult;
use crate::util::fs::collect_specifiers; use crate::util::fs::collect_specifiers;
use crate::util::path::is_supported_ext; use crate::util::path::is_supported_ext;
use crate::version::get_user_agent; use crate::version::get_user_agent;
@ -22,7 +24,6 @@ use deno_core::error::AnyError;
use deno_core::error::JsError; use deno_core::error::JsError;
use deno_core::futures::future; use deno_core::futures::future;
use deno_core::futures::stream; use deno_core::futures::stream;
use deno_core::futures::FutureExt;
use deno_core::futures::StreamExt; use deno_core::futures::StreamExt;
use deno_core::located_script_name; use deno_core::located_script_name;
use deno_core::serde_v8; use deno_core::serde_v8;
@ -40,7 +41,6 @@ use serde::Deserialize;
use serde::Serialize; use serde::Serialize;
use std::collections::HashSet; use std::collections::HashSet;
use std::path::Path; use std::path::Path;
use std::path::PathBuf;
use std::sync::Arc; use std::sync::Arc;
use tokio::sync::mpsc::unbounded_channel; use tokio::sync::mpsc::unbounded_channel;
use tokio::sync::mpsc::UnboundedSender; use tokio::sync::mpsc::UnboundedSender;
@ -630,9 +630,11 @@ fn is_supported_bench_path(path: &Path) -> bool {
} }
pub async fn run_benchmarks( pub async fn run_benchmarks(
cli_options: CliOptions, flags: Flags,
bench_options: BenchOptions, bench_flags: BenchFlags,
) -> Result<(), AnyError> { ) -> Result<(), AnyError> {
let cli_options = CliOptions::from_flags(flags)?;
let bench_options = cli_options.resolve_bench_options(bench_flags)?;
let factory = CliFactory::from_cli_options(Arc::new(cli_options)); let factory = CliFactory::from_cli_options(Arc::new(cli_options));
let cli_options = factory.cli_options(); let cli_options = factory.cli_options();
// Various bench files should not share the same permissions in terms of // Various bench files should not share the same permissions in terms of
@ -679,169 +681,102 @@ pub async fn run_benchmarks(
// TODO(bartlomieju): heavy duplication of code with `cli/tools/test.rs` // TODO(bartlomieju): heavy duplication of code with `cli/tools/test.rs`
pub async fn run_benchmarks_with_watch( pub async fn run_benchmarks_with_watch(
cli_options: CliOptions, flags: Flags,
bench_options: BenchOptions, bench_flags: BenchFlags,
) -> Result<(), AnyError> { ) -> Result<(), AnyError> {
let factory = CliFactory::from_cli_options(Arc::new(cli_options)); let clear_screen = !flags.no_clear_screen;
let cli_options = factory.cli_options();
let module_graph_builder = factory.module_graph_builder().await?;
let file_watcher = factory.file_watcher()?;
let module_load_preparer = factory.module_load_preparer().await?;
// Various bench files should not share the same permissions in terms of
// `PermissionsContainer` - otherwise granting/revoking permissions in one
// file would have impact on other files, which is undesirable.
let permissions =
Permissions::from_options(&cli_options.permissions_options())?;
let graph_kind = cli_options.type_check_mode().as_graph_kind();
let resolver = |changed: Option<Vec<PathBuf>>| {
let paths_to_watch = bench_options.files.include.clone();
let paths_to_watch_clone = paths_to_watch.clone();
let files_changed = changed.is_some();
let bench_options = &bench_options;
let module_graph_builder = module_graph_builder.clone();
let cli_options = cli_options.clone();
async move {
let bench_modules =
collect_specifiers(&bench_options.files, is_supported_bench_path)?;
let mut paths_to_watch = paths_to_watch_clone;
let mut modules_to_reload = if files_changed {
Vec::new()
} else {
bench_modules.clone()
};
let graph = module_graph_builder
.create_graph(graph_kind, bench_modules.clone())
.await?;
graph_valid_with_cli_options(&graph, &bench_modules, &cli_options)?;
// TODO(@kitsonk) - This should be totally derivable from the graph.
for specifier in bench_modules {
fn get_dependencies<'a>(
graph: &'a deno_graph::ModuleGraph,
maybe_module: Option<&'a deno_graph::Module>,
// This needs to be accessible to skip getting dependencies if they're already there,
// otherwise this will cause a stack overflow with circular dependencies
output: &mut HashSet<&'a ModuleSpecifier>,
) {
if let Some(module) = maybe_module.and_then(|m| m.esm()) {
for dep in module.dependencies.values() {
if let Some(specifier) = &dep.get_code() {
if !output.contains(specifier) {
output.insert(specifier);
get_dependencies(graph, graph.get(specifier), output);
}
}
if let Some(specifier) = &dep.get_type() {
if !output.contains(specifier) {
output.insert(specifier);
get_dependencies(graph, graph.get(specifier), output);
}
}
}
}
}
// This bench module and all it's dependencies
let mut modules = HashSet::new();
modules.insert(&specifier);
get_dependencies(&graph, graph.get(&specifier), &mut modules);
paths_to_watch.extend(
modules
.iter()
.filter_map(|specifier| specifier.to_file_path().ok()),
);
if let Some(changed) = &changed {
for path in changed
.iter()
.filter_map(|path| ModuleSpecifier::from_file_path(path).ok())
{
if modules.contains(&path) {
modules_to_reload.push(specifier);
break;
}
}
}
}
Ok((paths_to_watch, modules_to_reload))
}
.map(move |result| {
if files_changed
&& matches!(result, Ok((_, ref modules)) if modules.is_empty())
{
ResolutionResult::Ignore
} else {
match result {
Ok((paths_to_watch, modules_to_reload)) => {
ResolutionResult::Restart {
paths_to_watch,
result: Ok(modules_to_reload),
}
}
Err(e) => ResolutionResult::Restart {
paths_to_watch,
result: Err(e),
},
}
}
})
};
let create_cli_main_worker_factory =
factory.create_cli_main_worker_factory_func().await?;
let operation = |modules_to_reload: Vec<ModuleSpecifier>| {
let permissions = &permissions;
let bench_options = &bench_options;
file_watcher.reset();
let module_load_preparer = module_load_preparer.clone();
let cli_options = cli_options.clone();
let create_cli_main_worker_factory = create_cli_main_worker_factory.clone();
async move {
let worker_factory = Arc::new(create_cli_main_worker_factory());
let specifiers =
collect_specifiers(&bench_options.files, is_supported_bench_path)?
.into_iter()
.filter(|specifier| modules_to_reload.contains(specifier))
.collect::<Vec<ModuleSpecifier>>();
check_specifiers(&cli_options, &module_load_preparer, specifiers.clone())
.await?;
if bench_options.no_run {
return Ok(());
}
let log_level = cli_options.log_level();
bench_specifiers(
worker_factory,
permissions,
specifiers,
BenchSpecifierOptions {
filter: TestFilter::from_flag(&bench_options.filter),
json: bench_options.json,
log_level,
},
)
.await?;
Ok(())
}
};
let clear_screen = !cli_options.no_clear_screen();
file_watcher::watch_func( file_watcher::watch_func(
resolver, flags,
operation,
file_watcher::PrintConfig { file_watcher::PrintConfig {
job_name: "Bench".to_string(), job_name: "Bench".to_string(),
clear_screen, clear_screen,
}, },
move |flags, sender, changed_paths| {
let bench_flags = bench_flags.clone();
Ok(async move {
let factory = CliFactoryBuilder::new()
.with_watcher(sender.clone())
.build_from_flags(flags)
.await?;
let cli_options = factory.cli_options();
let bench_options = cli_options.resolve_bench_options(bench_flags)?;
if let Some(watch_paths) = cli_options.watch_paths() {
let _ = sender.send(watch_paths);
}
let _ = sender.send(bench_options.files.include.clone());
let graph_kind = cli_options.type_check_mode().as_graph_kind();
let module_graph_builder = factory.module_graph_builder().await?;
let module_load_preparer = factory.module_load_preparer().await?;
let bench_modules =
collect_specifiers(&bench_options.files, is_supported_bench_path)?;
// Various bench files should not share the same permissions in terms of
// `PermissionsContainer` - otherwise granting/revoking permissions in one
// file would have impact on other files, which is undesirable.
let permissions =
Permissions::from_options(&cli_options.permissions_options())?;
let graph = module_graph_builder
.create_graph(graph_kind, bench_modules.clone())
.await?;
graph_valid_with_cli_options(&graph, &bench_modules, cli_options)?;
let bench_modules_to_reload = if let Some(changed_paths) = changed_paths
{
let changed_specifiers = changed_paths
.into_iter()
.filter_map(|p| ModuleSpecifier::from_file_path(p).ok())
.collect::<HashSet<_>>();
let mut result = Vec::new();
for bench_module_specifier in bench_modules {
if has_graph_root_local_dependent_changed(
&graph,
&bench_module_specifier,
&changed_specifiers,
) {
result.push(bench_module_specifier.clone());
}
}
result
} else {
bench_modules.clone()
};
let worker_factory =
Arc::new(factory.create_cli_main_worker_factory().await?);
let specifiers =
collect_specifiers(&bench_options.files, is_supported_bench_path)?
.into_iter()
.filter(|specifier| bench_modules_to_reload.contains(specifier))
.collect::<Vec<ModuleSpecifier>>();
check_specifiers(cli_options, module_load_preparer, specifiers.clone())
.await?;
if bench_options.no_run {
return Ok(());
}
let log_level = cli_options.log_level();
bench_specifiers(
worker_factory,
&permissions,
specifiers,
BenchSpecifierOptions {
filter: TestFilter::from_flag(&bench_options.filter),
json: bench_options.json,
log_level,
},
)
.await?;
Ok(())
})
},
) )
.await?; .await?;

View file

@ -1,10 +1,8 @@
// Copyright 2018-2023 the Deno authors. All rights reserved. MIT license. // Copyright 2018-2023 the Deno authors. All rights reserved. MIT license.
use std::path::PathBuf; use std::path::PathBuf;
use std::sync::Arc;
use deno_core::error::AnyError; use deno_core::error::AnyError;
use deno_core::futures::FutureExt;
use deno_graph::Module; use deno_graph::Module;
use deno_runtime::colors; use deno_runtime::colors;
@ -13,17 +11,15 @@ use crate::args::CliOptions;
use crate::args::Flags; use crate::args::Flags;
use crate::args::TsConfigType; use crate::args::TsConfigType;
use crate::factory::CliFactory; use crate::factory::CliFactory;
use crate::factory::CliFactoryBuilder;
use crate::graph_util::error_for_any_npm_specifier; use crate::graph_util::error_for_any_npm_specifier;
use crate::util; use crate::util;
use crate::util::display; use crate::util::display;
use crate::util::file_watcher::ResolutionResult;
pub async fn bundle( pub async fn bundle(
flags: Flags, flags: Flags,
bundle_flags: BundleFlags, bundle_flags: BundleFlags,
) -> Result<(), AnyError> { ) -> Result<(), AnyError> {
let cli_options = Arc::new(CliOptions::from_flags(flags)?);
log::info!( log::info!(
"{} \"deno bundle\" is deprecated and will be removed in the future.", "{} \"deno bundle\" is deprecated and will be removed in the future.",
colors::yellow("Warning"), colors::yellow("Warning"),
@ -32,122 +28,115 @@ pub async fn bundle(
"Use alternative bundlers like \"deno_emit\", \"esbuild\" or \"rollup\" instead." "Use alternative bundlers like \"deno_emit\", \"esbuild\" or \"rollup\" instead."
); );
let module_specifier = cli_options.resolve_main_module()?; if flags.watch.is_some() {
let clear_screen = !flags.no_clear_screen;
let resolver = |_| {
let cli_options = cli_options.clone();
let module_specifier = &module_specifier;
async move {
log::debug!(">>>>> bundle START");
let factory = CliFactory::from_cli_options(cli_options);
let module_graph_builder = factory.module_graph_builder().await?;
let cli_options = factory.cli_options();
let graph = module_graph_builder
.create_graph_and_maybe_check(vec![module_specifier.clone()])
.await?;
let mut paths_to_watch: Vec<PathBuf> = graph
.specifiers()
.filter_map(|(_, r)| {
r.ok().and_then(|module| match module {
Module::Esm(m) => m.specifier.to_file_path().ok(),
Module::Json(m) => m.specifier.to_file_path().ok(),
// nothing to watch
Module::Node(_) | Module::Npm(_) | Module::External(_) => None,
})
})
.collect();
if let Ok(Some(import_map_path)) = cli_options
.resolve_import_map_specifier()
.map(|ms| ms.and_then(|ref s| s.to_file_path().ok()))
{
paths_to_watch.push(import_map_path);
}
Ok((paths_to_watch, graph, cli_options.clone()))
}
.map(move |result| match result {
Ok((paths_to_watch, graph, ps)) => ResolutionResult::Restart {
paths_to_watch,
result: Ok((ps, graph)),
},
Err(e) => ResolutionResult::Restart {
paths_to_watch: vec![module_specifier.to_file_path().unwrap()],
result: Err(e),
},
})
};
let operation =
|(cli_options, graph): (Arc<CliOptions>, Arc<deno_graph::ModuleGraph>)| {
let out_file = &bundle_flags.out_file;
async move {
// at the moment, we don't support npm specifiers in deno bundle, so show an error
error_for_any_npm_specifier(&graph)?;
let bundle_output = bundle_module_graph(graph.as_ref(), &cli_options)?;
log::debug!(">>>>> bundle END");
if let Some(out_file) = out_file {
let output_bytes = bundle_output.code.as_bytes();
let output_len = output_bytes.len();
util::fs::write_file(out_file, output_bytes, 0o644)?;
log::info!(
"{} {:?} ({})",
colors::green("Emit"),
out_file,
colors::gray(display::human_size(output_len as f64))
);
if let Some(bundle_map) = bundle_output.maybe_map {
let map_bytes = bundle_map.as_bytes();
let map_len = map_bytes.len();
let ext = if let Some(curr_ext) = out_file.extension() {
format!("{}.map", curr_ext.to_string_lossy())
} else {
"map".to_string()
};
let map_out_file = out_file.with_extension(ext);
util::fs::write_file(&map_out_file, map_bytes, 0o644)?;
log::info!(
"{} {:?} ({})",
colors::green("Emit"),
map_out_file,
colors::gray(display::human_size(map_len as f64))
);
}
} else {
println!("{}", bundle_output.code);
}
Ok(())
}
};
if cli_options.watch_paths().is_some() {
util::file_watcher::watch_func( util::file_watcher::watch_func(
resolver, flags,
operation,
util::file_watcher::PrintConfig { util::file_watcher::PrintConfig {
job_name: "Bundle".to_string(), job_name: "Bundle".to_string(),
clear_screen: !cli_options.no_clear_screen(), clear_screen,
},
move |flags, sender, _changed_paths| {
let bundle_flags = bundle_flags.clone();
Ok(async move {
let factory = CliFactoryBuilder::new()
.with_watcher(sender.clone())
.build_from_flags(flags)
.await?;
let cli_options = factory.cli_options();
if let Some(watch_paths) = cli_options.watch_paths() {
let _ = sender.send(watch_paths);
}
bundle_action(factory, &bundle_flags).await?;
Ok(())
})
}, },
) )
.await?; .await?;
} else { } else {
let module_graph = let factory = CliFactory::from_flags(flags).await?;
if let ResolutionResult::Restart { result, .. } = resolver(None).await { bundle_action(factory, &bundle_flags).await?;
result?
} else {
unreachable!();
};
operation(module_graph).await?;
} }
Ok(()) Ok(())
} }
async fn bundle_action(
factory: CliFactory,
bundle_flags: &BundleFlags,
) -> Result<(), AnyError> {
let cli_options = factory.cli_options();
let module_specifier = cli_options.resolve_main_module()?;
log::debug!(">>>>> bundle START");
let module_graph_builder = factory.module_graph_builder().await?;
let cli_options = factory.cli_options();
let graph = module_graph_builder
.create_graph_and_maybe_check(vec![module_specifier.clone()])
.await?;
let mut paths_to_watch: Vec<PathBuf> = graph
.specifiers()
.filter_map(|(_, r)| {
r.ok().and_then(|module| match module {
Module::Esm(m) => m.specifier.to_file_path().ok(),
Module::Json(m) => m.specifier.to_file_path().ok(),
// nothing to watch
Module::Node(_) | Module::Npm(_) | Module::External(_) => None,
})
})
.collect();
if let Ok(Some(import_map_path)) = cli_options
.resolve_import_map_specifier()
.map(|ms| ms.and_then(|ref s| s.to_file_path().ok()))
{
paths_to_watch.push(import_map_path);
}
// at the moment, we don't support npm specifiers in deno bundle, so show an error
error_for_any_npm_specifier(&graph)?;
let bundle_output = bundle_module_graph(graph.as_ref(), cli_options)?;
log::debug!(">>>>> bundle END");
let out_file = &bundle_flags.out_file;
if let Some(out_file) = out_file {
let output_bytes = bundle_output.code.as_bytes();
let output_len = output_bytes.len();
util::fs::write_file(out_file, output_bytes, 0o644)?;
log::info!(
"{} {:?} ({})",
colors::green("Emit"),
out_file,
colors::gray(display::human_size(output_len as f64))
);
if let Some(bundle_map) = bundle_output.maybe_map {
let map_bytes = bundle_map.as_bytes();
let map_len = map_bytes.len();
let ext = if let Some(curr_ext) = out_file.extension() {
format!("{}.map", curr_ext.to_string_lossy())
} else {
"map".to_string()
};
let map_out_file = out_file.with_extension(ext);
util::fs::write_file(&map_out_file, map_bytes, 0o644)?;
log::info!(
"{} {:?} ({})",
colors::green("Emit"),
map_out_file,
colors::gray(display::human_size(map_len as f64))
);
}
} else {
println!("{}", bundle_output.code);
}
Ok(())
}
fn bundle_module_graph( fn bundle_module_graph(
graph: &deno_graph::ModuleGraph, graph: &deno_graph::ModuleGraph,
cli_options: &CliOptions, cli_options: &CliOptions,

View file

@ -9,6 +9,8 @@
use crate::args::CliOptions; use crate::args::CliOptions;
use crate::args::FilesConfig; use crate::args::FilesConfig;
use crate::args::Flags;
use crate::args::FmtFlags;
use crate::args::FmtOptions; use crate::args::FmtOptions;
use crate::args::FmtOptionsConfig; use crate::args::FmtOptionsConfig;
use crate::args::ProseWrap; use crate::args::ProseWrap;
@ -16,7 +18,6 @@ use crate::colors;
use crate::factory::CliFactory; use crate::factory::CliFactory;
use crate::util::diff::diff; use crate::util::diff::diff;
use crate::util::file_watcher; use crate::util::file_watcher;
use crate::util::file_watcher::ResolutionResult;
use crate::util::fs::FileCollector; use crate::util::fs::FileCollector;
use crate::util::path::get_extension; use crate::util::path::get_extension;
use crate::util::text_encoding; use crate::util::text_encoding;
@ -46,11 +47,10 @@ use std::sync::Arc;
use crate::cache::IncrementalCache; use crate::cache::IncrementalCache;
/// Format JavaScript/TypeScript files. /// Format JavaScript/TypeScript files.
pub async fn format( pub async fn format(flags: Flags, fmt_flags: FmtFlags) -> Result<(), AnyError> {
cli_options: CliOptions, if fmt_flags.is_stdin() {
fmt_options: FmtOptions, let cli_options = CliOptions::from_flags(flags)?;
) -> Result<(), AnyError> { let fmt_options = cli_options.resolve_fmt_options(fmt_flags)?;
if fmt_options.is_stdin {
return format_stdin( return format_stdin(
fmt_options, fmt_options,
cli_options cli_options
@ -61,90 +61,93 @@ pub async fn format(
); );
} }
let files = fmt_options.files; if flags.watch.is_some() {
let check = fmt_options.check; let clear_screen = !flags.no_clear_screen;
let fmt_config_options = fmt_options.options;
let resolver = |changed: Option<Vec<PathBuf>>| {
let files_changed = changed.is_some();
let result = collect_fmt_files(&files).map(|files| {
let refmt_files = if let Some(paths) = changed {
if check {
files
.iter()
.any(|path| paths.contains(path))
.then_some(files)
.unwrap_or_else(|| [].to_vec())
} else {
files
.into_iter()
.filter(|path| paths.contains(path))
.collect::<Vec<_>>()
}
} else {
files
};
(refmt_files, fmt_config_options.clone())
});
let paths_to_watch = files.include.clone();
async move {
if files_changed
&& matches!(result, Ok((ref files, _)) if files.is_empty())
{
ResolutionResult::Ignore
} else {
ResolutionResult::Restart {
paths_to_watch,
result,
}
}
}
};
let factory = CliFactory::from_cli_options(Arc::new(cli_options));
let cli_options = factory.cli_options();
let caches = factory.caches()?;
let operation = |(paths, fmt_options): (Vec<PathBuf>, FmtOptionsConfig)| async {
let incremental_cache = Arc::new(IncrementalCache::new(
caches.fmt_incremental_cache_db(),
&fmt_options,
&paths,
));
if check {
check_source_files(paths, fmt_options, incremental_cache.clone()).await?;
} else {
format_source_files(paths, fmt_options, incremental_cache.clone())
.await?;
}
incremental_cache.wait_completion().await;
Ok(())
};
if cli_options.watch_paths().is_some() {
file_watcher::watch_func( file_watcher::watch_func(
resolver, flags,
operation,
file_watcher::PrintConfig { file_watcher::PrintConfig {
job_name: "Fmt".to_string(), job_name: "Fmt".to_string(),
clear_screen: !cli_options.no_clear_screen(), clear_screen,
},
move |flags, sender, changed_paths| {
let fmt_flags = fmt_flags.clone();
Ok(async move {
let factory = CliFactory::from_flags(flags).await?;
let cli_options = factory.cli_options();
let fmt_options = cli_options.resolve_fmt_options(fmt_flags)?;
let files =
collect_fmt_files(&fmt_options.files).and_then(|files| {
if files.is_empty() {
Err(generic_error("No target files found."))
} else {
Ok(files)
}
})?;
_ = sender.send(files.clone());
let refmt_files = if let Some(paths) = changed_paths {
if fmt_options.check {
// check all files on any changed (https://github.com/denoland/deno/issues/12446)
files
.iter()
.any(|path| paths.contains(path))
.then_some(files)
.unwrap_or_else(|| [].to_vec())
} else {
files
.into_iter()
.filter(|path| paths.contains(path))
.collect::<Vec<_>>()
}
} else {
files
};
format_files(factory, fmt_options, refmt_files).await?;
Ok(())
})
}, },
) )
.await?; .await?;
} else { } else {
let files = collect_fmt_files(&files).and_then(|files| { let factory = CliFactory::from_flags(flags).await?;
let cli_options = factory.cli_options();
let fmt_options = cli_options.resolve_fmt_options(fmt_flags)?;
let files = collect_fmt_files(&fmt_options.files).and_then(|files| {
if files.is_empty() { if files.is_empty() {
Err(generic_error("No target files found.")) Err(generic_error("No target files found."))
} else { } else {
Ok(files) Ok(files)
} }
})?; })?;
operation((files, fmt_config_options)).await?; format_files(factory, fmt_options, files).await?;
} }
Ok(()) Ok(())
} }
async fn format_files(
factory: CliFactory,
fmt_options: FmtOptions,
paths: Vec<PathBuf>,
) -> Result<(), AnyError> {
let caches = factory.caches()?;
let check = fmt_options.check;
let incremental_cache = Arc::new(IncrementalCache::new(
caches.fmt_incremental_cache_db(),
&fmt_options.options,
&paths,
));
if check {
check_source_files(paths, fmt_options.options, incremental_cache.clone())
.await?;
} else {
format_source_files(paths, fmt_options.options, incremental_cache.clone())
.await?;
}
incremental_cache.wait_completion().await;
Ok(())
}
fn collect_fmt_files(files: &FilesConfig) -> Result<Vec<PathBuf>, AnyError> { fn collect_fmt_files(files: &FilesConfig) -> Result<Vec<PathBuf>, AnyError> {
FileCollector::new(is_supported_ext_fmt) FileCollector::new(is_supported_ext_fmt)
.ignore_git_folder() .ignore_git_folder()

View file

@ -2,12 +2,9 @@
//! This module provides file linting utilities using //! This module provides file linting utilities using
//! [`deno_lint`](https://github.com/denoland/deno_lint). //! [`deno_lint`](https://github.com/denoland/deno_lint).
//!
//! At the moment it is only consumed using CLI but in
//! the future it can be easily extended to provide
//! the same functions as ops available in JS runtime.
use crate::args::CliOptions;
use crate::args::FilesConfig; use crate::args::FilesConfig;
use crate::args::Flags;
use crate::args::LintFlags;
use crate::args::LintOptions; use crate::args::LintOptions;
use crate::args::LintReporterKind; use crate::args::LintReporterKind;
use crate::args::LintRulesConfig; use crate::args::LintRulesConfig;
@ -15,9 +12,9 @@ use crate::colors;
use crate::factory::CliFactory; use crate::factory::CliFactory;
use crate::tools::fmt::run_parallelized; use crate::tools::fmt::run_parallelized;
use crate::util::file_watcher; use crate::util::file_watcher;
use crate::util::file_watcher::ResolutionResult;
use crate::util::fs::FileCollector; use crate::util::fs::FileCollector;
use crate::util::path::is_supported_ext; use crate::util::path::is_supported_ext;
use crate::util::sync::AtomicFlag;
use deno_ast::MediaType; use deno_ast::MediaType;
use deno_core::anyhow::bail; use deno_core::anyhow::bail;
use deno_core::error::generic_error; use deno_core::error::generic_error;
@ -38,8 +35,6 @@ use std::io::stdin;
use std::io::Read; use std::io::Read;
use std::path::Path; use std::path::Path;
use std::path::PathBuf; use std::path::PathBuf;
use std::sync::atomic::AtomicBool;
use std::sync::atomic::Ordering;
use std::sync::Arc; use std::sync::Arc;
use std::sync::Mutex; use std::sync::Mutex;
@ -55,133 +50,70 @@ fn create_reporter(kind: LintReporterKind) -> Box<dyn LintReporter + Send> {
} }
} }
pub async fn lint( pub async fn lint(flags: Flags, lint_flags: LintFlags) -> Result<(), AnyError> {
cli_options: CliOptions, if flags.watch.is_some() {
lint_options: LintOptions, if lint_flags.is_stdin() {
) -> Result<(), AnyError> {
// Try to get lint rules. If none were set use recommended rules.
let lint_rules = get_configured_rules(lint_options.rules);
if lint_rules.is_empty() {
bail!("No rules have been configured")
}
let files = lint_options.files;
let reporter_kind = lint_options.reporter_kind;
let resolver = |changed: Option<Vec<PathBuf>>| {
let files_changed = changed.is_some();
let result = collect_lint_files(&files).map(|files| {
if let Some(paths) = changed {
files
.iter()
.any(|path| paths.contains(path))
.then_some(files)
.unwrap_or_else(|| [].to_vec())
} else {
files
}
});
let paths_to_watch = files.include.clone();
async move {
if files_changed && matches!(result, Ok(ref files) if files.is_empty()) {
ResolutionResult::Ignore
} else {
ResolutionResult::Restart {
paths_to_watch,
result,
}
}
}
};
let has_error = Arc::new(AtomicBool::new(false));
let factory = CliFactory::from_cli_options(Arc::new(cli_options));
let cli_options = factory.cli_options();
let caches = factory.caches()?;
let operation = |paths: Vec<PathBuf>| async {
let incremental_cache = Arc::new(IncrementalCache::new(
caches.lint_incremental_cache_db(),
// use a hash of the rule names in order to bust the cache
&{
// ensure this is stable by sorting it
let mut names = lint_rules.iter().map(|r| r.code()).collect::<Vec<_>>();
names.sort_unstable();
names
},
&paths,
));
let target_files_len = paths.len();
let reporter_lock =
Arc::new(Mutex::new(create_reporter(reporter_kind.clone())));
run_parallelized(paths, {
let has_error = has_error.clone();
let lint_rules = lint_rules.clone();
let reporter_lock = reporter_lock.clone();
let incremental_cache = incremental_cache.clone();
move |file_path| {
let file_text = fs::read_to_string(&file_path)?;
// don't bother rechecking this file if it didn't have any diagnostics before
if incremental_cache.is_file_same(&file_path, &file_text) {
return Ok(());
}
let r = lint_file(&file_path, file_text, lint_rules);
if let Ok((file_diagnostics, file_text)) = &r {
if file_diagnostics.is_empty() {
// update the incremental cache if there were no diagnostics
incremental_cache.update_file(&file_path, file_text)
}
}
handle_lint_result(
&file_path.to_string_lossy(),
r,
reporter_lock.clone(),
has_error,
);
Ok(())
}
})
.await?;
incremental_cache.wait_completion().await;
reporter_lock.lock().unwrap().close(target_files_len);
Ok(())
};
if cli_options.watch_paths().is_some() {
if lint_options.is_stdin {
return Err(generic_error( return Err(generic_error(
"Lint watch on standard input is not supported.", "Lint watch on standard input is not supported.",
)); ));
} }
let clear_screen = !flags.no_clear_screen;
file_watcher::watch_func( file_watcher::watch_func(
resolver, flags,
operation,
file_watcher::PrintConfig { file_watcher::PrintConfig {
job_name: "Lint".to_string(), job_name: "Lint".to_string(),
clear_screen: !cli_options.no_clear_screen(), clear_screen,
},
move |flags, sender, changed_paths| {
let lint_flags = lint_flags.clone();
Ok(async move {
let factory = CliFactory::from_flags(flags).await?;
let cli_options = factory.cli_options();
let lint_options = cli_options.resolve_lint_options(lint_flags)?;
let files =
collect_lint_files(&lint_options.files).and_then(|files| {
if files.is_empty() {
Err(generic_error("No target files found."))
} else {
Ok(files)
}
})?;
_ = sender.send(files.clone());
let lint_paths = if let Some(paths) = changed_paths {
// lint all files on any changed (https://github.com/denoland/deno/issues/12446)
files
.iter()
.any(|path| paths.contains(path))
.then_some(files)
.unwrap_or_else(|| [].to_vec())
} else {
files
};
lint_files(factory, lint_options, lint_paths).await?;
Ok(())
})
}, },
) )
.await?; .await?;
} else { } else {
if lint_options.is_stdin { let factory = CliFactory::from_flags(flags).await?;
let cli_options = factory.cli_options();
let is_stdin = lint_flags.is_stdin();
let lint_options = cli_options.resolve_lint_options(lint_flags)?;
let files = &lint_options.files;
let success = if is_stdin {
let reporter_kind = lint_options.reporter_kind;
let reporter_lock = Arc::new(Mutex::new(create_reporter(reporter_kind))); let reporter_lock = Arc::new(Mutex::new(create_reporter(reporter_kind)));
let lint_rules = get_config_rules_err_empty(lint_options.rules)?;
let r = lint_stdin(lint_rules); let r = lint_stdin(lint_rules);
handle_lint_result( let success =
STDIN_FILE_NAME, handle_lint_result(STDIN_FILE_NAME, r, reporter_lock.clone());
r,
reporter_lock.clone(),
has_error.clone(),
);
reporter_lock.lock().unwrap().close(1); reporter_lock.lock().unwrap().close(1);
success
} else { } else {
let target_files = collect_lint_files(&files).and_then(|files| { let target_files = collect_lint_files(files).and_then(|files| {
if files.is_empty() { if files.is_empty() {
Err(generic_error("No target files found.")) Err(generic_error("No target files found."))
} else { } else {
@ -189,10 +121,9 @@ pub async fn lint(
} }
})?; })?;
debug!("Found {} files", target_files.len()); debug!("Found {} files", target_files.len());
operation(target_files).await?; lint_files(factory, lint_options, target_files).await?
}; };
let has_error = has_error.load(Ordering::Relaxed); if !success {
if has_error {
std::process::exit(1); std::process::exit(1);
} }
} }
@ -200,6 +131,70 @@ pub async fn lint(
Ok(()) Ok(())
} }
async fn lint_files(
factory: CliFactory,
lint_options: LintOptions,
paths: Vec<PathBuf>,
) -> Result<bool, AnyError> {
let caches = factory.caches()?;
let lint_rules = get_config_rules_err_empty(lint_options.rules)?;
let incremental_cache = Arc::new(IncrementalCache::new(
caches.lint_incremental_cache_db(),
// use a hash of the rule names in order to bust the cache
&{
// ensure this is stable by sorting it
let mut names = lint_rules.iter().map(|r| r.code()).collect::<Vec<_>>();
names.sort_unstable();
names
},
&paths,
));
let target_files_len = paths.len();
let reporter_kind = lint_options.reporter_kind;
let reporter_lock =
Arc::new(Mutex::new(create_reporter(reporter_kind.clone())));
let has_error = Arc::new(AtomicFlag::default());
run_parallelized(paths, {
let has_error = has_error.clone();
let lint_rules = lint_rules.clone();
let reporter_lock = reporter_lock.clone();
let incremental_cache = incremental_cache.clone();
move |file_path| {
let file_text = fs::read_to_string(&file_path)?;
// don't bother rechecking this file if it didn't have any diagnostics before
if incremental_cache.is_file_same(&file_path, &file_text) {
return Ok(());
}
let r = lint_file(&file_path, file_text, lint_rules);
if let Ok((file_diagnostics, file_text)) = &r {
if file_diagnostics.is_empty() {
// update the incremental cache if there were no diagnostics
incremental_cache.update_file(&file_path, file_text)
}
}
let success = handle_lint_result(
&file_path.to_string_lossy(),
r,
reporter_lock.clone(),
);
if !success {
has_error.raise();
}
Ok(())
}
})
.await?;
incremental_cache.wait_completion().await;
reporter_lock.lock().unwrap().close(target_files_len);
Ok(!has_error.is_raised())
}
fn collect_lint_files(files: &FilesConfig) -> Result<Vec<PathBuf>, AnyError> { fn collect_lint_files(files: &FilesConfig) -> Result<Vec<PathBuf>, AnyError> {
FileCollector::new(is_supported_ext) FileCollector::new(is_supported_ext)
.ignore_git_folder() .ignore_git_folder()
@ -286,21 +281,20 @@ fn handle_lint_result(
file_path: &str, file_path: &str,
result: Result<(Vec<LintDiagnostic>, String), AnyError>, result: Result<(Vec<LintDiagnostic>, String), AnyError>,
reporter_lock: Arc<Mutex<Box<dyn LintReporter + Send>>>, reporter_lock: Arc<Mutex<Box<dyn LintReporter + Send>>>,
has_error: Arc<AtomicBool>, ) -> bool {
) {
let mut reporter = reporter_lock.lock().unwrap(); let mut reporter = reporter_lock.lock().unwrap();
match result { match result {
Ok((mut file_diagnostics, source)) => { Ok((mut file_diagnostics, source)) => {
sort_diagnostics(&mut file_diagnostics); sort_diagnostics(&mut file_diagnostics);
for d in file_diagnostics.iter() { for d in file_diagnostics.iter() {
has_error.store(true, Ordering::Relaxed);
reporter.visit_diagnostic(d, source.split('\n').collect()); reporter.visit_diagnostic(d, source.split('\n').collect());
} }
file_diagnostics.is_empty()
} }
Err(err) => { Err(err) => {
has_error.store(true, Ordering::Relaxed);
reporter.visit_error(file_path, &err); reporter.visit_error(file_path, &err);
false
} }
} }
} }
@ -534,6 +528,16 @@ fn sort_diagnostics(diagnostics: &mut [LintDiagnostic]) {
}); });
} }
fn get_config_rules_err_empty(
rules: LintRulesConfig,
) -> Result<Vec<&'static dyn LintRule>, AnyError> {
let lint_rules = get_configured_rules(rules);
if lint_rules.is_empty() {
bail!("No rules have been configured")
}
Ok(lint_rules)
}
pub fn get_configured_rules( pub fn get_configured_rules(
rules: LintRulesConfig, rules: LintRulesConfig,
) -> Vec<&'static dyn LintRule> { ) -> Vec<&'static dyn LintRule> {

View file

@ -3,7 +3,6 @@
use std::io::Read; use std::io::Read;
use deno_ast::MediaType; use deno_ast::MediaType;
use deno_ast::ModuleSpecifier;
use deno_core::error::AnyError; use deno_core::error::AnyError;
use deno_runtime::permissions::Permissions; use deno_runtime::permissions::Permissions;
use deno_runtime::permissions::PermissionsContainer; use deno_runtime::permissions::PermissionsContainer;
@ -98,45 +97,42 @@ pub async fn run_from_stdin(flags: Flags) -> Result<i32, AnyError> {
// TODO(bartlomieju): this function is not handling `exit_code` set by the runtime // TODO(bartlomieju): this function is not handling `exit_code` set by the runtime
// code properly. // code properly.
async fn run_with_watch(flags: Flags) -> Result<i32, AnyError> { async fn run_with_watch(flags: Flags) -> Result<i32, AnyError> {
let (sender, receiver) = tokio::sync::mpsc::unbounded_channel(); let clear_screen = !flags.no_clear_screen;
let factory = CliFactoryBuilder::new()
.with_watcher(sender.clone())
.build_from_flags(flags)
.await?;
let file_watcher = factory.file_watcher()?;
let cli_options = factory.cli_options();
let clear_screen = !cli_options.no_clear_screen();
let main_module = cli_options.resolve_main_module()?;
maybe_npm_install(&factory).await?; util::file_watcher::watch_func(
flags,
let create_cli_main_worker_factory =
factory.create_cli_main_worker_factory_func().await?;
let operation = |main_module: ModuleSpecifier| {
file_watcher.reset();
let permissions = PermissionsContainer::new(Permissions::from_options(
&cli_options.permissions_options(),
)?);
let create_cli_main_worker_factory = create_cli_main_worker_factory.clone();
Ok(async move {
let worker = create_cli_main_worker_factory()
.create_main_worker(main_module, permissions)
.await?;
worker.run_for_watcher().await?;
Ok(())
})
};
util::file_watcher::watch_func2(
receiver,
operation,
main_module,
util::file_watcher::PrintConfig { util::file_watcher::PrintConfig {
job_name: "Process".to_string(), job_name: "Process".to_string(),
clear_screen, clear_screen,
}, },
move |flags, sender, _changed_paths| {
Ok(async move {
let factory = CliFactoryBuilder::new()
.with_watcher(sender.clone())
.build_from_flags(flags)
.await?;
let cli_options = factory.cli_options();
let main_module = cli_options.resolve_main_module()?;
maybe_npm_install(&factory).await?;
if let Some(watch_paths) = cli_options.watch_paths() {
let _ = sender.send(watch_paths);
}
let permissions = PermissionsContainer::new(Permissions::from_options(
&cli_options.permissions_options(),
)?);
let worker = factory
.create_cli_main_worker_factory()
.await?
.create_main_worker(main_module, permissions)
.await?;
worker.run_for_watcher().await?;
Ok(())
})
},
) )
.await?; .await?;

View file

@ -2,18 +2,20 @@
use crate::args::CliOptions; use crate::args::CliOptions;
use crate::args::FilesConfig; use crate::args::FilesConfig;
use crate::args::TestOptions; use crate::args::Flags;
use crate::args::TestFlags;
use crate::colors; use crate::colors;
use crate::display; use crate::display;
use crate::factory::CliFactory; use crate::factory::CliFactory;
use crate::factory::CliFactoryBuilder;
use crate::file_fetcher::File; use crate::file_fetcher::File;
use crate::file_fetcher::FileFetcher; use crate::file_fetcher::FileFetcher;
use crate::graph_util::graph_valid_with_cli_options; use crate::graph_util::graph_valid_with_cli_options;
use crate::graph_util::has_graph_root_local_dependent_changed;
use crate::module_loader::ModuleLoadPreparer; use crate::module_loader::ModuleLoadPreparer;
use crate::ops; use crate::ops;
use crate::util::checksum; use crate::util::checksum;
use crate::util::file_watcher; use crate::util::file_watcher;
use crate::util::file_watcher::ResolutionResult;
use crate::util::fs::collect_specifiers; use crate::util::fs::collect_specifiers;
use crate::util::path::get_extension; use crate::util::path::get_extension;
use crate::util::path::is_supported_ext; use crate::util::path::is_supported_ext;
@ -62,7 +64,6 @@ use std::io::Read;
use std::io::Write; use std::io::Write;
use std::num::NonZeroUsize; use std::num::NonZeroUsize;
use std::path::Path; use std::path::Path;
use std::path::PathBuf;
use std::sync::atomic::AtomicBool; use std::sync::atomic::AtomicBool;
use std::sync::atomic::AtomicUsize; use std::sync::atomic::AtomicUsize;
use std::sync::atomic::Ordering; use std::sync::atomic::Ordering;
@ -1641,11 +1642,12 @@ async fn fetch_specifiers_with_test_mode(
} }
pub async fn run_tests( pub async fn run_tests(
cli_options: CliOptions, flags: Flags,
test_options: TestOptions, test_flags: TestFlags,
) -> Result<(), AnyError> { ) -> Result<(), AnyError> {
let factory = CliFactory::from_cli_options(Arc::new(cli_options)); let factory = CliFactory::from_flags(flags).await?;
let cli_options = factory.cli_options(); let cli_options = factory.cli_options();
let test_options = cli_options.resolve_test_options(test_flags)?;
let file_fetcher = factory.file_fetcher()?; let file_fetcher = factory.file_fetcher()?;
let module_load_preparer = factory.module_load_preparer().await?; let module_load_preparer = factory.module_load_preparer().await?;
// Various test files should not share the same permissions in terms of // Various test files should not share the same permissions in terms of
@ -1708,186 +1710,9 @@ pub async fn run_tests(
} }
pub async fn run_tests_with_watch( pub async fn run_tests_with_watch(
cli_options: CliOptions, flags: Flags,
test_options: TestOptions, test_flags: TestFlags,
) -> Result<(), AnyError> { ) -> Result<(), AnyError> {
let factory = CliFactory::from_cli_options(Arc::new(cli_options));
let cli_options = factory.cli_options();
let module_graph_builder = factory.module_graph_builder().await?;
let module_load_preparer = factory.module_load_preparer().await?;
let file_fetcher = factory.file_fetcher()?;
let file_watcher = factory.file_watcher()?;
// Various test files should not share the same permissions in terms of
// `PermissionsContainer` - otherwise granting/revoking permissions in one
// file would have impact on other files, which is undesirable.
let permissions =
Permissions::from_options(&cli_options.permissions_options())?;
let graph_kind = cli_options.type_check_mode().as_graph_kind();
let log_level = cli_options.log_level();
let resolver = |changed: Option<Vec<PathBuf>>| {
let paths_to_watch = test_options.files.include.clone();
let paths_to_watch_clone = paths_to_watch.clone();
let files_changed = changed.is_some();
let test_options = &test_options;
let cli_options = cli_options.clone();
let module_graph_builder = module_graph_builder.clone();
async move {
let test_modules = if test_options.doc {
collect_specifiers(&test_options.files, is_supported_test_ext)
} else {
collect_specifiers(&test_options.files, is_supported_test_path)
}?;
let mut paths_to_watch = paths_to_watch_clone;
let mut modules_to_reload = if files_changed {
Vec::new()
} else {
test_modules.clone()
};
let graph = module_graph_builder
.create_graph(graph_kind, test_modules.clone())
.await?;
graph_valid_with_cli_options(&graph, &test_modules, &cli_options)?;
// TODO(@kitsonk) - This should be totally derivable from the graph.
for specifier in test_modules {
fn get_dependencies<'a>(
graph: &'a deno_graph::ModuleGraph,
maybe_module: Option<&'a deno_graph::Module>,
// This needs to be accessible to skip getting dependencies if they're already there,
// otherwise this will cause a stack overflow with circular dependencies
output: &mut HashSet<&'a ModuleSpecifier>,
) {
if let Some(module) = maybe_module.and_then(|m| m.esm()) {
for dep in module.dependencies.values() {
if let Some(specifier) = &dep.get_code() {
if !output.contains(specifier) {
output.insert(specifier);
get_dependencies(graph, graph.get(specifier), output);
}
}
if let Some(specifier) = &dep.get_type() {
if !output.contains(specifier) {
output.insert(specifier);
get_dependencies(graph, graph.get(specifier), output);
}
}
}
}
}
// This test module and all it's dependencies
let mut modules = HashSet::new();
modules.insert(&specifier);
get_dependencies(&graph, graph.get(&specifier), &mut modules);
paths_to_watch.extend(
modules
.iter()
.filter_map(|specifier| specifier.to_file_path().ok()),
);
if let Some(changed) = &changed {
for path in changed
.iter()
.filter_map(|path| ModuleSpecifier::from_file_path(path).ok())
{
if modules.contains(&path) {
modules_to_reload.push(specifier);
break;
}
}
}
}
Ok((paths_to_watch, modules_to_reload))
}
.map(move |result| {
if files_changed
&& matches!(result, Ok((_, ref modules)) if modules.is_empty())
{
ResolutionResult::Ignore
} else {
match result {
Ok((paths_to_watch, modules_to_reload)) => {
ResolutionResult::Restart {
paths_to_watch,
result: Ok(modules_to_reload),
}
}
Err(e) => ResolutionResult::Restart {
paths_to_watch,
result: Err(e),
},
}
}
})
};
let create_cli_main_worker_factory =
factory.create_cli_main_worker_factory_func().await?;
let operation = |modules_to_reload: Vec<ModuleSpecifier>| {
let permissions = &permissions;
let test_options = &test_options;
file_watcher.reset();
let cli_options = cli_options.clone();
let file_fetcher = file_fetcher.clone();
let module_load_preparer = module_load_preparer.clone();
let create_cli_main_worker_factory = create_cli_main_worker_factory.clone();
async move {
let worker_factory = Arc::new(create_cli_main_worker_factory());
let specifiers_with_mode = fetch_specifiers_with_test_mode(
&file_fetcher,
&test_options.files,
&test_options.doc,
)
.await?
.into_iter()
.filter(|(specifier, _)| modules_to_reload.contains(specifier))
.collect::<Vec<(ModuleSpecifier, TestMode)>>();
check_specifiers(
&cli_options,
&file_fetcher,
&module_load_preparer,
specifiers_with_mode.clone(),
)
.await?;
if test_options.no_run {
return Ok(());
}
test_specifiers(
worker_factory,
permissions,
specifiers_with_mode
.into_iter()
.filter_map(|(s, m)| match m {
TestMode::Documentation => None,
_ => Some(s),
})
.collect(),
TestSpecifiersOptions {
concurrent_jobs: test_options.concurrent_jobs,
fail_fast: test_options.fail_fast,
log_level,
specifier: TestSpecifierOptions {
filter: TestFilter::from_flag(&test_options.filter),
shuffle: test_options.shuffle,
trace_ops: test_options.trace_ops,
},
},
)
.await?;
Ok(())
}
};
// On top of the sigint handlers which are added and unbound for each test // On top of the sigint handlers which are added and unbound for each test
// run, a process-scoped basic exit handler is required due to a tokio // run, a process-scoped basic exit handler is required due to a tokio
// limitation where it doesn't unbind its own handler for the entire process // limitation where it doesn't unbind its own handler for the entire process
@ -1901,14 +1726,118 @@ pub async fn run_tests_with_watch(
} }
}); });
let clear_screen = !cli_options.no_clear_screen(); let clear_screen = !flags.no_clear_screen;
file_watcher::watch_func( file_watcher::watch_func(
resolver, flags,
operation,
file_watcher::PrintConfig { file_watcher::PrintConfig {
job_name: "Test".to_string(), job_name: "Test".to_string(),
clear_screen, clear_screen,
}, },
move |flags, sender, changed_paths| {
let test_flags = test_flags.clone();
Ok(async move {
let factory = CliFactoryBuilder::new()
.with_watcher(sender.clone())
.build_from_flags(flags)
.await?;
let cli_options = factory.cli_options();
let test_options = cli_options.resolve_test_options(test_flags)?;
if let Some(watch_paths) = cli_options.watch_paths() {
let _ = sender.send(watch_paths);
}
let _ = sender.send(test_options.files.include.clone());
let graph_kind = cli_options.type_check_mode().as_graph_kind();
let log_level = cli_options.log_level();
let cli_options = cli_options.clone();
let module_graph_builder = factory.module_graph_builder().await?;
let file_fetcher = factory.file_fetcher()?;
let test_modules = if test_options.doc {
collect_specifiers(&test_options.files, is_supported_test_ext)
} else {
collect_specifiers(&test_options.files, is_supported_test_path)
}?;
let permissions =
Permissions::from_options(&cli_options.permissions_options())?;
let graph = module_graph_builder
.create_graph(graph_kind, test_modules.clone())
.await?;
graph_valid_with_cli_options(&graph, &test_modules, &cli_options)?;
let test_modules_to_reload = if let Some(changed_paths) = changed_paths
{
let changed_specifiers = changed_paths
.into_iter()
.filter_map(|p| ModuleSpecifier::from_file_path(p).ok())
.collect::<HashSet<_>>();
let mut result = Vec::new();
for test_module_specifier in test_modules {
if has_graph_root_local_dependent_changed(
&graph,
&test_module_specifier,
&changed_specifiers,
) {
result.push(test_module_specifier.clone());
}
}
result
} else {
test_modules.clone()
};
let worker_factory =
Arc::new(factory.create_cli_main_worker_factory().await?);
let module_load_preparer = factory.module_load_preparer().await?;
let specifiers_with_mode = fetch_specifiers_with_test_mode(
file_fetcher,
&test_options.files,
&test_options.doc,
)
.await?
.into_iter()
.filter(|(specifier, _)| test_modules_to_reload.contains(specifier))
.collect::<Vec<(ModuleSpecifier, TestMode)>>();
check_specifiers(
&cli_options,
file_fetcher,
module_load_preparer,
specifiers_with_mode.clone(),
)
.await?;
if test_options.no_run {
return Ok(());
}
test_specifiers(
worker_factory,
&permissions,
specifiers_with_mode
.into_iter()
.filter_map(|(s, m)| match m {
TestMode::Documentation => None,
_ => Some(s),
})
.collect(),
TestSpecifiersOptions {
concurrent_jobs: test_options.concurrent_jobs,
fail_fast: test_options.fail_fast,
log_level,
specifier: TestSpecifierOptions {
filter: TestFilter::from_flag(&test_options.filter),
shuffle: test_options.shuffle,
trace_ops: test_options.trace_ops,
},
},
)
.await?;
Ok(())
})
},
) )
.await?; .await?;

View file

@ -1,5 +1,6 @@
// Copyright 2018-2023 the Deno authors. All rights reserved. MIT license. // Copyright 2018-2023 the Deno authors. All rights reserved. MIT license.
use crate::args::Flags;
use crate::colors; use crate::colors;
use crate::util::fs::canonicalize_path; use crate::util::fs::canonicalize_path;
@ -21,6 +22,7 @@ use std::time::Duration;
use tokio::select; use tokio::select;
use tokio::sync::mpsc; use tokio::sync::mpsc;
use tokio::sync::mpsc::UnboundedReceiver; use tokio::sync::mpsc::UnboundedReceiver;
use tokio::sync::mpsc::UnboundedSender;
use tokio::time::sleep; use tokio::time::sleep;
const CLEAR_SCREEN: &str = "\x1B[2J\x1B[1;1H"; const CLEAR_SCREEN: &str = "\x1B[2J\x1B[1;1H";
@ -66,7 +68,7 @@ impl DebouncedReceiver {
} }
} }
async fn error_handler<F>(watch_future: F) async fn error_handler<F>(watch_future: F) -> bool
where where
F: Future<Output = Result<(), AnyError>>, F: Future<Output = Result<(), AnyError>>,
{ {
@ -81,42 +83,9 @@ where
colors::red_bold("error"), colors::red_bold("error"),
error_string.trim_start_matches("error: ") error_string.trim_start_matches("error: ")
); );
} false
} } else {
true
pub enum ResolutionResult<T> {
Restart {
paths_to_watch: Vec<PathBuf>,
result: Result<T, AnyError>,
},
Ignore,
}
async fn next_restart<R, T, F>(
resolver: &mut R,
debounced_receiver: &mut DebouncedReceiver,
) -> (Vec<PathBuf>, Result<T, AnyError>)
where
R: FnMut(Option<Vec<PathBuf>>) -> F,
F: Future<Output = ResolutionResult<T>>,
{
loop {
let changed = debounced_receiver.recv().await;
match resolver(changed).await {
ResolutionResult::Ignore => {
log::debug!("File change ignored")
}
ResolutionResult::Restart {
mut paths_to_watch,
result,
} => {
// watch the current directory when empty
if paths_to_watch.is_empty() {
paths_to_watch.push(PathBuf::from("."));
}
return (paths_to_watch, result);
}
}
} }
} }
@ -139,138 +108,26 @@ fn create_print_after_restart_fn(clear_screen: bool) -> impl Fn() {
} }
} }
/// Creates a file watcher, which will call `resolver` with every file change.
///
/// - `resolver` is used for resolving file paths to be watched at every restarting
/// of the watcher, and can also return a value to be passed to `operation`.
/// It returns a [`ResolutionResult`], which can either instruct the watcher to restart or ignore the change.
/// This always contains paths to watch;
///
/// - `operation` is the actual operation we want to run every time the watcher detects file
/// changes. For example, in the case where we would like to bundle, then `operation` would
/// have the logic for it like bundling the code.
pub async fn watch_func<R, O, T, F1, F2>(
mut resolver: R,
mut operation: O,
print_config: PrintConfig,
) -> Result<(), AnyError>
where
R: FnMut(Option<Vec<PathBuf>>) -> F1,
O: FnMut(T) -> F2,
F1: Future<Output = ResolutionResult<T>>,
F2: Future<Output = Result<(), AnyError>>,
{
let (sender, mut receiver) = DebouncedReceiver::new_with_sender();
let PrintConfig {
job_name,
clear_screen,
} = print_config;
// Store previous data. If module resolution fails at some point, the watcher will try to
// continue watching files using these data.
let mut paths_to_watch;
let mut resolution_result;
let print_after_restart = create_print_after_restart_fn(clear_screen);
match resolver(None).await {
ResolutionResult::Ignore => {
// The only situation where it makes sense to ignore the initial 'change'
// is if the command isn't supposed to do anything until something changes,
// e.g. a variant of `deno test` which doesn't run the entire test suite to start with,
// but instead does nothing until you make a change.
//
// In that case, this is probably the correct output.
info!(
"{} Waiting for file changes...",
colors::intense_blue("Watcher"),
);
let (paths, result) = next_restart(&mut resolver, &mut receiver).await;
paths_to_watch = paths;
resolution_result = result;
print_after_restart();
}
ResolutionResult::Restart {
paths_to_watch: mut paths,
result,
} => {
// watch the current directory when empty
if paths.is_empty() {
paths.push(PathBuf::from("."));
}
paths_to_watch = paths;
resolution_result = result;
}
};
info!("{} {} started.", colors::intense_blue("Watcher"), job_name,);
loop {
let mut watcher = new_watcher(sender.clone())?;
add_paths_to_watcher(&mut watcher, &paths_to_watch);
match resolution_result {
Ok(operation_arg) => {
let fut = error_handler(operation(operation_arg));
select! {
(paths, result) = next_restart(&mut resolver, &mut receiver) => {
if result.is_ok() {
paths_to_watch = paths;
}
resolution_result = result;
print_after_restart();
continue;
},
_ = fut => {},
};
info!(
"{} {} finished. Restarting on file change...",
colors::intense_blue("Watcher"),
job_name,
);
}
Err(error) => {
eprintln!("{}: {}", colors::red_bold("error"), error);
info!(
"{} {} failed. Restarting on file change...",
colors::intense_blue("Watcher"),
job_name,
);
}
}
let (paths, result) = next_restart(&mut resolver, &mut receiver).await;
if result.is_ok() {
paths_to_watch = paths;
}
resolution_result = result;
print_after_restart();
drop(watcher);
}
}
/// Creates a file watcher. /// Creates a file watcher.
/// ///
/// - `operation` is the actual operation we want to run every time the watcher detects file /// - `operation` is the actual operation we want to run every time the watcher detects file
/// changes. For example, in the case where we would like to bundle, then `operation` would /// changes. For example, in the case where we would like to bundle, then `operation` would
/// have the logic for it like bundling the code. /// have the logic for it like bundling the code.
pub async fn watch_func2<T: Clone, O, F>( pub async fn watch_func<O, F>(
mut paths_to_watch_receiver: UnboundedReceiver<Vec<PathBuf>>, mut flags: Flags,
mut operation: O,
operation_args: T,
print_config: PrintConfig, print_config: PrintConfig,
mut operation: O,
) -> Result<(), AnyError> ) -> Result<(), AnyError>
where where
O: FnMut(T) -> Result<F, AnyError>, O: FnMut(
Flags,
UnboundedSender<Vec<PathBuf>>,
Option<Vec<PathBuf>>,
) -> Result<F, AnyError>,
F: Future<Output = Result<(), AnyError>>, F: Future<Output = Result<(), AnyError>>,
{ {
let (paths_to_watch_sender, mut paths_to_watch_receiver) =
tokio::sync::mpsc::unbounded_channel();
let (watcher_sender, mut watcher_receiver) = let (watcher_sender, mut watcher_receiver) =
DebouncedReceiver::new_with_sender(); DebouncedReceiver::new_with_sender();
@ -303,6 +160,7 @@ where
} }
} }
let mut changed_paths = None;
loop { loop {
// We may need to give the runtime a tick to settle, as cancellations may need to propagate // We may need to give the runtime a tick to settle, as cancellations may need to propagate
// to tasks. We choose yielding 10 times to the runtime as a decent heuristic. If watch tests // to tasks. We choose yielding 10 times to the runtime as a decent heuristic. If watch tests
@ -320,21 +178,34 @@ where
add_paths_to_watcher(&mut watcher, &maybe_paths.unwrap()); add_paths_to_watcher(&mut watcher, &maybe_paths.unwrap());
} }
}; };
let operation_future = error_handler(operation(operation_args.clone())?); let operation_future = error_handler(operation(
flags.clone(),
paths_to_watch_sender.clone(),
changed_paths.take(),
)?);
// don't reload dependencies after the first run
flags.reload = false;
select! { select! {
_ = receiver_future => {}, _ = receiver_future => {},
_ = watcher_receiver.recv() => { received_changed_paths = watcher_receiver.recv() => {
print_after_restart(); print_after_restart();
changed_paths = received_changed_paths;
continue; continue;
}, },
_ = operation_future => { success = operation_future => {
consume_paths_to_watch(&mut watcher, &mut paths_to_watch_receiver); consume_paths_to_watch(&mut watcher, &mut paths_to_watch_receiver);
// TODO(bartlomieju): print exit code here? // TODO(bartlomieju): print exit code here?
info!( info!(
"{} {} finished. Restarting on file change...", "{} {} {}. Restarting on file change...",
colors::intense_blue("Watcher"), colors::intense_blue("Watcher"),
job_name, job_name,
if success {
"finished"
} else {
"failed"
}
); );
}, },
}; };
@ -347,8 +218,9 @@ where
}; };
select! { select! {
_ = receiver_future => {}, _ = receiver_future => {},
_ = watcher_receiver.recv() => { received_changed_paths = watcher_receiver.recv() => {
print_after_restart(); print_after_restart();
changed_paths = received_changed_paths;
continue; continue;
}, },
}; };

View file

@ -1,99 +0,0 @@
// Copyright 2018-2023 the Deno authors. All rights reserved. MIT license.
use crate::args::CliOptions;
use crate::cache::ParsedSourceCache;
use crate::graph_util::ModuleGraphContainer;
use crate::module_loader::CjsResolutionStore;
use deno_core::parking_lot::Mutex;
use deno_core::ModuleSpecifier;
use std::path::PathBuf;
use std::sync::Arc;
pub struct FileWatcher {
cli_options: Arc<CliOptions>,
cjs_resolutions: Arc<CjsResolutionStore>,
graph_container: Arc<ModuleGraphContainer>,
maybe_reporter: Option<FileWatcherReporter>,
parsed_source_cache: Arc<ParsedSourceCache>,
}
impl FileWatcher {
pub fn new(
cli_options: Arc<CliOptions>,
cjs_resolutions: Arc<CjsResolutionStore>,
graph_container: Arc<ModuleGraphContainer>,
maybe_reporter: Option<FileWatcherReporter>,
parsed_source_cache: Arc<ParsedSourceCache>,
) -> Self {
Self {
cli_options,
cjs_resolutions,
parsed_source_cache,
graph_container,
maybe_reporter,
}
}
/// Reset all runtime state to its default. This should be used on file
/// watcher restarts.
pub fn reset(&self) {
self.cjs_resolutions.clear();
self.parsed_source_cache.clear();
self.graph_container.clear();
self.init_watcher();
}
// Add invariant files like the import map and explicit watch flag list to
// the watcher. Dedup for build_for_file_watcher and reset_for_file_watcher.
pub fn init_watcher(&self) {
let files_to_watch_sender = match &self.maybe_reporter {
Some(reporter) => &reporter.sender,
None => return,
};
if let Some(watch_paths) = self.cli_options.watch_paths() {
files_to_watch_sender.send(watch_paths.clone()).unwrap();
}
if let Ok(Some(import_map_path)) = self
.cli_options
.resolve_import_map_specifier()
.map(|ms| ms.and_then(|ref s| s.to_file_path().ok()))
{
files_to_watch_sender.send(vec![import_map_path]).unwrap();
}
}
}
#[derive(Clone, Debug)]
pub struct FileWatcherReporter {
sender: tokio::sync::mpsc::UnboundedSender<Vec<PathBuf>>,
file_paths: Arc<Mutex<Vec<PathBuf>>>,
}
impl FileWatcherReporter {
pub fn new(sender: tokio::sync::mpsc::UnboundedSender<Vec<PathBuf>>) -> Self {
Self {
sender,
file_paths: Default::default(),
}
}
}
impl deno_graph::source::Reporter for FileWatcherReporter {
fn on_load(
&self,
specifier: &ModuleSpecifier,
modules_done: usize,
modules_total: usize,
) {
let mut file_paths = self.file_paths.lock();
if specifier.scheme() == "file" {
file_paths.push(specifier.to_file_path().unwrap());
}
if modules_done == modules_total {
self.sender.send(file_paths.drain(..).collect()).unwrap();
}
}
}