diff --git a/.github/workflows/ci.yml b/.github/workflows/ci.yml index 0cc3ab7485..d3a42c12ba 100644 --- a/.github/workflows/ci.yml +++ b/.github/workflows/ci.yml @@ -259,7 +259,7 @@ jobs: ~/.cargo/registry/index ~/.cargo/registry/cache ~/.cargo/git/db - key: 3-cargo-home-${{ matrix.os }}-${{ hashFiles('Cargo.lock') }} + key: 4-cargo-home-${{ matrix.os }}-${{ hashFiles('Cargo.lock') }} # In main branch, always creates fresh cache - name: Cache build output (main) @@ -275,7 +275,7 @@ jobs: !./target/*/*.zip !./target/*/*.tar.gz key: | - 3-cargo-target-${{ matrix.os }}-${{ matrix.profile }}-${{ github.sha }} + 4-cargo-target-${{ matrix.os }}-${{ matrix.profile }}-${{ github.sha }} # Restore cache from the latest 'main' branch build. - name: Cache build output (PR) @@ -291,7 +291,7 @@ jobs: !./target/*/*.tar.gz key: never_saved restore-keys: | - 3-cargo-target-${{ matrix.os }}-${{ matrix.profile }}- + 4-cargo-target-${{ matrix.os }}-${{ matrix.profile }}- # Don't save cache after building PRs or branches other than 'main'. - name: Skip save cache (PR) diff --git a/Cargo.lock b/Cargo.lock index a4e461dee3..f7a7e8cf6d 100644 --- a/Cargo.lock +++ b/Cargo.lock @@ -773,7 +773,6 @@ dependencies = [ "lspower", "nix", "notify", - "num_cpus", "once_cell", "os_pipe", "percent-encoding", diff --git a/cli/Cargo.toml b/cli/Cargo.toml index 36a03c964c..b50efa09ef 100644 --- a/cli/Cargo.toml +++ b/cli/Cargo.toml @@ -75,7 +75,6 @@ libc = "=0.2.106" log = { version = "=0.4.14", features = ["serde"] } lspower = "=1.4.0" notify = "=5.0.0-pre.12" -num_cpus = "=1.13.0" once_cell = "=1.9.0" percent-encoding = "=2.1.0" pin-project = "=1.0.8" diff --git a/cli/cache.rs b/cli/cache.rs index 8bd40fc8a4..586912495a 100644 --- a/cli/cache.rs +++ b/cli/cache.rs @@ -198,8 +198,7 @@ impl Cacher for FetchCacher { .disk_cache .get(&filename) .ok() - .map(|b| String::from_utf8(b).ok()) - .flatten() + .and_then(|b| String::from_utf8(b).ok()) } fn set( diff --git a/cli/compat/esm_resolver.rs b/cli/compat/esm_resolver.rs index 68824ab118..1ac12ca148 100644 --- a/cli/compat/esm_resolver.rs +++ b/cli/compat/esm_resolver.rs @@ -549,7 +549,7 @@ fn resolve_package_target_string( if invalid_segment_re.is_match(&subpath) { let request = if pattern { - match_.replace("*", &subpath) + match_.replace('*', &subpath) } else { format!("{}{}", match_, subpath) }; diff --git a/cli/compat/mod.rs b/cli/compat/mod.rs index feeca2f4ae..0c30a58fc8 100644 --- a/cli/compat/mod.rs +++ b/cli/compat/mod.rs @@ -138,7 +138,7 @@ pub(crate) fn add_global_require( } fn escape_for_single_quote_string(text: &str) -> String { - text.replace(r"\", r"\\").replace("'", r"\'") + text.replace('\\', r"\\").replace('\'', r"\'") } pub fn setup_builtin_modules( diff --git a/cli/config_file.rs b/cli/config_file.rs index 3ca00b0f4c..205c789fbf 100644 --- a/cli/config_file.rs +++ b/cli/config_file.rs @@ -563,8 +563,7 @@ impl ConfigFile { .json .compiler_options .as_ref() - .map(|co| co.get("checkJs").map(|v| v.as_bool()).flatten()) - .flatten() + .and_then(|co| co.get("checkJs").and_then(|v| v.as_bool())) .unwrap_or(false) } diff --git a/cli/disk_cache.rs b/cli/disk_cache.rs index d3d294c18b..349b786b7d 100644 --- a/cli/disk_cache.rs +++ b/cli/disk_cache.rs @@ -91,7 +91,7 @@ impl DiskCache { | Prefix::VerbatimUNC(server, share) => { out.push("UNC"); let host = Host::parse(server.to_str().unwrap()).unwrap(); - let host = host.to_string().replace(":", "_"); + let host = host.to_string().replace(':', "_"); out.push(host); out.push(share); } diff --git a/cli/flags.rs b/cli/flags.rs index ab8153c4ed..316425247c 100644 --- a/cli/flags.rs +++ b/cli/flags.rs @@ -2232,8 +2232,8 @@ fn test_parse(flags: &mut Flags, matches: &clap::ArgMatches) { if let Some(value) = matches.value_of("jobs") { value.parse().unwrap() } else { - // TODO(caspervonb) drop the dependency on num_cpus when https://doc.rust-lang.org/std/thread/fn.available_concurrency.html becomes stable. - NonZeroUsize::new(num_cpus::get()).unwrap() + std::thread::available_parallelism() + .unwrap_or(NonZeroUsize::new(1).unwrap()) } } else { NonZeroUsize::new(1).unwrap() diff --git a/cli/lsp/cache.rs b/cli/lsp/cache.rs index 8b2c85dec7..f94faa4192 100644 --- a/cli/lsp/cache.rs +++ b/cli/lsp/cache.rs @@ -61,13 +61,10 @@ impl CacheServer { .unwrap(); let maybe_import_map_resolver = maybe_import_map.map(ImportMapResolver::new); - let maybe_jsx_resolver = maybe_config_file - .as_ref() - .map(|cf| { - cf.to_maybe_jsx_import_source_module() - .map(|im| JsxResolver::new(im, maybe_import_map_resolver.clone())) - }) - .flatten(); + let maybe_jsx_resolver = maybe_config_file.as_ref().and_then(|cf| { + cf.to_maybe_jsx_import_source_module() + .map(|im| JsxResolver::new(im, maybe_import_map_resolver.clone())) + }); let maybe_resolver = if maybe_jsx_resolver.is_some() { maybe_jsx_resolver.as_ref().map(|jr| jr.as_resolver()) } else { @@ -76,8 +73,7 @@ impl CacheServer { .map(|im| im.as_resolver()) }; let maybe_imports = maybe_config_file - .map(|cf| cf.to_maybe_imports().ok()) - .flatten() + .and_then(|cf| cf.to_maybe_imports().ok()) .flatten(); let mut cache = FetchCacher::new( ps.dir.gen_cache.clone(), @@ -187,10 +183,9 @@ impl CacheMetadata { let version = self .cache .get_cache_filename(specifier) - .map(|ref path| calculate_fs_version(path)) - .flatten(); + .and_then(|ref path| calculate_fs_version(path)); let metadata = self.metadata.lock().get(specifier).cloned(); - if metadata.as_ref().map(|m| m.version.clone()).flatten() != version { + if metadata.as_ref().and_then(|m| m.version.clone()) != version { self.refresh(specifier).map(|m| m.values) } else { metadata.map(|m| m.values) diff --git a/cli/lsp/diagnostics.rs b/cli/lsp/diagnostics.rs index 39f7f1c489..8801af7225 100644 --- a/cli/lsp/diagnostics.rs +++ b/cli/lsp/diagnostics.rs @@ -538,8 +538,7 @@ async fn generate_ts_diagnostics( let version = snapshot .documents .get(&specifier) - .map(|d| d.maybe_lsp_version()) - .flatten(); + .and_then(|d| d.maybe_lsp_version()); // check if the specifier is enabled again just in case TS returns us // diagnostics for a disabled specifier let ts_diagnostics = if config.specifier_enabled(&specifier) { @@ -555,8 +554,7 @@ async fn generate_ts_diagnostics( let version = snapshot .documents .get(&specifier) - .map(|d| d.maybe_lsp_version()) - .flatten(); + .and_then(|d| d.maybe_lsp_version()); diagnostics_vec.push((specifier, version, Vec::new())); } Ok(diagnostics_vec) diff --git a/cli/lsp/documents.rs b/cli/lsp/documents.rs index 9a302bb9ee..cc320db461 100644 --- a/cli/lsp/documents.rs +++ b/cli/lsp/documents.rs @@ -199,18 +199,17 @@ impl AssetOrDocument { ) -> Option<(String, deno_graph::Dependency, deno_graph::Range)> { self .document() - .map(|d| d.get_maybe_dependency(position)) - .flatten() + .and_then(|d| d.get_maybe_dependency(position)) } pub fn maybe_parsed_source( &self, ) -> Option> { - self.document().map(|d| d.maybe_parsed_source()).flatten() + self.document().and_then(|d| d.maybe_parsed_source()) } pub fn document_lsp_version(&self) -> Option { - self.document().map(|d| d.maybe_lsp_version()).flatten() + self.document().and_then(|d| d.maybe_lsp_version()) } } @@ -347,8 +346,7 @@ impl Document { .0 .maybe_language_id .as_ref() - .map(|li| li.as_headers()) - .flatten(); + .and_then(|li| li.as_headers()); let parser = SourceParser::default(); Some(deno_graph::parse_module( &self.0.specifier, @@ -865,8 +863,7 @@ impl Documents { } else { let mut file_system_docs = self.file_system_docs.lock(); let fs_version = get_document_path(&self.cache, &specifier) - .map(|path| calculate_fs_version(&path)) - .flatten(); + .and_then(|path| calculate_fs_version(&path)); let file_system_doc = file_system_docs.docs.get(&specifier); if file_system_doc.map(|d| d.fs_version().to_string()) != fs_version { // attempt to update the file on the file system @@ -1009,12 +1006,10 @@ impl Documents { ) { // TODO(@kitsonk) update resolved dependencies? self.maybe_import_map = maybe_import_map.map(ImportMapResolver::new); - self.maybe_jsx_resolver = maybe_config_file - .map(|cf| { - cf.to_maybe_jsx_import_source_module() - .map(|im| JsxResolver::new(im, self.maybe_import_map.clone())) - }) - .flatten(); + self.maybe_jsx_resolver = maybe_config_file.and_then(|cf| { + cf.to_maybe_jsx_import_source_module() + .map(|im| JsxResolver::new(im, self.maybe_import_map.clone())) + }); self.imports = Arc::new( if let Some(Ok(Some(imports))) = maybe_config_file.map(|cf| cf.to_maybe_imports()) @@ -1094,14 +1089,12 @@ impl Documents { specifier: &ModuleSpecifier, ) -> Option<(ModuleSpecifier, MediaType)> { let doc = self.get(specifier)?; - let maybe_module = doc.maybe_module().map(|r| r.as_ref().ok()).flatten(); - let maybe_types_dependency = maybe_module - .map(|m| { - m.maybe_types_dependency - .as_ref() - .map(|(_, resolved)| resolved.clone()) - }) - .flatten(); + let maybe_module = doc.maybe_module().and_then(|r| r.as_ref().ok()); + let maybe_types_dependency = maybe_module.and_then(|m| { + m.maybe_types_dependency + .as_ref() + .map(|(_, resolved)| resolved.clone()) + }); if let Some(Resolved::Ok { specifier, .. }) = maybe_types_dependency { self.resolve_dependency(&specifier) } else { diff --git a/cli/lsp/language_server.rs b/cli/lsp/language_server.rs index e4dd8b3d6a..05b6fcc03f 100644 --- a/cli/lsp/language_server.rs +++ b/cli/lsp/language_server.rs @@ -833,8 +833,7 @@ impl Inner { params .settings .as_object() - .map(|settings| settings.get(SETTINGS_SECTION)) - .flatten() + .and_then(|settings| settings.get(SETTINGS_SECTION)) .cloned() }; @@ -1037,8 +1036,7 @@ impl Inner { { let dep_maybe_types_dependency = dep .get_code() - .map(|s| self.documents.get(s)) - .flatten() + .and_then(|s| self.documents.get(s)) .map(|d| d.maybe_types_dependency()); let value = match (dep.maybe_code.is_none(), dep.maybe_type.is_none(), &dep_maybe_types_dependency) { (false, false, None) => format!( @@ -1204,7 +1202,7 @@ impl Inner { &specifier, diagnostic, asset_or_doc.document().map(|d| d.text_info()), - asset_or_doc.maybe_parsed_source().map(|r| r.ok()).flatten(), + asset_or_doc.maybe_parsed_source().and_then(|r| r.ok()), ) .map_err(|err| { error!("Unable to fix lint error: {}", err); @@ -1388,8 +1386,7 @@ impl Inner { error!("Error getting code lenses for \"{}\": {}", specifier, err); LspError::internal_error() })?; - let parsed_source = - asset_or_doc.maybe_parsed_source().map(|r| r.ok()).flatten(); + let parsed_source = asset_or_doc.maybe_parsed_source().and_then(|r| r.ok()); let line_index = asset_or_doc.line_index(); let code_lenses = code_lens::collect( &specifier, @@ -1463,8 +1460,7 @@ impl Inner { if let Some(document_highlights) = maybe_document_highlights { let result = document_highlights .into_iter() - .map(|dh| dh.to_highlight(line_index.clone())) - .flatten() + .flat_map(|dh| dh.to_highlight(line_index.clone())) .collect(); self.performance.measure(mark); Ok(Some(result)) diff --git a/cli/lsp/tsc.rs b/cli/lsp/tsc.rs index f2d733e6fb..a0f2008a4e 100644 --- a/cli/lsp/tsc.rs +++ b/cli/lsp/tsc.rs @@ -1402,9 +1402,7 @@ impl FileTextChanges { ops.push(lsp::DocumentChangeOperation::Edit(lsp::TextDocumentEdit { text_document: lsp::OptionalVersionedTextDocumentIdentifier { uri: specifier.clone(), - version: maybe_asset_or_document - .map(|d| d.document_lsp_version()) - .flatten(), + version: maybe_asset_or_document.and_then(|d| d.document_lsp_version()), }, edits, })); @@ -2064,7 +2062,7 @@ impl CompletionEntry { return Some(insert_text.clone()); } } else { - return Some(self.name.replace("#", "")); + return Some(self.name.replace('#', "")); } } diff --git a/cli/main.rs b/cli/main.rs index 597845daed..f5c1b78d33 100644 --- a/cli/main.rs +++ b/cli/main.rs @@ -146,7 +146,9 @@ fn create_web_worker_callback(ps: ProcState) -> Arc { bootstrap: BootstrapOptions { args: ps.flags.argv.clone(), apply_source_maps: true, - cpu_count: num_cpus::get(), + cpu_count: std::thread::available_parallelism() + .map(|p| p.get()) + .unwrap_or(1), debug_flag: ps .flags .log_level @@ -247,7 +249,9 @@ pub fn create_main_worker( bootstrap: BootstrapOptions { apply_source_maps: true, args: ps.flags.argv.clone(), - cpu_count: num_cpus::get(), + cpu_count: std::thread::available_parallelism() + .map(|p| p.get()) + .unwrap_or(1), debug_flag: ps.flags.log_level.map_or(false, |l| l == log::Level::Debug), enable_testing_features: ps.flags.enable_testing_features, location: ps.flags.location.clone(), @@ -477,14 +481,10 @@ async fn info_command( let maybe_locker = lockfile::as_maybe_locker(ps.lockfile.clone()); let maybe_import_map_resolver = ps.maybe_import_map.clone().map(ImportMapResolver::new); - let maybe_jsx_resolver = ps - .maybe_config_file - .as_ref() - .map(|cf| { - cf.to_maybe_jsx_import_source_module() - .map(|im| JsxResolver::new(im, maybe_import_map_resolver.clone())) - }) - .flatten(); + let maybe_jsx_resolver = ps.maybe_config_file.as_ref().and_then(|cf| { + cf.to_maybe_jsx_import_source_module() + .map(|im| JsxResolver::new(im, maybe_import_map_resolver.clone())) + }); let maybe_resolver = if maybe_jsx_resolver.is_some() { maybe_jsx_resolver.as_ref().map(|jr| jr.as_resolver()) } else { @@ -649,14 +649,10 @@ async fn create_graph_and_maybe_check( }; let maybe_import_map_resolver = ps.maybe_import_map.clone().map(ImportMapResolver::new); - let maybe_jsx_resolver = ps - .maybe_config_file - .as_ref() - .map(|cf| { - cf.to_maybe_jsx_import_source_module() - .map(|im| JsxResolver::new(im, maybe_import_map_resolver.clone())) - }) - .flatten(); + let maybe_jsx_resolver = ps.maybe_config_file.as_ref().and_then(|cf| { + cf.to_maybe_jsx_import_source_module() + .map(|im| JsxResolver::new(im, maybe_import_map_resolver.clone())) + }); let maybe_resolver = if maybe_jsx_resolver.is_some() { maybe_jsx_resolver.as_ref().map(|jr| jr.as_resolver()) } else { @@ -802,10 +798,7 @@ async fn bundle_command( .specifiers() .iter() .filter_map(|(_, r)| { - r.as_ref() - .ok() - .map(|(s, _, _)| s.to_file_path().ok()) - .flatten() + r.as_ref().ok().and_then(|(s, _, _)| s.to_file_path().ok()) }) .collect(); @@ -1003,14 +996,10 @@ async fn run_with_watch(flags: Flags, script: String) -> Result { }; let maybe_import_map_resolver = ps.maybe_import_map.clone().map(ImportMapResolver::new); - let maybe_jsx_resolver = ps - .maybe_config_file - .as_ref() - .map(|cf| { - cf.to_maybe_jsx_import_source_module() - .map(|im| JsxResolver::new(im, maybe_import_map_resolver.clone())) - }) - .flatten(); + let maybe_jsx_resolver = ps.maybe_config_file.as_ref().and_then(|cf| { + cf.to_maybe_jsx_import_source_module() + .map(|im| JsxResolver::new(im, maybe_import_map_resolver.clone())) + }); let maybe_resolver = if maybe_jsx_resolver.is_some() { maybe_jsx_resolver.as_ref().map(|jr| jr.as_resolver()) } else { @@ -1041,10 +1030,7 @@ async fn run_with_watch(flags: Flags, script: String) -> Result { .specifiers() .iter() .filter_map(|(_, r)| { - r.as_ref() - .ok() - .map(|(s, _, _)| s.to_file_path().ok()) - .flatten() + r.as_ref().ok().and_then(|(s, _, _)| s.to_file_path().ok()) }) .collect(); diff --git a/cli/proc_state.rs b/cli/proc_state.rs index 12bdd3149c..fee9e747e7 100644 --- a/cli/proc_state.rs +++ b/cli/proc_state.rs @@ -190,13 +190,10 @@ impl ProcState { ); let maybe_import_map_resolver = maybe_import_map.clone().map(ImportMapResolver::new); - let maybe_jsx_resolver = maybe_config_file - .as_ref() - .map(|cf| { - cf.to_maybe_jsx_import_source_module() - .map(|im| JsxResolver::new(im, maybe_import_map_resolver.clone())) - }) - .flatten(); + let maybe_jsx_resolver = maybe_config_file.as_ref().and_then(|cf| { + cf.to_maybe_jsx_import_source_module() + .map(|im| JsxResolver::new(im, maybe_import_map_resolver.clone())) + }); let maybe_resolver: Option< Arc, > = if flags.compat { diff --git a/cli/standalone.rs b/cli/standalone.rs index b7c026cece..bd7bef8acb 100644 --- a/cli/standalone.rs +++ b/cli/standalone.rs @@ -269,7 +269,9 @@ pub async fn run( bootstrap: BootstrapOptions { apply_source_maps: false, args: metadata.argv, - cpu_count: num_cpus::get(), + cpu_count: std::thread::available_parallelism() + .map(|p| p.get()) + .unwrap_or(1), debug_flag: metadata.log_level.map_or(false, |l| l == log::Level::Debug), enable_testing_features: false, location: metadata.location, diff --git a/cli/tools/coverage/mod.rs b/cli/tools/coverage/mod.rs index 35001db1e9..26f36adaec 100644 --- a/cli/tools/coverage/mod.rs +++ b/cli/tools/coverage/mod.rs @@ -312,7 +312,7 @@ fn generate_coverage_report( let mut found_lines = line_counts .iter() .enumerate() - .map(|(index, count)| { + .flat_map(|(index, count)| { // get all the mappings from this destination line to a different src line let mut results = source_map .tokens() @@ -324,7 +324,6 @@ fn generate_coverage_report( results.dedup_by_key(|(index, _)| *index); results.into_iter() }) - .flatten() .collect::>(); found_lines.sort_unstable_by_key(|(index, _)| *index); @@ -400,8 +399,7 @@ impl CoverageReporter for LcovCoverageReporter { .url .to_file_path() .ok() - .map(|p| p.to_str().map(|p| p.to_string())) - .flatten() + .and_then(|p| p.to_str().map(|p| p.to_string())) .unwrap_or_else(|| coverage_report.url.to_string()); writeln!(out_writer, "SF:{}", file_path)?; diff --git a/cli/tools/fmt.rs b/cli/tools/fmt.rs index 45cf69b864..4fba9ea13d 100644 --- a/cli/tools/fmt.rs +++ b/cli/tools/fmt.rs @@ -234,7 +234,7 @@ pub fn format_file( file_text: &str, fmt_options: FmtOptionsConfig, ) -> Result { - let ext = get_extension(file_path).unwrap_or_else(String::new); + let ext = get_extension(file_path).unwrap_or_default(); if matches!( ext.as_str(), "md" | "mkd" | "mkdn" | "mdwn" | "mdown" | "markdown" @@ -589,8 +589,7 @@ where let mut errors = join_results.into_iter().filter_map(|join_result| { join_result .ok() - .map(|handle_result| handle_result.err()) - .flatten() + .and_then(|handle_result| handle_result.err()) }); if let Some(e) = errors.next() { diff --git a/cli/tools/installer.rs b/cli/tools/installer.rs index 4cbbb7cf51..8318c72bf7 100644 --- a/cli/tools/installer.rs +++ b/cli/tools/installer.rs @@ -56,7 +56,7 @@ fn generate_executable_file(shim_data: &ShimData) -> Result<(), AnyError> { "% generated by deno install %\n@deno {} %*\n", args .iter() - .map(|arg| arg.replace("%", "%%")) + .map(|arg| arg.replace('%', "%%")) .collect::>() .join(" ") ); @@ -646,7 +646,7 @@ mod tests { let bin_dir = temp_dir.path().join("bin"); std::fs::create_dir(&bin_dir).unwrap(); let original_install_root = env::var_os("DENO_INSTALL_ROOT"); - env::set_var("DENO_INSTALL_ROOT", temp_dir.path().to_path_buf()); + env::set_var("DENO_INSTALL_ROOT", temp_dir.path()); let shim_data = resolve_shim_data( &Flags::default(), @@ -876,7 +876,7 @@ mod tests { let config_file_name = "echo_test.tsconfig.json"; - let file_path = bin_dir.join(config_file_name.to_string()); + let file_path = bin_dir.join(config_file_name); assert!(file_path.exists()); let content = fs::read_to_string(file_path).unwrap(); assert!(content == "{}"); diff --git a/cli/tools/repl/editor.rs b/cli/tools/repl/editor.rs index ea62bd49ff..825934b5ec 100644 --- a/cli/tools/repl/editor.rs +++ b/cli/tools/repl/editor.rs @@ -70,7 +70,7 @@ impl EditorHelper { self .get_object_expr_properties(object_expr) - .unwrap_or_else(Vec::new) + .unwrap_or_default() } fn get_expression_type(&self, expr: &str) -> Option { diff --git a/cli/tools/test.rs b/cli/tools/test.rs index 101dcb6b6a..97a7ddf4f2 100644 --- a/cli/tools/test.rs +++ b/cli/tools/test.rs @@ -1082,14 +1082,10 @@ pub async fn run_tests_with_watch( let maybe_import_map_resolver = ps.maybe_import_map.clone().map(ImportMapResolver::new); - let maybe_jsx_resolver = ps - .maybe_config_file - .as_ref() - .map(|cf| { - cf.to_maybe_jsx_import_source_module() - .map(|im| JsxResolver::new(im, maybe_import_map_resolver.clone())) - }) - .flatten(); + let maybe_jsx_resolver = ps.maybe_config_file.as_ref().and_then(|cf| { + cf.to_maybe_jsx_import_source_module() + .map(|im| JsxResolver::new(im, maybe_import_map_resolver.clone())) + }); let maybe_locker = lockfile::as_maybe_locker(ps.lockfile.clone()); let maybe_imports = ps .maybe_config_file diff --git a/cli/tools/upgrade.rs b/cli/tools/upgrade.rs index e82d51a6e1..dfcb871d1c 100644 --- a/cli/tools/upgrade.rs +++ b/cli/tools/upgrade.rs @@ -151,7 +151,7 @@ async fn get_latest_release_version( .await?; let version = res.url().path_segments().unwrap().last().unwrap(); - Ok(version.replace("v", "")) + Ok(version.replace('v', "")) } async fn get_latest_canary_version( diff --git a/cli/tools/vendor/mod.rs b/cli/tools/vendor/mod.rs index eb9c91071b..3a5455aae6 100644 --- a/cli/tools/vendor/mod.rs +++ b/cli/tools/vendor/mod.rs @@ -137,14 +137,10 @@ async fn create_graph( }; let maybe_import_map_resolver = ps.maybe_import_map.clone().map(ImportMapResolver::new); - let maybe_jsx_resolver = ps - .maybe_config_file - .as_ref() - .map(|cf| { - cf.to_maybe_jsx_import_source_module() - .map(|im| JsxResolver::new(im, maybe_import_map_resolver.clone())) - }) - .flatten(); + let maybe_jsx_resolver = ps.maybe_config_file.as_ref().and_then(|cf| { + cf.to_maybe_jsx_import_source_module() + .map(|im| JsxResolver::new(im, maybe_import_map_resolver.clone())) + }); let maybe_resolver = if maybe_jsx_resolver.is_some() { maybe_jsx_resolver.as_ref().map(|jr| jr.as_resolver()) } else { diff --git a/cli/tools/vendor/test.rs b/cli/tools/vendor/test.rs index b37e2b3b0b..7f4c18fca3 100644 --- a/cli/tools/vendor/test.rs +++ b/cli/tools/vendor/test.rs @@ -223,7 +223,7 @@ fn make_path(text: &str) -> PathBuf { // a cross platform path here assert!(text.starts_with('/')); if cfg!(windows) { - PathBuf::from(format!("C:{}", text.replace("/", "\\"))) + PathBuf::from(format!("C:{}", text.replace('/', "\\"))) } else { PathBuf::from(text) } diff --git a/cli/tsc.rs b/cli/tsc.rs index b679b049e7..9500aae74d 100644 --- a/cli/tsc.rs +++ b/cli/tsc.rs @@ -737,7 +737,7 @@ mod tests { .to_string() .replace(":///", "_") .replace("://", "_") - .replace("/", "-"); + .replace('/', "-"); let source_path = self.fixtures.join(specifier_text); let response = fs::read_to_string(&source_path) .map(|c| { diff --git a/core/async_cancel.rs b/core/async_cancel.rs index 4c1cded0e1..e8f25136cf 100644 --- a/core/async_cancel.rs +++ b/core/async_cancel.rs @@ -460,18 +460,18 @@ mod internal { /// must refer to a head (`CancelHandle`) node. fn cancel(&mut self) { let mut head_nn = NonNull::from(self); - let mut item_nn; // Mark the head node as canceled. - match replace(unsafe { head_nn.as_mut() }, NodeInner::Canceled) { - NodeInner::Linked { - kind: NodeKind::Head { .. }, - next: next_nn, - .. - } => item_nn = next_nn, - NodeInner::Unlinked | NodeInner::Canceled => return, - _ => unreachable!(), - }; + let mut item_nn = + match replace(unsafe { head_nn.as_mut() }, NodeInner::Canceled) { + NodeInner::Linked { + kind: NodeKind::Head { .. }, + next: next_nn, + .. + } => next_nn, + NodeInner::Unlinked | NodeInner::Canceled => return, + _ => unreachable!(), + }; // Cancel all item nodes in the chain, waking each stored `Waker`. while item_nn != head_nn { diff --git a/core/module_specifier.rs b/core/module_specifier.rs index d09c4afb58..ecdebbd74a 100644 --- a/core/module_specifier.rs +++ b/core/module_specifier.rs @@ -397,7 +397,7 @@ mod tests { // Relative local path. let expected_url = format!( "file:///{}/tests/006_url_imports.ts", - cwd_str.replace("\\", "/") + cwd_str.replace('\\', "/") ); tests.extend(vec![ (r"tests/006_url_imports.ts", expected_url.to_string()), diff --git a/runtime/ops/web_worker/sync_fetch.rs b/runtime/ops/web_worker/sync_fetch.rs index 9f146b67e7..76791ded58 100644 --- a/runtime/ops/web_worker/sync_fetch.rs +++ b/runtime/ops/web_worker/sync_fetch.rs @@ -16,7 +16,7 @@ use tokio::task::JoinHandle; // TODO(andreubotella) Properly parse the MIME type fn mime_type_essence(mime_type: &str) -> String { - let essence = match mime_type.split_once(";") { + let essence = match mime_type.split_once(';') { Some((essence, _)) => essence, None => mime_type, }; diff --git a/runtime/permissions.rs b/runtime/permissions.rs index 7e18fe217f..c4a9d144f9 100644 --- a/runtime/permissions.rs +++ b/runtime/permissions.rs @@ -2220,7 +2220,7 @@ mod tests { ]; for (host, port) in domain_tests { - assert!(!perms.net.check(&(host, Some(port))).is_ok()); + assert!(perms.net.check(&(host, Some(port))).is_err()); } } @@ -2396,13 +2396,13 @@ mod tests { assert_eq!(perms2.net.query::<&str>(None), PermissionState::Prompt); assert_eq!(perms2.net.query(Some(&("127.0.0.1", Some(8000)))), PermissionState::Granted); assert_eq!(perms1.env.query(None), PermissionState::Granted); - assert_eq!(perms1.env.query(Some(&"HOME".to_string())), PermissionState::Granted); + assert_eq!(perms1.env.query(Some("HOME")), PermissionState::Granted); assert_eq!(perms2.env.query(None), PermissionState::Prompt); - assert_eq!(perms2.env.query(Some(&"HOME".to_string())), PermissionState::Granted); + assert_eq!(perms2.env.query(Some("HOME")), PermissionState::Granted); assert_eq!(perms1.run.query(None), PermissionState::Granted); - assert_eq!(perms1.run.query(Some(&"deno".to_string())), PermissionState::Granted); + assert_eq!(perms1.run.query(Some("deno")), PermissionState::Granted); assert_eq!(perms2.run.query(None), PermissionState::Prompt); - assert_eq!(perms2.run.query(Some(&"deno".to_string())), PermissionState::Granted); + assert_eq!(perms2.run.query(Some("deno")), PermissionState::Granted); assert_eq!(perms1.ffi.query(None), PermissionState::Granted); assert_eq!(perms1.ffi.query(Some(Path::new("deno"))), PermissionState::Granted); assert_eq!(perms2.ffi.query(None), PermissionState::Prompt); @@ -2433,15 +2433,15 @@ mod tests { prompt_value.set(false); assert_eq!(perms.net.request(Some(&("127.0.0.1", Some(8000)))), PermissionState::Granted); prompt_value.set(true); - assert_eq!(perms.env.request(Some(&"HOME".to_string())), PermissionState::Granted); + assert_eq!(perms.env.request(Some("HOME")), PermissionState::Granted); assert_eq!(perms.env.query(None), PermissionState::Prompt); prompt_value.set(false); - assert_eq!(perms.env.request(Some(&"HOME".to_string())), PermissionState::Granted); + assert_eq!(perms.env.request(Some("HOME")), PermissionState::Granted); prompt_value.set(true); - assert_eq!(perms.run.request(Some(&"deno".to_string())), PermissionState::Granted); + assert_eq!(perms.run.request(Some("deno")), PermissionState::Granted); assert_eq!(perms.run.query(None), PermissionState::Prompt); prompt_value.set(false); - assert_eq!(perms.run.request(Some(&"deno".to_string())), PermissionState::Granted); + assert_eq!(perms.run.request(Some("deno")), PermissionState::Granted); prompt_value.set(true); assert_eq!(perms.ffi.request(Some(Path::new("deno"))), PermissionState::Granted); assert_eq!(perms.ffi.query(None), PermissionState::Prompt); @@ -2506,8 +2506,8 @@ mod tests { assert_eq!(perms.net.revoke(Some(&("127.0.0.1", Some(9000)))), PermissionState::Prompt); assert_eq!(perms.net.query(Some(&("127.0.0.1", None))), PermissionState::Prompt); assert_eq!(perms.net.query(Some(&("127.0.0.1", Some(8000)))), PermissionState::Granted); - assert_eq!(perms.env.revoke(Some(&"HOME".to_string())), PermissionState::Prompt); - assert_eq!(perms.run.revoke(Some(&"deno".to_string())), PermissionState::Prompt); + assert_eq!(perms.env.revoke(Some("HOME")), PermissionState::Prompt); + assert_eq!(perms.run.revoke(Some("deno")), PermissionState::Prompt); assert_eq!(perms.ffi.revoke(Some(Path::new("deno"))), PermissionState::Prompt); assert_eq!(perms.hrtime.revoke(), PermissionState::Denied); }; @@ -2641,10 +2641,7 @@ mod tests { assert!(perms.env.check("HOME").is_ok()); assert!(perms.env.check("hOmE").is_ok()); - assert_eq!( - perms.env.revoke(Some(&"HomE".to_string())), - PermissionState::Prompt - ); + assert_eq!(perms.env.revoke(Some("HomE")), PermissionState::Prompt); } #[test] diff --git a/rust-toolchain.toml b/rust-toolchain.toml index 89378c52de..60883f424c 100644 --- a/rust-toolchain.toml +++ b/rust-toolchain.toml @@ -1,2 +1,2 @@ [toolchain] -channel = "1.58.1" +channel = "1.59.0"