1
0
Fork 0
mirror of https://github.com/denoland/deno.git synced 2024-11-28 16:20:57 -05:00

chore: upgrade to Rust 1.59 (#13767)

This commit is contained in:
David Sherret 2022-02-24 20:03:12 -05:00 committed by GitHub
parent c59152e400
commit 3b12afd072
No known key found for this signature in database
GPG key ID: 4AEE18F83AFDEB23
31 changed files with 114 additions and 167 deletions

View file

@ -259,7 +259,7 @@ jobs:
~/.cargo/registry/index ~/.cargo/registry/index
~/.cargo/registry/cache ~/.cargo/registry/cache
~/.cargo/git/db ~/.cargo/git/db
key: 3-cargo-home-${{ matrix.os }}-${{ hashFiles('Cargo.lock') }} key: 4-cargo-home-${{ matrix.os }}-${{ hashFiles('Cargo.lock') }}
# In main branch, always creates fresh cache # In main branch, always creates fresh cache
- name: Cache build output (main) - name: Cache build output (main)
@ -275,7 +275,7 @@ jobs:
!./target/*/*.zip !./target/*/*.zip
!./target/*/*.tar.gz !./target/*/*.tar.gz
key: | key: |
3-cargo-target-${{ matrix.os }}-${{ matrix.profile }}-${{ github.sha }} 4-cargo-target-${{ matrix.os }}-${{ matrix.profile }}-${{ github.sha }}
# Restore cache from the latest 'main' branch build. # Restore cache from the latest 'main' branch build.
- name: Cache build output (PR) - name: Cache build output (PR)
@ -291,7 +291,7 @@ jobs:
!./target/*/*.tar.gz !./target/*/*.tar.gz
key: never_saved key: never_saved
restore-keys: | restore-keys: |
3-cargo-target-${{ matrix.os }}-${{ matrix.profile }}- 4-cargo-target-${{ matrix.os }}-${{ matrix.profile }}-
# Don't save cache after building PRs or branches other than 'main'. # Don't save cache after building PRs or branches other than 'main'.
- name: Skip save cache (PR) - name: Skip save cache (PR)

1
Cargo.lock generated
View file

@ -773,7 +773,6 @@ dependencies = [
"lspower", "lspower",
"nix", "nix",
"notify", "notify",
"num_cpus",
"once_cell", "once_cell",
"os_pipe", "os_pipe",
"percent-encoding", "percent-encoding",

View file

@ -75,7 +75,6 @@ libc = "=0.2.106"
log = { version = "=0.4.14", features = ["serde"] } log = { version = "=0.4.14", features = ["serde"] }
lspower = "=1.4.0" lspower = "=1.4.0"
notify = "=5.0.0-pre.12" notify = "=5.0.0-pre.12"
num_cpus = "=1.13.0"
once_cell = "=1.9.0" once_cell = "=1.9.0"
percent-encoding = "=2.1.0" percent-encoding = "=2.1.0"
pin-project = "=1.0.8" pin-project = "=1.0.8"

View file

@ -198,8 +198,7 @@ impl Cacher for FetchCacher {
.disk_cache .disk_cache
.get(&filename) .get(&filename)
.ok() .ok()
.map(|b| String::from_utf8(b).ok()) .and_then(|b| String::from_utf8(b).ok())
.flatten()
} }
fn set( fn set(

View file

@ -549,7 +549,7 @@ fn resolve_package_target_string(
if invalid_segment_re.is_match(&subpath) { if invalid_segment_re.is_match(&subpath) {
let request = if pattern { let request = if pattern {
match_.replace("*", &subpath) match_.replace('*', &subpath)
} else { } else {
format!("{}{}", match_, subpath) format!("{}{}", match_, subpath)
}; };

View file

@ -138,7 +138,7 @@ pub(crate) fn add_global_require(
} }
fn escape_for_single_quote_string(text: &str) -> String { fn escape_for_single_quote_string(text: &str) -> String {
text.replace(r"\", r"\\").replace("'", r"\'") text.replace('\\', r"\\").replace('\'', r"\'")
} }
pub fn setup_builtin_modules( pub fn setup_builtin_modules(

View file

@ -614,8 +614,7 @@ impl ConfigFile {
.json .json
.compiler_options .compiler_options
.as_ref() .as_ref()
.map(|co| co.get("checkJs").map(|v| v.as_bool()).flatten()) .and_then(|co| co.get("checkJs").and_then(|v| v.as_bool()))
.flatten()
.unwrap_or(false) .unwrap_or(false)
} }

View file

@ -91,7 +91,7 @@ impl DiskCache {
| Prefix::VerbatimUNC(server, share) => { | Prefix::VerbatimUNC(server, share) => {
out.push("UNC"); out.push("UNC");
let host = Host::parse(server.to_str().unwrap()).unwrap(); let host = Host::parse(server.to_str().unwrap()).unwrap();
let host = host.to_string().replace(":", "_"); let host = host.to_string().replace(':', "_");
out.push(host); out.push(host);
out.push(share); out.push(share);
} }

View file

@ -2232,8 +2232,8 @@ fn test_parse(flags: &mut Flags, matches: &clap::ArgMatches) {
if let Some(value) = matches.value_of("jobs") { if let Some(value) = matches.value_of("jobs") {
value.parse().unwrap() value.parse().unwrap()
} else { } else {
// TODO(caspervonb) drop the dependency on num_cpus when https://doc.rust-lang.org/std/thread/fn.available_concurrency.html becomes stable. std::thread::available_parallelism()
NonZeroUsize::new(num_cpus::get()).unwrap() .unwrap_or(NonZeroUsize::new(1).unwrap())
} }
} else { } else {
NonZeroUsize::new(1).unwrap() NonZeroUsize::new(1).unwrap()

View file

@ -61,13 +61,10 @@ impl CacheServer {
.unwrap(); .unwrap();
let maybe_import_map_resolver = let maybe_import_map_resolver =
maybe_import_map.map(ImportMapResolver::new); maybe_import_map.map(ImportMapResolver::new);
let maybe_jsx_resolver = maybe_config_file let maybe_jsx_resolver = maybe_config_file.as_ref().and_then(|cf| {
.as_ref() cf.to_maybe_jsx_import_source_module()
.map(|cf| { .map(|im| JsxResolver::new(im, maybe_import_map_resolver.clone()))
cf.to_maybe_jsx_import_source_module() });
.map(|im| JsxResolver::new(im, maybe_import_map_resolver.clone()))
})
.flatten();
let maybe_resolver = if maybe_jsx_resolver.is_some() { let maybe_resolver = if maybe_jsx_resolver.is_some() {
maybe_jsx_resolver.as_ref().map(|jr| jr.as_resolver()) maybe_jsx_resolver.as_ref().map(|jr| jr.as_resolver())
} else { } else {
@ -76,8 +73,7 @@ impl CacheServer {
.map(|im| im.as_resolver()) .map(|im| im.as_resolver())
}; };
let maybe_imports = maybe_config_file let maybe_imports = maybe_config_file
.map(|cf| cf.to_maybe_imports().ok()) .and_then(|cf| cf.to_maybe_imports().ok())
.flatten()
.flatten(); .flatten();
let mut cache = FetchCacher::new( let mut cache = FetchCacher::new(
ps.dir.gen_cache.clone(), ps.dir.gen_cache.clone(),
@ -187,10 +183,9 @@ impl CacheMetadata {
let version = self let version = self
.cache .cache
.get_cache_filename(specifier) .get_cache_filename(specifier)
.map(|ref path| calculate_fs_version(path)) .and_then(|ref path| calculate_fs_version(path));
.flatten();
let metadata = self.metadata.lock().get(specifier).cloned(); let metadata = self.metadata.lock().get(specifier).cloned();
if metadata.as_ref().map(|m| m.version.clone()).flatten() != version { if metadata.as_ref().and_then(|m| m.version.clone()) != version {
self.refresh(specifier).map(|m| m.values) self.refresh(specifier).map(|m| m.values)
} else { } else {
metadata.map(|m| m.values) metadata.map(|m| m.values)

View file

@ -538,8 +538,7 @@ async fn generate_ts_diagnostics(
let version = snapshot let version = snapshot
.documents .documents
.get(&specifier) .get(&specifier)
.map(|d| d.maybe_lsp_version()) .and_then(|d| d.maybe_lsp_version());
.flatten();
// check if the specifier is enabled again just in case TS returns us // check if the specifier is enabled again just in case TS returns us
// diagnostics for a disabled specifier // diagnostics for a disabled specifier
let ts_diagnostics = if config.specifier_enabled(&specifier) { let ts_diagnostics = if config.specifier_enabled(&specifier) {
@ -555,8 +554,7 @@ async fn generate_ts_diagnostics(
let version = snapshot let version = snapshot
.documents .documents
.get(&specifier) .get(&specifier)
.map(|d| d.maybe_lsp_version()) .and_then(|d| d.maybe_lsp_version());
.flatten();
diagnostics_vec.push((specifier, version, Vec::new())); diagnostics_vec.push((specifier, version, Vec::new()));
} }
Ok(diagnostics_vec) Ok(diagnostics_vec)

View file

@ -199,18 +199,17 @@ impl AssetOrDocument {
) -> Option<(String, deno_graph::Dependency, deno_graph::Range)> { ) -> Option<(String, deno_graph::Dependency, deno_graph::Range)> {
self self
.document() .document()
.map(|d| d.get_maybe_dependency(position)) .and_then(|d| d.get_maybe_dependency(position))
.flatten()
} }
pub fn maybe_parsed_source( pub fn maybe_parsed_source(
&self, &self,
) -> Option<Result<deno_ast::ParsedSource, deno_graph::ModuleGraphError>> { ) -> Option<Result<deno_ast::ParsedSource, deno_graph::ModuleGraphError>> {
self.document().map(|d| d.maybe_parsed_source()).flatten() self.document().and_then(|d| d.maybe_parsed_source())
} }
pub fn document_lsp_version(&self) -> Option<i32> { pub fn document_lsp_version(&self) -> Option<i32> {
self.document().map(|d| d.maybe_lsp_version()).flatten() self.document().and_then(|d| d.maybe_lsp_version())
} }
} }
@ -347,8 +346,7 @@ impl Document {
.0 .0
.maybe_language_id .maybe_language_id
.as_ref() .as_ref()
.map(|li| li.as_headers()) .and_then(|li| li.as_headers());
.flatten();
let parser = SourceParser::default(); let parser = SourceParser::default();
Some(deno_graph::parse_module( Some(deno_graph::parse_module(
&self.0.specifier, &self.0.specifier,
@ -865,8 +863,7 @@ impl Documents {
} else { } else {
let mut file_system_docs = self.file_system_docs.lock(); let mut file_system_docs = self.file_system_docs.lock();
let fs_version = get_document_path(&self.cache, &specifier) let fs_version = get_document_path(&self.cache, &specifier)
.map(|path| calculate_fs_version(&path)) .and_then(|path| calculate_fs_version(&path));
.flatten();
let file_system_doc = file_system_docs.docs.get(&specifier); let file_system_doc = file_system_docs.docs.get(&specifier);
if file_system_doc.map(|d| d.fs_version().to_string()) != fs_version { if file_system_doc.map(|d| d.fs_version().to_string()) != fs_version {
// attempt to update the file on the file system // attempt to update the file on the file system
@ -1009,12 +1006,10 @@ impl Documents {
) { ) {
// TODO(@kitsonk) update resolved dependencies? // TODO(@kitsonk) update resolved dependencies?
self.maybe_import_map = maybe_import_map.map(ImportMapResolver::new); self.maybe_import_map = maybe_import_map.map(ImportMapResolver::new);
self.maybe_jsx_resolver = maybe_config_file self.maybe_jsx_resolver = maybe_config_file.and_then(|cf| {
.map(|cf| { cf.to_maybe_jsx_import_source_module()
cf.to_maybe_jsx_import_source_module() .map(|im| JsxResolver::new(im, self.maybe_import_map.clone()))
.map(|im| JsxResolver::new(im, self.maybe_import_map.clone())) });
})
.flatten();
self.imports = Arc::new( self.imports = Arc::new(
if let Some(Ok(Some(imports))) = if let Some(Ok(Some(imports))) =
maybe_config_file.map(|cf| cf.to_maybe_imports()) maybe_config_file.map(|cf| cf.to_maybe_imports())
@ -1094,14 +1089,12 @@ impl Documents {
specifier: &ModuleSpecifier, specifier: &ModuleSpecifier,
) -> Option<(ModuleSpecifier, MediaType)> { ) -> Option<(ModuleSpecifier, MediaType)> {
let doc = self.get(specifier)?; let doc = self.get(specifier)?;
let maybe_module = doc.maybe_module().map(|r| r.as_ref().ok()).flatten(); let maybe_module = doc.maybe_module().and_then(|r| r.as_ref().ok());
let maybe_types_dependency = maybe_module let maybe_types_dependency = maybe_module.and_then(|m| {
.map(|m| { m.maybe_types_dependency
m.maybe_types_dependency .as_ref()
.as_ref() .map(|(_, resolved)| resolved.clone())
.map(|(_, resolved)| resolved.clone()) });
})
.flatten();
if let Some(Resolved::Ok { specifier, .. }) = maybe_types_dependency { if let Some(Resolved::Ok { specifier, .. }) = maybe_types_dependency {
self.resolve_dependency(&specifier) self.resolve_dependency(&specifier)
} else { } else {

View file

@ -870,8 +870,7 @@ impl Inner {
params params
.settings .settings
.as_object() .as_object()
.map(|settings| settings.get(SETTINGS_SECTION)) .and_then(|settings| settings.get(SETTINGS_SECTION))
.flatten()
.cloned() .cloned()
}; };
@ -1075,8 +1074,7 @@ impl Inner {
{ {
let dep_maybe_types_dependency = dep let dep_maybe_types_dependency = dep
.get_code() .get_code()
.map(|s| self.documents.get(s)) .and_then(|s| self.documents.get(s))
.flatten()
.map(|d| d.maybe_types_dependency()); .map(|d| d.maybe_types_dependency());
let value = match (dep.maybe_code.is_none(), dep.maybe_type.is_none(), &dep_maybe_types_dependency) { let value = match (dep.maybe_code.is_none(), dep.maybe_type.is_none(), &dep_maybe_types_dependency) {
(false, false, None) => format!( (false, false, None) => format!(
@ -1242,7 +1240,7 @@ impl Inner {
&specifier, &specifier,
diagnostic, diagnostic,
asset_or_doc.document().map(|d| d.text_info()), asset_or_doc.document().map(|d| d.text_info()),
asset_or_doc.maybe_parsed_source().map(|r| r.ok()).flatten(), asset_or_doc.maybe_parsed_source().and_then(|r| r.ok()),
) )
.map_err(|err| { .map_err(|err| {
error!("Unable to fix lint error: {}", err); error!("Unable to fix lint error: {}", err);
@ -1426,8 +1424,7 @@ impl Inner {
error!("Error getting code lenses for \"{}\": {}", specifier, err); error!("Error getting code lenses for \"{}\": {}", specifier, err);
LspError::internal_error() LspError::internal_error()
})?; })?;
let parsed_source = let parsed_source = asset_or_doc.maybe_parsed_source().and_then(|r| r.ok());
asset_or_doc.maybe_parsed_source().map(|r| r.ok()).flatten();
let line_index = asset_or_doc.line_index(); let line_index = asset_or_doc.line_index();
let code_lenses = code_lens::collect( let code_lenses = code_lens::collect(
&specifier, &specifier,
@ -1501,8 +1498,7 @@ impl Inner {
if let Some(document_highlights) = maybe_document_highlights { if let Some(document_highlights) = maybe_document_highlights {
let result = document_highlights let result = document_highlights
.into_iter() .into_iter()
.map(|dh| dh.to_highlight(line_index.clone())) .flat_map(|dh| dh.to_highlight(line_index.clone()))
.flatten()
.collect(); .collect();
self.performance.measure(mark); self.performance.measure(mark);
Ok(Some(result)) Ok(Some(result))

View file

@ -1402,9 +1402,7 @@ impl FileTextChanges {
ops.push(lsp::DocumentChangeOperation::Edit(lsp::TextDocumentEdit { ops.push(lsp::DocumentChangeOperation::Edit(lsp::TextDocumentEdit {
text_document: lsp::OptionalVersionedTextDocumentIdentifier { text_document: lsp::OptionalVersionedTextDocumentIdentifier {
uri: specifier.clone(), uri: specifier.clone(),
version: maybe_asset_or_document version: maybe_asset_or_document.and_then(|d| d.document_lsp_version()),
.map(|d| d.document_lsp_version())
.flatten(),
}, },
edits, edits,
})); }));
@ -2064,7 +2062,7 @@ impl CompletionEntry {
return Some(insert_text.clone()); return Some(insert_text.clone());
} }
} else { } else {
return Some(self.name.replace("#", "")); return Some(self.name.replace('#', ""));
} }
} }

View file

@ -146,7 +146,9 @@ fn create_web_worker_callback(ps: ProcState) -> Arc<CreateWebWorkerCb> {
bootstrap: BootstrapOptions { bootstrap: BootstrapOptions {
args: ps.flags.argv.clone(), args: ps.flags.argv.clone(),
apply_source_maps: true, apply_source_maps: true,
cpu_count: num_cpus::get(), cpu_count: std::thread::available_parallelism()
.map(|p| p.get())
.unwrap_or(1),
debug_flag: ps debug_flag: ps
.flags .flags
.log_level .log_level
@ -247,7 +249,9 @@ pub fn create_main_worker(
bootstrap: BootstrapOptions { bootstrap: BootstrapOptions {
apply_source_maps: true, apply_source_maps: true,
args: ps.flags.argv.clone(), args: ps.flags.argv.clone(),
cpu_count: num_cpus::get(), cpu_count: std::thread::available_parallelism()
.map(|p| p.get())
.unwrap_or(1),
debug_flag: ps.flags.log_level.map_or(false, |l| l == log::Level::Debug), debug_flag: ps.flags.log_level.map_or(false, |l| l == log::Level::Debug),
enable_testing_features: ps.flags.enable_testing_features, enable_testing_features: ps.flags.enable_testing_features,
location: ps.flags.location.clone(), location: ps.flags.location.clone(),
@ -477,14 +481,10 @@ async fn info_command(
let maybe_locker = lockfile::as_maybe_locker(ps.lockfile.clone()); let maybe_locker = lockfile::as_maybe_locker(ps.lockfile.clone());
let maybe_import_map_resolver = let maybe_import_map_resolver =
ps.maybe_import_map.clone().map(ImportMapResolver::new); ps.maybe_import_map.clone().map(ImportMapResolver::new);
let maybe_jsx_resolver = ps let maybe_jsx_resolver = ps.maybe_config_file.as_ref().and_then(|cf| {
.maybe_config_file cf.to_maybe_jsx_import_source_module()
.as_ref() .map(|im| JsxResolver::new(im, maybe_import_map_resolver.clone()))
.map(|cf| { });
cf.to_maybe_jsx_import_source_module()
.map(|im| JsxResolver::new(im, maybe_import_map_resolver.clone()))
})
.flatten();
let maybe_resolver = if maybe_jsx_resolver.is_some() { let maybe_resolver = if maybe_jsx_resolver.is_some() {
maybe_jsx_resolver.as_ref().map(|jr| jr.as_resolver()) maybe_jsx_resolver.as_ref().map(|jr| jr.as_resolver())
} else { } else {
@ -649,14 +649,10 @@ async fn create_graph_and_maybe_check(
}; };
let maybe_import_map_resolver = let maybe_import_map_resolver =
ps.maybe_import_map.clone().map(ImportMapResolver::new); ps.maybe_import_map.clone().map(ImportMapResolver::new);
let maybe_jsx_resolver = ps let maybe_jsx_resolver = ps.maybe_config_file.as_ref().and_then(|cf| {
.maybe_config_file cf.to_maybe_jsx_import_source_module()
.as_ref() .map(|im| JsxResolver::new(im, maybe_import_map_resolver.clone()))
.map(|cf| { });
cf.to_maybe_jsx_import_source_module()
.map(|im| JsxResolver::new(im, maybe_import_map_resolver.clone()))
})
.flatten();
let maybe_resolver = if maybe_jsx_resolver.is_some() { let maybe_resolver = if maybe_jsx_resolver.is_some() {
maybe_jsx_resolver.as_ref().map(|jr| jr.as_resolver()) maybe_jsx_resolver.as_ref().map(|jr| jr.as_resolver())
} else { } else {
@ -802,10 +798,7 @@ async fn bundle_command(
.specifiers() .specifiers()
.iter() .iter()
.filter_map(|(_, r)| { .filter_map(|(_, r)| {
r.as_ref() r.as_ref().ok().and_then(|(s, _, _)| s.to_file_path().ok())
.ok()
.map(|(s, _, _)| s.to_file_path().ok())
.flatten()
}) })
.collect(); .collect();
@ -814,7 +807,7 @@ async fn bundle_command(
ps.flags.import_map_path.as_deref(), ps.flags.import_map_path.as_deref(),
ps.maybe_config_file.as_ref(), ps.maybe_config_file.as_ref(),
) )
.map(|ms| ms.map(|ref s| s.to_file_path().ok()).flatten()) .map(|ms| ms.and_then(|ref s| s.to_file_path().ok()))
{ {
paths_to_watch.push(import_map_path); paths_to_watch.push(import_map_path);
} }
@ -1008,14 +1001,10 @@ async fn run_with_watch(flags: Flags, script: String) -> Result<i32, AnyError> {
}; };
let maybe_import_map_resolver = let maybe_import_map_resolver =
ps.maybe_import_map.clone().map(ImportMapResolver::new); ps.maybe_import_map.clone().map(ImportMapResolver::new);
let maybe_jsx_resolver = ps let maybe_jsx_resolver = ps.maybe_config_file.as_ref().and_then(|cf| {
.maybe_config_file cf.to_maybe_jsx_import_source_module()
.as_ref() .map(|im| JsxResolver::new(im, maybe_import_map_resolver.clone()))
.map(|cf| { });
cf.to_maybe_jsx_import_source_module()
.map(|im| JsxResolver::new(im, maybe_import_map_resolver.clone()))
})
.flatten();
let maybe_resolver = if maybe_jsx_resolver.is_some() { let maybe_resolver = if maybe_jsx_resolver.is_some() {
maybe_jsx_resolver.as_ref().map(|jr| jr.as_resolver()) maybe_jsx_resolver.as_ref().map(|jr| jr.as_resolver())
} else { } else {
@ -1046,10 +1035,7 @@ async fn run_with_watch(flags: Flags, script: String) -> Result<i32, AnyError> {
.specifiers() .specifiers()
.iter() .iter()
.filter_map(|(_, r)| { .filter_map(|(_, r)| {
r.as_ref() r.as_ref().ok().and_then(|(s, _, _)| s.to_file_path().ok())
.ok()
.map(|(s, _, _)| s.to_file_path().ok())
.flatten()
}) })
.collect(); .collect();
@ -1063,7 +1049,7 @@ async fn run_with_watch(flags: Flags, script: String) -> Result<i32, AnyError> {
ps.flags.import_map_path.as_deref(), ps.flags.import_map_path.as_deref(),
ps.maybe_config_file.as_ref(), ps.maybe_config_file.as_ref(),
) )
.map(|ms| ms.map(|ref s| s.to_file_path().ok()).flatten()) .map(|ms| ms.and_then(|ref s| s.to_file_path().ok()))
{ {
paths_to_watch.push(import_map_path); paths_to_watch.push(import_map_path);
} }

View file

@ -190,13 +190,10 @@ impl ProcState {
); );
let maybe_import_map_resolver = let maybe_import_map_resolver =
maybe_import_map.clone().map(ImportMapResolver::new); maybe_import_map.clone().map(ImportMapResolver::new);
let maybe_jsx_resolver = maybe_config_file let maybe_jsx_resolver = maybe_config_file.as_ref().and_then(|cf| {
.as_ref() cf.to_maybe_jsx_import_source_module()
.map(|cf| { .map(|im| JsxResolver::new(im, maybe_import_map_resolver.clone()))
cf.to_maybe_jsx_import_source_module() });
.map(|im| JsxResolver::new(im, maybe_import_map_resolver.clone()))
})
.flatten();
let maybe_resolver: Option< let maybe_resolver: Option<
Arc<dyn deno_graph::source::Resolver + Send + Sync>, Arc<dyn deno_graph::source::Resolver + Send + Sync>,
> = if flags.compat { > = if flags.compat {

View file

@ -269,7 +269,9 @@ pub async fn run(
bootstrap: BootstrapOptions { bootstrap: BootstrapOptions {
apply_source_maps: false, apply_source_maps: false,
args: metadata.argv, args: metadata.argv,
cpu_count: num_cpus::get(), cpu_count: std::thread::available_parallelism()
.map(|p| p.get())
.unwrap_or(1),
debug_flag: metadata.log_level.map_or(false, |l| l == log::Level::Debug), debug_flag: metadata.log_level.map_or(false, |l| l == log::Level::Debug),
enable_testing_features: false, enable_testing_features: false,
location: metadata.location, location: metadata.location,

View file

@ -312,7 +312,7 @@ fn generate_coverage_report(
let mut found_lines = line_counts let mut found_lines = line_counts
.iter() .iter()
.enumerate() .enumerate()
.map(|(index, count)| { .flat_map(|(index, count)| {
// get all the mappings from this destination line to a different src line // get all the mappings from this destination line to a different src line
let mut results = source_map let mut results = source_map
.tokens() .tokens()
@ -324,7 +324,6 @@ fn generate_coverage_report(
results.dedup_by_key(|(index, _)| *index); results.dedup_by_key(|(index, _)| *index);
results.into_iter() results.into_iter()
}) })
.flatten()
.collect::<Vec<(usize, i64)>>(); .collect::<Vec<(usize, i64)>>();
found_lines.sort_unstable_by_key(|(index, _)| *index); found_lines.sort_unstable_by_key(|(index, _)| *index);
@ -400,8 +399,7 @@ impl CoverageReporter for LcovCoverageReporter {
.url .url
.to_file_path() .to_file_path()
.ok() .ok()
.map(|p| p.to_str().map(|p| p.to_string())) .and_then(|p| p.to_str().map(|p| p.to_string()))
.flatten()
.unwrap_or_else(|| coverage_report.url.to_string()); .unwrap_or_else(|| coverage_report.url.to_string());
writeln!(out_writer, "SF:{}", file_path)?; writeln!(out_writer, "SF:{}", file_path)?;

View file

@ -234,7 +234,7 @@ pub fn format_file(
file_text: &str, file_text: &str,
fmt_options: FmtOptionsConfig, fmt_options: FmtOptionsConfig,
) -> Result<String, AnyError> { ) -> Result<String, AnyError> {
let ext = get_extension(file_path).unwrap_or_else(String::new); let ext = get_extension(file_path).unwrap_or_default();
if matches!( if matches!(
ext.as_str(), ext.as_str(),
"md" | "mkd" | "mkdn" | "mdwn" | "mdown" | "markdown" "md" | "mkd" | "mkdn" | "mdwn" | "mdown" | "markdown"
@ -589,8 +589,7 @@ where
let mut errors = join_results.into_iter().filter_map(|join_result| { let mut errors = join_results.into_iter().filter_map(|join_result| {
join_result join_result
.ok() .ok()
.map(|handle_result| handle_result.err()) .and_then(|handle_result| handle_result.err())
.flatten()
}); });
if let Some(e) = errors.next() { if let Some(e) = errors.next() {

View file

@ -56,7 +56,7 @@ fn generate_executable_file(shim_data: &ShimData) -> Result<(), AnyError> {
"% generated by deno install %\n@deno {} %*\n", "% generated by deno install %\n@deno {} %*\n",
args args
.iter() .iter()
.map(|arg| arg.replace("%", "%%")) .map(|arg| arg.replace('%', "%%"))
.collect::<Vec<_>>() .collect::<Vec<_>>()
.join(" ") .join(" ")
); );
@ -646,7 +646,7 @@ mod tests {
let bin_dir = temp_dir.path().join("bin"); let bin_dir = temp_dir.path().join("bin");
std::fs::create_dir(&bin_dir).unwrap(); std::fs::create_dir(&bin_dir).unwrap();
let original_install_root = env::var_os("DENO_INSTALL_ROOT"); let original_install_root = env::var_os("DENO_INSTALL_ROOT");
env::set_var("DENO_INSTALL_ROOT", temp_dir.path().to_path_buf()); env::set_var("DENO_INSTALL_ROOT", temp_dir.path());
let shim_data = resolve_shim_data( let shim_data = resolve_shim_data(
&Flags::default(), &Flags::default(),
@ -876,7 +876,7 @@ mod tests {
let config_file_name = "echo_test.tsconfig.json"; let config_file_name = "echo_test.tsconfig.json";
let file_path = bin_dir.join(config_file_name.to_string()); let file_path = bin_dir.join(config_file_name);
assert!(file_path.exists()); assert!(file_path.exists());
let content = fs::read_to_string(file_path).unwrap(); let content = fs::read_to_string(file_path).unwrap();
assert!(content == "{}"); assert!(content == "{}");

View file

@ -70,7 +70,7 @@ impl EditorHelper {
self self
.get_object_expr_properties(object_expr) .get_object_expr_properties(object_expr)
.unwrap_or_else(Vec::new) .unwrap_or_default()
} }
fn get_expression_type(&self, expr: &str) -> Option<String> { fn get_expression_type(&self, expr: &str) -> Option<String> {

View file

@ -1082,14 +1082,10 @@ pub async fn run_tests_with_watch(
let maybe_import_map_resolver = let maybe_import_map_resolver =
ps.maybe_import_map.clone().map(ImportMapResolver::new); ps.maybe_import_map.clone().map(ImportMapResolver::new);
let maybe_jsx_resolver = ps let maybe_jsx_resolver = ps.maybe_config_file.as_ref().and_then(|cf| {
.maybe_config_file cf.to_maybe_jsx_import_source_module()
.as_ref() .map(|im| JsxResolver::new(im, maybe_import_map_resolver.clone()))
.map(|cf| { });
cf.to_maybe_jsx_import_source_module()
.map(|im| JsxResolver::new(im, maybe_import_map_resolver.clone()))
})
.flatten();
let maybe_locker = lockfile::as_maybe_locker(ps.lockfile.clone()); let maybe_locker = lockfile::as_maybe_locker(ps.lockfile.clone());
let maybe_imports = ps let maybe_imports = ps
.maybe_config_file .maybe_config_file

View file

@ -151,7 +151,7 @@ async fn get_latest_release_version(
.await?; .await?;
let version = res.url().path_segments().unwrap().last().unwrap(); let version = res.url().path_segments().unwrap().last().unwrap();
Ok(version.replace("v", "")) Ok(version.replace('v', ""))
} }
async fn get_latest_canary_version( async fn get_latest_canary_version(

View file

@ -137,14 +137,10 @@ async fn create_graph(
}; };
let maybe_import_map_resolver = let maybe_import_map_resolver =
ps.maybe_import_map.clone().map(ImportMapResolver::new); ps.maybe_import_map.clone().map(ImportMapResolver::new);
let maybe_jsx_resolver = ps let maybe_jsx_resolver = ps.maybe_config_file.as_ref().and_then(|cf| {
.maybe_config_file cf.to_maybe_jsx_import_source_module()
.as_ref() .map(|im| JsxResolver::new(im, maybe_import_map_resolver.clone()))
.map(|cf| { });
cf.to_maybe_jsx_import_source_module()
.map(|im| JsxResolver::new(im, maybe_import_map_resolver.clone()))
})
.flatten();
let maybe_resolver = if maybe_jsx_resolver.is_some() { let maybe_resolver = if maybe_jsx_resolver.is_some() {
maybe_jsx_resolver.as_ref().map(|jr| jr.as_resolver()) maybe_jsx_resolver.as_ref().map(|jr| jr.as_resolver())
} else { } else {

View file

@ -223,7 +223,7 @@ fn make_path(text: &str) -> PathBuf {
// a cross platform path here // a cross platform path here
assert!(text.starts_with('/')); assert!(text.starts_with('/'));
if cfg!(windows) { if cfg!(windows) {
PathBuf::from(format!("C:{}", text.replace("/", "\\"))) PathBuf::from(format!("C:{}", text.replace('/', "\\")))
} else { } else {
PathBuf::from(text) PathBuf::from(text)
} }

View file

@ -737,7 +737,7 @@ mod tests {
.to_string() .to_string()
.replace(":///", "_") .replace(":///", "_")
.replace("://", "_") .replace("://", "_")
.replace("/", "-"); .replace('/', "-");
let source_path = self.fixtures.join(specifier_text); let source_path = self.fixtures.join(specifier_text);
let response = fs::read_to_string(&source_path) let response = fs::read_to_string(&source_path)
.map(|c| { .map(|c| {

View file

@ -460,18 +460,18 @@ mod internal {
/// must refer to a head (`CancelHandle`) node. /// must refer to a head (`CancelHandle`) node.
fn cancel(&mut self) { fn cancel(&mut self) {
let mut head_nn = NonNull::from(self); let mut head_nn = NonNull::from(self);
let mut item_nn;
// Mark the head node as canceled. // Mark the head node as canceled.
match replace(unsafe { head_nn.as_mut() }, NodeInner::Canceled) { let mut item_nn =
NodeInner::Linked { match replace(unsafe { head_nn.as_mut() }, NodeInner::Canceled) {
kind: NodeKind::Head { .. }, NodeInner::Linked {
next: next_nn, kind: NodeKind::Head { .. },
.. next: next_nn,
} => item_nn = next_nn, ..
NodeInner::Unlinked | NodeInner::Canceled => return, } => next_nn,
_ => unreachable!(), NodeInner::Unlinked | NodeInner::Canceled => return,
}; _ => unreachable!(),
};
// Cancel all item nodes in the chain, waking each stored `Waker`. // Cancel all item nodes in the chain, waking each stored `Waker`.
while item_nn != head_nn { while item_nn != head_nn {

View file

@ -397,7 +397,7 @@ mod tests {
// Relative local path. // Relative local path.
let expected_url = format!( let expected_url = format!(
"file:///{}/tests/006_url_imports.ts", "file:///{}/tests/006_url_imports.ts",
cwd_str.replace("\\", "/") cwd_str.replace('\\', "/")
); );
tests.extend(vec![ tests.extend(vec![
(r"tests/006_url_imports.ts", expected_url.to_string()), (r"tests/006_url_imports.ts", expected_url.to_string()),

View file

@ -16,7 +16,7 @@ use tokio::task::JoinHandle;
// TODO(andreubotella) Properly parse the MIME type // TODO(andreubotella) Properly parse the MIME type
fn mime_type_essence(mime_type: &str) -> String { fn mime_type_essence(mime_type: &str) -> String {
let essence = match mime_type.split_once(";") { let essence = match mime_type.split_once(';') {
Some((essence, _)) => essence, Some((essence, _)) => essence,
None => mime_type, None => mime_type,
}; };

View file

@ -2220,7 +2220,7 @@ mod tests {
]; ];
for (host, port) in domain_tests { for (host, port) in domain_tests {
assert!(!perms.net.check(&(host, Some(port))).is_ok()); assert!(perms.net.check(&(host, Some(port))).is_err());
} }
} }
@ -2396,13 +2396,13 @@ mod tests {
assert_eq!(perms2.net.query::<&str>(None), PermissionState::Prompt); assert_eq!(perms2.net.query::<&str>(None), PermissionState::Prompt);
assert_eq!(perms2.net.query(Some(&("127.0.0.1", Some(8000)))), PermissionState::Granted); assert_eq!(perms2.net.query(Some(&("127.0.0.1", Some(8000)))), PermissionState::Granted);
assert_eq!(perms1.env.query(None), PermissionState::Granted); assert_eq!(perms1.env.query(None), PermissionState::Granted);
assert_eq!(perms1.env.query(Some(&"HOME".to_string())), PermissionState::Granted); assert_eq!(perms1.env.query(Some("HOME")), PermissionState::Granted);
assert_eq!(perms2.env.query(None), PermissionState::Prompt); assert_eq!(perms2.env.query(None), PermissionState::Prompt);
assert_eq!(perms2.env.query(Some(&"HOME".to_string())), PermissionState::Granted); assert_eq!(perms2.env.query(Some("HOME")), PermissionState::Granted);
assert_eq!(perms1.run.query(None), PermissionState::Granted); assert_eq!(perms1.run.query(None), PermissionState::Granted);
assert_eq!(perms1.run.query(Some(&"deno".to_string())), PermissionState::Granted); assert_eq!(perms1.run.query(Some("deno")), PermissionState::Granted);
assert_eq!(perms2.run.query(None), PermissionState::Prompt); assert_eq!(perms2.run.query(None), PermissionState::Prompt);
assert_eq!(perms2.run.query(Some(&"deno".to_string())), PermissionState::Granted); assert_eq!(perms2.run.query(Some("deno")), PermissionState::Granted);
assert_eq!(perms1.ffi.query(None), PermissionState::Granted); assert_eq!(perms1.ffi.query(None), PermissionState::Granted);
assert_eq!(perms1.ffi.query(Some(Path::new("deno"))), PermissionState::Granted); assert_eq!(perms1.ffi.query(Some(Path::new("deno"))), PermissionState::Granted);
assert_eq!(perms2.ffi.query(None), PermissionState::Prompt); assert_eq!(perms2.ffi.query(None), PermissionState::Prompt);
@ -2433,15 +2433,15 @@ mod tests {
prompt_value.set(false); prompt_value.set(false);
assert_eq!(perms.net.request(Some(&("127.0.0.1", Some(8000)))), PermissionState::Granted); assert_eq!(perms.net.request(Some(&("127.0.0.1", Some(8000)))), PermissionState::Granted);
prompt_value.set(true); prompt_value.set(true);
assert_eq!(perms.env.request(Some(&"HOME".to_string())), PermissionState::Granted); assert_eq!(perms.env.request(Some("HOME")), PermissionState::Granted);
assert_eq!(perms.env.query(None), PermissionState::Prompt); assert_eq!(perms.env.query(None), PermissionState::Prompt);
prompt_value.set(false); prompt_value.set(false);
assert_eq!(perms.env.request(Some(&"HOME".to_string())), PermissionState::Granted); assert_eq!(perms.env.request(Some("HOME")), PermissionState::Granted);
prompt_value.set(true); prompt_value.set(true);
assert_eq!(perms.run.request(Some(&"deno".to_string())), PermissionState::Granted); assert_eq!(perms.run.request(Some("deno")), PermissionState::Granted);
assert_eq!(perms.run.query(None), PermissionState::Prompt); assert_eq!(perms.run.query(None), PermissionState::Prompt);
prompt_value.set(false); prompt_value.set(false);
assert_eq!(perms.run.request(Some(&"deno".to_string())), PermissionState::Granted); assert_eq!(perms.run.request(Some("deno")), PermissionState::Granted);
prompt_value.set(true); prompt_value.set(true);
assert_eq!(perms.ffi.request(Some(Path::new("deno"))), PermissionState::Granted); assert_eq!(perms.ffi.request(Some(Path::new("deno"))), PermissionState::Granted);
assert_eq!(perms.ffi.query(None), PermissionState::Prompt); assert_eq!(perms.ffi.query(None), PermissionState::Prompt);
@ -2506,8 +2506,8 @@ mod tests {
assert_eq!(perms.net.revoke(Some(&("127.0.0.1", Some(9000)))), PermissionState::Prompt); assert_eq!(perms.net.revoke(Some(&("127.0.0.1", Some(9000)))), PermissionState::Prompt);
assert_eq!(perms.net.query(Some(&("127.0.0.1", None))), PermissionState::Prompt); assert_eq!(perms.net.query(Some(&("127.0.0.1", None))), PermissionState::Prompt);
assert_eq!(perms.net.query(Some(&("127.0.0.1", Some(8000)))), PermissionState::Granted); assert_eq!(perms.net.query(Some(&("127.0.0.1", Some(8000)))), PermissionState::Granted);
assert_eq!(perms.env.revoke(Some(&"HOME".to_string())), PermissionState::Prompt); assert_eq!(perms.env.revoke(Some("HOME")), PermissionState::Prompt);
assert_eq!(perms.run.revoke(Some(&"deno".to_string())), PermissionState::Prompt); assert_eq!(perms.run.revoke(Some("deno")), PermissionState::Prompt);
assert_eq!(perms.ffi.revoke(Some(Path::new("deno"))), PermissionState::Prompt); assert_eq!(perms.ffi.revoke(Some(Path::new("deno"))), PermissionState::Prompt);
assert_eq!(perms.hrtime.revoke(), PermissionState::Denied); assert_eq!(perms.hrtime.revoke(), PermissionState::Denied);
}; };
@ -2641,10 +2641,7 @@ mod tests {
assert!(perms.env.check("HOME").is_ok()); assert!(perms.env.check("HOME").is_ok());
assert!(perms.env.check("hOmE").is_ok()); assert!(perms.env.check("hOmE").is_ok());
assert_eq!( assert_eq!(perms.env.revoke(Some("HomE")), PermissionState::Prompt);
perms.env.revoke(Some(&"HomE".to_string())),
PermissionState::Prompt
);
} }
#[test] #[test]

View file

@ -1,2 +1,2 @@
[toolchain] [toolchain]
channel = "1.58.1" channel = "1.59.0"