1
0
Fork 0
mirror of https://github.com/denoland/deno.git synced 2024-11-21 15:04:11 -05:00

chore: upgrade to Rust 1.67 (#17548)

Co-authored-by: Bartek Iwańczuk <biwanczuk@gmail.com>
This commit is contained in:
David Sherret 2023-01-27 10:43:16 -05:00 committed by GitHub
parent 1a1faff2f6
commit f5840bdcd3
No known key found for this signature in database
GPG key ID: 4AEE18F83AFDEB23
148 changed files with 576 additions and 681 deletions

View file

@ -438,7 +438,7 @@ const ci = {
].join("\n"),
key: "never_saved",
"restore-keys":
"18-cargo-target-${{ matrix.os }}-${{ matrix.profile }}-",
"19-cargo-target-${{ matrix.os }}-${{ matrix.profile }}-",
},
},
{

View file

@ -266,7 +266,7 @@ jobs:
!./target/*/*.zip
!./target/*/*.tar.gz
key: never_saved
restore-keys: '18-cargo-target-${{ matrix.os }}-${{ matrix.profile }}-'
restore-keys: '19-cargo-target-${{ matrix.os }}-${{ matrix.profile }}-'
- name: Apply and update mtime cache
if: '!(github.event_name == ''pull_request'' && matrix.skip_pr) && (steps.exit_early.outputs.EXIT_EARLY != ''true'' && (!startsWith(github.ref, ''refs/tags/'')))'
uses: ./.github/mtime_cache

View file

@ -15,7 +15,7 @@ pub fn create_js_runtime(setup: impl FnOnce() -> Vec<Extension>) -> JsRuntime {
}
fn loop_code(iters: u64, src: &str) -> String {
format!(r#"for(let i=0; i < {}; i++) {{ {} }}"#, iters, src,)
format!(r#"for(let i=0; i < {iters}; i++) {{ {src} }}"#,)
}
#[derive(Copy, Clone)]

View file

@ -163,7 +163,7 @@ pub const IGNORED_COMPILER_OPTIONS: &[&str] = &[
/// A function that works like JavaScript's `Object.assign()`.
pub fn json_merge(a: &mut Value, b: &Value) {
match (a, b) {
(&mut Value::Object(ref mut a), &Value::Object(ref b)) => {
(&mut Value::Object(ref mut a), Value::Object(b)) => {
for (k, v) in b {
json_merge(a.entry(k.clone()).or_insert(Value::Null), v);
}

View file

@ -1904,7 +1904,7 @@ fn permission_args(app: Command) -> Command {
.validator(|keys| {
for key in keys.split(',') {
if key.is_empty() || key.contains(&['=', '\0'] as &[char]) {
return Err(format!("invalid key \"{}\"", key));
return Err(format!("invalid key \"{key}\""));
}
}
Ok(())
@ -3164,7 +3164,7 @@ fn seed_arg_parse(flags: &mut Flags, matches: &ArgMatches) {
let seed = seed_string.parse::<u64>().unwrap();
flags.seed = Some(seed);
flags.v8_flags.push(format!("--random-seed={}", seed));
flags.v8_flags.push(format!("--random-seed={seed}"));
}
}
@ -3293,7 +3293,7 @@ pub fn resolve_urls(urls: Vec<String>) -> Vec<String> {
}
out.push(full_url);
} else {
panic!("Bad Url: {}", urlstr);
panic!("Bad Url: {urlstr}");
}
}
out

View file

@ -27,13 +27,13 @@ impl FromStr for BarePort {
}
pub fn validator(host_and_port: &str) -> Result<(), String> {
if Url::parse(&format!("deno://{}", host_and_port)).is_ok()
if Url::parse(&format!("deno://{host_and_port}")).is_ok()
|| host_and_port.parse::<IpAddr>().is_ok()
|| host_and_port.parse::<BarePort>().is_ok()
{
Ok(())
} else {
Err(format!("Bad host:port pair: {}", host_and_port))
Err(format!("Bad host:port pair: {host_and_port}"))
}
}
@ -43,7 +43,7 @@ pub fn validator(host_and_port: &str) -> Result<(), String> {
pub fn parse(paths: Vec<String>) -> clap::Result<Vec<String>> {
let mut out: Vec<String> = vec![];
for host_and_port in paths.iter() {
if Url::parse(&format!("deno://{}", host_and_port)).is_ok()
if Url::parse(&format!("deno://{host_and_port}")).is_ok()
|| host_and_port.parse::<IpAddr>().is_ok()
{
out.push(host_and_port.to_owned())
@ -55,7 +55,7 @@ pub fn parse(paths: Vec<String>) -> clap::Result<Vec<String>> {
} else {
return Err(clap::Error::raw(
clap::ErrorKind::InvalidValue,
format!("Bad host:port pair: {}", host_and_port),
format!("Bad host:port pair: {host_and_port}"),
));
}
}

View file

@ -56,7 +56,7 @@ fn print_import_map_diagnostics(diagnostics: &[ImportMapDiagnostic]) {
"Import map diagnostics:\n{}",
diagnostics
.iter()
.map(|d| format!(" - {}", d))
.map(|d| format!(" - {d}"))
.collect::<Vec<_>>()
.join("\n")
);

View file

@ -90,7 +90,7 @@ impl CacheSetting {
if list.iter().any(|i| i == "npm:") {
return false;
}
let specifier = format!("npm:{}", package_name);
let specifier = format!("npm:{package_name}");
if list.contains(&specifier) {
return false;
}
@ -491,7 +491,7 @@ impl CliOptions {
format!("for: {}", insecure_allowlist.join(", "))
};
let msg =
format!("DANGER: TLS certificate validation is disabled {}", domains);
format!("DANGER: TLS certificate validation is disabled {domains}");
// use eprintln instead of log::warn so this always gets shown
eprintln!("{}", colors::yellow(msg));
}
@ -579,8 +579,7 @@ impl CliOptions {
)
.await
.context(format!(
"Unable to load '{}' import map",
import_map_specifier
"Unable to load '{import_map_specifier}' import map"
))
.map(Some)
}
@ -929,7 +928,7 @@ fn resolve_import_map_specifier(
}
}
let specifier = deno_core::resolve_url_or_path(import_map_path)
.context(format!("Bad URL (\"{}\") for import map.", import_map_path))?;
.context(format!("Bad URL (\"{import_map_path}\") for import map."))?;
return Ok(Some(specifier));
} else if let Some(config_file) = &maybe_config_file {
// if the config file is an import map we prefer to use it, over `importMap`
@ -970,8 +969,7 @@ fn resolve_import_map_specifier(
} else {
deno_core::resolve_import(&import_map_path, config_file.specifier.as_str())
.context(format!(
"Bad URL (\"{}\") for import map.",
import_map_path
"Bad URL (\"{import_map_path}\") for import map."
))?
};
return Ok(Some(specifier));

View file

@ -20,9 +20,9 @@ pub struct AuthToken {
impl fmt::Display for AuthToken {
fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
match &self.token {
AuthTokenData::Bearer(token) => write!(f, "Bearer {}", token),
AuthTokenData::Bearer(token) => write!(f, "Bearer {token}"),
AuthTokenData::Basic { username, password } => {
let credentials = format!("{}:{}", username, password);
let credentials = format!("{username}:{password}");
write!(f, "Basic {}", base64::encode(credentials))
}
}

View file

@ -44,7 +44,7 @@ pub fn benchmark(
let name = entry.file_name().into_string().unwrap();
let file_stem = pathbuf.file_stem().unwrap().to_str().unwrap();
let lua_script = http_dir.join(format!("{}.lua", file_stem));
let lua_script = http_dir.join(format!("{file_stem}.lua"));
let mut maybe_lua = None;
if lua_script.exists() {
maybe_lua = Some(lua_script.to_str().unwrap());
@ -158,7 +158,7 @@ fn run(
let wrk = test_util::prebuilt_tool_path("wrk");
assert!(wrk.is_file());
let addr = format!("http://127.0.0.1:{}/", port);
let addr = format!("http://127.0.0.1:{port}/");
let mut wrk_cmd =
vec![wrk.to_str().unwrap(), "-d", DURATION, "--latency", &addr];
@ -172,7 +172,7 @@ fn run(
std::thread::sleep(Duration::from_secs(1)); // wait to capture failure. TODO racy.
println!("{}", output);
println!("{output}");
assert!(
server.try_wait()?.map_or(true, |s| s.success()),
"server ended with error"
@ -194,7 +194,7 @@ fn get_port() -> u16 {
}
fn server_addr(port: u16) -> String {
format!("0.0.0.0:{}", port)
format!("0.0.0.0:{port}")
}
fn core_http_json_ops(exe: &str) -> Result<HttpBenchmarkResult> {

View file

@ -202,7 +202,7 @@ fn bench_find_replace(deno_exe: &Path) -> Result<Duration, AnyError> {
"textDocument/didOpen",
json!({
"textDocument": {
"uri": format!("file:///a/file_{}.ts", i),
"uri": format!("file:///a/file_{i}.ts"),
"languageId": "typescript",
"version": 1,
"text": "console.log(\"000\");\n"
@ -223,7 +223,7 @@ fn bench_find_replace(deno_exe: &Path) -> Result<Duration, AnyError> {
}
for i in 0..10 {
let file_name = format!("file:///a/file_{}.ts", i);
let file_name = format!("file:///a/file_{i}.ts");
client.write_notification(
"textDocument/didChange",
lsp::DidChangeTextDocumentParams {
@ -250,7 +250,7 @@ fn bench_find_replace(deno_exe: &Path) -> Result<Duration, AnyError> {
}
for i in 0..10 {
let file_name = format!("file:///a/file_{}.ts", i);
let file_name = format!("file:///a/file_{i}.ts");
let (maybe_res, maybe_err) = client.write_request::<_, _, Value>(
"textDocument/formatting",
lsp::DocumentFormattingParams {

View file

@ -55,7 +55,7 @@ fn incremental_change_wait(bench: &mut Bencher) {
let mut document_version: u64 = 0;
bench.iter(|| {
let text = format!("m{:05}", document_version);
let text = format!("m{document_version:05}");
client
.write_notification(
"textDocument/didChange",

View file

@ -189,7 +189,7 @@ fn run_exec_time(
let ret_code_test = if let Some(code) = return_code {
// Bash test which asserts the return code value of the previous command
// $? contains the return code of the previous command
format!("; test $? -eq {}", code)
format!("; test $? -eq {code}")
} else {
"".to_string()
};
@ -244,11 +244,11 @@ fn rlib_size(target_dir: &std::path::Path, prefix: &str) -> i64 {
if name.starts_with(prefix) && name.ends_with(".rlib") {
let start = name.split('-').next().unwrap().to_string();
if seen.contains(&start) {
println!("skip {}", name);
println!("skip {name}");
} else {
seen.insert(start);
size += entry.metadata().unwrap().len();
println!("check size {} {}", name, size);
println!("check size {name} {size}");
}
}
}
@ -269,11 +269,11 @@ fn get_binary_sizes(target_dir: &Path) -> Result<HashMap<String, i64>> {
// add up size for everything in target/release/deps/libswc*
let swc_size = rlib_size(target_dir, "libswc");
println!("swc {} bytes", swc_size);
println!("swc {swc_size} bytes");
sizes.insert("swc_rlib".to_string(), swc_size);
let v8_size = rlib_size(target_dir, "libv8");
println!("v8 {} bytes", v8_size);
println!("v8 {v8_size} bytes");
sizes.insert("rusty_v8_rlib".to_string(), v8_size);
// Because cargo's OUT_DIR is not predictable, search the build tree for
@ -314,7 +314,7 @@ fn bundle_benchmark(deno_exe: &Path) -> Result<HashMap<String, i64>> {
let mut sizes = HashMap::<String, i64>::new();
for (name, url) in BUNDLES {
let path = format!("{}.bundle.js", name);
let path = format!("{name}.bundle.js");
test_util::run(
&[
deno_exe.to_str().unwrap(),
@ -374,7 +374,7 @@ fn cargo_deps() -> usize {
count += 1
}
}
println!("cargo_deps {}", count);
println!("cargo_deps {count}");
assert!(count > 10); // Sanity check.
count
}

View file

@ -130,7 +130,7 @@ mod ts {
for name in libs.iter() {
println!(
"cargo:rerun-if-changed={}",
path_dts.join(format!("lib.{}.d.ts", name)).display()
path_dts.join(format!("lib.{name}.d.ts")).display()
);
}
println!(
@ -229,7 +229,7 @@ mod ts {
PathBuf::from(op_crate_lib).canonicalize()?
// otherwise we are will generate the path ourself
} else {
path_dts.join(format!("lib.{}.d.ts", lib))
path_dts.join(format!("lib.{lib}.d.ts"))
};
let data = std::fs::read_to_string(path)?;
Ok(json!({
@ -431,7 +431,7 @@ fn main() {
// op_fetch_asset::trace_serializer();
if let Ok(c) = env::var("DENO_CANARY") {
println!("cargo:rustc-env=DENO_CANARY={}", c);
println!("cargo:rustc-env=DENO_CANARY={c}");
}
println!("cargo:rerun-if-env-changed=DENO_CANARY");

6
cli/cache/check.rs vendored
View file

@ -71,7 +71,7 @@ impl TypeCheckCache {
Ok(val) => val,
Err(err) => {
if cfg!(debug_assertions) {
panic!("Error retrieving hash: {}", err);
panic!("Error retrieving hash: {err}");
} else {
log::debug!("Error retrieving hash: {}", err);
// fail silently when not debugging
@ -94,7 +94,7 @@ impl TypeCheckCache {
pub fn add_check_hash(&self, check_hash: u64) {
if let Err(err) = self.add_check_hash_result(check_hash) {
if cfg!(debug_assertions) {
panic!("Error saving check hash: {}", err);
panic!("Error saving check hash: {err}");
} else {
log::debug!("Error saving check hash: {}", err);
}
@ -134,7 +134,7 @@ impl TypeCheckCache {
if let Err(err) = self.set_tsbuildinfo_result(specifier, text) {
// should never error here, but if it ever does don't fail
if cfg!(debug_assertions) {
panic!("Error saving tsbuildinfo: {}", err);
panic!("Error saving tsbuildinfo: {err}");
} else {
log::debug!("Error saving tsbuildinfo: {}", err);
}

View file

@ -43,8 +43,7 @@ impl DiskCache {
}
fs::create_dir_all(path).map_err(|e| {
io::Error::new(e.kind(), format!(
"Could not create TypeScript compiler cache location: {:?}\nCheck the permission of the directory.",
path
"Could not create TypeScript compiler cache location: {path:?}\nCheck the permission of the directory."
))
})
}
@ -61,7 +60,7 @@ impl DiskCache {
let host_port = match url.port() {
// Windows doesn't support ":" in filenames, so we represent port using a
// special string.
Some(port) => format!("{}_PORT{}", host, port),
Some(port) => format!("{host}_PORT{port}"),
None => host.to_string(),
};
out.push(host_port);
@ -128,7 +127,7 @@ impl DiskCache {
None => Some(base.with_extension(extension)),
Some(ext) => {
let original_extension = OsStr::to_str(ext).unwrap();
let final_extension = format!("{}.{}", original_extension, extension);
let final_extension = format!("{original_extension}.{extension}");
Some(base.with_extension(final_extension))
}
}

2
cli/cache/emit.rs vendored
View file

@ -90,7 +90,7 @@ impl EmitCache {
if let Err(err) = self.set_emit_code_result(specifier, source_hash, code) {
// should never error here, but if it ever does don't fail
if cfg!(debug_assertions) {
panic!("Error saving emit data ({}): {}", specifier, err);
panic!("Error saving emit data ({specifier}): {err}");
} else {
log::debug!("Error saving emit data({}): {}", specifier, err);
}

View file

@ -35,7 +35,7 @@ fn base_url_to_filename(url: &Url) -> Option<PathBuf> {
"http" | "https" => {
let host = url.host_str().unwrap();
let host_port = match url.port() {
Some(port) => format!("{}_PORT{}", host, port),
Some(port) => format!("{host}_PORT{port}"),
None => host.to_string(),
};
out.push(host_port);
@ -128,8 +128,7 @@ impl HttpCache {
io::Error::new(
e.kind(),
format!(
"Could not create remote modules cache location: {:?}\nCheck the permission of the directory.",
path
"Could not create remote modules cache location: {path:?}\nCheck the permission of the directory."
),
)
})
@ -231,7 +230,7 @@ mod tests {
headers.insert("etag".to_string(), "as5625rqdsfb".to_string());
let content = b"Hello world";
let r = cache.set(&url, headers, content);
eprintln!("result {:?}", r);
eprintln!("result {r:?}");
assert!(r.is_ok());
let r = cache.get(&url);
assert!(r.is_ok());

View file

@ -185,7 +185,7 @@ impl SqlIncrementalCache {
Ok(option) => option,
Err(err) => {
if cfg!(debug_assertions) {
panic!("Error retrieving hash: {}", err);
panic!("Error retrieving hash: {err}");
} else {
// fail silently when not debugging
None

4
cli/cache/node.rs vendored
View file

@ -108,7 +108,7 @@ impl NodeAnalysisCache {
Err(err) => {
// should never error here, but if it ever does don't fail
if cfg!(debug_assertions) {
panic!("Error creating node analysis cache: {:#}", err);
panic!("Error creating node analysis cache: {err:#}");
} else {
log::debug!("Error creating node analysis cache: {:#}", err);
None
@ -124,7 +124,7 @@ impl NodeAnalysisCache {
Err(err) => {
// should never error here, but if it ever does don't fail
if cfg!(debug_assertions) {
panic!("Error using esm analysis: {:#}", err);
panic!("Error using esm analysis: {err:#}");
} else {
log::debug!("Error using esm analysis: {:#}", err);
}

View file

@ -65,9 +65,7 @@ pub fn get_error_class_name(e: &AnyError) -> &'static str {
eprintln!(
"Error '{}' contains boxed error of unknown type:{}",
e,
e.chain()
.map(|e| format!("\n {:?}", e))
.collect::<String>()
e.chain().map(|e| format!("\n {e:?}")).collect::<String>()
);
"Error"
})

View file

@ -88,7 +88,7 @@ impl FileCache {
/// Fetch a source file from the local file system.
fn fetch_local(specifier: &ModuleSpecifier) -> Result<File, AnyError> {
let local = specifier.to_file_path().map_err(|_| {
uri_error(format!("Invalid file path.\n Specifier: {}", specifier))
uri_error(format!("Invalid file path.\n Specifier: {specifier}"))
})?;
let bytes = fs::read(&local)?;
let charset = text_encoding::detect_charset(&bytes).to_string();
@ -111,13 +111,13 @@ pub fn get_source_from_data_url(
specifier: &ModuleSpecifier,
) -> Result<(String, String), AnyError> {
let data_url = DataUrl::process(specifier.as_str())
.map_err(|e| uri_error(format!("{:?}", e)))?;
.map_err(|e| uri_error(format!("{e:?}")))?;
let mime = data_url.mime_type();
let charset = mime.get_parameter("charset").map(|v| v.to_string());
let (bytes, _) = data_url
.decode_to_vec()
.map_err(|e| uri_error(format!("{:?}", e)))?;
Ok((get_source_from_bytes(bytes, charset)?, format!("{}", mime)))
.map_err(|e| uri_error(format!("{e:?}")))?;
Ok((get_source_from_bytes(bytes, charset)?, format!("{mime}")))
}
/// Given a vector of bytes and optionally a charset, decode the bytes to a
@ -142,8 +142,7 @@ fn get_validated_scheme(
let scheme = specifier.scheme();
if !SUPPORTED_SCHEMES.contains(&scheme) {
Err(generic_error(format!(
"Unsupported scheme \"{}\" for module \"{}\". Supported schemes: {:#?}",
scheme, specifier, SUPPORTED_SCHEMES
"Unsupported scheme \"{scheme}\" for module \"{specifier}\". Supported schemes: {SUPPORTED_SCHEMES:#?}"
)))
} else {
Ok(scheme.to_string())
@ -301,8 +300,7 @@ impl FileFetcher {
return Err(custom_error(
"NotCached",
format!(
"Specifier not found in cache: \"{}\", --cached-only is specified.",
specifier
"Specifier not found in cache: \"{specifier}\", --cached-only is specified."
),
));
}
@ -349,8 +347,7 @@ impl FileFetcher {
return Err(custom_error(
"NotCached",
format!(
"Specifier not found in cache: \"{}\", --cached-only is specified.",
specifier
"Specifier not found in cache: \"{specifier}\", --cached-only is specified."
),
));
}
@ -362,7 +359,7 @@ impl FileFetcher {
.ok_or_else(|| {
custom_error(
"NotFound",
format!("Blob URL not found: \"{}\".", specifier),
format!("Blob URL not found: \"{specifier}\"."),
)
})?
};
@ -435,8 +432,7 @@ impl FileFetcher {
return futures::future::err(custom_error(
"NotCached",
format!(
"Specifier not found in cache: \"{}\", --cached-only is specified.",
specifier
"Specifier not found in cache: \"{specifier}\", --cached-only is specified."
),
))
.boxed();
@ -580,7 +576,7 @@ impl FileFetcher {
} else if !self.allow_remote {
Err(custom_error(
"NoRemote",
format!("A remote specifier was requested: \"{}\", but --no-remote is specified.", specifier),
format!("A remote specifier was requested: \"{specifier}\", but --no-remote is specified."),
))
} else {
let result = self
@ -818,19 +814,19 @@ mod tests {
charset: &str,
expected: &str,
) {
let url_str = format!("http://127.0.0.1:4545/encoding/{}", fixture);
let url_str = format!("http://127.0.0.1:4545/encoding/{fixture}");
let specifier = resolve_url(&url_str).unwrap();
let (file, headers) = test_fetch_remote(&specifier).await;
assert_eq!(&*file.source, expected);
assert_eq!(file.media_type, MediaType::TypeScript);
assert_eq!(
headers.get("content-type").unwrap(),
&format!("application/typescript;charset={}", charset)
&format!("application/typescript;charset={charset}")
);
}
async fn test_fetch_local_encoded(charset: &str, expected: String) {
let p = test_util::testdata_path().join(format!("encoding/{}.ts", charset));
let p = test_util::testdata_path().join(format!("encoding/{charset}.ts"));
let specifier = resolve_url_or_path(p.to_str().unwrap()).unwrap();
let (file, _) = test_fetch(&specifier).await;
assert_eq!(&*file.source, expected);
@ -2016,7 +2012,7 @@ mod tests {
)
.await;
println!("{:?}", result);
println!("{result:?}");
if let Ok(FetchOnceResult::Code(body, _headers)) = result {
assert!(!body.is_empty());
} else {

View file

@ -630,12 +630,12 @@ fn handle_check_error(
let mut message = if let Some(err) = error.downcast_ref::<ResolutionError>() {
enhanced_resolution_error_message(err)
} else {
format!("{}", error)
format!("{error}")
};
if let Some(range) = maybe_range {
if !range.specifier.as_str().contains("$deno") {
message.push_str(&format!("\n at {}", range));
message.push_str(&format!("\n at {range}"));
}
}
@ -644,7 +644,7 @@ fn handle_check_error(
/// Adds more explanatory information to a resolution error.
pub fn enhanced_resolution_error_message(error: &ResolutionError) -> String {
let mut message = format!("{}", error);
let mut message = format!("{error}");
if let ResolutionError::InvalidSpecifier {
error: SpecifierError::ImportPrefixMissing(specifier, _),
@ -653,8 +653,7 @@ pub fn enhanced_resolution_error_message(error: &ResolutionError) -> String {
{
if crate::node::resolve_builtin_node_module(specifier).is_ok() {
message.push_str(&format!(
"\nIf you want to use a built-in Node module, add a \"node:\" prefix (ex. \"node:{}\").",
specifier
"\nIf you want to use a built-in Node module, add a \"node:\" prefix (ex. \"node:{specifier}\")."
));
}
}

View file

@ -59,8 +59,7 @@ pub fn resolve_redirect_from_response(
Ok(new_url)
} else {
Err(generic_error(format!(
"Redirection from '{}' did not provide location header",
request_url
"Redirection from '{request_url}' did not provide location header"
)))
}
}
@ -290,7 +289,7 @@ impl HttpClient {
"Bad response: {:?}{}",
status,
match maybe_response_text {
Some(text) => format!("\n\n{}", text),
Some(text) => format!("\n\n{text}"),
None => String::new(),
}
);

View file

@ -158,7 +158,7 @@ fn check_specifier(
documents: &Documents,
) -> Option<String> {
for ext in SUPPORTED_EXTENSIONS {
let specifier_with_ext = format!("{}{}", specifier, ext);
let specifier_with_ext = format!("{specifier}{ext}");
if documents.contains_import(&specifier_with_ext, referrer) {
return Some(specifier_with_ext);
}
@ -398,7 +398,7 @@ impl CodeActionCollection {
specifier.clone(),
vec![lsp::TextEdit {
new_text: prepend_whitespace(
format!("// deno-lint-ignore {}\n", code),
format!("// deno-lint-ignore {code}\n"),
line_content,
),
range: lsp::Range {
@ -414,7 +414,7 @@ impl CodeActionCollection {
}],
);
let ignore_error_action = lsp::CodeAction {
title: format!("Disable {} for this line", code),
title: format!("Disable {code} for this line"),
kind: Some(lsp::CodeActionKind::QUICKFIX),
diagnostics: Some(vec![diagnostic.clone()]),
command: None,
@ -447,7 +447,7 @@ impl CodeActionCollection {
})
});
let mut new_text = format!("// deno-lint-ignore-file {}\n", code);
let mut new_text = format!("// deno-lint-ignore-file {code}\n");
let mut range = lsp::Range {
start: lsp::Position {
line: 0,
@ -461,7 +461,7 @@ impl CodeActionCollection {
// If ignore file comment already exists, append the lint code
// to the existing comment.
if let Some(ignore_comment) = maybe_ignore_comment {
new_text = format!(" {}", code);
new_text = format!(" {code}");
// Get the end position of the comment.
let line = maybe_parsed_source
.unwrap()
@ -479,7 +479,7 @@ impl CodeActionCollection {
let mut changes = HashMap::new();
changes.insert(specifier.clone(), vec![lsp::TextEdit { new_text, range }]);
let ignore_file_action = lsp::CodeAction {
title: format!("Disable {} for the entire file", code),
title: format!("Disable {code} for the entire file"),
kind: Some(lsp::CodeActionKind::QUICKFIX),
diagnostics: Some(vec![diagnostic.clone()]),
command: None,

View file

@ -107,7 +107,7 @@ impl Client {
) {
self
.0
.show_message(message_type, format!("{}", message))
.show_message(message_type, format!("{message}"))
.await
}

View file

@ -394,7 +394,7 @@ fn get_local_completions(
let filter_text = if full_text.starts_with(current) {
Some(full_text)
} else {
Some(format!("{}{}", current, label))
Some(format!("{current}{label}"))
};
match de.file_type() {
Ok(file_type) if file_type.is_dir() => Some(lsp::CompletionItem {

View file

@ -670,17 +670,14 @@ impl DenoDiagnostic {
let DiagnosticDataImportMapRemap { from, to } =
serde_json::from_value(data)?;
lsp::CodeAction {
title: format!(
"Update \"{}\" to \"{}\" to use import map.",
from, to
),
title: format!("Update \"{from}\" to \"{to}\" to use import map."),
kind: Some(lsp::CodeActionKind::QUICKFIX),
diagnostics: Some(vec![diagnostic.clone()]),
edit: Some(lsp::WorkspaceEdit {
changes: Some(HashMap::from([(
specifier.clone(),
vec![lsp::TextEdit {
new_text: format!("\"{}\"", to),
new_text: format!("\"{to}\""),
range: diagnostic.range,
}],
)])),
@ -821,15 +818,15 @@ impl DenoDiagnostic {
pub fn to_lsp_diagnostic(&self, range: &lsp::Range) -> lsp::Diagnostic {
let (severity, message, data) = match self {
Self::DenoWarn(message) => (lsp::DiagnosticSeverity::WARNING, message.to_string(), None),
Self::ImportMapRemap { from, to } => (lsp::DiagnosticSeverity::HINT, format!("The import specifier can be remapped to \"{}\" which will resolve it via the active import map.", to), Some(json!({ "from": from, "to": to }))),
Self::InvalidAssertType(assert_type) => (lsp::DiagnosticSeverity::ERROR, format!("The module is a JSON module and expected an assertion type of \"json\". Instead got \"{}\".", assert_type), None),
Self::ImportMapRemap { from, to } => (lsp::DiagnosticSeverity::HINT, format!("The import specifier can be remapped to \"{to}\" which will resolve it via the active import map."), Some(json!({ "from": from, "to": to }))),
Self::InvalidAssertType(assert_type) => (lsp::DiagnosticSeverity::ERROR, format!("The module is a JSON module and expected an assertion type of \"json\". Instead got \"{assert_type}\"."), None),
Self::NoAssertType => (lsp::DiagnosticSeverity::ERROR, "The module is a JSON module and not being imported with an import assertion. Consider adding `assert { type: \"json\" }` to the import statement.".to_string(), None),
Self::NoCache(specifier) => (lsp::DiagnosticSeverity::ERROR, format!("Uncached or missing remote URL: \"{}\".", specifier), Some(json!({ "specifier": specifier }))),
Self::NoCache(specifier) => (lsp::DiagnosticSeverity::ERROR, format!("Uncached or missing remote URL: \"{specifier}\"."), Some(json!({ "specifier": specifier }))),
Self::NoCacheBlob => (lsp::DiagnosticSeverity::ERROR, "Uncached blob URL.".to_string(), None),
Self::NoCacheData(specifier) => (lsp::DiagnosticSeverity::ERROR, "Uncached data URL.".to_string(), Some(json!({ "specifier": specifier }))),
Self::NoCacheNpm(pkg_ref, specifier) => (lsp::DiagnosticSeverity::ERROR, format!("Uncached or missing npm package: \"{}\".", pkg_ref.req), Some(json!({ "specifier": specifier }))),
Self::NoLocal(specifier) => (lsp::DiagnosticSeverity::ERROR, format!("Unable to load a local module: \"{}\".\n Please check the file path.", specifier), None),
Self::Redirect { from, to} => (lsp::DiagnosticSeverity::INFORMATION, format!("The import of \"{}\" was redirected to \"{}\".", from, to), Some(json!({ "specifier": from, "redirect": to }))),
Self::NoLocal(specifier) => (lsp::DiagnosticSeverity::ERROR, format!("Unable to load a local module: \"{specifier}\".\n Please check the file path."), None),
Self::Redirect { from, to} => (lsp::DiagnosticSeverity::INFORMATION, format!("The import of \"{from}\" was redirected to \"{to}\"."), Some(json!({ "specifier": from, "redirect": to }))),
Self::ResolutionError(err) => (
lsp::DiagnosticSeverity::ERROR,
enhanced_resolution_error_message(err),

View file

@ -838,7 +838,7 @@ impl Documents {
|| {
Err(custom_error(
"NotFound",
format!("The specifier \"{}\" was not found.", specifier),
format!("The specifier \"{specifier}\" was not found."),
))
},
Ok,
@ -862,7 +862,7 @@ impl Documents {
} else {
return Err(custom_error(
"NotFound",
format!("The specifier \"{}\" was not found.", specifier),
format!("The specifier \"{specifier}\" was not found."),
));
}
}
@ -1100,7 +1100,7 @@ impl Documents {
} else {
return Err(custom_error(
"NotFound",
format!("Specifier not found {}", specifier),
format!("Specifier not found {specifier}"),
));
}
}

View file

@ -375,8 +375,7 @@ impl Inner {
self.get_maybe_asset_or_document(specifier).map_or_else(
|| {
Err(LspError::invalid_params(format!(
"Unable to find asset or document for: {}",
specifier
"Unable to find asset or document for: {specifier}"
)))
},
Ok,
@ -1296,7 +1295,7 @@ impl Inner {
Ok(Some(text_edits))
}
} else {
self.client.show_message(MessageType::WARNING, format!("Unable to format \"{}\". Likely due to unrecoverable syntax errors in the file.", specifier)).await;
self.client.show_message(MessageType::WARNING, format!("Unable to format \"{specifier}\". Likely due to unrecoverable syntax errors in the file.")).await;
Ok(None)
}
}
@ -1354,7 +1353,7 @@ impl Inner {
};
let value =
if let Some(docs) = self.module_registries.get_hover(&dep).await {
format!("{}\n\n---\n\n{}", value, docs)
format!("{value}\n\n---\n\n{docs}")
} else {
value
};

View file

@ -220,8 +220,8 @@ pub enum StringOrNumber {
impl fmt::Display for StringOrNumber {
fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
match &self {
Self::Number(n) => write!(f, "{}", n),
Self::String(s) => write!(f, "{}", s),
Self::Number(n) => write!(f, "{n}"),
Self::String(s) => write!(f, "{s}"),
}
}
}
@ -269,9 +269,9 @@ impl StringOrVec {
let mut s = String::new();
for (i, segment) in v.iter().enumerate() {
if omit_initial_prefix && i == 0 {
write!(s, "{}{}", segment, suffix).unwrap();
write!(s, "{segment}{suffix}").unwrap();
} else {
write!(s, "{}{}{}", prefix, segment, suffix).unwrap();
write!(s, "{prefix}{segment}{suffix}").unwrap();
}
}
s
@ -610,7 +610,7 @@ pub fn tokens_to_regex(
}
} else {
let modifier = key.modifier.clone().unwrap_or_default();
format!(r"(?:{}{}){}", prefix, suffix, modifier)
format!(r"(?:{prefix}{suffix}){modifier}")
}
}
};
@ -619,10 +619,10 @@ pub fn tokens_to_regex(
if end {
if !strict {
write!(route, r"{}?", delimiter).unwrap();
write!(route, r"{delimiter}?").unwrap();
}
if has_ends_with {
write!(route, r"(?={})", ends_with).unwrap();
write!(route, r"(?={ends_with})").unwrap();
} else {
route.push('$');
}
@ -640,16 +640,16 @@ pub fn tokens_to_regex(
};
if !strict {
write!(route, r"(?:{}(?={}))?", delimiter, ends_with).unwrap();
write!(route, r"(?:{delimiter}(?={ends_with}))?").unwrap();
}
if !is_end_deliminated {
write!(route, r"(?={}|{})", delimiter, ends_with).unwrap();
write!(route, r"(?={delimiter}|{ends_with})").unwrap();
}
}
let flags = if sensitive { "" } else { "(?i)" };
let re = FancyRegex::new(&format!("{}{}", flags, route))?;
let re = FancyRegex::new(&format!("{flags}{route}"))?;
let maybe_keys = if keys.is_empty() { None } else { Some(keys) };
Ok((re, maybe_keys))
@ -754,7 +754,7 @@ impl Compiler {
}
}
}
write!(path, "{}{}{}", prefix, segment, suffix).unwrap();
write!(path, "{prefix}{segment}{suffix}").unwrap();
}
}
}
@ -773,7 +773,7 @@ impl Compiler {
}
let prefix = k.prefix.clone().unwrap_or_default();
let suffix = k.suffix.clone().unwrap_or_default();
write!(path, "{}{}{}", prefix, s, suffix).unwrap();
write!(path, "{prefix}{s}{suffix}").unwrap();
}
None => {
if !optional {
@ -874,25 +874,23 @@ mod tests {
fixtures: &[Fixture],
) {
let result = string_to_regex(path, maybe_options);
assert!(result.is_ok(), "Could not parse path: \"{}\"", path);
assert!(result.is_ok(), "Could not parse path: \"{path}\"");
let (re, _) = result.unwrap();
for (fixture, expected) in fixtures {
let result = re.find(fixture);
assert!(
result.is_ok(),
"Find failure for path \"{}\" and fixture \"{}\"",
path,
fixture
"Find failure for path \"{path}\" and fixture \"{fixture}\""
);
let actual = result.unwrap();
if let Some((text, start, end)) = *expected {
assert!(actual.is_some(), "Match failure for path \"{}\" and fixture \"{}\". Expected Some got None", path, fixture);
assert!(actual.is_some(), "Match failure for path \"{path}\" and fixture \"{fixture}\". Expected Some got None");
let actual = actual.unwrap();
assert_eq!(actual.as_str(), text, "Match failure for path \"{}\" and fixture \"{}\". Expected \"{}\" got \"{}\".", path, fixture, text, actual.as_str());
assert_eq!(actual.start(), start);
assert_eq!(actual.end(), end);
} else {
assert!(actual.is_none(), "Match failure for path \"{}\" and fixture \"{}\". Expected None got {:?}", path, fixture, actual);
assert!(actual.is_none(), "Match failure for path \"{path}\" and fixture \"{fixture}\". Expected None got {actual:?}");
}
}
}

View file

@ -217,10 +217,10 @@ fn get_endpoint_with_match(
Token::Key(k) if k.name == *key => Some(k),
_ => None,
});
url = url
.replace(&format!("${{{}}}", name), &value.to_string(maybe_key, true));
url =
url.replace(&format!("${{{name}}}"), &value.to_string(maybe_key, true));
url = url.replace(
&format!("${{{{{}}}}}", name),
&format!("${{{{{name}}}}}"),
&percent_encoding::percent_encode(
value.to_string(maybe_key, true).as_bytes(),
COMPONENT,
@ -278,8 +278,8 @@ fn replace_variable(
let value = maybe_value.unwrap_or("");
if let StringOrNumber::String(name) = &variable.name {
url_str
.replace(&format!("${{{}}}", name), value)
.replace(&format! {"${{{{{}}}}}", name}, value)
.replace(&format!("${{{name}}}"), value)
.replace(&format! {"${{{{{name}}}}}"}, value)
} else {
url_str
}
@ -723,7 +723,7 @@ impl ModuleRegistry {
}
for (idx, item) in items.into_iter().enumerate() {
let mut label = if let Some(p) = &prefix {
format!("{}{}", p, item)
format!("{p}{item}")
} else {
item.clone()
};
@ -880,7 +880,7 @@ impl ModuleRegistry {
is_incomplete = true;
}
for (idx, item) in items.into_iter().enumerate() {
let path = format!("{}{}", prefix, item);
let path = format!("{prefix}{item}");
let kind = Some(lsp::CompletionItemKind::FOLDER);
let item_specifier = base.join(&path).ok()?;
let full_text = item_specifier.as_str();

View file

@ -188,7 +188,7 @@ impl ReplLanguageServer {
let new_text = if new_text.ends_with('\n') {
new_text.to_string()
} else {
format!("{}\n", new_text)
format!("{new_text}\n")
};
self.document_version += 1;
let current_line_count =

View file

@ -211,7 +211,7 @@ fn new_assets_map() -> Arc<Mutex<AssetsMap>> {
let assets = tsc::LAZILY_LOADED_STATIC_ASSETS
.iter()
.map(|(k, v)| {
let url_str = format!("asset:///{}", k);
let url_str = format!("asset:///{k}");
let specifier = resolve_url(&url_str).unwrap();
let asset = AssetDocument::new(specifier.clone(), v);
(specifier, asset)
@ -384,9 +384,9 @@ fn get_tag_documentation(
let maybe_text = get_tag_body_text(tag, language_server);
if let Some(text) = maybe_text {
if text.contains('\n') {
format!("{} \n{}", label, text)
format!("{label} \n{text}")
} else {
format!("{} - {}", label, text)
format!("{label} - {text}")
}
} else {
label
@ -397,7 +397,7 @@ fn make_codeblock(text: &str) -> String {
if CODEBLOCK_RE.is_match(text) {
text.to_string()
} else {
format!("```\n{}\n```", text)
format!("```\n{text}\n```")
}
}
@ -700,9 +700,9 @@ fn display_parts_to_string(
.unwrap_or_else(|| "".to_string())
});
let link_str = if link.linkcode {
format!("[`{}`]({})", link_text, specifier)
format!("[`{link_text}`]({specifier})")
} else {
format!("[{}]({})", link_text, specifier)
format!("[{link_text}]({specifier})")
};
out.push(link_str);
}
@ -785,8 +785,7 @@ impl QuickInfo {
.join(" \n\n");
if !tags_preview.is_empty() {
parts.push(lsp::MarkedString::from_markdown(format!(
"\n\n{}",
tags_preview
"\n\n{tags_preview}"
)));
}
}
@ -1984,7 +1983,7 @@ impl CompletionEntryDetails {
.map(|tag_info| get_tag_documentation(tag_info, language_server))
.collect::<Vec<String>>()
.join("");
value = format!("{}\n\n{}", value, tag_documentation);
value = format!("{value}\n\n{tag_documentation}");
}
Some(lsp::Documentation::MarkupContent(lsp::MarkupContent {
kind: lsp::MarkupKind::Markdown,
@ -2486,7 +2485,7 @@ impl SignatureHelpItem {
let documentation =
display_parts_to_string(&self.documentation, language_server);
lsp::SignatureInformation {
label: format!("{}{}{}", prefix_text, params_text, suffix_text),
label: format!("{prefix_text}{params_text}{suffix_text}"),
documentation: Some(lsp::Documentation::MarkupContent(
lsp::MarkupContent {
kind: lsp::MarkupKind::Markdown,
@ -2844,7 +2843,7 @@ fn start(
.clone()
.unwrap_or_else(|| Url::parse("cache:///").unwrap());
let init_config = json!({ "debug": debug, "rootUri": root_uri });
let init_src = format!("globalThis.serverInit({});", init_config);
let init_src = format!("globalThis.serverInit({init_config});");
runtime.execute_script(&located_script_name!(), &init_src)?;
Ok(())
@ -3433,7 +3432,7 @@ pub fn request(
(state.performance.clone(), method.to_value(state, id))
};
let mark = performance.mark("request", Some(request_params.clone()));
let request_src = format!("globalThis.serverRequest({});", request_params);
let request_src = format!("globalThis.serverRequest({request_params});");
runtime.execute_script(&located_script_name!(), &request_src)?;
let op_state = runtime.op_state();

View file

@ -104,10 +104,10 @@ impl LspUrlMap {
format!("deno:/asset{}", specifier.path())
} else if specifier.scheme() == "data" {
let data_url = DataUrl::process(specifier.as_str())
.map_err(|e| uri_error(format!("{:?}", e)))?;
.map_err(|e| uri_error(format!("{e:?}")))?;
let mime = data_url.mime_type();
let (media_type, _) =
map_content_type(specifier, Some(&format!("{}", mime)));
map_content_type(specifier, Some(&format!("{mime}")));
let extension = if media_type == MediaType::Unknown {
""
} else {
@ -128,7 +128,7 @@ impl LspUrlMap {
})
.collect();
path.push_str(&parts.join("/"));
format!("deno:/{}", path)
format!("deno:/{path}")
};
let url = Url::parse(&specifier_str)?;
inner.put(specifier.clone(), url.clone());

View file

@ -139,7 +139,7 @@ async fn run_subcommand(flags: Flags) -> Result<i32, AnyError> {
DenoSubcommand::Test(test_flags) => {
if let Some(ref coverage_dir) = flags.coverage_dir {
std::fs::create_dir_all(coverage_dir)
.with_context(|| format!("Failed creating: {}", coverage_dir))?;
.with_context(|| format!("Failed creating: {coverage_dir}"))?;
// this is set in order to ensure spawned processes use the same
// coverage directory
env::set_var(
@ -206,7 +206,7 @@ fn unwrap_or_exit<T>(result: Result<T, AnyError>) -> T {
match result {
Ok(value) => value,
Err(error) => {
let mut error_string = format!("{:?}", error);
let mut error_string = format!("{error:?}");
let mut error_code = 1;
if let Some(e) = error.downcast_ref::<JsError>() {

View file

@ -122,7 +122,7 @@ impl CliModuleLoader {
)?
}
MediaType::TsBuildInfo | MediaType::Wasm | MediaType::SourceMap => {
panic!("Unexpected media type {} for {}", media_type, found_url)
panic!("Unexpected media type {media_type} for {found_url}")
}
};
@ -136,7 +136,7 @@ impl CliModuleLoader {
})
}
_ => {
let mut msg = format!("Loading unprepared module: {}", specifier);
let mut msg = format!("Loading unprepared module: {specifier}");
if let Some(referrer) = maybe_referrer {
msg = format!("{}, imported from: {}", msg, referrer.as_str());
}

View file

@ -39,7 +39,7 @@ fn napi_cancel_async_work(
/// Frees a previously allocated work object.
#[napi_sym::napi_sym]
fn napi_delete_async_work(_env: &mut Env, work: napi_async_work) -> Result {
let work = Box::from_raw(work);
let work = Box::from_raw(work as *mut AsyncWork);
drop(work);
Ok(())

View file

@ -34,8 +34,7 @@ pub unsafe extern "C" fn napi_fatal_error(
std::str::from_utf8(slice).unwrap()
};
panic!(
"Fatal exception triggered by napi_fatal_error!\nLocation: {:?}\n{}",
location, message
"Fatal exception triggered by napi_fatal_error!\nLocation: {location:?}\n{message}"
);
}
@ -46,10 +45,7 @@ fn napi_fatal_exception(env: *mut Env, value: napi_value) -> Result {
let env: &mut Env = env.as_mut().ok_or(Error::InvalidArg)?;
let value = transmute::<napi_value, v8::Local<v8::Value>>(value);
let error = value.to_rust_string_lossy(&mut env.scope());
panic!(
"Fatal exception triggered by napi_fatal_exception!\n{}",
error
);
panic!("Fatal exception triggered by napi_fatal_exception!\n{error}");
}
#[napi_sym::napi_sym]

View file

@ -76,11 +76,11 @@ pub fn esm_code_with_node_globals(
let global_this_expr = if has_global_this {
global_this_expr
} else {
write!(result, "var globalThis = {};", global_this_expr).unwrap();
write!(result, "var globalThis = {global_this_expr};").unwrap();
"globalThis"
};
for global in globals {
write!(result, "var {0} = {1}.{0};", global, global_this_expr).unwrap();
write!(result, "var {global} = {global_this_expr}.{global};").unwrap();
}
let file_text = text_info.text_str();

View file

@ -65,7 +65,7 @@ impl NodeResolution {
if specifier.starts_with("node:") {
ModuleSpecifier::parse(&specifier).unwrap()
} else {
ModuleSpecifier::parse(&format!("node:{}", specifier)).unwrap()
ModuleSpecifier::parse(&format!("node:{specifier}")).unwrap()
}
}
}
@ -146,8 +146,7 @@ pub fn resolve_builtin_node_module(specifier: &str) -> Result<Url, AnyError> {
}
Err(generic_error(format!(
"Unknown built-in \"node:\" module: {}",
specifier
"Unknown built-in \"node:\" module: {specifier}"
)))
}
@ -235,8 +234,7 @@ pub async fn initialize_binary_command(
Object.defineProperty(process.argv, "0", {{
get: () => binaryName,
}});
}})('{}');"#,
binary_name,
}})('{binary_name}');"#,
);
let value =
@ -333,7 +331,7 @@ pub fn node_resolve_npm_reference(
&reference
.sub_path
.as_ref()
.map(|s| format!("./{}", s))
.map(|s| format!("./{s}"))
.unwrap_or_else(|| ".".to_string()),
&package_folder,
node_module_kind,
@ -343,7 +341,7 @@ pub fn node_resolve_npm_reference(
permissions,
)
.with_context(|| {
format!("Error resolving package config for '{}'", reference)
format!("Error resolving package config for '{reference}'")
})?;
let resolved_path = match maybe_resolved_path {
Some(resolved_path) => resolved_path,
@ -425,7 +423,7 @@ fn resolve_bin_entry_value<'a>(
.map(|o| {
o.keys()
.into_iter()
.map(|k| format!(" * npm:{}/{}", pkg_req, k))
.map(|k| format!(" * npm:{pkg_req}/{k}"))
.collect::<Vec<_>>()
})
.unwrap_or_default();
@ -546,8 +544,7 @@ pub fn url_to_node_resolution(
Ok(NodeResolution::Esm(url))
} else if url_str.ends_with(".ts") {
Err(generic_error(format!(
"TypeScript files are not supported in npm packages: {}",
url
"TypeScript files are not supported in npm packages: {url}"
)))
} else {
Ok(NodeResolution::CommonJs(url))
@ -681,15 +678,13 @@ fn add_export(
// so assign it to a temporary variable that won't have a conflict, then re-export
// it as a string
source.push(format!(
"const __deno_export_{}__ = {};",
temp_var_count, initializer
"const __deno_export_{temp_var_count}__ = {initializer};"
));
source.push(format!(
"export {{ __deno_export_{}__ as \"{}\" }};",
temp_var_count, name
"export {{ __deno_export_{temp_var_count}__ as \"{name}\" }};"
));
} else {
source.push(format!("export const {} = {};", name, initializer));
source.push(format!("export const {name} = {initializer};"));
}
}
@ -838,7 +833,7 @@ pub fn translate_cjs_to_esm(
add_export(
&mut source,
export,
&format!("mod[\"{}\"]", export),
&format!("mod[\"{export}\"]"),
&mut temp_var_count,
);
}
@ -975,7 +970,7 @@ fn parse_specifier(specifier: &str) -> Option<(String, String)> {
fn to_file_path(url: &ModuleSpecifier) -> PathBuf {
url
.to_file_path()
.unwrap_or_else(|_| panic!("Provided URL was not file:// URL: {}", url))
.unwrap_or_else(|_| panic!("Provided URL was not file:// URL: {url}"))
}
fn to_file_path_string(url: &ModuleSpecifier) -> String {

View file

@ -216,7 +216,7 @@ impl ReadonlyNpmCache {
let encoded_name = mixed_case_package_name_encode(name);
// Using the encoded directory may have a collision with an actual package name
// so prefix it with an underscore since npm packages can't start with that
dir.join(format!("_{}", encoded_name))
dir.join(format!("_{encoded_name}"))
} else {
// ensure backslashes are used on windows
for part in name.split('/') {

View file

@ -131,8 +131,7 @@ impl NpmPackageVersionInfo {
let version_req =
NpmVersionReq::parse(&version_req).with_context(|| {
format!(
"error parsing version requirement for dependency: {}@{}",
bare_specifier, version_req
"error parsing version requirement for dependency: {bare_specifier}@{version_req}"
)
})?;
Ok(NpmDependencyEntry {
@ -369,10 +368,7 @@ impl RealNpmRegistryApiInner {
Ok(value) => value,
Err(err) => {
if cfg!(debug_assertions) {
panic!(
"error loading cached npm package info for {}: {:#}",
name, err
);
panic!("error loading cached npm package info for {name}: {err:#}");
} else {
None
}
@ -415,10 +411,7 @@ impl RealNpmRegistryApiInner {
self.save_package_info_to_file_cache_result(name, package_info)
{
if cfg!(debug_assertions) {
panic!(
"error saving cached npm package info for {}: {:#}",
name, err
);
panic!("error saving cached npm package info for {name}: {err:#}");
}
}
}
@ -443,8 +436,7 @@ impl RealNpmRegistryApiInner {
return Err(custom_error(
"NotCached",
format!(
"An npm specifier not found in cache: \"{}\", --cached-only is specified.",
name
"An npm specifier not found in cache: \"{name}\", --cached-only is specified."
)
));
}

View file

@ -112,7 +112,7 @@ impl NpmPackageId {
let (input, version) = parse_version(input)?;
match NpmVersion::parse(version) {
Ok(version) => Ok((input, (name.to_string(), version))),
Err(err) => ParseError::fail(at_version_input, format!("{:#}", err)),
Err(err) => ParseError::fail(at_version_input, format!("{err:#}")),
}
}
@ -173,7 +173,7 @@ impl NpmPackageId {
}
with_failure_handling(parse_id_at_level(0))(id)
.with_context(|| format!("Invalid npm package id '{}'.", id))
.with_context(|| format!("Invalid npm package id '{id}'."))
}
pub fn display(&self) -> String {

View file

@ -247,7 +247,7 @@ impl NpmResolutionSnapshot {
// collect the specifiers to version mappings
for (key, value) in &lockfile.content.npm.specifiers {
let package_req = NpmPackageReq::from_str(key)
.with_context(|| format!("Unable to parse npm specifier: {}", key))?;
.with_context(|| format!("Unable to parse npm specifier: {key}"))?;
let package_id = NpmPackageId::from_serialized(value)?;
package_reqs.insert(package_req, package_id.clone());
verify_ids.insert(package_id.clone());

View file

@ -47,7 +47,7 @@ impl NpmPackageReference {
let parts = specifier.split('/').collect::<Vec<_>>();
let name_part_len = if specifier.starts_with('@') { 2 } else { 1 };
if parts.len() < name_part_len {
return Err(generic_error(format!("Not a valid package: {}", specifier)));
return Err(generic_error(format!("Not a valid package: {specifier}")));
}
let name_parts = &parts[0..name_part_len];
let last_name_part = &name_parts[name_part_len - 1];
@ -81,8 +81,7 @@ impl NpmPackageReference {
if let Some(at_index) = sub_path.rfind('@') {
let (new_sub_path, version) = sub_path.split_at(at_index);
let msg = format!(
"Invalid package specifier 'npm:{}/{}'. Did you mean to write 'npm:{}{}/{}'?",
name, sub_path, name, version, new_sub_path
"Invalid package specifier 'npm:{name}/{sub_path}'. Did you mean to write 'npm:{name}{version}/{new_sub_path}'?"
);
return Err(generic_error(msg));
}
@ -90,8 +89,7 @@ impl NpmPackageReference {
if name.is_empty() {
let msg = format!(
"Invalid npm specifier '{}'. Did not contain a package name.",
original_text
"Invalid npm specifier '{original_text}'. Did not contain a package name."
);
return Err(generic_error(msg));
}
@ -133,7 +131,7 @@ impl std::fmt::Display for NpmPackageReq {
impl NpmPackageReq {
pub fn from_str(text: &str) -> Result<Self, AnyError> {
// probably should do something more targeted in the future
let reference = NpmPackageReference::from_str(&format!("npm:{}", text))?;
let reference = NpmPackageReference::from_str(&format!("npm:{text}"))?;
Ok(reference.req)
}
}
@ -163,7 +161,7 @@ impl NpmVersionMatcher for NpmPackageReq {
self
.version_req
.as_ref()
.map(|v| format!("{}", v))
.map(|v| format!("{v}"))
.unwrap_or_else(|| "non-prerelease".to_string())
}
}

View file

@ -256,14 +256,13 @@ impl NpmPackageResolver {
.iter()
.collect::<HashSet<_>>() // prevent duplicates
.iter()
.map(|p| format!("\"{}\"", p))
.map(|p| format!("\"{p}\""))
.collect::<Vec<_>>()
.join(", ");
return Err(custom_error(
"NoNpm",
format!(
"Following npm specifiers were requested: {}; but --no-npm is specified.",
fmt_reqs
"Following npm specifiers were requested: {fmt_reqs}; but --no-npm is specified."
),
));
}

View file

@ -53,7 +53,7 @@ impl fmt::Display for NpmVersion {
if i > 0 {
write!(f, ".")?;
}
write!(f, "{}", part)?;
write!(f, "{part}")?;
}
}
if !self.build.is_empty() {
@ -62,7 +62,7 @@ impl fmt::Display for NpmVersion {
if i > 0 {
write!(f, ".")?;
}
write!(f, "{}", part)?;
write!(f, "{part}")?;
}
}
Ok(())
@ -143,7 +143,7 @@ impl NpmVersion {
pub fn parse(text: &str) -> Result<Self, AnyError> {
let text = text.trim();
with_failure_handling(parse_npm_version)(text)
.with_context(|| format!("Invalid npm version '{}'.", text))
.with_context(|| format!("Invalid npm version '{text}'."))
}
}
@ -218,7 +218,7 @@ impl NpmVersionReq {
pub fn parse(text: &str) -> Result<Self, AnyError> {
let text = text.trim();
with_failure_handling(parse_npm_version_req)(text)
.with_context(|| format!("Invalid npm version requirement '{}'.", text))
.with_context(|| format!("Invalid npm version requirement '{text}'."))
}
}
@ -523,7 +523,7 @@ fn nr(input: &str) -> ParseResult<u64> {
Err(err) => {
return ParseError::fail(
input,
format!("Error parsing '{}' to u64.\n\n{:#}", result, err),
format!("Error parsing '{result}' to u64.\n\n{err:#}"),
)
}
};
@ -984,9 +984,7 @@ mod tests {
let version = NpmVersion::parse(version_text).unwrap();
assert!(
req.matches(&version),
"Checking {} satisfies {}",
req_text,
version_text
"Checking {req_text} satisfies {version_text}"
);
}
}
@ -1083,9 +1081,7 @@ mod tests {
let version = NpmVersion::parse(version_text).unwrap();
assert!(
!req.matches(&version),
"Checking {} not satisfies {}",
req_text,
version_text
"Checking {req_text} not satisfies {version_text}"
);
}
}

View file

@ -33,7 +33,7 @@ impl std::fmt::Display for SpecifierVersionReq {
impl SpecifierVersionReq {
pub fn parse(text: &str) -> Result<Self, AnyError> {
with_failure_handling(parse_npm_specifier)(text).with_context(|| {
format!("Invalid npm specifier version requirement '{}'.", text)
format!("Invalid npm specifier version requirement '{text}'.")
})
}
@ -143,7 +143,7 @@ fn nr(input: &str) -> ParseResult<u64> {
Err(err) => {
return ParseError::fail(
input,
format!("Error parsing '{}' to u64.\n\n{:#}", result, err),
format!("Error parsing '{result}' to u64.\n\n{err:#}"),
)
}
};

View file

@ -154,12 +154,12 @@ mod test {
verify_tarball_integrity(package, &Vec::new(), "sha512-test")
.unwrap_err()
.to_string(),
format!("Tarball checksum did not match what was provided by npm registry for package@1.0.0.\n\nExpected: test\nActual: {}", actual_checksum),
format!("Tarball checksum did not match what was provided by npm registry for package@1.0.0.\n\nExpected: test\nActual: {actual_checksum}"),
);
assert!(verify_tarball_integrity(
package,
&Vec::new(),
&format!("sha512-{}", actual_checksum)
&format!("sha512-{actual_checksum}")
)
.is_ok());
}

View file

@ -559,7 +559,7 @@ impl ProcState {
permissions,
))
.with_context(|| {
format!("Could not resolve '{}' from '{}'.", specifier, referrer)
format!("Could not resolve '{specifier}' from '{referrer}'.")
});
}
@ -581,7 +581,7 @@ impl ProcState {
{
return Err(custom_error(
"NotSupported",
format!("importing npm specifiers in remote modules requires the --unstable flag (referrer: {})", found_referrer),
format!("importing npm specifiers in remote modules requires the --unstable flag (referrer: {found_referrer})"),
));
}
@ -592,7 +592,7 @@ impl ProcState {
&self.npm_resolver,
permissions,
))
.with_context(|| format!("Could not resolve '{}'.", reference));
.with_context(|| format!("Could not resolve '{reference}'."));
} else {
return Ok(specifier.clone());
}
@ -639,7 +639,7 @@ impl ProcState {
&self.npm_resolver,
permissions,
))
.with_context(|| format!("Could not resolve '{}'.", reference));
.with_context(|| format!("Could not resolve '{reference}'."));
}
}
}

View file

@ -320,9 +320,7 @@ fn get_error_class_name(e: &AnyError) -> &'static str {
panic!(
"Error '{}' contains boxed error of unsupported type:{}",
e,
e.chain()
.map(|e| format!("\n {:?}", e))
.collect::<String>()
e.chain().map(|e| format!("\n {e:?}")).collect::<String>()
);
})
}

View file

@ -97,7 +97,7 @@ fn run_coverage_text(test_name: &str, extension: &str) {
.arg("--quiet")
.arg("--unstable")
.arg(format!("--coverage={}", tempdir.to_str().unwrap()))
.arg(format!("coverage/{}_test.{}", test_name, extension))
.arg(format!("coverage/{test_name}_test.{extension}"))
.stdout(std::process::Stdio::piped())
.stderr(std::process::Stdio::inherit())
.status()
@ -123,13 +123,13 @@ fn run_coverage_text(test_name: &str, extension: &str) {
.to_string();
let expected = fs::read_to_string(
util::testdata_path().join(format!("coverage/{}_expected.out", test_name)),
util::testdata_path().join(format!("coverage/{test_name}_expected.out")),
)
.unwrap();
if !util::wildcard_match(&expected, &actual) {
println!("OUTPUT\n{}\nOUTPUT", actual);
println!("EXPECTED\n{}\nEXPECTED", expected);
println!("OUTPUT\n{actual}\nOUTPUT");
println!("EXPECTED\n{expected}\nEXPECTED");
panic!("pattern match failed");
}
@ -152,13 +152,13 @@ fn run_coverage_text(test_name: &str, extension: &str) {
.to_string();
let expected = fs::read_to_string(
util::testdata_path().join(format!("coverage/{}_expected.lcov", test_name)),
util::testdata_path().join(format!("coverage/{test_name}_expected.lcov")),
)
.unwrap();
if !util::wildcard_match(&expected, &actual) {
println!("OUTPUT\n{}\nOUTPUT", actual);
println!("EXPECTED\n{}\nEXPECTED", expected);
println!("OUTPUT\n{actual}\nOUTPUT");
println!("EXPECTED\n{expected}\nEXPECTED");
panic!("pattern match failed");
}
@ -208,8 +208,8 @@ fn multifile_coverage() {
.unwrap();
if !util::wildcard_match(&expected, &actual) {
println!("OUTPUT\n{}\nOUTPUT", actual);
println!("EXPECTED\n{}\nEXPECTED", expected);
println!("OUTPUT\n{actual}\nOUTPUT");
println!("EXPECTED\n{expected}\nEXPECTED");
panic!("pattern match failed");
}
@ -237,8 +237,8 @@ fn multifile_coverage() {
.unwrap();
if !util::wildcard_match(&expected, &actual) {
println!("OUTPUT\n{}\nOUTPUT", actual);
println!("EXPECTED\n{}\nEXPECTED", expected);
println!("OUTPUT\n{actual}\nOUTPUT");
println!("EXPECTED\n{expected}\nEXPECTED");
panic!("pattern match failed");
}
@ -258,8 +258,7 @@ fn no_snaps_included(test_name: &str, extension: &str) {
.arg("--allow-read")
.arg(format!("--coverage={}", tempdir.to_str().unwrap()))
.arg(format!(
"coverage/no_snaps_included/{}_test.{}",
test_name, extension
"coverage/no_snaps_included/{test_name}_test.{extension}"
))
.stdout(std::process::Stdio::piped())
.stderr(std::process::Stdio::piped())
@ -292,8 +291,8 @@ fn no_snaps_included(test_name: &str, extension: &str) {
.unwrap();
if !util::wildcard_match(&expected, &actual) {
println!("OUTPUT\n{}\nOUTPUT", actual);
println!("EXPECTED\n{}\nEXPECTED", expected);
println!("OUTPUT\n{actual}\nOUTPUT");
println!("EXPECTED\n{expected}\nEXPECTED");
panic!("pattern match failed");
}
@ -339,8 +338,8 @@ fn no_transpiled_lines() {
.unwrap();
if !util::wildcard_match(&expected, &actual) {
println!("OUTPUT\n{}\nOUTPUT", actual);
println!("EXPECTED\n{}\nEXPECTED", expected);
println!("OUTPUT\n{actual}\nOUTPUT");
println!("EXPECTED\n{expected}\nEXPECTED");
panic!("pattern match failed");
}
@ -367,8 +366,8 @@ fn no_transpiled_lines() {
.unwrap();
if !util::wildcard_match(&expected, &actual) {
println!("OUTPUT\n{}\nOUTPUT", actual);
println!("EXPECTED\n{}\nEXPECTED", expected);
println!("OUTPUT\n{actual}\nOUTPUT");
println!("EXPECTED\n{expected}\nEXPECTED");
panic!("pattern match failed");
}

View file

@ -31,8 +31,7 @@ fn fmt_test() {
.current_dir(&testdata_fmt_dir)
.arg("fmt")
.arg(format!(
"--ignore={},{},{}",
badly_formatted_js_str, badly_formatted_md_str, badly_formatted_json_str
"--ignore={badly_formatted_js_str},{badly_formatted_md_str},{badly_formatted_json_str}"
))
.arg("--check")
.arg(badly_formatted_js_str)

View file

@ -103,8 +103,7 @@ impl InspectorTester {
self.child.kill().unwrap();
panic!(
"Inspector test failed with error: {:?}.\nstdout:\n{}\nstderr:\n{}",
err, stdout, stderr
"Inspector test failed with error: {err:?}.\nstdout:\n{stdout}\nstderr:\n{stderr}"
);
}
}
@ -215,7 +214,7 @@ fn inspect_flag_with_unique_port(flag_prefix: &str) -> String {
use std::sync::atomic::Ordering;
static PORT: AtomicU16 = AtomicU16::new(9229);
let port = PORT.fetch_add(1, Ordering::Relaxed);
format!("{}=127.0.0.1:{}", flag_prefix, port)
format!("{flag_prefix}=127.0.0.1:{port}")
}
fn extract_ws_url_from_stderr(
@ -508,7 +507,7 @@ async fn inspector_does_not_hang() {
.await;
tester
.assert_received_messages(
&[&format!(r#"{{"id":{},"result":{{}}}}"#, request_id)],
&[&format!(r#"{{"id":{request_id},"result":{{}}}}"#)],
&[r#"{"method":"Debugger.resumed","params":{}}"#],
)
.await;

View file

@ -103,7 +103,7 @@ pub fn ensure_directory_specifier(
) -> ModuleSpecifier {
let path = specifier.path();
if !path.ends_with('/') {
let new_path = format!("{}/", path);
let new_path = format!("{path}/");
specifier.set_path(&new_path);
}
specifier

View file

@ -673,7 +673,7 @@ fn assign_underscore_error() {
Some(vec![("NO_COLOR".to_owned(), "1".to_owned())]),
false,
);
println!("{}", out);
println!("{out}");
assert_ends_with!(
out,
"Last thrown error is no longer saved to _error.\n1\nUncaught 2\n1\n"

View file

@ -1942,9 +1942,9 @@ mod permissions {
.current_dir(&util::testdata_path())
.arg("run")
.arg("--unstable")
.arg(format!("--allow-{0}", permission))
.arg(format!("--allow-{permission}"))
.arg("run/permission_test.ts")
.arg(format!("{0}Required", permission))
.arg(format!("{permission}Required"))
.spawn()
.unwrap()
.wait()
@ -1959,10 +1959,7 @@ mod permissions {
for permission in &util::PERMISSION_VARIANTS {
let (_, err) = util::run_and_collect_output(
false,
&format!(
"run --unstable run/permission_test.ts {0}Required",
permission
),
&format!("run --unstable run/permission_test.ts {permission}Required"),
None,
None,
false,
@ -2100,7 +2097,7 @@ mod permissions {
let status = util::deno_cmd()
.current_dir(&util::testdata_path())
.arg("run")
.arg(format!("--allow-{0}={1},{2}", permission, test_dir, js_dir))
.arg(format!("--allow-{permission}={test_dir},{js_dir}"))
.arg("run/complex_permissions_test.ts")
.arg(permission)
.arg("run/complex_permissions_test.ts")
@ -2119,7 +2116,7 @@ mod permissions {
let status = util::deno_cmd()
.current_dir(&util::testdata_path())
.arg("run")
.arg(format!("--allow-{0}=.", permission))
.arg(format!("--allow-{permission}=."))
.arg("run/complex_permissions_test.ts")
.arg(permission)
.arg("run/complex_permissions_test.ts")
@ -2138,7 +2135,7 @@ mod permissions {
let status = util::deno_cmd()
.current_dir(&util::testdata_path())
.arg("run")
.arg(format!("--allow-{0}=tls/../", permission))
.arg(format!("--allow-{permission}=tls/../"))
.arg("run/complex_permissions_test.ts")
.arg(permission)
.arg("run/complex_permissions_test.ts")
@ -3251,7 +3248,7 @@ fn basic_auth_tokens() {
assert!(stdout_str.is_empty());
let stderr_str = std::str::from_utf8(&output.stderr).unwrap().trim();
eprintln!("{}", stderr_str);
eprintln!("{stderr_str}");
assert!(stderr_str
.contains("Module not found \"http://127.0.0.1:4554/run/001_hello.js\"."));
@ -3269,7 +3266,7 @@ fn basic_auth_tokens() {
.unwrap();
let stderr_str = std::str::from_utf8(&output.stderr).unwrap().trim();
eprintln!("{}", stderr_str);
eprintln!("{stderr_str}");
assert!(output.status.success());
@ -3354,7 +3351,7 @@ async fn test_resolve_dns() {
.unwrap();
let err = String::from_utf8_lossy(&output.stderr);
let out = String::from_utf8_lossy(&output.stdout);
println!("{}", err);
println!("{err}");
assert!(output.status.success());
assert!(err.starts_with("Check file"));

View file

@ -530,7 +530,7 @@ fn update_existing_config_test() {
}
fn success_text(module_count: &str, dir: &str, has_import_map: bool) -> String {
let mut text = format!("Vendored {} into {} directory.", module_count, dir);
let mut text = format!("Vendored {module_count} into {dir} directory.");
if has_import_map {
let f = format!(
concat!(
@ -544,7 +544,7 @@ fn success_text(module_count: &str, dir: &str, has_import_map: bool) -> String {
dir.to_string()
}
);
write!(text, "{}", f).unwrap();
write!(text, "{f}").unwrap();
}
text
}

View file

@ -74,14 +74,14 @@ fn child_lines(
.lines()
.map(|r| {
let line = r.unwrap();
eprintln!("STDOUT: {}", line);
eprintln!("STDOUT: {line}");
line
});
let stderr_lines = std::io::BufReader::new(child.stderr.take().unwrap())
.lines()
.map(|r| {
let line = r.unwrap();
eprintln!("STDERR: {}", line);
eprintln!("STDERR: {line}");
line
});
(stdout_lines, stderr_lines)

View file

@ -413,7 +413,7 @@ impl CoverageReporter for LcovCoverageReporter {
.ok()
.and_then(|p| p.to_str().map(|p| p.to_string()))
.unwrap_or_else(|| coverage_report.url.to_string());
writeln!(out_writer, "SF:{}", file_path)?;
writeln!(out_writer, "SF:{file_path}")?;
for function in &coverage_report.named_functions {
writeln!(
@ -433,13 +433,13 @@ impl CoverageReporter for LcovCoverageReporter {
}
let functions_found = coverage_report.named_functions.len();
writeln!(out_writer, "FNF:{}", functions_found)?;
writeln!(out_writer, "FNF:{functions_found}")?;
let functions_hit = coverage_report
.named_functions
.iter()
.filter(|f| f.execution_count > 0)
.count();
writeln!(out_writer, "FNH:{}", functions_hit)?;
writeln!(out_writer, "FNH:{functions_hit}")?;
for branch in &coverage_report.branches {
let taken = if let Some(taken) = &branch.taken {
@ -459,10 +459,10 @@ impl CoverageReporter for LcovCoverageReporter {
}
let branches_found = coverage_report.branches.len();
writeln!(out_writer, "BRF:{}", branches_found)?;
writeln!(out_writer, "BRF:{branches_found}")?;
let branches_hit =
coverage_report.branches.iter().filter(|b| b.is_hit).count();
writeln!(out_writer, "BRH:{}", branches_hit)?;
writeln!(out_writer, "BRH:{branches_hit}")?;
for (index, count) in &coverage_report.found_lines {
writeln!(out_writer, "DA:{},{}", index + 1, count)?;
}
@ -472,10 +472,10 @@ impl CoverageReporter for LcovCoverageReporter {
.iter()
.filter(|(_, count)| *count != 0)
.count();
writeln!(out_writer, "LH:{}", lines_hit)?;
writeln!(out_writer, "LH:{lines_hit}")?;
let lines_found = coverage_report.found_lines.len();
writeln!(out_writer, "LF:{}", lines_found)?;
writeln!(out_writer, "LF:{lines_found}")?;
writeln!(out_writer, "end_of_record")?;
Ok(())
@ -664,7 +664,7 @@ pub async fn cover_files(
ps.file_fetcher
.fetch_cached(&module_specifier, 10)
.with_context(|| {
format!("Failed to fetch \"{}\" from cache.", module_specifier)
format!("Failed to fetch \"{module_specifier}\" from cache.")
})?
};
let file = maybe_file.ok_or_else(|| {

View file

@ -69,7 +69,7 @@ pub async fn print_docs(
local: PathBuf::from("./$deno$doc.ts"),
maybe_types: None,
media_type: MediaType::TypeScript,
source: format!("export * from \"{}\";", module_specifier).into(),
source: format!("export * from \"{module_specifier}\";").into(),
specifier: root_specifier.clone(),
maybe_headers: None,
};

View file

@ -183,7 +183,7 @@ fn format_markdown(
dprint_plugin_json::format_text(text, &json_config)
} else {
let fake_filename =
PathBuf::from(format!("deno_fmt_stdin.{}", extension));
PathBuf::from(format!("deno_fmt_stdin.{extension}"));
let mut codeblock_config =
get_resolved_typescript_config(fmt_options);
codeblock_config.line_width = line_width;
@ -287,13 +287,13 @@ async fn check_source_files(
warn!("Error checking: {}", file_path.to_string_lossy());
warn!(
"{}",
format!("{}", e)
format!("{e}")
.split('\n')
.map(|l| {
if l.trim().is_empty() {
String::new()
} else {
format!(" {}", l)
format!(" {l}")
}
})
.collect::<Vec<_>>()
@ -317,8 +317,7 @@ async fn check_source_files(
} else {
let not_formatted_files_str = files_str(not_formatted_files_count);
Err(generic_error(format!(
"Found {} not formatted {} in {}",
not_formatted_files_count, not_formatted_files_str, checked_files_str,
"Found {not_formatted_files_count} not formatted {not_formatted_files_str} in {checked_files_str}",
)))
}
}
@ -369,7 +368,7 @@ async fn format_source_files(
Err(e) => {
let _g = output_lock.lock();
eprintln!("Error formatting: {}", file_path.to_string_lossy());
eprintln!(" {}", e);
eprintln!(" {e}");
}
}
Ok(())
@ -719,7 +718,7 @@ mod test {
&PathBuf::from("mod.ts"),
"1",
&Default::default(),
|_, file_text, _| Ok(Some(format!("1{}", file_text))),
|_, file_text, _| Ok(Some(format!("1{file_text}"))),
)
.unwrap();
}

View file

@ -266,10 +266,7 @@ fn print_tree_node<TWrite: Write>(
writeln!(
writer,
"{} {}",
colors::gray(format!(
"{}{}─{}",
prefix, sibling_connector, child_connector
)),
colors::gray(format!("{prefix}{sibling_connector}{child_connector}")),
child.text
)?;
let child_prefix = format!(

View file

@ -18,7 +18,7 @@ fn create_file(
.write(true)
.create_new(true)
.open(dir.join(filename))
.with_context(|| format!("Failed to create {} file", filename))?;
.with_context(|| format!("Failed to create {filename} file"))?;
file.write_all(content.as_bytes())?;
Ok(())
}

View file

@ -41,8 +41,7 @@ fn validate_name(exec_name: &str) -> Result<(), AnyError> {
Ok(())
} else {
Err(generic_error(format!(
"Invalid executable name: {}",
exec_name
"Invalid executable name: {exec_name}"
)))
}
}
@ -53,11 +52,8 @@ fn validate_name(exec_name: &str) -> Result<(), AnyError> {
/// A second compatible with git bash / MINGW64
/// Generate batch script to satisfy that.
fn generate_executable_file(shim_data: &ShimData) -> Result<(), AnyError> {
let args: Vec<String> = shim_data
.args
.iter()
.map(|c| format!("\"{}\"", c))
.collect();
let args: Vec<String> =
shim_data.args.iter().map(|c| format!("\"{c}\"")).collect();
let template = format!(
"% generated by deno install %\n@deno {} %*\n",
args
@ -122,7 +118,7 @@ fn get_installer_root() -> Result<PathBuf, io::Error> {
.ok_or_else(|| {
io::Error::new(
io::ErrorKind::NotFound,
format!("${} is not defined", home_env_var),
format!("${home_env_var} is not defined"),
)
})?;
home_path.push(".deno");
@ -201,7 +197,7 @@ pub fn uninstall(name: String, root: Option<PathBuf>) -> Result<(), AnyError> {
}
if !removed {
return Err(generic_error(format!("No installation found for {}", name)));
return Err(generic_error(format!("No installation found for {name}")));
}
// There might be some extra files to delete
@ -339,7 +335,7 @@ fn resolve_shim_data(
Level::Debug => "debug",
Level::Info => "info",
_ => {
return Err(generic_error(format!("invalid log level {}", log_level)))
return Err(generic_error(format!("invalid log level {log_level}")))
}
};
executable_args.push(log_level.to_string());
@ -388,11 +384,11 @@ fn resolve_shim_data(
}
if let Some(inspect) = flags.inspect {
executable_args.push(format!("--inspect={}", inspect));
executable_args.push(format!("--inspect={inspect}"));
}
if let Some(inspect_brk) = flags.inspect_brk {
executable_args.push(format!("--inspect-brk={}", inspect_brk));
executable_args.push(format!("--inspect-brk={inspect_brk}"));
}
if let Some(import_map_path) = &flags.import_map_path {
@ -408,7 +404,7 @@ fn resolve_shim_data(
extra_files.push((
copy_path,
fs::read_to_string(config_path)
.with_context(|| format!("error reading {}", config_path))?,
.with_context(|| format!("error reading {config_path}"))?,
));
} else {
executable_args.push("--no-config".to_string());
@ -1082,13 +1078,11 @@ mod tests {
assert!(file_path.exists());
let mut expected_string = format!(
"--import-map '{}' --no-config 'http://localhost:4545/cat.ts'",
import_map_url
"--import-map '{import_map_url}' --no-config 'http://localhost:4545/cat.ts'"
);
if cfg!(windows) {
expected_string = format!(
"\"--import-map\" \"{}\" \"--no-config\" \"http://localhost:4545/cat.ts\"",
import_map_url
"\"--import-map\" \"{import_map_url}\" \"--no-config\" \"http://localhost:4545/cat.ts\""
);
}

View file

@ -219,7 +219,7 @@ pub fn print_rules_list(json: bool) {
})
.collect();
let json_str = serde_json::to_string_pretty(&json_rules).unwrap();
println!("{}", json_str);
println!("{json_str}");
} else {
// The rules should still be printed even if `--quiet` option is enabled,
// so use `println!` here instead of `info!`.
@ -345,12 +345,12 @@ impl LintReporter for PrettyLintReporter {
)),
);
eprintln!("{}\n", message);
eprintln!("{message}\n");
}
fn visit_error(&mut self, file_path: &str, err: &AnyError) {
eprintln!("Error linting: {}", file_path);
eprintln!(" {}", err);
eprintln!("Error linting: {file_path}");
eprintln!(" {err}");
}
fn close(&mut self, check_count: usize) {
@ -393,8 +393,8 @@ impl LintReporter for CompactLintReporter {
}
fn visit_error(&mut self, file_path: &str, err: &AnyError) {
eprintln!("Error linting: {}", file_path);
eprintln!(" {}", err);
eprintln!("Error linting: {file_path}");
eprintln!(" {err}");
}
fn close(&mut self, check_count: usize) {

View file

@ -277,8 +277,7 @@ fn validate(input: &str) -> ValidationResult {
| (Some(Token::DollarLBrace), Token::RBrace) => {}
(Some(left), _) => {
return ValidationResult::Invalid(Some(format!(
"Mismatched pairs: {:?} is not properly closed",
left
"Mismatched pairs: {left:?} is not properly closed"
)))
}
(None, _) => {
@ -460,7 +459,7 @@ impl ReplEditor {
}
self.errored_on_history_save.store(true, Relaxed);
eprintln!("Unable to save history file: {}", e);
eprintln!("Unable to save history file: {e}");
}
}

View file

@ -112,14 +112,11 @@ pub async fn run(flags: Flags, repl_flags: ReplFlags) -> Result<i32, AnyError> {
.await;
// only output errors
if let EvaluationOutput::Error(error_text) = output {
println!(
"Error in --eval-file file \"{}\": {}",
eval_file, error_text
);
println!("Error in --eval-file file \"{eval_file}\": {error_text}");
}
}
Err(e) => {
println!("Error in --eval-file file \"{}\": {}", eval_file, e);
println!("Error in --eval-file file \"{eval_file}\": {e}");
}
}
}
@ -129,7 +126,7 @@ pub async fn run(flags: Flags, repl_flags: ReplFlags) -> Result<i32, AnyError> {
let output = repl_session.evaluate_line_and_get_output(&eval).await;
// only output errors
if let EvaluationOutput::Error(error_text) = output {
println!("Error in --eval flag: {}", error_text);
println!("Error in --eval flag: {error_text}");
}
}
@ -166,7 +163,7 @@ pub async fn run(flags: Flags, repl_flags: ReplFlags) -> Result<i32, AnyError> {
break;
}
println!("{}", output);
println!("{output}");
}
Err(ReadlineError::Interrupted) => {
if editor.should_exit_on_interrupt() {
@ -180,7 +177,7 @@ pub async fn run(flags: Flags, repl_flags: ReplFlags) -> Result<i32, AnyError> {
break;
}
Err(err) => {
println!("Error: {:?}", err);
println!("Error: {err:?}");
break;
}
}

View file

@ -419,10 +419,7 @@ impl ReplSession {
.text;
let value = self
.evaluate_expression(&format!(
"'use strict'; void 0;\n{}",
transpiled_src
))
.evaluate_expression(&format!("'use strict'; void 0;\n{transpiled_src}"))
.await?;
Ok(TsEvaluateResponse {

View file

@ -94,7 +94,7 @@ async fn get_base_binary(
}
let target = target.unwrap_or_else(|| env!("TARGET").to_string());
let binary_name = format!("deno-{}.zip", target);
let binary_name = format!("deno-{target}.zip");
let binary_path_suffix = if crate::version::is_canary() {
format!("canary/{}/{}", crate::version::GIT_COMMIT_HASH, binary_name)
@ -127,7 +127,7 @@ async fn download_base_binary(
output_directory: &Path,
binary_path_suffix: &str,
) -> Result<(), AnyError> {
let download_url = format!("https://dl.deno.land/{}", binary_path_suffix);
let download_url = format!("https://dl.deno.land/{binary_path_suffix}");
let maybe_bytes = {
let progress_bars = ProgressBar::new(ProgressBarStyle::DownloadBars);
let progress = progress_bars.update(&download_url);
@ -164,7 +164,7 @@ async fn create_standalone_binary(
let ca_data = match ps.options.ca_data() {
Some(CaData::File(ca_file)) => {
Some(fs::read(ca_file).with_context(|| format!("Reading: {}", ca_file))?)
Some(fs::read(ca_file).with_context(|| format!("Reading: {ca_file}"))?)
}
Some(CaData::Bytes(bytes)) => Some(bytes.clone()),
None => None,

View file

@ -56,7 +56,7 @@ pub async fn execute_script(
.map(|a| format!("\"{}\"", a.replace('"', "\\\"").replace('$', "\\$")))
.collect::<Vec<_>>()
.join(" ");
let script = format!("{} {}", script, additional_args);
let script = format!("{script} {additional_args}");
let script = script.trim();
log::info!(
"{} {} {}",
@ -65,7 +65,7 @@ pub async fn execute_script(
script,
);
let seq_list = deno_task_shell::parser::parse(script)
.with_context(|| format!("Error parsing script '{}'.", task_name))?;
.with_context(|| format!("Error parsing script '{task_name}'."))?;
// get the starting env vars (the PWD env var will be set by deno_task_shell)
let mut env_vars = std::env::vars().collect::<HashMap<String, String>>();
@ -81,7 +81,7 @@ pub async fn execute_script(
let exit_code = deno_task_shell::execute(seq_list, env_vars, &cwd).await;
Ok(exit_code)
} else {
eprintln!("Task not found: {}", task_name);
eprintln!("Task not found: {task_name}");
print_available_tasks(tasks_config);
Ok(1)
}

View file

@ -323,7 +323,7 @@ impl PrettyTestReporter {
if url.scheme() == "file" {
if let Some(mut r) = self.cwd.make_relative(&url) {
if !r.starts_with("../") {
r = format!("./{}", r);
r = format!("./{r}");
}
return r;
}
@ -513,7 +513,7 @@ impl PrettyTestReporter {
);
print!(" {} ...", root.name);
for name in ancestor_names {
print!(" {} ...", name);
print!(" {name} ...");
}
print!(" {} ...", description.name);
self.in_new_line = false;
@ -584,7 +584,7 @@ impl PrettyTestReporter {
}
println!("{}\n", colors::white_bold_on_red(" FAILURES "));
for failure_title in failure_titles {
println!("{}", failure_title);
println!("{failure_title}");
}
}
@ -600,7 +600,7 @@ impl PrettyTestReporter {
} else if count == 1 {
" (1 step)".to_string()
} else {
format!(" ({} steps)", count)
format!(" ({count} steps)")
}
};

View file

@ -373,7 +373,7 @@ pub async fn upgrade(
let archive_data = download_package(client, &download_url)
.await
.with_context(|| format!("Failed downloading {}", download_url))?;
.with_context(|| format!("Failed downloading {download_url}"))?;
log::info!("Deno is upgrading to version {}", &install_version);
@ -531,7 +531,7 @@ pub fn unpack_into_dir(
})?
.wait()?
}
ext => panic!("Unsupported archive type: '{}'", ext),
ext => panic!("Unsupported archive type: '{ext}'"),
};
assert!(unpack_status.success());
assert!(exe_path.exists());

View file

@ -204,19 +204,15 @@ fn build_proxy_module_source(
// for simplicity, always include the `export *` statement as it won't error
// even when the module does not contain a named export
writeln!(text, "export * from \"{}\";", relative_specifier).unwrap();
writeln!(text, "export * from \"{relative_specifier}\";").unwrap();
// add a default export if one exists in the module
if let Some(parsed_source) =
parsed_source_cache.get_parsed_source_from_module(module)?
{
if has_default_export(&parsed_source) {
writeln!(
text,
"export {{ default }} from \"{}\";",
relative_specifier
)
.unwrap();
writeln!(text, "export {{ default }} from \"{relative_specifier}\";")
.unwrap();
}
}

View file

@ -322,7 +322,7 @@ fn handle_remote_dep_specifier(
if is_remote_specifier_text(text) {
let base_specifier = mappings.base_specifier(specifier);
if !text.starts_with(base_specifier.as_str()) {
panic!("Expected {} to start with {}", text, base_specifier);
panic!("Expected {text} to start with {base_specifier}");
}
let sub_path = &text[base_specifier.as_str().len()..];

View file

@ -133,9 +133,7 @@ impl Mappings {
self
.mappings
.get(specifier)
.unwrap_or_else(|| {
panic!("Could not find local path for {}", specifier)
})
.unwrap_or_else(|| panic!("Could not find local path for {specifier}"))
.to_path_buf()
}
}
@ -163,7 +161,7 @@ impl Mappings {
.iter()
.find(|s| child_specifier.as_str().starts_with(s.as_str()))
.unwrap_or_else(|| {
panic!("Could not find base specifier for {}", child_specifier)
panic!("Could not find base specifier for {child_specifier}")
})
}

View file

@ -45,7 +45,7 @@ pub fn get_unique_path(
let mut count = 2;
// case insensitive comparison so the output works on case insensitive file systems
while !unique_set.insert(path.to_string_lossy().to_lowercase()) {
path = path_with_stem_suffix(&original_path, &format!("_{}", count));
path = path_with_stem_suffix(&original_path, &format!("_{count}"));
count += 1;
}
path

View file

@ -143,7 +143,7 @@ impl From<i64> for DiagnosticCategory {
1 => DiagnosticCategory::Error,
2 => DiagnosticCategory::Suggestion,
3 => DiagnosticCategory::Message,
_ => panic!("Unknown value: {}", value),
_ => panic!("Unknown value: {value}"),
}
}
}
@ -212,7 +212,7 @@ impl Diagnostic {
};
if !category.is_empty() {
write!(f, "{}[{}]: ", code, category)
write!(f, "{code}[{category}]: ")
} else {
Ok(())
}
@ -375,12 +375,12 @@ impl fmt::Display for Diagnostics {
if i > 0 {
write!(f, "\n\n")?;
}
write!(f, "{}", item)?;
write!(f, "{item}")?;
i += 1;
}
if i > 1 {
write!(f, "\n\nFound {} errors.", i)?;
write!(f, "\n\nFound {i} errors.")?;
}
Ok(())

View file

@ -98,7 +98,7 @@ pub fn get_types_declaration_file_text(unstable: bool) -> String {
lib_names
.into_iter()
.map(|name| {
let asset_url = format!("asset:///lib.{}.d.ts", name);
let asset_url = format!("asset:///lib.{name}.d.ts");
assets.remove(&asset_url).unwrap()
})
.collect::<Vec<_>>()
@ -204,7 +204,7 @@ impl fmt::Display for Stats {
fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
writeln!(f, "Compilation statistics:")?;
for (key, value) in self.0.clone() {
writeln!(f, " {}: {}", key, value)?;
writeln!(f, " {key}: {value}")?;
}
Ok(())
@ -838,7 +838,7 @@ pub fn exec(request: Request) -> Result<Response, AnyError> {
"rootNames": root_names,
});
let request_str = request_value.to_string();
let exec_source = format!("globalThis.exec({})", request_str);
let exec_source = format!("globalThis.exec({request_str})");
runtime
.execute_script(&located_script_name!(), startup_source)

View file

@ -12,7 +12,7 @@ pub fn gen(v: &[impl AsRef<[u8]>]) -> String {
let out: Vec<String> = digest
.as_ref()
.iter()
.map(|byte| format!("{:02x}", byte))
.map(|byte| format!("{byte:02x}"))
.collect();
out.join("")
}

View file

@ -23,7 +23,7 @@ pub fn human_size(size: f64) -> String {
.unwrap()
* 1_f64;
let unit = units[exponent as usize];
format!("{}{}{}", negative, pretty_bytes, unit)
format!("{negative}{pretty_bytes}{unit}")
}
const BYTES_TO_KIB: u64 = 2u64.pow(10);
@ -41,7 +41,7 @@ pub fn human_download_size(byte_count: u64, total_bytes: u64) -> String {
fn get_in_format(byte_count: u64, conversion: u64, suffix: &str) -> String {
let converted_value = byte_count / conversion;
let decimal = (byte_count % conversion) * 100 / conversion;
format!("{}.{:0>2}{}", converted_value, decimal, suffix)
format!("{converted_value}.{decimal:0>2}{suffix}")
}
}
@ -49,7 +49,7 @@ pub fn human_download_size(byte_count: u64, total_bytes: u64) -> String {
/// represents a human readable version of that time.
pub fn human_elapsed(elapsed: u128) -> String {
if elapsed < 1_000 {
return format!("{}ms", elapsed);
return format!("{elapsed}ms");
}
if elapsed < 1_000 * 60 {
return format!("{}s", elapsed / 1000);
@ -58,7 +58,7 @@ pub fn human_elapsed(elapsed: u128) -> String {
let seconds = elapsed / 1_000;
let minutes = seconds / 60;
let seconds_remainder = seconds % 60;
format!("{}m{}s", minutes, seconds_remainder)
format!("{minutes}m{seconds_remainder}s")
}
pub fn write_to_stdout_ignore_sigpipe(

View file

@ -74,7 +74,7 @@ where
if let Err(err) = result {
let error_string = match err.downcast_ref::<JsError>() {
Some(e) => format_js_error(e),
None => format!("{:?}", err),
None => format!("{err:?}"),
};
eprintln!(
"{}: {}",
@ -130,7 +130,7 @@ pub struct PrintConfig {
fn create_print_after_restart_fn(clear_screen: bool) -> impl Fn() {
move || {
if clear_screen && atty::is(atty::Stream::Stderr) {
eprint!("{}", CLEAR_SCREEN);
eprint!("{CLEAR_SCREEN}");
}
info!(
"{} File change detected! Restarting!",

View file

@ -29,7 +29,7 @@ pub fn atomic_write_file<T: AsRef<[u8]>>(
let rand: String = (0..4)
.map(|_| format!("{:02x}", rand::random::<u8>()))
.collect();
let extension = format!("{}.tmp", rand);
let extension = format!("{rand}.tmp");
let tmp_file = filename.with_extension(extension);
write_file(&tmp_file, data, mode)?;
std::fs::rename(tmp_file, filename)?;
@ -710,13 +710,13 @@ mod tests {
.to_string();
let expected: Vec<ModuleSpecifier> = [
"http://localhost:8080",
&format!("{}/a.ts", root_dir_url),
&format!("{}/b.js", root_dir_url),
&format!("{}/c.tsx", root_dir_url),
&format!("{}/child/README.md", root_dir_url),
&format!("{}/child/e.mjs", root_dir_url),
&format!("{}/child/f.mjsx", root_dir_url),
&format!("{}/d.jsx", root_dir_url),
&format!("{root_dir_url}/a.ts"),
&format!("{root_dir_url}/b.js"),
&format!("{root_dir_url}/c.tsx"),
&format!("{root_dir_url}/child/README.md"),
&format!("{root_dir_url}/child/e.mjs"),
&format!("{root_dir_url}/child/f.mjsx"),
&format!("{root_dir_url}/d.jsx"),
"https://localhost:8080",
]
.iter()
@ -748,9 +748,9 @@ mod tests {
.unwrap();
let expected: Vec<ModuleSpecifier> = [
&format!("{}/child/README.md", root_dir_url),
&format!("{}/child/e.mjs", root_dir_url),
&format!("{}/child/f.mjsx", root_dir_url),
&format!("{root_dir_url}/child/README.md"),
&format!("{root_dir_url}/child/e.mjs"),
&format!("{root_dir_url}/child/f.mjsx"),
]
.iter()
.map(|f| ModuleSpecifier::parse(f).unwrap())

View file

@ -64,8 +64,7 @@ pub fn specifier_to_file_path(
match result {
Ok(path) => Ok(path),
Err(()) => Err(uri_error(format!(
"Invalid file path.\n Specifier: {}",
specifier
"Invalid file path.\n Specifier: {specifier}"
))),
}
}
@ -76,7 +75,7 @@ pub fn ensure_directory_specifier(
) -> ModuleSpecifier {
let path = specifier.path();
if !path.ends_with('/') {
let new_path = format!("{}/", path);
let new_path = format!("{path}/");
specifier.set_path(&new_path);
}
specifier
@ -135,7 +134,7 @@ pub fn relative_specifier(
Some(if text.starts_with("../") || text.starts_with("./") {
text
} else {
format!("./{}", text)
format!("./{text}")
})
}
@ -170,12 +169,12 @@ pub fn path_with_stem_suffix(path: &Path, suffix: &str) -> PathBuf {
ext
))
} else {
path.with_file_name(format!("{}{}.{}", file_stem, suffix, ext))
path.with_file_name(format!("{file_stem}{suffix}.{ext}"))
};
}
}
path.with_file_name(format!("{}{}", file_name, suffix))
path.with_file_name(format!("{file_name}{suffix}"))
} else {
path.with_file_name(suffix)
}
@ -380,9 +379,7 @@ mod test {
assert_eq!(
actual.as_deref(),
expected,
"from: \"{}\" to: \"{}\"",
from_str,
to_str
"from: \"{from_str}\" to: \"{to_str}\""
);
}
}

View file

@ -154,7 +154,7 @@ fn get_elapsed_text(elapsed: Duration) -> String {
let elapsed_secs = elapsed.as_secs();
let seconds = elapsed_secs % 60;
let minutes = elapsed_secs / 60;
format!("[{:0>2}:{:0>2}]", minutes, seconds)
format!("[{minutes:0>2}:{seconds:0>2}]")
}
#[cfg(test)]

View file

@ -39,7 +39,7 @@ pub fn convert_to_utf8<'a>(
.ok_or_else(|| ErrorKind::InvalidData.into()),
None => Err(Error::new(
ErrorKind::InvalidInput,
format!("Unsupported charset: {}", charset),
format!("Unsupported charset: {charset}"),
)),
}
}

View file

@ -36,7 +36,7 @@ pub fn init_v8_flags(v8_flags: &[String], env_v8_flags: Vec<String>) {
.collect::<Vec<_>>();
if !unrecognized_v8_flags.is_empty() {
for f in unrecognized_v8_flags {
eprintln!("error: V8 did not recognize flag '{}'", f);
eprintln!("error: V8 did not recognize flag '{f}'");
}
eprintln!("\nFor a list of V8 flags, use '--v8-flags=--help'");
std::process::exit(1);

View file

@ -791,10 +791,10 @@ mod tests {
let mut worker = create_test_worker();
let result = worker.execute_main_module(&module_specifier).await;
if let Err(err) = result {
eprintln!("execute_mod err {:?}", err);
eprintln!("execute_mod err {err:?}");
}
if let Err(e) = worker.run_event_loop(false).await {
panic!("Future got unexpected error: {:?}", e);
panic!("Future got unexpected error: {e:?}");
}
}
@ -808,10 +808,10 @@ mod tests {
let mut worker = create_test_worker();
let result = worker.execute_main_module(&module_specifier).await;
if let Err(err) = result {
eprintln!("execute_mod err {:?}", err);
eprintln!("execute_mod err {err:?}");
}
if let Err(e) = worker.run_event_loop(false).await {
panic!("Future got unexpected error: {:?}", e);
panic!("Future got unexpected error: {e:?}");
}
}

View file

@ -577,7 +577,7 @@ mod internal {
pub fn add(self, mode: BorrowMode) -> BorrowCount {
match self.try_add(mode) {
Some(value) => value,
None => panic!("Can't add {:?} to {:?}", mode, self),
None => panic!("Can't add {mode:?} to {self:?}"),
}
}
@ -596,7 +596,7 @@ mod internal {
pub fn remove(self, mode: BorrowMode) -> BorrowCount {
match self.try_remove(mode) {
Some(value) => value,
None => panic!("Can't remove {:?} from {:?}", mode, self),
None => panic!("Can't remove {mode:?} from {self:?}"),
}
}
}

View file

@ -613,8 +613,7 @@ pub fn module_resolve_callback<'s>(
}
let msg = format!(
r#"Cannot resolve module "{}" from "{}""#,
specifier_str, referrer_name
r#"Cannot resolve module "{specifier_str}" from "{referrer_name}""#
);
throw_type_error(scope, msg);
None

View file

@ -45,7 +45,7 @@ pub fn range_error(message: impl Into<Cow<'static, str>>) -> Error {
}
pub fn invalid_hostname(hostname: &str) -> Error {
type_error(format!("Invalid hostname: '{}'", hostname))
type_error(format!("Invalid hostname: '{hostname}'"))
}
pub fn uri_error(message: impl Into<Cow<'static, str>>) -> Error {
@ -109,7 +109,7 @@ pub fn to_v8_error<'a>(
let cb = cb.open(tc_scope);
let this = v8::undefined(tc_scope).into();
let class = v8::String::new(tc_scope, get_class(error)).unwrap();
let message = v8::String::new(tc_scope, &format!("{:#}", error)).unwrap();
let message = v8::String::new(tc_scope, &format!("{error:#}")).unwrap();
let mut args = vec![class.into(), message.into()];
if let Some(code) = crate::error_codes::get_error_code(error) {
args.push(v8::String::new(tc_scope, code).unwrap().into());
@ -339,11 +339,11 @@ impl JsError {
let message_prop = e.message.clone().unwrap_or_default();
let exception_message = exception_message.unwrap_or_else(|| {
if !name.is_empty() && !message_prop.is_empty() {
format!("Uncaught {}: {}", name, message_prop)
format!("Uncaught {name}: {message_prop}")
} else if !name.is_empty() {
format!("Uncaught {}", name)
format!("Uncaught {name}")
} else if !message_prop.is_empty() {
format!("Uncaught {}", message_prop)
format!("Uncaught {message_prop}")
} else {
"Uncaught".to_string()
}
@ -509,7 +509,7 @@ fn format_source_loc(
) -> String {
let line_number = line_number;
let column_number = column_number;
format!("{}:{}:{}", file_name, line_number, column_number)
format!("{file_name}:{line_number}:{column_number}")
}
impl Display for JsError {
@ -517,7 +517,7 @@ impl Display for JsError {
if let Some(stack) = &self.stack {
let stack_lines = stack.lines();
if stack_lines.count() > 1 {
return write!(f, "{}", stack);
return write!(f, "{stack}");
}
}
write!(f, "{}", self.exception_message)?;
@ -527,7 +527,7 @@ impl Display for JsError {
(&frame.file_name, frame.line_number, frame.column_number)
{
let source_loc = format_source_loc(f_, l, c);
write!(f, "\n at {}", source_loc)?;
write!(f, "\n at {source_loc}")?;
}
}
Ok(())
@ -568,8 +568,8 @@ pub(crate) fn to_v8_type_error(
/// of `instanceof`. `Value::is_native_error()` also checks for static class
/// inheritance rather than just scanning the prototype chain, which doesn't
/// work with our WebIDL implementation of `DOMException`.
pub(crate) fn is_instance_of_error<'s>(
scope: &mut v8::HandleScope<'s>,
pub(crate) fn is_instance_of_error(
scope: &mut v8::HandleScope,
value: v8::Local<v8::Value>,
) -> bool {
if !value.is_object() {
@ -603,8 +603,8 @@ pub(crate) fn is_instance_of_error<'s>(
/// NOTE: There is currently no way to detect `AggregateError` via `rusty_v8`,
/// as v8 itself doesn't expose `v8__Exception__AggregateError`,
/// and we cannot create bindings for it. This forces us to rely on `name` inference.
pub(crate) fn is_aggregate_error<'s>(
scope: &mut v8::HandleScope<'s>,
pub(crate) fn is_aggregate_error(
scope: &mut v8::HandleScope,
value: v8::Local<v8::Value>,
) -> bool {
let mut maybe_prototype = Some(value);

View file

@ -18,7 +18,7 @@ fn main() {
let output: serde_json::Value =
eval(&mut runtime, code).expect("Eval failed");
println!("Output: {:?}", output);
println!("Output: {output:?}");
let expected_output = serde_json::json!(10);
assert_eq!(expected_output, output);
@ -40,9 +40,9 @@ fn eval(
match deserialized_value {
Ok(value) => Ok(value),
Err(err) => Err(format!("Cannot deserialize value: {:?}", err)),
Err(err) => Err(format!("Cannot deserialize value: {err:?}")),
}
}
Err(err) => Err(format!("Evaling error: {:?}", err)),
Err(err) => Err(format!("Evaling error: {err:?}")),
}
}

View file

@ -13,7 +13,7 @@ fn main() -> Result<(), Error> {
std::process::exit(1);
}
let main_url = &args[1];
println!("Run {}", main_url);
println!("Run {main_url}");
let mut js_runtime = JsRuntime::new(RuntimeOptions {
module_loader: Some(Rc::new(FsModuleLoader)),

View file

@ -65,7 +65,7 @@ fn main() {
#[op]
fn op_schedule_task(state: &mut OpState, i: u8) -> Result<(), Error> {
let tx = state.borrow_mut::<mpsc::UnboundedSender<Task>>();
tx.unbounded_send(Box::new(move || println!("Hello, world! x{}", i)))
tx.unbounded_send(Box::new(move || println!("Hello, world! x{i}")))
.expect("unbounded_send failed");
Ok(())
}

View file

@ -99,7 +99,7 @@ fn main() -> Result<(), Error> {
std::process::exit(1);
}
let main_url = &args[1];
println!("Run {}", main_url);
println!("Run {main_url}");
let mut js_runtime = JsRuntime::new(RuntimeOptions {
module_loader: Some(Rc::new(TypescriptModuleLoader)),

View file

@ -32,17 +32,17 @@ impl Error for ModuleResolutionError {
impl fmt::Display for ModuleResolutionError {
fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
match self {
InvalidUrl(ref err) => write!(f, "invalid URL: {}", err),
InvalidUrl(ref err) => write!(f, "invalid URL: {err}"),
InvalidBaseUrl(ref err) => {
write!(f, "invalid base URL for relative import: {}", err)
write!(f, "invalid base URL for relative import: {err}")
}
InvalidPath(ref path) => write!(f, "invalid module path: {:?}", path),
InvalidPath(ref path) => write!(f, "invalid module path: {path:?}"),
ImportPrefixMissing(ref specifier, ref maybe_referrer) => write!(
f,
"Relative import path \"{}\" not prefixed with / or ./ or ../{}",
specifier,
match maybe_referrer {
Some(referrer) => format!(" from \"{}\"", referrer),
Some(referrer) => format!(" from \"{referrer}\""),
None => String::new(),
}
),
@ -425,7 +425,7 @@ mod tests {
]);
// Relative local path.
let expected_url = format!("file://{}/tests/006_url_imports.ts", cwd_str);
let expected_url = format!("file://{cwd_str}/tests/006_url_imports.ts");
tests.extend(vec![
("tests/006_url_imports.ts", expected_url.to_string()),
("./tests/006_url_imports.ts", expected_url.to_string()),

View file

@ -51,7 +51,7 @@ pub(crate) fn validate_import_assertions(
if key == "type" && !SUPPORTED_TYPE_ASSERTIONS.contains(&value.as_str()) {
let message = v8::String::new(
scope,
&format!("\"{}\" is not a valid module type.", value),
&format!("\"{value}\" is not a valid module type."),
)
.unwrap();
let exception = v8::Exception::type_error(scope, message);
@ -318,8 +318,7 @@ impl ModuleLoader for FsModuleLoader {
async move {
let path = module_specifier.to_file_path().map_err(|_| {
generic_error(format!(
"Provided module specifier \"{}\" is not a file URL.",
module_specifier
"Provided module specifier \"{module_specifier}\" is not a file URL."
))
})?;
let module_type = if let Some(extension) = path.extension() {
@ -1483,6 +1482,7 @@ import "/a.js";
let a_id_fut = runtime.load_main_module(&spec, None);
let a_id = futures::executor::block_on(a_id_fut).unwrap();
#[allow(clippy::let_underscore_future)]
let _ = runtime.mod_evaluate(a_id);
futures::executor::block_on(runtime.run_event_loop(false)).unwrap();
let l = loads.lock();
@ -1662,6 +1662,7 @@ import "/a.js";
runtime.instantiate_module(mod_a).unwrap();
assert_eq!(DISPATCH_COUNT.load(Ordering::Relaxed), 0);
#[allow(clippy::let_underscore_future)]
let _ = runtime.mod_evaluate(mod_a);
assert_eq!(DISPATCH_COUNT.load(Ordering::Relaxed), 1);
}
@ -2042,6 +2043,7 @@ import "/a.js";
let result = runtime.load_main_module(&spec, None).await;
assert!(result.is_ok());
let circular1_id = result.unwrap();
#[allow(clippy::let_underscore_future)]
let _ = runtime.mod_evaluate(circular1_id);
runtime.run_event_loop(false).await.unwrap();
@ -2122,6 +2124,7 @@ import "/a.js";
let result = runtime.load_main_module(&spec, None).await;
assert!(result.is_ok());
let redirect1_id = result.unwrap();
#[allow(clippy::let_underscore_future)]
let _ = runtime.mod_evaluate(redirect1_id);
runtime.run_event_loop(false).await.unwrap();
let l = loads.lock();
@ -2280,6 +2283,7 @@ if (import.meta.url != 'file:///main_with_code.js') throw Error();
.boxed_local();
let main_id = futures::executor::block_on(main_id_fut).unwrap();
#[allow(clippy::let_underscore_future)]
let _ = runtime.mod_evaluate(main_id);
futures::executor::block_on(runtime.run_event_loop(false)).unwrap();
@ -2397,6 +2401,7 @@ if (import.meta.url != 'file:///main_with_code.js') throw Error();
.boxed_local();
let main_id = futures::executor::block_on(main_id_fut).unwrap();
#[allow(clippy::let_underscore_future)]
let _ = runtime.mod_evaluate(main_id);
futures::executor::block_on(runtime.run_event_loop(false)).unwrap();
@ -2412,6 +2417,7 @@ if (import.meta.url != 'file:///main_with_code.js') throw Error();
.boxed_local();
let side_id = futures::executor::block_on(side_id_fut).unwrap();
#[allow(clippy::let_underscore_future)]
let _ = runtime.mod_evaluate(side_id);
futures::executor::block_on(runtime.run_event_loop(false)).unwrap();
}
@ -2440,6 +2446,7 @@ if (import.meta.url != 'file:///main_with_code.js') throw Error();
.boxed_local();
let main_id = futures::executor::block_on(main_id_fut).unwrap();
#[allow(clippy::let_underscore_future)]
let _ = runtime.mod_evaluate(main_id);
futures::executor::block_on(runtime.run_event_loop(false)).unwrap();
runtime.snapshot()
@ -2479,6 +2486,7 @@ if (import.meta.url != 'file:///main_with_code.js') throw Error();
.boxed_local();
let main_id = futures::executor::block_on(main_id_fut).unwrap();
#[allow(clippy::let_underscore_future)]
let _ = runtime.mod_evaluate(main_id);
futures::executor::block_on(runtime.run_event_loop(false)).unwrap();
runtime.snapshot()

View file

@ -134,7 +134,7 @@ impl OpError {
pub fn new(get_class: GetErrorClassFn, err: Error) -> Self {
Self {
class_name: (get_class)(&err),
message: format!("{:#}", err),
message: format!("{err:#}"),
code: crate::error_codes::get_error_code(&err),
}
}

View file

@ -471,7 +471,7 @@ fn op_serialize(
if buf.was_detached() {
return Err(custom_error(
"DOMExceptionOperationError",
format!("ArrayBuffer at index {} is already detached", index),
format!("ArrayBuffer at index {index} is already detached"),
));
}
@ -593,8 +593,8 @@ fn op_get_promise_details<'a>(
}
#[op(v8)]
fn op_set_promise_hooks<'a>(
scope: &mut v8::HandleScope<'a>,
fn op_set_promise_hooks(
scope: &mut v8::HandleScope,
init_cb: serde_v8::Value,
before_cb: serde_v8::Value,
after_cb: serde_v8::Value,

Some files were not shown because too many files have changed in this diff Show more