mirror of
https://github.com/denoland/deno.git
synced 2024-11-21 15:04:11 -05:00
chore: enable clippy unused_async rule (#22834)
This commit is contained in:
parent
c38c14f51f
commit
ad6b00a2bf
38 changed files with 411 additions and 408 deletions
|
@ -83,20 +83,17 @@ impl CliFactoryBuilder {
|
|||
}
|
||||
}
|
||||
|
||||
pub async fn build_from_flags(
|
||||
self,
|
||||
flags: Flags,
|
||||
) -> Result<CliFactory, AnyError> {
|
||||
pub fn build_from_flags(self, flags: Flags) -> Result<CliFactory, AnyError> {
|
||||
Ok(self.build_from_cli_options(Arc::new(CliOptions::from_flags(flags)?)))
|
||||
}
|
||||
|
||||
pub async fn build_from_flags_for_watcher(
|
||||
pub fn build_from_flags_for_watcher(
|
||||
mut self,
|
||||
flags: Flags,
|
||||
watcher_communicator: Arc<WatcherCommunicator>,
|
||||
) -> Result<CliFactory, AnyError> {
|
||||
self.watcher_communicator = Some(watcher_communicator);
|
||||
self.build_from_flags(flags).await
|
||||
self.build_from_flags(flags)
|
||||
}
|
||||
|
||||
pub fn build_from_cli_options(self, options: Arc<CliOptions>) -> CliFactory {
|
||||
|
@ -190,8 +187,8 @@ pub struct CliFactory {
|
|||
}
|
||||
|
||||
impl CliFactory {
|
||||
pub async fn from_flags(flags: Flags) -> Result<Self, AnyError> {
|
||||
CliFactoryBuilder::new().build_from_flags(flags).await
|
||||
pub fn from_flags(flags: Flags) -> Result<Self, AnyError> {
|
||||
CliFactoryBuilder::new().build_from_flags(flags)
|
||||
}
|
||||
|
||||
pub fn from_cli_options(options: Arc<CliOptions>) -> Self {
|
||||
|
|
|
@ -415,7 +415,7 @@ pub fn collect_test(
|
|||
}
|
||||
|
||||
/// Return tsc navigation tree code lenses.
|
||||
pub async fn collect_tsc(
|
||||
pub fn collect_tsc(
|
||||
specifier: &ModuleSpecifier,
|
||||
code_lens_settings: &CodeLensSettings,
|
||||
line_index: Arc<LineIndex>,
|
||||
|
|
|
@ -786,7 +786,7 @@ impl Inner {
|
|||
})
|
||||
}
|
||||
|
||||
pub async fn update_cache(&mut self) -> Result<(), AnyError> {
|
||||
pub fn update_cache(&mut self) -> Result<(), AnyError> {
|
||||
let mark = self.performance.mark("lsp.update_cache");
|
||||
self.performance.measure(mark);
|
||||
let maybe_cache = &self.config.workspace_settings().cache;
|
||||
|
@ -816,23 +816,17 @@ impl Inner {
|
|||
None
|
||||
};
|
||||
if self.maybe_global_cache_path != maybe_global_cache_path {
|
||||
self
|
||||
.set_new_global_cache_path(maybe_global_cache_path)
|
||||
.await?;
|
||||
self.set_new_global_cache_path(maybe_global_cache_path)?;
|
||||
}
|
||||
Ok(())
|
||||
}
|
||||
|
||||
async fn recreate_http_client_and_dependents(
|
||||
&mut self,
|
||||
) -> Result<(), AnyError> {
|
||||
self
|
||||
.set_new_global_cache_path(self.maybe_global_cache_path.clone())
|
||||
.await
|
||||
fn recreate_http_client_and_dependents(&mut self) -> Result<(), AnyError> {
|
||||
self.set_new_global_cache_path(self.maybe_global_cache_path.clone())
|
||||
}
|
||||
|
||||
/// Recreates the http client and all dependent structs.
|
||||
async fn set_new_global_cache_path(
|
||||
fn set_new_global_cache_path(
|
||||
&mut self,
|
||||
new_cache_path: Option<PathBuf>,
|
||||
) -> Result<(), AnyError> {
|
||||
|
@ -1025,21 +1019,21 @@ impl Inner {
|
|||
|
||||
async fn update_registries(&mut self) -> Result<(), AnyError> {
|
||||
let mark = self.performance.mark("lsp.update_registries");
|
||||
self.recreate_http_client_and_dependents().await?;
|
||||
self.recreate_http_client_and_dependents()?;
|
||||
let workspace_settings = self.config.workspace_settings();
|
||||
for (registry, enabled) in workspace_settings.suggest.imports.hosts.iter() {
|
||||
if *enabled {
|
||||
lsp_log!("Enabling import suggestions for: {}", registry);
|
||||
self.module_registries.enable(registry).await?;
|
||||
} else {
|
||||
self.module_registries.disable(registry).await?;
|
||||
self.module_registries.disable(registry)?;
|
||||
}
|
||||
}
|
||||
self.performance.measure(mark);
|
||||
Ok(())
|
||||
}
|
||||
|
||||
async fn update_config_file(&mut self) -> Result<(), AnyError> {
|
||||
fn update_config_file(&mut self) -> Result<(), AnyError> {
|
||||
self.config.clear_config_file();
|
||||
self.fmt_options = FmtOptions::new_with_base(self.initial_cwd.clone());
|
||||
self.lint_options = LintOptions::new_with_base(self.initial_cwd.clone());
|
||||
|
@ -1064,7 +1058,7 @@ impl Inner {
|
|||
self.config.set_config_file(config_file);
|
||||
self.lint_options = lint_options;
|
||||
self.fmt_options = fmt_options;
|
||||
self.recreate_http_client_and_dependents().await?;
|
||||
self.recreate_http_client_and_dependents()?;
|
||||
if let Some(config_file) = self.config.maybe_config_file() {
|
||||
if let Ok((compiler_options, _)) = config_file.to_compiler_options() {
|
||||
if let Some(compiler_options_obj) = compiler_options.as_object() {
|
||||
|
@ -1278,11 +1272,11 @@ impl Inner {
|
|||
|
||||
self.update_debug_flag();
|
||||
// Check to see if we need to change the cache path
|
||||
if let Err(err) = self.update_cache().await {
|
||||
if let Err(err) = self.update_cache() {
|
||||
lsp_warn!("Error updating cache: {:#}", err);
|
||||
self.client.show_message(MessageType::WARNING, err);
|
||||
}
|
||||
if let Err(err) = self.update_config_file().await {
|
||||
if let Err(err) = self.update_config_file() {
|
||||
lsp_warn!("Error updating config file: {:#}", err);
|
||||
self.client.show_message(MessageType::WARNING, err);
|
||||
}
|
||||
|
@ -1349,11 +1343,11 @@ impl Inner {
|
|||
self.refresh_npm_specifiers().await;
|
||||
}
|
||||
|
||||
async fn shutdown(&self) -> LspResult<()> {
|
||||
fn shutdown(&self) -> LspResult<()> {
|
||||
Ok(())
|
||||
}
|
||||
|
||||
async fn did_open(
|
||||
fn did_open(
|
||||
&mut self,
|
||||
specifier: &ModuleSpecifier,
|
||||
params: DidOpenTextDocumentParams,
|
||||
|
@ -1475,7 +1469,7 @@ impl Inner {
|
|||
};
|
||||
|
||||
self.update_debug_flag();
|
||||
if let Err(err) = self.update_cache().await {
|
||||
if let Err(err) = self.update_cache() {
|
||||
lsp_warn!("Error updating cache: {:#}", err);
|
||||
self.client.show_message(MessageType::WARNING, err);
|
||||
}
|
||||
|
@ -1483,7 +1477,7 @@ impl Inner {
|
|||
lsp_warn!("Error updating registries: {:#}", err);
|
||||
self.client.show_message(MessageType::WARNING, err);
|
||||
}
|
||||
if let Err(err) = self.update_config_file().await {
|
||||
if let Err(err) = self.update_config_file() {
|
||||
lsp_warn!("Error updating config file: {:#}", err);
|
||||
self.client.show_message(MessageType::WARNING, err);
|
||||
}
|
||||
|
@ -1601,7 +1595,7 @@ impl Inner {
|
|||
files_to_check.insert(url.clone());
|
||||
}
|
||||
// Update config.
|
||||
if let Err(err) = self.update_config_file().await {
|
||||
if let Err(err) = self.update_config_file() {
|
||||
lsp_warn!("Error updating config file: {:#}", err);
|
||||
self.client.show_message(MessageType::WARNING, err);
|
||||
}
|
||||
|
@ -2246,7 +2240,7 @@ impl Inner {
|
|||
)),
|
||||
)
|
||||
.await?;
|
||||
code_action.edit = refactor_edit_info.to_workspace_edit(self).await?;
|
||||
code_action.edit = refactor_edit_info.to_workspace_edit(self)?;
|
||||
code_action
|
||||
} else {
|
||||
// The code action doesn't need to be resolved
|
||||
|
@ -2314,7 +2308,6 @@ impl Inner {
|
|||
line_index,
|
||||
&navigation_tree,
|
||||
)
|
||||
.await
|
||||
.map_err(|err| {
|
||||
error!(
|
||||
"Error getting ts code lenses for \"{:#}\": {:#}",
|
||||
|
@ -2483,7 +2476,7 @@ impl Inner {
|
|||
.await?;
|
||||
|
||||
if let Some(definition) = maybe_definition {
|
||||
let results = definition.to_definition(line_index, self).await;
|
||||
let results = definition.to_definition(line_index, self);
|
||||
self.performance.measure(mark);
|
||||
Ok(results)
|
||||
} else {
|
||||
|
@ -2978,7 +2971,6 @@ impl Inner {
|
|||
let rename_locations = tsc::RenameLocations { locations };
|
||||
let workspace_edits = rename_locations
|
||||
.into_workspace_edit(¶ms.new_name, self)
|
||||
.await
|
||||
.map_err(|err| {
|
||||
error!("Failed to get workspace edits: {:#}", err);
|
||||
LspError::internal_error()
|
||||
|
@ -3426,7 +3418,7 @@ impl tower_lsp::LanguageServer for LanguageServer {
|
|||
|
||||
async fn shutdown(&self) -> LspResult<()> {
|
||||
self.1.cancel();
|
||||
self.0.write().await.shutdown().await
|
||||
self.0.write().await.shutdown()
|
||||
}
|
||||
|
||||
async fn did_open(&self, params: DidOpenTextDocumentParams) {
|
||||
|
@ -3441,7 +3433,7 @@ impl tower_lsp::LanguageServer for LanguageServer {
|
|||
let specifier = inner
|
||||
.url_map
|
||||
.normalize_url(¶ms.text_document.uri, LspUrlKind::File);
|
||||
let document = inner.did_open(&specifier, params).await;
|
||||
let document = inner.did_open(&specifier, params);
|
||||
if document.is_diagnosable() {
|
||||
inner.refresh_npm_specifiers().await;
|
||||
let specifiers = inner.documents.dependents(&specifier);
|
||||
|
|
|
@ -488,7 +488,7 @@ impl ModuleRegistry {
|
|||
}
|
||||
|
||||
/// Disable a registry, removing its configuration, if any, from memory.
|
||||
pub async fn disable(&mut self, origin: &str) -> Result<(), AnyError> {
|
||||
pub fn disable(&mut self, origin: &str) -> Result<(), AnyError> {
|
||||
let origin = base_url(&Url::parse(origin)?);
|
||||
self.origins.remove(&origin);
|
||||
Ok(())
|
||||
|
|
|
@ -213,7 +213,7 @@ impl TestRun {
|
|||
let args = self.get_args();
|
||||
lsp_log!("Executing test run with arguments: {}", args.join(" "));
|
||||
let flags = flags_from_vec(args.into_iter().map(String::from).collect())?;
|
||||
let factory = CliFactory::from_flags(flags).await?;
|
||||
let factory = CliFactory::from_flags(flags)?;
|
||||
// Various test files should not share the same permissions in terms of
|
||||
// `PermissionsContainer` - otherwise granting/revoking permissions in one
|
||||
// file would have impact on other files, which is undesirable.
|
||||
|
|
|
@ -2106,7 +2106,7 @@ pub struct RenameLocations {
|
|||
}
|
||||
|
||||
impl RenameLocations {
|
||||
pub async fn into_workspace_edit(
|
||||
pub fn into_workspace_edit(
|
||||
self,
|
||||
new_name: &str,
|
||||
language_server: &language_server::Inner,
|
||||
|
@ -2226,7 +2226,7 @@ impl DefinitionInfoAndBoundSpan {
|
|||
Ok(())
|
||||
}
|
||||
|
||||
pub async fn to_definition(
|
||||
pub fn to_definition(
|
||||
&self,
|
||||
line_index: Arc<LineIndex>,
|
||||
language_server: &language_server::Inner,
|
||||
|
@ -2609,7 +2609,7 @@ impl RefactorEditInfo {
|
|||
Ok(())
|
||||
}
|
||||
|
||||
pub async fn to_workspace_edit(
|
||||
pub fn to_workspace_edit(
|
||||
&self,
|
||||
language_server: &language_server::Inner,
|
||||
) -> LspResult<Option<lsp::WorkspaceEdit>> {
|
||||
|
|
10
cli/main.rs
10
cli/main.rs
|
@ -109,7 +109,7 @@ async fn run_subcommand(flags: Flags) -> Result<i32, AnyError> {
|
|||
tools::run::eval_command(flags, eval_flags).await
|
||||
}),
|
||||
DenoSubcommand::Cache(cache_flags) => spawn_subcommand(async move {
|
||||
let factory = CliFactory::from_flags(flags).await?;
|
||||
let factory = CliFactory::from_flags(flags)?;
|
||||
let module_load_preparer = factory.module_load_preparer().await?;
|
||||
let emitter = factory.emitter()?;
|
||||
let graph_container = factory.graph_container();
|
||||
|
@ -119,7 +119,7 @@ async fn run_subcommand(flags: Flags) -> Result<i32, AnyError> {
|
|||
emitter.cache_module_emits(&graph_container.graph())
|
||||
}),
|
||||
DenoSubcommand::Check(check_flags) => spawn_subcommand(async move {
|
||||
let factory = CliFactory::from_flags(flags).await?;
|
||||
let factory = CliFactory::from_flags(flags)?;
|
||||
let module_load_preparer = factory.module_load_preparer().await?;
|
||||
module_load_preparer
|
||||
.load_and_type_check_files(&check_flags.files)
|
||||
|
@ -137,7 +137,11 @@ async fn run_subcommand(flags: Flags) -> Result<i32, AnyError> {
|
|||
)
|
||||
}
|
||||
DenoSubcommand::Init(init_flags) => {
|
||||
spawn_subcommand(async { tools::init::init_project(init_flags).await })
|
||||
spawn_subcommand(async {
|
||||
// make compiler happy since init_project is sync
|
||||
tokio::task::yield_now().await;
|
||||
tools::init::init_project(init_flags)
|
||||
})
|
||||
}
|
||||
DenoSubcommand::Info(info_flags) => {
|
||||
spawn_subcommand(async { tools::info::info(flags, info_flags).await })
|
||||
|
|
|
@ -487,8 +487,7 @@ pub async fn run_benchmarks_with_watch(
|
|||
let bench_flags = bench_flags.clone();
|
||||
Ok(async move {
|
||||
let factory = CliFactoryBuilder::new()
|
||||
.build_from_flags_for_watcher(flags, watcher_communicator.clone())
|
||||
.await?;
|
||||
.build_from_flags_for_watcher(flags, watcher_communicator.clone())?;
|
||||
let cli_options = factory.cli_options();
|
||||
let bench_options = cli_options.resolve_bench_options(bench_flags)?;
|
||||
|
||||
|
|
|
@ -35,9 +35,10 @@ pub async fn bundle(
|
|||
move |flags, watcher_communicator, _changed_paths| {
|
||||
let bundle_flags = bundle_flags.clone();
|
||||
Ok(async move {
|
||||
let factory = CliFactoryBuilder::new()
|
||||
.build_from_flags_for_watcher(flags, watcher_communicator.clone())
|
||||
.await?;
|
||||
let factory = CliFactoryBuilder::new().build_from_flags_for_watcher(
|
||||
flags,
|
||||
watcher_communicator.clone(),
|
||||
)?;
|
||||
let cli_options = factory.cli_options();
|
||||
let _ = watcher_communicator.watch_paths(cli_options.watch_paths());
|
||||
bundle_action(factory, &bundle_flags).await?;
|
||||
|
@ -48,7 +49,7 @@ pub async fn bundle(
|
|||
)
|
||||
.await?;
|
||||
} else {
|
||||
let factory = CliFactory::from_flags(flags).await?;
|
||||
let factory = CliFactory::from_flags(flags)?;
|
||||
bundle_action(factory, &bundle_flags).await?;
|
||||
}
|
||||
|
||||
|
|
|
@ -22,7 +22,7 @@ pub async fn compile(
|
|||
flags: Flags,
|
||||
compile_flags: CompileFlags,
|
||||
) -> Result<(), AnyError> {
|
||||
let factory = CliFactory::from_flags(flags).await?;
|
||||
let factory = CliFactory::from_flags(flags)?;
|
||||
let cli_options = factory.cli_options();
|
||||
let module_graph_creator = factory.module_graph_creator().await?;
|
||||
let parsed_source_cache = factory.parsed_source_cache();
|
||||
|
|
|
@ -462,7 +462,7 @@ pub async fn cover_files(
|
|||
return Err(generic_error("No matching coverage profiles found"));
|
||||
}
|
||||
|
||||
let factory = CliFactory::from_flags(flags).await?;
|
||||
let factory = CliFactory::from_flags(flags)?;
|
||||
let npm_resolver = factory.npm_resolver().await?;
|
||||
let file_fetcher = factory.file_fetcher()?;
|
||||
let cli_options = factory.cli_options();
|
||||
|
|
|
@ -17,7 +17,6 @@ use deno_config::glob::PathOrPatternSet;
|
|||
use deno_core::anyhow::bail;
|
||||
use deno_core::anyhow::Context;
|
||||
use deno_core::error::AnyError;
|
||||
use deno_core::futures::FutureExt;
|
||||
use deno_doc as doc;
|
||||
use deno_graph::GraphKind;
|
||||
use deno_graph::ModuleAnalyzer;
|
||||
|
@ -73,7 +72,7 @@ async fn generate_doc_nodes_for_builtin_types(
|
|||
}
|
||||
|
||||
pub async fn doc(flags: Flags, doc_flags: DocFlags) -> Result<(), AnyError> {
|
||||
let factory = CliFactory::from_flags(flags).await?;
|
||||
let factory = CliFactory::from_flags(flags)?;
|
||||
let cli_options = factory.cli_options();
|
||||
let module_info_cache = factory.module_info_cache()?;
|
||||
let parsed_source_cache = factory.parsed_source_cache();
|
||||
|
@ -156,8 +155,6 @@ pub async fn doc(flags: Flags, doc_flags: DocFlags) -> Result<(), AnyError> {
|
|||
};
|
||||
|
||||
generate_docs_directory(&doc_nodes_by_url, html_options, deno_ns)
|
||||
.boxed_local()
|
||||
.await
|
||||
} else {
|
||||
let modules_len = doc_nodes_by_url.len();
|
||||
let doc_nodes =
|
||||
|
@ -224,7 +221,7 @@ impl deno_doc::html::HrefResolver for DocResolver {
|
|||
}
|
||||
}
|
||||
|
||||
async fn generate_docs_directory(
|
||||
fn generate_docs_directory(
|
||||
doc_nodes_by_url: &IndexMap<ModuleSpecifier, Vec<doc::DocNode>>,
|
||||
html_options: &DocHtmlFlag,
|
||||
deno_ns: std::collections::HashSet<Vec<String>>,
|
||||
|
|
|
@ -68,7 +68,7 @@ pub async fn format(flags: Flags, fmt_flags: FmtFlags) -> Result<(), AnyError> {
|
|||
move |flags, watcher_communicator, changed_paths| {
|
||||
let fmt_flags = fmt_flags.clone();
|
||||
Ok(async move {
|
||||
let factory = CliFactory::from_flags(flags).await?;
|
||||
let factory = CliFactory::from_flags(flags)?;
|
||||
let cli_options = factory.cli_options();
|
||||
let fmt_options = cli_options.resolve_fmt_options(fmt_flags)?;
|
||||
let files =
|
||||
|
@ -113,7 +113,7 @@ pub async fn format(flags: Flags, fmt_flags: FmtFlags) -> Result<(), AnyError> {
|
|||
)
|
||||
.await?;
|
||||
} else {
|
||||
let factory = CliFactory::from_flags(flags).await?;
|
||||
let factory = CliFactory::from_flags(flags)?;
|
||||
let cli_options = factory.cli_options();
|
||||
let fmt_options = cli_options.resolve_fmt_options(fmt_flags)?;
|
||||
let files =
|
||||
|
|
|
@ -36,7 +36,7 @@ use crate::npm::ManagedCliNpmResolver;
|
|||
use crate::util::checksum;
|
||||
|
||||
pub async fn info(flags: Flags, info_flags: InfoFlags) -> Result<(), AnyError> {
|
||||
let factory = CliFactory::from_flags(flags).await?;
|
||||
let factory = CliFactory::from_flags(flags)?;
|
||||
let cli_options = factory.cli_options();
|
||||
if let Some(specifier) = info_flags.file {
|
||||
let module_graph_builder = factory.module_graph_builder().await?;
|
||||
|
|
|
@ -32,7 +32,7 @@ fn create_file(
|
|||
}
|
||||
}
|
||||
|
||||
pub async fn init_project(init_flags: InitFlags) -> Result<(), AnyError> {
|
||||
pub fn init_project(init_flags: InitFlags) -> Result<(), AnyError> {
|
||||
let cwd =
|
||||
std::env::current_dir().context("Can't read current working directory.")?;
|
||||
let dir = if let Some(dir) = &init_flags.dir {
|
||||
|
|
|
@ -242,8 +242,7 @@ pub async fn install_command(
|
|||
install_flags: InstallFlags,
|
||||
) -> Result<(), AnyError> {
|
||||
// ensure the module is cached
|
||||
CliFactory::from_flags(flags.clone())
|
||||
.await?
|
||||
CliFactory::from_flags(flags.clone())?
|
||||
.module_load_preparer()
|
||||
.await?
|
||||
.load_and_type_check_files(&[install_flags.module_url.clone()])
|
||||
|
|
|
@ -54,7 +54,7 @@ pub async fn kernel(
|
|||
logger::init(Some(log::Level::Debug));
|
||||
}
|
||||
|
||||
let factory = CliFactory::from_flags(flags).await?;
|
||||
let factory = CliFactory::from_flags(flags)?;
|
||||
let cli_options = factory.cli_options();
|
||||
let main_module =
|
||||
resolve_url_or_path("./$deno$jupyter.ts", cli_options.initial_cwd())
|
||||
|
|
|
@ -74,7 +74,7 @@ pub async fn lint(flags: Flags, lint_flags: LintFlags) -> Result<(), AnyError> {
|
|||
move |flags, watcher_communicator, changed_paths| {
|
||||
let lint_flags = lint_flags.clone();
|
||||
Ok(async move {
|
||||
let factory = CliFactory::from_flags(flags).await?;
|
||||
let factory = CliFactory::from_flags(flags)?;
|
||||
let cli_options = factory.cli_options();
|
||||
let lint_options = cli_options.resolve_lint_options(lint_flags)?;
|
||||
let files = collect_lint_files(lint_options.files.clone()).and_then(
|
||||
|
@ -110,7 +110,7 @@ pub async fn lint(flags: Flags, lint_flags: LintFlags) -> Result<(), AnyError> {
|
|||
)
|
||||
.await?;
|
||||
} else {
|
||||
let factory = CliFactory::from_flags(flags).await?;
|
||||
let factory = CliFactory::from_flags(flags)?;
|
||||
let cli_options = factory.cli_options();
|
||||
let is_stdin = lint_flags.is_stdin();
|
||||
let lint_options = cli_options.resolve_lint_options(lint_flags)?;
|
||||
|
|
|
@ -910,7 +910,7 @@ pub async fn publish(
|
|||
flags: Flags,
|
||||
publish_flags: PublishFlags,
|
||||
) -> Result<(), AnyError> {
|
||||
let cli_factory = CliFactory::from_flags(flags).await?;
|
||||
let cli_factory = CliFactory::from_flags(flags)?;
|
||||
|
||||
let auth_method =
|
||||
get_auth_method(publish_flags.token, publish_flags.dry_run)?;
|
||||
|
|
|
@ -26,7 +26,7 @@ use crate::jsr::JsrFetchResolver;
|
|||
use crate::npm::NpmFetchResolver;
|
||||
|
||||
pub async fn add(flags: Flags, add_flags: AddFlags) -> Result<(), AnyError> {
|
||||
let cli_factory = CliFactory::from_flags(flags.clone()).await?;
|
||||
let cli_factory = CliFactory::from_flags(flags.clone())?;
|
||||
let cli_options = cli_factory.cli_options();
|
||||
|
||||
let Some(config_file) = cli_options.maybe_config_file() else {
|
||||
|
|
|
@ -152,7 +152,7 @@ async fn read_eval_file(
|
|||
}
|
||||
|
||||
pub async fn run(flags: Flags, repl_flags: ReplFlags) -> Result<i32, AnyError> {
|
||||
let factory = CliFactory::from_flags(flags).await?;
|
||||
let factory = CliFactory::from_flags(flags)?;
|
||||
let cli_options = factory.cli_options();
|
||||
let main_module = cli_options.resolve_main_module()?;
|
||||
let permissions = PermissionsContainer::new(Permissions::from_options(
|
||||
|
|
|
@ -200,7 +200,7 @@ impl ReplSession {
|
|||
test_event_receiver: TestEventReceiver,
|
||||
) -> Result<Self, AnyError> {
|
||||
let language_server = ReplLanguageServer::new_initialized().await?;
|
||||
let mut session = worker.create_inspector_session().await;
|
||||
let mut session = worker.create_inspector_session();
|
||||
|
||||
worker
|
||||
.js_runtime
|
||||
|
|
|
@ -39,7 +39,7 @@ To grant permissions, set them before the script argument. For example:
|
|||
|
||||
// TODO(bartlomieju): actually I think it will also fail if there's an import
|
||||
// map specified and bare specifier is used on the command line
|
||||
let factory = CliFactory::from_flags(flags).await?;
|
||||
let factory = CliFactory::from_flags(flags)?;
|
||||
let deno_dir = factory.deno_dir()?;
|
||||
let http_client = factory.http_client();
|
||||
let cli_options = factory.cli_options();
|
||||
|
@ -76,7 +76,7 @@ To grant permissions, set them before the script argument. For example:
|
|||
}
|
||||
|
||||
pub async fn run_from_stdin(flags: Flags) -> Result<i32, AnyError> {
|
||||
let factory = CliFactory::from_flags(flags).await?;
|
||||
let factory = CliFactory::from_flags(flags)?;
|
||||
let cli_options = factory.cli_options();
|
||||
let main_module = cli_options.resolve_main_module()?;
|
||||
|
||||
|
@ -121,8 +121,7 @@ async fn run_with_watch(
|
|||
move |flags, watcher_communicator, _changed_paths| {
|
||||
Ok(async move {
|
||||
let factory = CliFactoryBuilder::new()
|
||||
.build_from_flags_for_watcher(flags, watcher_communicator.clone())
|
||||
.await?;
|
||||
.build_from_flags_for_watcher(flags, watcher_communicator.clone())?;
|
||||
let cli_options = factory.cli_options();
|
||||
let main_module = cli_options.resolve_main_module()?;
|
||||
|
||||
|
@ -158,7 +157,7 @@ pub async fn eval_command(
|
|||
flags: Flags,
|
||||
eval_flags: EvalFlags,
|
||||
) -> Result<i32, AnyError> {
|
||||
let factory = CliFactory::from_flags(flags).await?;
|
||||
let factory = CliFactory::from_flags(flags)?;
|
||||
let cli_options = factory.cli_options();
|
||||
let file_fetcher = factory.file_fetcher()?;
|
||||
let main_module = cli_options.resolve_main_module()?;
|
||||
|
|
|
@ -32,7 +32,7 @@ pub async fn execute_script(
|
|||
flags: Flags,
|
||||
task_flags: TaskFlags,
|
||||
) -> Result<i32, AnyError> {
|
||||
let factory = CliFactory::from_flags(flags).await?;
|
||||
let factory = CliFactory::from_flags(flags)?;
|
||||
let cli_options = factory.cli_options();
|
||||
let tasks_config = cli_options.resolve_tasks_config()?;
|
||||
let maybe_package_json = cli_options.maybe_package_json();
|
||||
|
|
|
@ -1479,7 +1479,7 @@ pub async fn run_tests(
|
|||
flags: Flags,
|
||||
test_flags: TestFlags,
|
||||
) -> Result<(), AnyError> {
|
||||
let factory = CliFactory::from_flags(flags).await?;
|
||||
let factory = CliFactory::from_flags(flags)?;
|
||||
let cli_options = factory.cli_options();
|
||||
let test_options = cli_options.resolve_test_options(test_flags)?;
|
||||
let file_fetcher = factory.file_fetcher()?;
|
||||
|
@ -1577,8 +1577,7 @@ pub async fn run_tests_with_watch(
|
|||
let test_flags = test_flags.clone();
|
||||
Ok(async move {
|
||||
let factory = CliFactoryBuilder::new()
|
||||
.build_from_flags_for_watcher(flags, watcher_communicator.clone())
|
||||
.await?;
|
||||
.build_from_flags_for_watcher(flags, watcher_communicator.clone())?;
|
||||
let cli_options = factory.cli_options();
|
||||
let test_options = cli_options.resolve_test_options(test_flags)?;
|
||||
|
||||
|
|
|
@ -376,7 +376,7 @@ pub async fn upgrade(
|
|||
flags: Flags,
|
||||
upgrade_flags: UpgradeFlags,
|
||||
) -> Result<(), AnyError> {
|
||||
let factory = CliFactory::from_flags(flags).await?;
|
||||
let factory = CliFactory::from_flags(flags)?;
|
||||
let client = factory.http_client();
|
||||
let current_exe_path = std::env::current_exe()?;
|
||||
let output_exe_path =
|
||||
|
|
|
@ -355,7 +355,7 @@ impl CliMainWorker {
|
|||
return Ok(None);
|
||||
};
|
||||
|
||||
let session = self.worker.create_inspector_session().await;
|
||||
let session = self.worker.create_inspector_session();
|
||||
|
||||
let mut hmr_runner = setup_hmr_runner(session);
|
||||
|
||||
|
@ -379,7 +379,7 @@ impl CliMainWorker {
|
|||
return Ok(None);
|
||||
};
|
||||
|
||||
let session = self.worker.create_inspector_session().await;
|
||||
let session = self.worker.create_inspector_session();
|
||||
let mut coverage_collector = create_coverage_collector(session);
|
||||
self
|
||||
.worker
|
||||
|
|
|
@ -198,104 +198,106 @@ pub async fn op_crypto_sign_key(
|
|||
#[serde] args: SignArg,
|
||||
#[buffer] zero_copy: JsBuffer,
|
||||
) -> Result<ToJsBuffer, AnyError> {
|
||||
let data = &*zero_copy;
|
||||
let algorithm = args.algorithm;
|
||||
deno_core::unsync::spawn_blocking(move || {
|
||||
let data = &*zero_copy;
|
||||
let algorithm = args.algorithm;
|
||||
|
||||
let signature = match algorithm {
|
||||
Algorithm::RsassaPkcs1v15 => {
|
||||
use rsa::pkcs1v15::SigningKey;
|
||||
let private_key = RsaPrivateKey::from_pkcs1_der(&args.key.data)?;
|
||||
match args
|
||||
.hash
|
||||
.ok_or_else(|| type_error("Missing argument hash".to_string()))?
|
||||
{
|
||||
CryptoHash::Sha1 => {
|
||||
let signing_key = SigningKey::<Sha1>::new(private_key);
|
||||
signing_key.sign(data)
|
||||
}
|
||||
CryptoHash::Sha256 => {
|
||||
let signing_key = SigningKey::<Sha256>::new(private_key);
|
||||
signing_key.sign(data)
|
||||
}
|
||||
CryptoHash::Sha384 => {
|
||||
let signing_key = SigningKey::<Sha384>::new(private_key);
|
||||
signing_key.sign(data)
|
||||
}
|
||||
CryptoHash::Sha512 => {
|
||||
let signing_key = SigningKey::<Sha512>::new(private_key);
|
||||
signing_key.sign(data)
|
||||
let signature = match algorithm {
|
||||
Algorithm::RsassaPkcs1v15 => {
|
||||
use rsa::pkcs1v15::SigningKey;
|
||||
let private_key = RsaPrivateKey::from_pkcs1_der(&args.key.data)?;
|
||||
match args
|
||||
.hash
|
||||
.ok_or_else(|| type_error("Missing argument hash".to_string()))?
|
||||
{
|
||||
CryptoHash::Sha1 => {
|
||||
let signing_key = SigningKey::<Sha1>::new(private_key);
|
||||
signing_key.sign(data)
|
||||
}
|
||||
CryptoHash::Sha256 => {
|
||||
let signing_key = SigningKey::<Sha256>::new(private_key);
|
||||
signing_key.sign(data)
|
||||
}
|
||||
CryptoHash::Sha384 => {
|
||||
let signing_key = SigningKey::<Sha384>::new(private_key);
|
||||
signing_key.sign(data)
|
||||
}
|
||||
CryptoHash::Sha512 => {
|
||||
let signing_key = SigningKey::<Sha512>::new(private_key);
|
||||
signing_key.sign(data)
|
||||
}
|
||||
}
|
||||
.to_vec()
|
||||
}
|
||||
.to_vec()
|
||||
}
|
||||
Algorithm::RsaPss => {
|
||||
let private_key = RsaPrivateKey::from_pkcs1_der(&args.key.data)?;
|
||||
Algorithm::RsaPss => {
|
||||
let private_key = RsaPrivateKey::from_pkcs1_der(&args.key.data)?;
|
||||
|
||||
let salt_len = args
|
||||
.salt_length
|
||||
.ok_or_else(|| type_error("Missing argument saltLength".to_string()))?
|
||||
as usize;
|
||||
let salt_len = args.salt_length.ok_or_else(|| {
|
||||
type_error("Missing argument saltLength".to_string())
|
||||
})? as usize;
|
||||
|
||||
let mut rng = OsRng;
|
||||
match args
|
||||
.hash
|
||||
.ok_or_else(|| type_error("Missing argument hash".to_string()))?
|
||||
{
|
||||
CryptoHash::Sha1 => {
|
||||
let signing_key = Pss::new_with_salt::<Sha1>(salt_len);
|
||||
let hashed = Sha1::digest(data);
|
||||
signing_key.sign(Some(&mut rng), &private_key, &hashed)?
|
||||
}
|
||||
CryptoHash::Sha256 => {
|
||||
let signing_key = Pss::new_with_salt::<Sha256>(salt_len);
|
||||
let hashed = Sha256::digest(data);
|
||||
signing_key.sign(Some(&mut rng), &private_key, &hashed)?
|
||||
}
|
||||
CryptoHash::Sha384 => {
|
||||
let signing_key = Pss::new_with_salt::<Sha384>(salt_len);
|
||||
let hashed = Sha384::digest(data);
|
||||
signing_key.sign(Some(&mut rng), &private_key, &hashed)?
|
||||
}
|
||||
CryptoHash::Sha512 => {
|
||||
let signing_key = Pss::new_with_salt::<Sha512>(salt_len);
|
||||
let hashed = Sha512::digest(data);
|
||||
signing_key.sign(Some(&mut rng), &private_key, &hashed)?
|
||||
let mut rng = OsRng;
|
||||
match args
|
||||
.hash
|
||||
.ok_or_else(|| type_error("Missing argument hash".to_string()))?
|
||||
{
|
||||
CryptoHash::Sha1 => {
|
||||
let signing_key = Pss::new_with_salt::<Sha1>(salt_len);
|
||||
let hashed = Sha1::digest(data);
|
||||
signing_key.sign(Some(&mut rng), &private_key, &hashed)?
|
||||
}
|
||||
CryptoHash::Sha256 => {
|
||||
let signing_key = Pss::new_with_salt::<Sha256>(salt_len);
|
||||
let hashed = Sha256::digest(data);
|
||||
signing_key.sign(Some(&mut rng), &private_key, &hashed)?
|
||||
}
|
||||
CryptoHash::Sha384 => {
|
||||
let signing_key = Pss::new_with_salt::<Sha384>(salt_len);
|
||||
let hashed = Sha384::digest(data);
|
||||
signing_key.sign(Some(&mut rng), &private_key, &hashed)?
|
||||
}
|
||||
CryptoHash::Sha512 => {
|
||||
let signing_key = Pss::new_with_salt::<Sha512>(salt_len);
|
||||
let hashed = Sha512::digest(data);
|
||||
signing_key.sign(Some(&mut rng), &private_key, &hashed)?
|
||||
}
|
||||
}
|
||||
.to_vec()
|
||||
}
|
||||
.to_vec()
|
||||
}
|
||||
Algorithm::Ecdsa => {
|
||||
let curve: &EcdsaSigningAlgorithm =
|
||||
args.named_curve.ok_or_else(not_supported)?.into();
|
||||
Algorithm::Ecdsa => {
|
||||
let curve: &EcdsaSigningAlgorithm =
|
||||
args.named_curve.ok_or_else(not_supported)?.into();
|
||||
|
||||
let rng = RingRand::SystemRandom::new();
|
||||
let key_pair = EcdsaKeyPair::from_pkcs8(curve, &args.key.data, &rng)?;
|
||||
// We only support P256-SHA256 & P384-SHA384. These are recommended signature pairs.
|
||||
// https://briansmith.org/rustdoc/ring/signature/index.html#statics
|
||||
if let Some(hash) = args.hash {
|
||||
match hash {
|
||||
CryptoHash::Sha256 | CryptoHash::Sha384 => (),
|
||||
_ => return Err(type_error("Unsupported algorithm")),
|
||||
}
|
||||
};
|
||||
let rng = RingRand::SystemRandom::new();
|
||||
let key_pair = EcdsaKeyPair::from_pkcs8(curve, &args.key.data, &rng)?;
|
||||
// We only support P256-SHA256 & P384-SHA384. These are recommended signature pairs.
|
||||
// https://briansmith.org/rustdoc/ring/signature/index.html#statics
|
||||
if let Some(hash) = args.hash {
|
||||
match hash {
|
||||
CryptoHash::Sha256 | CryptoHash::Sha384 => (),
|
||||
_ => return Err(type_error("Unsupported algorithm")),
|
||||
}
|
||||
};
|
||||
|
||||
let signature = key_pair.sign(&rng, data)?;
|
||||
let signature = key_pair.sign(&rng, data)?;
|
||||
|
||||
// Signature data as buffer.
|
||||
signature.as_ref().to_vec()
|
||||
}
|
||||
Algorithm::Hmac => {
|
||||
let hash: HmacAlgorithm = args.hash.ok_or_else(not_supported)?.into();
|
||||
// Signature data as buffer.
|
||||
signature.as_ref().to_vec()
|
||||
}
|
||||
Algorithm::Hmac => {
|
||||
let hash: HmacAlgorithm = args.hash.ok_or_else(not_supported)?.into();
|
||||
|
||||
let key = HmacKey::new(hash, &args.key.data);
|
||||
let key = HmacKey::new(hash, &args.key.data);
|
||||
|
||||
let signature = ring::hmac::sign(&key, data);
|
||||
signature.as_ref().to_vec()
|
||||
}
|
||||
_ => return Err(type_error("Unsupported algorithm".to_string())),
|
||||
};
|
||||
let signature = ring::hmac::sign(&key, data);
|
||||
signature.as_ref().to_vec()
|
||||
}
|
||||
_ => return Err(type_error("Unsupported algorithm".to_string())),
|
||||
};
|
||||
|
||||
Ok(signature.into())
|
||||
Ok(signature.into())
|
||||
})
|
||||
.await?
|
||||
}
|
||||
|
||||
#[derive(Deserialize)]
|
||||
|
@ -314,106 +316,108 @@ pub async fn op_crypto_verify_key(
|
|||
#[serde] args: VerifyArg,
|
||||
#[buffer] zero_copy: JsBuffer,
|
||||
) -> Result<bool, AnyError> {
|
||||
let data = &*zero_copy;
|
||||
let algorithm = args.algorithm;
|
||||
deno_core::unsync::spawn_blocking(move || {
|
||||
let data = &*zero_copy;
|
||||
let algorithm = args.algorithm;
|
||||
|
||||
let verification = match algorithm {
|
||||
Algorithm::RsassaPkcs1v15 => {
|
||||
use rsa::pkcs1v15::Signature;
|
||||
use rsa::pkcs1v15::VerifyingKey;
|
||||
let public_key = read_rsa_public_key(args.key)?;
|
||||
let signature: Signature = args.signature.as_ref().try_into()?;
|
||||
match args
|
||||
.hash
|
||||
.ok_or_else(|| type_error("Missing argument hash".to_string()))?
|
||||
{
|
||||
CryptoHash::Sha1 => {
|
||||
let verifying_key = VerifyingKey::<Sha1>::new(public_key);
|
||||
verifying_key.verify(data, &signature).is_ok()
|
||||
}
|
||||
CryptoHash::Sha256 => {
|
||||
let verifying_key = VerifyingKey::<Sha256>::new(public_key);
|
||||
verifying_key.verify(data, &signature).is_ok()
|
||||
}
|
||||
CryptoHash::Sha384 => {
|
||||
let verifying_key = VerifyingKey::<Sha384>::new(public_key);
|
||||
verifying_key.verify(data, &signature).is_ok()
|
||||
}
|
||||
CryptoHash::Sha512 => {
|
||||
let verifying_key = VerifyingKey::<Sha512>::new(public_key);
|
||||
verifying_key.verify(data, &signature).is_ok()
|
||||
let verification = match algorithm {
|
||||
Algorithm::RsassaPkcs1v15 => {
|
||||
use rsa::pkcs1v15::Signature;
|
||||
use rsa::pkcs1v15::VerifyingKey;
|
||||
let public_key = read_rsa_public_key(args.key)?;
|
||||
let signature: Signature = args.signature.as_ref().try_into()?;
|
||||
match args
|
||||
.hash
|
||||
.ok_or_else(|| type_error("Missing argument hash".to_string()))?
|
||||
{
|
||||
CryptoHash::Sha1 => {
|
||||
let verifying_key = VerifyingKey::<Sha1>::new(public_key);
|
||||
verifying_key.verify(data, &signature).is_ok()
|
||||
}
|
||||
CryptoHash::Sha256 => {
|
||||
let verifying_key = VerifyingKey::<Sha256>::new(public_key);
|
||||
verifying_key.verify(data, &signature).is_ok()
|
||||
}
|
||||
CryptoHash::Sha384 => {
|
||||
let verifying_key = VerifyingKey::<Sha384>::new(public_key);
|
||||
verifying_key.verify(data, &signature).is_ok()
|
||||
}
|
||||
CryptoHash::Sha512 => {
|
||||
let verifying_key = VerifyingKey::<Sha512>::new(public_key);
|
||||
verifying_key.verify(data, &signature).is_ok()
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
Algorithm::RsaPss => {
|
||||
let public_key = read_rsa_public_key(args.key)?;
|
||||
let signature = args.signature.as_ref();
|
||||
Algorithm::RsaPss => {
|
||||
let public_key = read_rsa_public_key(args.key)?;
|
||||
let signature = args.signature.as_ref();
|
||||
|
||||
let salt_len = args
|
||||
.salt_length
|
||||
.ok_or_else(|| type_error("Missing argument saltLength".to_string()))?
|
||||
as usize;
|
||||
let salt_len = args.salt_length.ok_or_else(|| {
|
||||
type_error("Missing argument saltLength".to_string())
|
||||
})? as usize;
|
||||
|
||||
match args
|
||||
.hash
|
||||
.ok_or_else(|| type_error("Missing argument hash".to_string()))?
|
||||
{
|
||||
CryptoHash::Sha1 => {
|
||||
let pss = Pss::new_with_salt::<Sha1>(salt_len);
|
||||
let hashed = Sha1::digest(data);
|
||||
pss.verify(&public_key, &hashed, signature).is_ok()
|
||||
}
|
||||
CryptoHash::Sha256 => {
|
||||
let pss = Pss::new_with_salt::<Sha256>(salt_len);
|
||||
let hashed = Sha256::digest(data);
|
||||
pss.verify(&public_key, &hashed, signature).is_ok()
|
||||
}
|
||||
CryptoHash::Sha384 => {
|
||||
let pss = Pss::new_with_salt::<Sha384>(salt_len);
|
||||
let hashed = Sha384::digest(data);
|
||||
pss.verify(&public_key, &hashed, signature).is_ok()
|
||||
}
|
||||
CryptoHash::Sha512 => {
|
||||
let pss = Pss::new_with_salt::<Sha512>(salt_len);
|
||||
let hashed = Sha512::digest(data);
|
||||
pss.verify(&public_key, &hashed, signature).is_ok()
|
||||
match args
|
||||
.hash
|
||||
.ok_or_else(|| type_error("Missing argument hash".to_string()))?
|
||||
{
|
||||
CryptoHash::Sha1 => {
|
||||
let pss = Pss::new_with_salt::<Sha1>(salt_len);
|
||||
let hashed = Sha1::digest(data);
|
||||
pss.verify(&public_key, &hashed, signature).is_ok()
|
||||
}
|
||||
CryptoHash::Sha256 => {
|
||||
let pss = Pss::new_with_salt::<Sha256>(salt_len);
|
||||
let hashed = Sha256::digest(data);
|
||||
pss.verify(&public_key, &hashed, signature).is_ok()
|
||||
}
|
||||
CryptoHash::Sha384 => {
|
||||
let pss = Pss::new_with_salt::<Sha384>(salt_len);
|
||||
let hashed = Sha384::digest(data);
|
||||
pss.verify(&public_key, &hashed, signature).is_ok()
|
||||
}
|
||||
CryptoHash::Sha512 => {
|
||||
let pss = Pss::new_with_salt::<Sha512>(salt_len);
|
||||
let hashed = Sha512::digest(data);
|
||||
pss.verify(&public_key, &hashed, signature).is_ok()
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
Algorithm::Hmac => {
|
||||
let hash: HmacAlgorithm = args.hash.ok_or_else(not_supported)?.into();
|
||||
let key = HmacKey::new(hash, &args.key.data);
|
||||
ring::hmac::verify(&key, data, &args.signature).is_ok()
|
||||
}
|
||||
Algorithm::Ecdsa => {
|
||||
let signing_alg: &EcdsaSigningAlgorithm =
|
||||
args.named_curve.ok_or_else(not_supported)?.into();
|
||||
let verify_alg: &EcdsaVerificationAlgorithm =
|
||||
args.named_curve.ok_or_else(not_supported)?.into();
|
||||
Algorithm::Hmac => {
|
||||
let hash: HmacAlgorithm = args.hash.ok_or_else(not_supported)?.into();
|
||||
let key = HmacKey::new(hash, &args.key.data);
|
||||
ring::hmac::verify(&key, data, &args.signature).is_ok()
|
||||
}
|
||||
Algorithm::Ecdsa => {
|
||||
let signing_alg: &EcdsaSigningAlgorithm =
|
||||
args.named_curve.ok_or_else(not_supported)?.into();
|
||||
let verify_alg: &EcdsaVerificationAlgorithm =
|
||||
args.named_curve.ok_or_else(not_supported)?.into();
|
||||
|
||||
let private_key;
|
||||
let private_key;
|
||||
|
||||
let public_key_bytes = match args.key.r#type {
|
||||
KeyType::Private => {
|
||||
let rng = RingRand::SystemRandom::new();
|
||||
private_key =
|
||||
EcdsaKeyPair::from_pkcs8(signing_alg, &args.key.data, &rng)?;
|
||||
let public_key_bytes = match args.key.r#type {
|
||||
KeyType::Private => {
|
||||
let rng = RingRand::SystemRandom::new();
|
||||
private_key =
|
||||
EcdsaKeyPair::from_pkcs8(signing_alg, &args.key.data, &rng)?;
|
||||
|
||||
private_key.public_key().as_ref()
|
||||
}
|
||||
KeyType::Public => &*args.key.data,
|
||||
_ => return Err(type_error("Invalid Key format".to_string())),
|
||||
};
|
||||
private_key.public_key().as_ref()
|
||||
}
|
||||
KeyType::Public => &*args.key.data,
|
||||
_ => return Err(type_error("Invalid Key format".to_string())),
|
||||
};
|
||||
|
||||
let public_key =
|
||||
ring::signature::UnparsedPublicKey::new(verify_alg, public_key_bytes);
|
||||
let public_key =
|
||||
ring::signature::UnparsedPublicKey::new(verify_alg, public_key_bytes);
|
||||
|
||||
public_key.verify(data, &args.signature).is_ok()
|
||||
}
|
||||
_ => return Err(type_error("Unsupported algorithm".to_string())),
|
||||
};
|
||||
public_key.verify(data, &args.signature).is_ok()
|
||||
}
|
||||
_ => return Err(type_error("Unsupported algorithm".to_string())),
|
||||
};
|
||||
|
||||
Ok(verification)
|
||||
Ok(verification)
|
||||
})
|
||||
.await?
|
||||
}
|
||||
|
||||
#[derive(Deserialize)]
|
||||
|
@ -437,153 +441,160 @@ pub async fn op_crypto_derive_bits(
|
|||
#[serde] args: DeriveKeyArg,
|
||||
#[buffer] zero_copy: Option<JsBuffer>,
|
||||
) -> Result<ToJsBuffer, AnyError> {
|
||||
let algorithm = args.algorithm;
|
||||
match algorithm {
|
||||
Algorithm::Pbkdf2 => {
|
||||
let zero_copy = zero_copy.ok_or_else(not_supported)?;
|
||||
let salt = &*zero_copy;
|
||||
// The caller must validate these cases.
|
||||
assert!(args.length > 0);
|
||||
assert!(args.length % 8 == 0);
|
||||
deno_core::unsync::spawn_blocking(move || {
|
||||
let algorithm = args.algorithm;
|
||||
match algorithm {
|
||||
Algorithm::Pbkdf2 => {
|
||||
let zero_copy = zero_copy.ok_or_else(not_supported)?;
|
||||
let salt = &*zero_copy;
|
||||
// The caller must validate these cases.
|
||||
assert!(args.length > 0);
|
||||
assert!(args.length % 8 == 0);
|
||||
|
||||
let algorithm = match args.hash.ok_or_else(not_supported)? {
|
||||
CryptoHash::Sha1 => pbkdf2::PBKDF2_HMAC_SHA1,
|
||||
CryptoHash::Sha256 => pbkdf2::PBKDF2_HMAC_SHA256,
|
||||
CryptoHash::Sha384 => pbkdf2::PBKDF2_HMAC_SHA384,
|
||||
CryptoHash::Sha512 => pbkdf2::PBKDF2_HMAC_SHA512,
|
||||
};
|
||||
let algorithm = match args.hash.ok_or_else(not_supported)? {
|
||||
CryptoHash::Sha1 => pbkdf2::PBKDF2_HMAC_SHA1,
|
||||
CryptoHash::Sha256 => pbkdf2::PBKDF2_HMAC_SHA256,
|
||||
CryptoHash::Sha384 => pbkdf2::PBKDF2_HMAC_SHA384,
|
||||
CryptoHash::Sha512 => pbkdf2::PBKDF2_HMAC_SHA512,
|
||||
};
|
||||
|
||||
// This will never panic. We have already checked length earlier.
|
||||
let iterations =
|
||||
NonZeroU32::new(args.iterations.ok_or_else(not_supported)?).unwrap();
|
||||
let secret = args.key.data;
|
||||
let mut out = vec![0; args.length / 8];
|
||||
pbkdf2::derive(algorithm, iterations, salt, &secret, &mut out);
|
||||
Ok(out.into())
|
||||
}
|
||||
Algorithm::Ecdh => {
|
||||
let named_curve = args
|
||||
.named_curve
|
||||
.ok_or_else(|| type_error("Missing argument namedCurve".to_string()))?;
|
||||
// This will never panic. We have already checked length earlier.
|
||||
let iterations =
|
||||
NonZeroU32::new(args.iterations.ok_or_else(not_supported)?).unwrap();
|
||||
let secret = args.key.data;
|
||||
let mut out = vec![0; args.length / 8];
|
||||
pbkdf2::derive(algorithm, iterations, salt, &secret, &mut out);
|
||||
Ok(out.into())
|
||||
}
|
||||
Algorithm::Ecdh => {
|
||||
let named_curve = args.named_curve.ok_or_else(|| {
|
||||
type_error("Missing argument namedCurve".to_string())
|
||||
})?;
|
||||
|
||||
let public_key = args
|
||||
.public_key
|
||||
.ok_or_else(|| type_error("Missing argument publicKey"))?;
|
||||
let public_key = args
|
||||
.public_key
|
||||
.ok_or_else(|| type_error("Missing argument publicKey"))?;
|
||||
|
||||
match named_curve {
|
||||
CryptoNamedCurve::P256 => {
|
||||
let secret_key = p256::SecretKey::from_pkcs8_der(&args.key.data)
|
||||
.map_err(|_| type_error("Unexpected error decoding private key"))?;
|
||||
match named_curve {
|
||||
CryptoNamedCurve::P256 => {
|
||||
let secret_key = p256::SecretKey::from_pkcs8_der(&args.key.data)
|
||||
.map_err(|_| {
|
||||
type_error("Unexpected error decoding private key")
|
||||
})?;
|
||||
|
||||
let public_key = match public_key.r#type {
|
||||
KeyType::Private => {
|
||||
p256::SecretKey::from_pkcs8_der(&public_key.data)
|
||||
.map_err(|_| {
|
||||
type_error("Unexpected error decoding private key")
|
||||
})?
|
||||
.public_key()
|
||||
}
|
||||
KeyType::Public => {
|
||||
let point = p256::EncodedPoint::from_bytes(public_key.data)
|
||||
.map_err(|_| {
|
||||
type_error("Unexpected error decoding private key")
|
||||
})?;
|
||||
|
||||
let pk = p256::PublicKey::from_encoded_point(&point);
|
||||
// pk is a constant time Option.
|
||||
if pk.is_some().into() {
|
||||
pk.unwrap()
|
||||
} else {
|
||||
return Err(type_error(
|
||||
"Unexpected error decoding private key",
|
||||
));
|
||||
let public_key = match public_key.r#type {
|
||||
KeyType::Private => {
|
||||
p256::SecretKey::from_pkcs8_der(&public_key.data)
|
||||
.map_err(|_| {
|
||||
type_error("Unexpected error decoding private key")
|
||||
})?
|
||||
.public_key()
|
||||
}
|
||||
}
|
||||
_ => unreachable!(),
|
||||
};
|
||||
KeyType::Public => {
|
||||
let point = p256::EncodedPoint::from_bytes(public_key.data)
|
||||
.map_err(|_| {
|
||||
type_error("Unexpected error decoding private key")
|
||||
})?;
|
||||
|
||||
let shared_secret = p256::elliptic_curve::ecdh::diffie_hellman(
|
||||
secret_key.to_nonzero_scalar(),
|
||||
public_key.as_affine(),
|
||||
);
|
||||
|
||||
// raw serialized x-coordinate of the computed point
|
||||
Ok(shared_secret.raw_secret_bytes().to_vec().into())
|
||||
}
|
||||
CryptoNamedCurve::P384 => {
|
||||
let secret_key = p384::SecretKey::from_pkcs8_der(&args.key.data)
|
||||
.map_err(|_| type_error("Unexpected error decoding private key"))?;
|
||||
|
||||
let public_key = match public_key.r#type {
|
||||
KeyType::Private => {
|
||||
p384::SecretKey::from_pkcs8_der(&public_key.data)
|
||||
.map_err(|_| {
|
||||
type_error("Unexpected error decoding private key")
|
||||
})?
|
||||
.public_key()
|
||||
}
|
||||
KeyType::Public => {
|
||||
let point = p384::EncodedPoint::from_bytes(public_key.data)
|
||||
.map_err(|_| {
|
||||
type_error("Unexpected error decoding private key")
|
||||
})?;
|
||||
|
||||
let pk = p384::PublicKey::from_encoded_point(&point);
|
||||
// pk is a constant time Option.
|
||||
if pk.is_some().into() {
|
||||
pk.unwrap()
|
||||
} else {
|
||||
return Err(type_error(
|
||||
"Unexpected error decoding private key",
|
||||
));
|
||||
let pk = p256::PublicKey::from_encoded_point(&point);
|
||||
// pk is a constant time Option.
|
||||
if pk.is_some().into() {
|
||||
pk.unwrap()
|
||||
} else {
|
||||
return Err(type_error(
|
||||
"Unexpected error decoding private key",
|
||||
));
|
||||
}
|
||||
}
|
||||
}
|
||||
_ => unreachable!(),
|
||||
};
|
||||
_ => unreachable!(),
|
||||
};
|
||||
|
||||
let shared_secret = p384::elliptic_curve::ecdh::diffie_hellman(
|
||||
secret_key.to_nonzero_scalar(),
|
||||
public_key.as_affine(),
|
||||
);
|
||||
let shared_secret = p256::elliptic_curve::ecdh::diffie_hellman(
|
||||
secret_key.to_nonzero_scalar(),
|
||||
public_key.as_affine(),
|
||||
);
|
||||
|
||||
// raw serialized x-coordinate of the computed point
|
||||
Ok(shared_secret.raw_secret_bytes().to_vec().into())
|
||||
// raw serialized x-coordinate of the computed point
|
||||
Ok(shared_secret.raw_secret_bytes().to_vec().into())
|
||||
}
|
||||
CryptoNamedCurve::P384 => {
|
||||
let secret_key = p384::SecretKey::from_pkcs8_der(&args.key.data)
|
||||
.map_err(|_| {
|
||||
type_error("Unexpected error decoding private key")
|
||||
})?;
|
||||
|
||||
let public_key = match public_key.r#type {
|
||||
KeyType::Private => {
|
||||
p384::SecretKey::from_pkcs8_der(&public_key.data)
|
||||
.map_err(|_| {
|
||||
type_error("Unexpected error decoding private key")
|
||||
})?
|
||||
.public_key()
|
||||
}
|
||||
KeyType::Public => {
|
||||
let point = p384::EncodedPoint::from_bytes(public_key.data)
|
||||
.map_err(|_| {
|
||||
type_error("Unexpected error decoding private key")
|
||||
})?;
|
||||
|
||||
let pk = p384::PublicKey::from_encoded_point(&point);
|
||||
// pk is a constant time Option.
|
||||
if pk.is_some().into() {
|
||||
pk.unwrap()
|
||||
} else {
|
||||
return Err(type_error(
|
||||
"Unexpected error decoding private key",
|
||||
));
|
||||
}
|
||||
}
|
||||
_ => unreachable!(),
|
||||
};
|
||||
|
||||
let shared_secret = p384::elliptic_curve::ecdh::diffie_hellman(
|
||||
secret_key.to_nonzero_scalar(),
|
||||
public_key.as_affine(),
|
||||
);
|
||||
|
||||
// raw serialized x-coordinate of the computed point
|
||||
Ok(shared_secret.raw_secret_bytes().to_vec().into())
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
Algorithm::Hkdf => {
|
||||
let zero_copy = zero_copy.ok_or_else(not_supported)?;
|
||||
let salt = &*zero_copy;
|
||||
let algorithm = match args.hash.ok_or_else(not_supported)? {
|
||||
CryptoHash::Sha1 => hkdf::HKDF_SHA1_FOR_LEGACY_USE_ONLY,
|
||||
CryptoHash::Sha256 => hkdf::HKDF_SHA256,
|
||||
CryptoHash::Sha384 => hkdf::HKDF_SHA384,
|
||||
CryptoHash::Sha512 => hkdf::HKDF_SHA512,
|
||||
};
|
||||
Algorithm::Hkdf => {
|
||||
let zero_copy = zero_copy.ok_or_else(not_supported)?;
|
||||
let salt = &*zero_copy;
|
||||
let algorithm = match args.hash.ok_or_else(not_supported)? {
|
||||
CryptoHash::Sha1 => hkdf::HKDF_SHA1_FOR_LEGACY_USE_ONLY,
|
||||
CryptoHash::Sha256 => hkdf::HKDF_SHA256,
|
||||
CryptoHash::Sha384 => hkdf::HKDF_SHA384,
|
||||
CryptoHash::Sha512 => hkdf::HKDF_SHA512,
|
||||
};
|
||||
|
||||
let info = args
|
||||
.info
|
||||
.ok_or_else(|| type_error("Missing argument info".to_string()))?;
|
||||
// IKM
|
||||
let secret = args.key.data;
|
||||
// L
|
||||
let length = args.length / 8;
|
||||
let info = args
|
||||
.info
|
||||
.ok_or_else(|| type_error("Missing argument info".to_string()))?;
|
||||
// IKM
|
||||
let secret = args.key.data;
|
||||
// L
|
||||
let length = args.length / 8;
|
||||
|
||||
let salt = hkdf::Salt::new(algorithm, salt);
|
||||
let prk = salt.extract(&secret);
|
||||
let info = &[&*info];
|
||||
let okm = prk.expand(info, HkdfOutput(length)).map_err(|_e| {
|
||||
custom_error(
|
||||
"DOMExceptionOperationError",
|
||||
"The length provided for HKDF is too large",
|
||||
)
|
||||
})?;
|
||||
let mut r = vec![0u8; length];
|
||||
okm.fill(&mut r)?;
|
||||
Ok(r.into())
|
||||
let salt = hkdf::Salt::new(algorithm, salt);
|
||||
let prk = salt.extract(&secret);
|
||||
let info = &[&*info];
|
||||
let okm = prk.expand(info, HkdfOutput(length)).map_err(|_e| {
|
||||
custom_error(
|
||||
"DOMExceptionOperationError",
|
||||
"The length provided for HKDF is too large",
|
||||
)
|
||||
})?;
|
||||
let mut r = vec![0u8; length];
|
||||
okm.fill(&mut r)?;
|
||||
Ok(r.into())
|
||||
}
|
||||
_ => Err(type_error("Unsupported algorithm".to_string())),
|
||||
}
|
||||
_ => Err(type_error("Unsupported algorithm".to_string())),
|
||||
}
|
||||
})
|
||||
.await?
|
||||
}
|
||||
|
||||
fn read_rsa_public_key(key_data: KeyData) -> Result<RsaPublicKey, AnyError> {
|
||||
|
|
|
@ -166,6 +166,7 @@ function listenTls({
|
|||
return new TlsListener(rid, localAddr);
|
||||
}
|
||||
|
||||
// deno-lint-ignore require-await
|
||||
async function startTls(
|
||||
conn,
|
||||
{
|
||||
|
@ -174,7 +175,7 @@ async function startTls(
|
|||
alpnProtocols = undefined,
|
||||
} = {},
|
||||
) {
|
||||
const { 0: rid, 1: localAddr, 2: remoteAddr } = await op_tls_start({
|
||||
const { 0: rid, 1: localAddr, 2: remoteAddr } = op_tls_start({
|
||||
rid: conn[internalRidSymbol],
|
||||
hostname,
|
||||
caCerts,
|
||||
|
|
|
@ -152,6 +152,7 @@ impl UnixStreamResource {
|
|||
fn write(self: Rc<Self>, _data: &[u8]) -> AsyncResult<usize> {
|
||||
unreachable!()
|
||||
}
|
||||
#[allow(clippy::unused_async)]
|
||||
pub async fn shutdown(self: Rc<Self>) -> Result<(), AnyError> {
|
||||
unreachable!()
|
||||
}
|
||||
|
|
|
@ -159,9 +159,9 @@ pub struct StartTlsArgs {
|
|||
alpn_protocols: Option<Vec<String>>,
|
||||
}
|
||||
|
||||
#[op2(async)]
|
||||
#[op2]
|
||||
#[serde]
|
||||
pub async fn op_tls_start<NP>(
|
||||
pub fn op_tls_start<NP>(
|
||||
state: Rc<RefCell<OpState>>,
|
||||
#[serde] args: StartTlsArgs,
|
||||
) -> Result<(ResourceId, IpAddr, IpAddr), AnyError>
|
||||
|
|
|
@ -408,6 +408,7 @@ mod impl_ {
|
|||
use deno_core::RcRef;
|
||||
use std::rc::Rc;
|
||||
|
||||
#[allow(clippy::unused_async)]
|
||||
#[cfg(unix)]
|
||||
pub async fn pair() -> (Rc<IpcJsonStreamResource>, tokio::net::UnixStream) {
|
||||
let (a, b) = tokio::net::UnixStream::pair().unwrap();
|
||||
|
|
|
@ -43,5 +43,6 @@ where
|
|||
buf[1] = subsec_nanos;
|
||||
}
|
||||
|
||||
#[allow(clippy::unused_async)]
|
||||
#[op2(async(lazy), fast)]
|
||||
pub async fn op_defer() {}
|
||||
|
|
|
@ -324,6 +324,7 @@ class GPU {
|
|||
/**
|
||||
* @param {GPURequestAdapterOptions} options
|
||||
*/
|
||||
// deno-lint-ignore require-await
|
||||
async requestAdapter(options = {}) {
|
||||
webidl.assertBranded(this, GPUPrototype);
|
||||
options = webidl.converters.GPURequestAdapterOptions(
|
||||
|
@ -332,7 +333,7 @@ class GPU {
|
|||
"Argument 1",
|
||||
);
|
||||
|
||||
const { err, ...data } = await op_webgpu_request_adapter(
|
||||
const { err, ...data } = op_webgpu_request_adapter(
|
||||
options.powerPreference,
|
||||
options.forceFallbackAdapter,
|
||||
);
|
||||
|
@ -411,6 +412,7 @@ class GPUAdapter {
|
|||
* @param {GPUDeviceDescriptor} descriptor
|
||||
* @returns {Promise<GPUDevice>}
|
||||
*/
|
||||
// deno-lint-ignore require-await
|
||||
async requestDevice(descriptor = {}) {
|
||||
webidl.assertBranded(this, GPUAdapterPrototype);
|
||||
const prefix = "Failed to execute 'requestDevice' on 'GPUAdapter'";
|
||||
|
@ -431,7 +433,7 @@ class GPUAdapter {
|
|||
}
|
||||
}
|
||||
|
||||
const { rid, features, limits } = await op_webgpu_request_device(
|
||||
const { rid, features, limits } = op_webgpu_request_device(
|
||||
this[_adapter].rid,
|
||||
descriptor.label,
|
||||
requiredFeatures,
|
||||
|
@ -455,7 +457,7 @@ class GPUAdapter {
|
|||
* @param {string[]} unmaskHints
|
||||
* @returns {Promise<GPUAdapterInfo>}
|
||||
*/
|
||||
async requestAdapterInfo(unmaskHints = []) {
|
||||
requestAdapterInfo(unmaskHints = []) {
|
||||
webidl.assertBranded(this, GPUAdapterPrototype);
|
||||
const prefix = "Failed to execute 'requestAdapterInfo' on 'GPUAdapter'";
|
||||
unmaskHints = webidl.converters["sequence<DOMString>"](
|
||||
|
@ -469,9 +471,7 @@ class GPUAdapter {
|
|||
architecture,
|
||||
device,
|
||||
description,
|
||||
} = await op_webgpu_request_adapter_info(
|
||||
this[_adapter].rid,
|
||||
);
|
||||
} = op_webgpu_request_adapter_info(this[_adapter].rid);
|
||||
|
||||
const adapterInfo = webidl.createBranded(GPUAdapterInfo);
|
||||
adapterInfo[_vendor] = ArrayPrototypeIncludes(unmaskHints, "vendor")
|
||||
|
@ -484,7 +484,7 @@ class GPUAdapter {
|
|||
: "";
|
||||
adapterInfo[_description] =
|
||||
ArrayPrototypeIncludes(unmaskHints, "description") ? description : "";
|
||||
return adapterInfo;
|
||||
return PromiseResolve(adapterInfo);
|
||||
}
|
||||
|
||||
[SymbolFor("Deno.privateCustomInspect")](inspect, inspectOptions) {
|
||||
|
|
|
@ -381,9 +381,9 @@ pub struct GpuAdapterDevice {
|
|||
is_software: bool,
|
||||
}
|
||||
|
||||
#[op2(async)]
|
||||
#[op2]
|
||||
#[serde]
|
||||
pub async fn op_webgpu_request_adapter(
|
||||
pub fn op_webgpu_request_adapter(
|
||||
state: Rc<RefCell<OpState>>,
|
||||
#[serde] power_preference: Option<wgpu_types::PowerPreference>,
|
||||
force_fallback_adapter: bool,
|
||||
|
@ -645,9 +645,9 @@ impl From<GpuRequiredFeatures> for wgpu_types::Features {
|
|||
}
|
||||
}
|
||||
|
||||
#[op2(async)]
|
||||
#[op2]
|
||||
#[serde]
|
||||
pub async fn op_webgpu_request_device(
|
||||
pub fn op_webgpu_request_device(
|
||||
state: Rc<RefCell<OpState>>,
|
||||
#[smi] adapter_rid: ResourceId,
|
||||
#[string] label: String,
|
||||
|
@ -702,9 +702,9 @@ pub struct GPUAdapterInfo {
|
|||
description: String,
|
||||
}
|
||||
|
||||
#[op2(async)]
|
||||
#[op2]
|
||||
#[serde]
|
||||
pub async fn op_webgpu_request_adapter_info(
|
||||
pub fn op_webgpu_request_adapter_info(
|
||||
state: Rc<RefCell<OpState>>,
|
||||
#[smi] adapter_rid: ResourceId,
|
||||
) -> Result<GPUAdapterInfo, AnyError> {
|
||||
|
|
|
@ -662,7 +662,7 @@ impl MainWorker {
|
|||
|
||||
/// Create new inspector session. This function panics if Worker
|
||||
/// was not configured to create inspector.
|
||||
pub async fn create_inspector_session(&mut self) -> LocalInspectorSession {
|
||||
pub fn create_inspector_session(&mut self) -> LocalInspectorSession {
|
||||
self.js_runtime.maybe_init_inspector();
|
||||
self.js_runtime.inspector().borrow().create_local_session()
|
||||
}
|
||||
|
|
|
@ -47,15 +47,14 @@ pub async fn get_tls_listener_stream(
|
|||
let cert_file = "tls/localhost.crt";
|
||||
let key_file = "tls/localhost.key";
|
||||
let ca_cert_file = "tls/RootCA.pem";
|
||||
let tls_config = get_tls_config(cert_file, key_file, ca_cert_file, http)
|
||||
.await
|
||||
.unwrap();
|
||||
let tls_config =
|
||||
get_tls_config(cert_file, key_file, ca_cert_file, http).unwrap();
|
||||
|
||||
let tcp = get_tcp_listener_stream(name, port).await;
|
||||
get_tls_listener_stream_from_tcp(tls_config, tcp)
|
||||
}
|
||||
|
||||
pub async fn get_tls_config(
|
||||
pub fn get_tls_config(
|
||||
cert: &str,
|
||||
key: &str,
|
||||
ca: &str,
|
||||
|
|
|
@ -156,6 +156,8 @@ async function clippy() {
|
|||
"--",
|
||||
"-D",
|
||||
"warnings",
|
||||
"--deny",
|
||||
"clippy::unused_async",
|
||||
],
|
||||
stdout: "inherit",
|
||||
stderr: "inherit",
|
||||
|
|
Loading…
Reference in a new issue