mirror of
https://github.com/denoland/deno.git
synced 2025-01-07 22:58:24 -05:00
perf(ext/node): improve createHash performance (#18033)
``` > deno run -A ../test.mjs cpu: unknown runtime: deno 1.31.1 (aarch64-apple-darwin) benchmark time (avg) (min … max) p75 p99 p995 ------------------------------------------------- ----------------------------- 2.22 µs/iter (2.2 µs … 2.28 µs) 2.22 µs 2.28 µs 2.28 µs > target/release/deno run -A test.mjs cpu: unknown runtime: deno 1.31.1 (aarch64-apple-darwin) benchmark time (avg) (min … max) p75 p99 p995 ------------------------------------------------- ----------------------------- 864.9 ns/iter (825.05 ns … 1.22 µs) 864.93 ns 1.22 µs 1.22 µs ```
This commit is contained in:
parent
ce7a23b86d
commit
38b4411cae
5 changed files with 70 additions and 27 deletions
7
Cargo.lock
generated
7
Cargo.lock
generated
|
@ -1228,6 +1228,7 @@ version = "0.28.0"
|
||||||
dependencies = [
|
dependencies = [
|
||||||
"deno_core",
|
"deno_core",
|
||||||
"digest 0.10.6",
|
"digest 0.10.6",
|
||||||
|
"hex",
|
||||||
"idna 0.3.0",
|
"idna 0.3.0",
|
||||||
"indexmap",
|
"indexmap",
|
||||||
"md-5",
|
"md-5",
|
||||||
|
@ -2241,6 +2242,12 @@ dependencies = [
|
||||||
"libc",
|
"libc",
|
||||||
]
|
]
|
||||||
|
|
||||||
|
[[package]]
|
||||||
|
name = "hex"
|
||||||
|
version = "0.4.3"
|
||||||
|
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||||
|
checksum = "7f24254aa9a54b5c858eaee2f5bccdb46aaf0e486a595ed5fd8f86ba55232a70"
|
||||||
|
|
||||||
[[package]]
|
[[package]]
|
||||||
name = "hexf-parse"
|
name = "hexf-parse"
|
||||||
version = "0.2.1"
|
version = "0.2.1"
|
||||||
|
|
|
@ -16,6 +16,7 @@ path = "lib.rs"
|
||||||
[dependencies]
|
[dependencies]
|
||||||
deno_core.workspace = true
|
deno_core.workspace = true
|
||||||
digest = { version = "0.10.5", features = ["core-api", "std"] }
|
digest = { version = "0.10.5", features = ["core-api", "std"] }
|
||||||
|
hex = "0.4.3"
|
||||||
idna = "0.3.0"
|
idna = "0.3.0"
|
||||||
indexmap.workspace = true
|
indexmap.workspace = true
|
||||||
md-5 = "0.10.5"
|
md-5 = "0.10.5"
|
||||||
|
|
|
@ -17,23 +17,38 @@ use rsa::RsaPublicKey;
|
||||||
|
|
||||||
mod digest;
|
mod digest;
|
||||||
|
|
||||||
#[op]
|
#[op(fast)]
|
||||||
pub fn op_node_create_hash(
|
pub fn op_node_create_hash(state: &mut OpState, algorithm: &str) -> u32 {
|
||||||
state: &mut OpState,
|
state
|
||||||
algorithm: String,
|
.resource_table
|
||||||
) -> Result<ResourceId, AnyError> {
|
.add(match digest::Context::new(algorithm) {
|
||||||
Ok(state.resource_table.add(digest::Context::new(&algorithm)?))
|
Ok(context) => context,
|
||||||
|
Err(_) => return 0,
|
||||||
|
})
|
||||||
}
|
}
|
||||||
|
|
||||||
#[op]
|
#[op(fast)]
|
||||||
pub fn op_node_hash_update(
|
pub fn op_node_hash_update(state: &mut OpState, rid: u32, data: &[u8]) -> bool {
|
||||||
state: &mut OpState,
|
let context = match state.resource_table.get::<digest::Context>(rid) {
|
||||||
rid: ResourceId,
|
Ok(context) => context,
|
||||||
data: &[u8],
|
_ => return false,
|
||||||
) -> Result<(), AnyError> {
|
};
|
||||||
let context = state.resource_table.get::<digest::Context>(rid)?;
|
|
||||||
context.update(data);
|
context.update(data);
|
||||||
Ok(())
|
true
|
||||||
|
}
|
||||||
|
|
||||||
|
#[op(fast)]
|
||||||
|
pub fn op_node_hash_update_str(
|
||||||
|
state: &mut OpState,
|
||||||
|
rid: u32,
|
||||||
|
data: &str,
|
||||||
|
) -> bool {
|
||||||
|
let context = match state.resource_table.get::<digest::Context>(rid) {
|
||||||
|
Ok(context) => context,
|
||||||
|
_ => return false,
|
||||||
|
};
|
||||||
|
context.update(data.as_bytes());
|
||||||
|
true
|
||||||
}
|
}
|
||||||
|
|
||||||
#[op]
|
#[op]
|
||||||
|
@ -47,6 +62,18 @@ pub fn op_node_hash_digest(
|
||||||
Ok(context.digest()?.into())
|
Ok(context.digest()?.into())
|
||||||
}
|
}
|
||||||
|
|
||||||
|
#[op]
|
||||||
|
pub fn op_node_hash_digest_hex(
|
||||||
|
state: &mut OpState,
|
||||||
|
rid: ResourceId,
|
||||||
|
) -> Result<String, AnyError> {
|
||||||
|
let context = state.resource_table.take::<digest::Context>(rid)?;
|
||||||
|
let context = Rc::try_unwrap(context)
|
||||||
|
.map_err(|_| type_error("Hash context is already in use"))?;
|
||||||
|
let digest = context.digest()?;
|
||||||
|
Ok(hex::encode(digest))
|
||||||
|
}
|
||||||
|
|
||||||
#[op]
|
#[op]
|
||||||
pub fn op_node_hash_clone(
|
pub fn op_node_hash_clone(
|
||||||
state: &mut OpState,
|
state: &mut OpState,
|
||||||
|
|
|
@ -328,7 +328,9 @@ pub fn init_polyfill() -> Extension {
|
||||||
.ops(vec![
|
.ops(vec![
|
||||||
crypto::op_node_create_hash::decl(),
|
crypto::op_node_create_hash::decl(),
|
||||||
crypto::op_node_hash_update::decl(),
|
crypto::op_node_hash_update::decl(),
|
||||||
|
crypto::op_node_hash_update_str::decl(),
|
||||||
crypto::op_node_hash_digest::decl(),
|
crypto::op_node_hash_digest::decl(),
|
||||||
|
crypto::op_node_hash_digest_hex::decl(),
|
||||||
crypto::op_node_hash_clone::decl(),
|
crypto::op_node_hash_clone::decl(),
|
||||||
crypto::op_node_private_encrypt::decl(),
|
crypto::op_node_private_encrypt::decl(),
|
||||||
crypto::op_node_private_decrypt::decl(),
|
crypto::op_node_private_decrypt::decl(),
|
||||||
|
|
|
@ -1,13 +1,9 @@
|
||||||
// Copyright 2018-2023 the Deno authors. All rights reserved. MIT license.
|
// Copyright 2018-2023 the Deno authors. All rights reserved. MIT license.
|
||||||
// Copyright Joyent, Inc. and Node.js contributors. All rights reserved. MIT license.
|
// Copyright Joyent, Inc. and Node.js contributors. All rights reserved. MIT license.
|
||||||
|
|
||||||
import {
|
import { TextEncoder } from "internal:deno_web/08_text_encoding.js";
|
||||||
TextDecoder,
|
|
||||||
TextEncoder,
|
|
||||||
} from "internal:deno_web/08_text_encoding.js";
|
|
||||||
import { Buffer } from "internal:deno_node/buffer.ts";
|
import { Buffer } from "internal:deno_node/buffer.ts";
|
||||||
import { Transform } from "internal:deno_node/stream.ts";
|
import { Transform } from "internal:deno_node/stream.ts";
|
||||||
import { encode as encodeToHex } from "internal:deno_node/internal/crypto/_hex.ts";
|
|
||||||
import {
|
import {
|
||||||
forgivingBase64Encode as encodeToBase64,
|
forgivingBase64Encode as encodeToBase64,
|
||||||
forgivingBase64UrlEncode as encodeToBase64Url,
|
forgivingBase64UrlEncode as encodeToBase64Url,
|
||||||
|
@ -26,6 +22,14 @@ import { notImplemented } from "internal:deno_node/_utils.ts";
|
||||||
|
|
||||||
const { ops } = globalThis.__bootstrap.core;
|
const { ops } = globalThis.__bootstrap.core;
|
||||||
|
|
||||||
|
// TODO(@littledivy): Use Result<T, E> instead of boolean when
|
||||||
|
// https://bugs.chromium.org/p/v8/issues/detail?id=13600 is fixed.
|
||||||
|
function unwrapErr(ok: boolean) {
|
||||||
|
if (!ok) {
|
||||||
|
throw new Error("Context is not initialized");
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
const coerceToBytes = (data: string | BufferSource): Uint8Array => {
|
const coerceToBytes = (data: string | BufferSource): Uint8Array => {
|
||||||
if (data instanceof Uint8Array) {
|
if (data instanceof Uint8Array) {
|
||||||
return data;
|
return data;
|
||||||
|
@ -71,6 +75,9 @@ export class Hash extends Transform {
|
||||||
this.#context = ops.op_node_create_hash(
|
this.#context = ops.op_node_create_hash(
|
||||||
algorithm,
|
algorithm,
|
||||||
);
|
);
|
||||||
|
if (this.#context === 0) {
|
||||||
|
throw new TypeError(`Unknown hash algorithm: ${algorithm}`);
|
||||||
|
}
|
||||||
} else {
|
} else {
|
||||||
this.#context = algorithm;
|
this.#context = algorithm;
|
||||||
}
|
}
|
||||||
|
@ -86,16 +93,12 @@ export class Hash extends Transform {
|
||||||
* Updates the hash content with the given data.
|
* Updates the hash content with the given data.
|
||||||
*/
|
*/
|
||||||
update(data: string | ArrayBuffer, _encoding?: string): this {
|
update(data: string | ArrayBuffer, _encoding?: string): this {
|
||||||
let bytes;
|
|
||||||
if (typeof data === "string") {
|
if (typeof data === "string") {
|
||||||
data = new TextEncoder().encode(data);
|
unwrapErr(ops.op_node_hash_update_str(this.#context, data));
|
||||||
bytes = coerceToBytes(data);
|
|
||||||
} else {
|
} else {
|
||||||
bytes = coerceToBytes(data);
|
unwrapErr(ops.op_node_hash_update(this.#context, coerceToBytes(data)));
|
||||||
}
|
}
|
||||||
|
|
||||||
ops.op_node_hash_update(this.#context, bytes);
|
|
||||||
|
|
||||||
return this;
|
return this;
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -107,14 +110,17 @@ export class Hash extends Transform {
|
||||||
* Supported encodings are currently 'hex', 'binary', 'base64', 'base64url'.
|
* Supported encodings are currently 'hex', 'binary', 'base64', 'base64url'.
|
||||||
*/
|
*/
|
||||||
digest(encoding?: string): Buffer | string {
|
digest(encoding?: string): Buffer | string {
|
||||||
|
if (encoding === "hex") {
|
||||||
|
return ops.op_node_hash_digest_hex(this.#context);
|
||||||
|
}
|
||||||
|
|
||||||
const digest = ops.op_node_hash_digest(this.#context);
|
const digest = ops.op_node_hash_digest(this.#context);
|
||||||
if (encoding === undefined) {
|
if (encoding === undefined) {
|
||||||
return Buffer.from(digest);
|
return Buffer.from(digest);
|
||||||
}
|
}
|
||||||
|
|
||||||
|
// TODO(@littedivy): Fast paths for below encodings.
|
||||||
switch (encoding) {
|
switch (encoding) {
|
||||||
case "hex":
|
|
||||||
return new TextDecoder().decode(encodeToHex(new Uint8Array(digest)));
|
|
||||||
case "binary":
|
case "binary":
|
||||||
return String.fromCharCode(...digest);
|
return String.fromCharCode(...digest);
|
||||||
case "base64":
|
case "base64":
|
||||||
|
|
Loading…
Reference in a new issue