2023-02-14 11:38:45 -05:00
|
|
|
// Copyright 2018-2023 the Deno authors. All rights reserved. MIT license.
|
|
|
|
// Copyright Joyent, Inc. and Node.js contributors. All rights reserved. MIT license.
|
|
|
|
|
2023-03-05 22:28:04 -05:00
|
|
|
import { TextEncoder } from "internal:deno_web/08_text_encoding.js";
|
refactor(core): include_js_files! 'dir' option doesn't change specifiers (#18019)
This commit changes "include_js_files!" macro from "deno_core"
in a way that "dir" option doesn't cause specifiers to be rewritten
to include it.
Example:
```
include_js_files! {
dir "js",
"hello.js",
}
```
The above definition required embedders to use:
`import ... from "internal:<ext_name>/js/hello.js"`.
But with this change, the "js" directory in which the files are stored
is an implementation detail, which for embedders results in:
`import ... from "internal:<ext_name>/hello.js"`.
The directory the files are stored in, is an implementation detail and
in some cases might result in a significant size difference for the
snapshot. As an example, in "deno_node" extension, we store the
source code in "polyfills" directory; which resulted in each specifier
to look like "internal:deno_node/polyfills/<module_name>", but with
this change it's "internal:deno_node/<module_name>".
Given that "deno_node" has over 100 files, many of them having
several import specifiers to the same extension, this change removes
10 characters from each import specifier.
2023-03-04 21:31:38 -05:00
|
|
|
import { Buffer } from "internal:deno_node/buffer.ts";
|
|
|
|
import { Transform } from "internal:deno_node/stream.ts";
|
2023-02-14 11:38:45 -05:00
|
|
|
import {
|
|
|
|
forgivingBase64Encode as encodeToBase64,
|
|
|
|
forgivingBase64UrlEncode as encodeToBase64Url,
|
|
|
|
} from "internal:deno_web/00_infra.js";
|
refactor(core): include_js_files! 'dir' option doesn't change specifiers (#18019)
This commit changes "include_js_files!" macro from "deno_core"
in a way that "dir" option doesn't cause specifiers to be rewritten
to include it.
Example:
```
include_js_files! {
dir "js",
"hello.js",
}
```
The above definition required embedders to use:
`import ... from "internal:<ext_name>/js/hello.js"`.
But with this change, the "js" directory in which the files are stored
is an implementation detail, which for embedders results in:
`import ... from "internal:<ext_name>/hello.js"`.
The directory the files are stored in, is an implementation detail and
in some cases might result in a significant size difference for the
snapshot. As an example, in "deno_node" extension, we store the
source code in "polyfills" directory; which resulted in each specifier
to look like "internal:deno_node/polyfills/<module_name>", but with
this change it's "internal:deno_node/<module_name>".
Given that "deno_node" has over 100 files, many of them having
several import specifiers to the same extension, this change removes
10 characters from each import specifier.
2023-03-04 21:31:38 -05:00
|
|
|
import type { TransformOptions } from "internal:deno_node/_stream.d.ts";
|
|
|
|
import { validateString } from "internal:deno_node/internal/validators.mjs";
|
2023-02-14 11:38:45 -05:00
|
|
|
import type {
|
|
|
|
BinaryToTextEncoding,
|
|
|
|
Encoding,
|
refactor(core): include_js_files! 'dir' option doesn't change specifiers (#18019)
This commit changes "include_js_files!" macro from "deno_core"
in a way that "dir" option doesn't cause specifiers to be rewritten
to include it.
Example:
```
include_js_files! {
dir "js",
"hello.js",
}
```
The above definition required embedders to use:
`import ... from "internal:<ext_name>/js/hello.js"`.
But with this change, the "js" directory in which the files are stored
is an implementation detail, which for embedders results in:
`import ... from "internal:<ext_name>/hello.js"`.
The directory the files are stored in, is an implementation detail and
in some cases might result in a significant size difference for the
snapshot. As an example, in "deno_node" extension, we store the
source code in "polyfills" directory; which resulted in each specifier
to look like "internal:deno_node/polyfills/<module_name>", but with
this change it's "internal:deno_node/<module_name>".
Given that "deno_node" has over 100 files, many of them having
several import specifiers to the same extension, this change removes
10 characters from each import specifier.
2023-03-04 21:31:38 -05:00
|
|
|
} from "internal:deno_node/internal/crypto/types.ts";
|
2023-02-14 11:38:45 -05:00
|
|
|
import {
|
|
|
|
KeyObject,
|
|
|
|
prepareSecretKey,
|
refactor(core): include_js_files! 'dir' option doesn't change specifiers (#18019)
This commit changes "include_js_files!" macro from "deno_core"
in a way that "dir" option doesn't cause specifiers to be rewritten
to include it.
Example:
```
include_js_files! {
dir "js",
"hello.js",
}
```
The above definition required embedders to use:
`import ... from "internal:<ext_name>/js/hello.js"`.
But with this change, the "js" directory in which the files are stored
is an implementation detail, which for embedders results in:
`import ... from "internal:<ext_name>/hello.js"`.
The directory the files are stored in, is an implementation detail and
in some cases might result in a significant size difference for the
snapshot. As an example, in "deno_node" extension, we store the
source code in "polyfills" directory; which resulted in each specifier
to look like "internal:deno_node/polyfills/<module_name>", but with
this change it's "internal:deno_node/<module_name>".
Given that "deno_node" has over 100 files, many of them having
several import specifiers to the same extension, this change removes
10 characters from each import specifier.
2023-03-04 21:31:38 -05:00
|
|
|
} from "internal:deno_node/internal/crypto/keys.ts";
|
|
|
|
import { notImplemented } from "internal:deno_node/_utils.ts";
|
2023-02-14 11:38:45 -05:00
|
|
|
|
|
|
|
const { ops } = globalThis.__bootstrap.core;
|
|
|
|
|
2023-03-05 22:28:04 -05:00
|
|
|
// TODO(@littledivy): Use Result<T, E> instead of boolean when
|
|
|
|
// https://bugs.chromium.org/p/v8/issues/detail?id=13600 is fixed.
|
|
|
|
function unwrapErr(ok: boolean) {
|
|
|
|
if (!ok) {
|
|
|
|
throw new Error("Context is not initialized");
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
2023-02-14 11:38:45 -05:00
|
|
|
const coerceToBytes = (data: string | BufferSource): Uint8Array => {
|
|
|
|
if (data instanceof Uint8Array) {
|
|
|
|
return data;
|
|
|
|
} else if (typeof data === "string") {
|
|
|
|
// This assumes UTF-8, which may not be correct.
|
|
|
|
return new TextEncoder().encode(data);
|
|
|
|
} else if (ArrayBuffer.isView(data)) {
|
|
|
|
return new Uint8Array(data.buffer, data.byteOffset, data.byteLength);
|
|
|
|
} else if (data instanceof ArrayBuffer) {
|
|
|
|
return new Uint8Array(data);
|
|
|
|
} else {
|
|
|
|
throw new TypeError("expected data to be string | BufferSource");
|
|
|
|
}
|
|
|
|
};
|
|
|
|
|
|
|
|
/**
|
|
|
|
* The Hash class is a utility for creating hash digests of data. It can be used in one of two ways:
|
|
|
|
*
|
|
|
|
* - As a stream that is both readable and writable, where data is written to produce a computed hash digest on the readable side, or
|
|
|
|
* - Using the hash.update() and hash.digest() methods to produce the computed hash.
|
|
|
|
*
|
|
|
|
* The crypto.createHash() method is used to create Hash instances. Hash objects are not to be created directly using the new keyword.
|
|
|
|
*/
|
|
|
|
export class Hash extends Transform {
|
|
|
|
#context: number;
|
|
|
|
|
|
|
|
constructor(
|
|
|
|
algorithm: string | number,
|
|
|
|
_opts?: TransformOptions,
|
|
|
|
) {
|
|
|
|
super({
|
|
|
|
transform(chunk: string, _encoding: string, callback: () => void) {
|
|
|
|
ops.op_node_hash_update(context, coerceToBytes(chunk));
|
|
|
|
callback();
|
|
|
|
},
|
|
|
|
flush(callback: () => void) {
|
|
|
|
this.push(context.digest(undefined));
|
|
|
|
callback();
|
|
|
|
},
|
|
|
|
});
|
|
|
|
|
|
|
|
if (typeof algorithm === "string") {
|
|
|
|
this.#context = ops.op_node_create_hash(
|
|
|
|
algorithm,
|
|
|
|
);
|
2023-03-05 22:28:04 -05:00
|
|
|
if (this.#context === 0) {
|
|
|
|
throw new TypeError(`Unknown hash algorithm: ${algorithm}`);
|
|
|
|
}
|
2023-02-14 11:38:45 -05:00
|
|
|
} else {
|
|
|
|
this.#context = algorithm;
|
|
|
|
}
|
|
|
|
|
|
|
|
const context = this.#context;
|
|
|
|
}
|
|
|
|
|
|
|
|
copy(): Hash {
|
|
|
|
return new Hash(ops.op_node_clone_hash(this.#context));
|
|
|
|
}
|
|
|
|
|
|
|
|
/**
|
|
|
|
* Updates the hash content with the given data.
|
|
|
|
*/
|
|
|
|
update(data: string | ArrayBuffer, _encoding?: string): this {
|
|
|
|
if (typeof data === "string") {
|
2023-03-05 22:28:04 -05:00
|
|
|
unwrapErr(ops.op_node_hash_update_str(this.#context, data));
|
2023-02-14 11:38:45 -05:00
|
|
|
} else {
|
2023-03-05 22:28:04 -05:00
|
|
|
unwrapErr(ops.op_node_hash_update(this.#context, coerceToBytes(data)));
|
2023-02-14 11:38:45 -05:00
|
|
|
}
|
|
|
|
|
|
|
|
return this;
|
|
|
|
}
|
|
|
|
|
|
|
|
/**
|
|
|
|
* Calculates the digest of all of the data.
|
|
|
|
*
|
|
|
|
* If encoding is provided a string will be returned; otherwise a Buffer is returned.
|
|
|
|
*
|
|
|
|
* Supported encodings are currently 'hex', 'binary', 'base64', 'base64url'.
|
|
|
|
*/
|
|
|
|
digest(encoding?: string): Buffer | string {
|
2023-03-05 22:28:04 -05:00
|
|
|
if (encoding === "hex") {
|
|
|
|
return ops.op_node_hash_digest_hex(this.#context);
|
|
|
|
}
|
|
|
|
|
2023-02-15 13:44:52 -05:00
|
|
|
const digest = ops.op_node_hash_digest(this.#context);
|
2023-02-14 11:38:45 -05:00
|
|
|
if (encoding === undefined) {
|
|
|
|
return Buffer.from(digest);
|
|
|
|
}
|
|
|
|
|
2023-03-05 22:28:04 -05:00
|
|
|
// TODO(@littedivy): Fast paths for below encodings.
|
2023-02-14 11:38:45 -05:00
|
|
|
switch (encoding) {
|
|
|
|
case "binary":
|
|
|
|
return String.fromCharCode(...digest);
|
|
|
|
case "base64":
|
|
|
|
return encodeToBase64(digest);
|
|
|
|
case "base64url":
|
|
|
|
return encodeToBase64Url(digest);
|
|
|
|
case "buffer":
|
|
|
|
return Buffer.from(digest);
|
|
|
|
default:
|
|
|
|
return Buffer.from(digest).toString(encoding);
|
|
|
|
}
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
|
|
|
export function Hmac(
|
|
|
|
hmac: string,
|
|
|
|
key: string | ArrayBuffer | KeyObject,
|
|
|
|
options?: TransformOptions,
|
|
|
|
): Hmac {
|
|
|
|
return new HmacImpl(hmac, key, options);
|
|
|
|
}
|
|
|
|
|
|
|
|
type Hmac = HmacImpl;
|
|
|
|
|
|
|
|
class HmacImpl extends Transform {
|
|
|
|
#ipad: Uint8Array;
|
|
|
|
#opad: Uint8Array;
|
|
|
|
#ZEROES = Buffer.alloc(128);
|
|
|
|
#algorithm: string;
|
|
|
|
#hash: Hash;
|
|
|
|
|
|
|
|
constructor(
|
|
|
|
hmac: string,
|
|
|
|
key: string | ArrayBuffer | KeyObject,
|
|
|
|
options?: TransformOptions,
|
|
|
|
) {
|
|
|
|
super({
|
|
|
|
transform(chunk: string, encoding: string, callback: () => void) {
|
|
|
|
// deno-lint-ignore no-explicit-any
|
|
|
|
self.update(coerceToBytes(chunk), encoding as any);
|
|
|
|
callback();
|
|
|
|
},
|
|
|
|
flush(callback: () => void) {
|
|
|
|
this.push(self.digest());
|
|
|
|
callback();
|
|
|
|
},
|
|
|
|
});
|
|
|
|
// deno-lint-ignore no-this-alias
|
|
|
|
const self = this;
|
|
|
|
if (key instanceof KeyObject) {
|
|
|
|
notImplemented("Hmac: KeyObject key is not implemented");
|
|
|
|
}
|
|
|
|
|
|
|
|
validateString(hmac, "hmac");
|
|
|
|
const u8Key = prepareSecretKey(key, options?.encoding) as Buffer;
|
|
|
|
|
|
|
|
const alg = hmac.toLowerCase();
|
|
|
|
this.#hash = new Hash(alg, options);
|
|
|
|
this.#algorithm = alg;
|
|
|
|
const blockSize = (alg === "sha512" || alg === "sha384") ? 128 : 64;
|
|
|
|
const keySize = u8Key.length;
|
|
|
|
|
|
|
|
let bufKey: Buffer;
|
|
|
|
|
|
|
|
if (keySize > blockSize) {
|
|
|
|
bufKey = this.#hash.update(u8Key).digest() as Buffer;
|
|
|
|
} else {
|
|
|
|
bufKey = Buffer.concat([u8Key, this.#ZEROES], blockSize);
|
|
|
|
}
|
|
|
|
|
|
|
|
this.#ipad = Buffer.allocUnsafe(blockSize);
|
|
|
|
this.#opad = Buffer.allocUnsafe(blockSize);
|
|
|
|
|
|
|
|
for (let i = 0; i < blockSize; i++) {
|
|
|
|
this.#ipad[i] = bufKey[i] ^ 0x36;
|
|
|
|
this.#opad[i] = bufKey[i] ^ 0x5C;
|
|
|
|
}
|
|
|
|
|
|
|
|
this.#hash = new Hash(alg);
|
|
|
|
this.#hash.update(this.#ipad);
|
|
|
|
}
|
|
|
|
|
|
|
|
digest(): Buffer;
|
|
|
|
digest(encoding: BinaryToTextEncoding): string;
|
|
|
|
digest(encoding?: BinaryToTextEncoding): Buffer | string {
|
|
|
|
const result = this.#hash.digest();
|
|
|
|
|
|
|
|
return new Hash(this.#algorithm).update(this.#opad).update(result).digest(
|
|
|
|
encoding,
|
|
|
|
);
|
|
|
|
}
|
|
|
|
|
|
|
|
update(data: string | ArrayBuffer, inputEncoding?: Encoding): this {
|
|
|
|
this.#hash.update(data, inputEncoding);
|
|
|
|
return this;
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
|
|
|
Hmac.prototype = HmacImpl.prototype;
|
|
|
|
|
|
|
|
/**
|
|
|
|
* Creates and returns a Hash object that can be used to generate hash digests
|
|
|
|
* using the given `algorithm`. Optional `options` argument controls stream behavior.
|
|
|
|
*/
|
|
|
|
export function createHash(algorithm: string, opts?: TransformOptions) {
|
|
|
|
return new Hash(algorithm, opts);
|
|
|
|
}
|
|
|
|
|
|
|
|
export default {
|
|
|
|
Hash,
|
|
|
|
Hmac,
|
|
|
|
createHash,
|
|
|
|
};
|