mirror of
https://github.com/denoland/deno.git
synced 2024-11-21 15:04:11 -05:00
Revert "perf(ext/node): improve Buffer
from string performance" (#24851)
This commit is contained in:
parent
e24aa6bbec
commit
7495bcbf77
7 changed files with 33 additions and 171 deletions
|
@ -9,10 +9,7 @@ const {
|
|||
TypedArrayPrototypeSlice,
|
||||
TypedArrayPrototypeSubarray,
|
||||
TypedArrayPrototypeGetByteLength,
|
||||
TypedArrayPrototypeGetByteOffset,
|
||||
DataViewPrototypeGetBuffer,
|
||||
DataViewPrototypeGetByteLength,
|
||||
DataViewPrototypeGetByteOffset,
|
||||
TypedArrayPrototypeGetBuffer,
|
||||
} = primordials;
|
||||
const { isTypedArray, isDataView, close } = core;
|
||||
|
@ -41,17 +38,9 @@ const toU8 = (input) => {
|
|||
}
|
||||
|
||||
if (isTypedArray(input)) {
|
||||
return new Uint8Array(
|
||||
TypedArrayPrototypeGetBuffer(input),
|
||||
TypedArrayPrototypeGetByteOffset(input),
|
||||
TypedArrayPrototypeGetByteLength(input),
|
||||
);
|
||||
return new Uint8Array(TypedArrayPrototypeGetBuffer(input));
|
||||
} else if (isDataView(input)) {
|
||||
return new Uint8Array(
|
||||
DataViewPrototypeGetBuffer(input),
|
||||
DataViewPrototypeGetByteOffset(input),
|
||||
DataViewPrototypeGetByteLength(input),
|
||||
);
|
||||
return new Uint8Array(DataViewPrototypeGetBuffer(input));
|
||||
}
|
||||
|
||||
return input;
|
||||
|
|
|
@ -542,7 +542,7 @@ export class OutgoingMessage extends Stream {
|
|||
if (data instanceof Buffer) {
|
||||
data = new Uint8Array(data.buffer, data.byteOffset, data.byteLength);
|
||||
}
|
||||
if (data.byteLength > 0) {
|
||||
if (data.buffer.byteLength > 0) {
|
||||
this._bodyWriter.write(data).then(() => {
|
||||
callback?.();
|
||||
this.emit("drain");
|
||||
|
|
|
@ -19,7 +19,6 @@ import {
|
|||
asciiToBytes,
|
||||
base64ToBytes,
|
||||
base64UrlToBytes,
|
||||
base64Write,
|
||||
bytesToAscii,
|
||||
bytesToUtf16le,
|
||||
hexToBytes,
|
||||
|
@ -43,15 +42,6 @@ import { Blob } from "ext:deno_web/09_file.js";
|
|||
|
||||
export { atob, Blob, btoa };
|
||||
|
||||
class FastBuffer extends Uint8Array {
|
||||
// Using an explicit constructor here is necessary to avoid relying on
|
||||
// `Array.prototype[Symbol.iterator]`, which can be mutated by users.
|
||||
// eslint-disable-next-line no-useless-constructor
|
||||
constructor(bufferOrLength, byteOffset, length) {
|
||||
super(bufferOrLength, byteOffset, length);
|
||||
}
|
||||
}
|
||||
|
||||
const utf8Encoder = new TextEncoder();
|
||||
|
||||
// Temporary buffers to convert numbers.
|
||||
|
@ -82,9 +72,6 @@ export const constants = {
|
|||
MAX_STRING_LENGTH: kStringMaxLength,
|
||||
};
|
||||
|
||||
FastBuffer.prototype.constructor = Buffer;
|
||||
Buffer.prototype = FastBuffer.prototype;
|
||||
|
||||
Object.defineProperty(Buffer.prototype, "parent", {
|
||||
enumerable: true,
|
||||
get: function () {
|
||||
|
@ -111,7 +98,9 @@ function createBuffer(length) {
|
|||
'The value "' + length + '" is invalid for option "size"',
|
||||
);
|
||||
}
|
||||
return new FastBuffer(length);
|
||||
const buf = new Uint8Array(length);
|
||||
Object.setPrototypeOf(buf, Buffer.prototype);
|
||||
return buf;
|
||||
}
|
||||
|
||||
export function Buffer(arg, encodingOrOffset, length) {
|
||||
|
@ -128,32 +117,7 @@ export function Buffer(arg, encodingOrOffset, length) {
|
|||
return _from(arg, encodingOrOffset, length);
|
||||
}
|
||||
|
||||
Object.defineProperty(Buffer, Symbol.species, {
|
||||
__proto__: null,
|
||||
enumerable: false,
|
||||
configurable: true,
|
||||
get() {
|
||||
return FastBuffer;
|
||||
},
|
||||
});
|
||||
|
||||
Buffer.poolSize = 8 * 1024;
|
||||
let poolSize, poolOffset, allocPool;
|
||||
|
||||
function createPool() {
|
||||
poolSize = Buffer.poolSize;
|
||||
allocPool = new Uint8Array(poolSize).buffer;
|
||||
poolOffset = 0;
|
||||
}
|
||||
createPool();
|
||||
|
||||
function alignPool() {
|
||||
// Ensure aligned slices
|
||||
if (poolOffset & 0x7) {
|
||||
poolOffset |= 0x7;
|
||||
poolOffset++;
|
||||
}
|
||||
}
|
||||
Buffer.poolSize = 8192;
|
||||
|
||||
function _from(value, encodingOrOffset, length) {
|
||||
if (typeof value === "string") {
|
||||
|
@ -240,28 +204,6 @@ Buffer.allocUnsafeSlow = function allocUnsafeSlow(size) {
|
|||
return _allocUnsafe(size);
|
||||
};
|
||||
|
||||
function fromStringFast(string, ops) {
|
||||
const length = ops.byteLength(string);
|
||||
if (length >= (Buffer.poolSize >>> 1)) {
|
||||
const data = ops.create(string);
|
||||
Object.setPrototypeOf(data, Buffer.prototype);
|
||||
return data;
|
||||
}
|
||||
|
||||
if (length > (poolSize - poolOffset)) {
|
||||
createPool();
|
||||
}
|
||||
let b = new FastBuffer(allocPool, poolOffset, length);
|
||||
const actual = ops.write(b, string, 0, length);
|
||||
if (actual != length) {
|
||||
// byteLength() may overestimate. That's a rare case, though.
|
||||
b = new FastBuffer(allocPool, poolOffset, actual);
|
||||
}
|
||||
poolOffset += actual;
|
||||
alignPool();
|
||||
return b;
|
||||
}
|
||||
|
||||
function fromString(string, encoding) {
|
||||
if (typeof encoding !== "string" || encoding === "") {
|
||||
encoding = "utf8";
|
||||
|
@ -269,15 +211,19 @@ function fromString(string, encoding) {
|
|||
if (!Buffer.isEncoding(encoding)) {
|
||||
throw new codes.ERR_UNKNOWN_ENCODING(encoding);
|
||||
}
|
||||
const ops = getEncodingOps(encoding);
|
||||
if (ops === undefined) {
|
||||
throw new codes.ERR_UNKNOWN_ENCODING(encoding);
|
||||
const length = byteLength(string, encoding) | 0;
|
||||
let buf = createBuffer(length);
|
||||
const actual = buf.write(string, encoding);
|
||||
if (actual !== length) {
|
||||
buf = buf.slice(0, actual);
|
||||
}
|
||||
return fromStringFast(string, ops);
|
||||
return buf;
|
||||
}
|
||||
|
||||
function fromArrayLike(obj) {
|
||||
return new FastBuffer(obj);
|
||||
const buf = new Uint8Array(obj);
|
||||
Object.setPrototypeOf(buf, Buffer.prototype);
|
||||
return buf;
|
||||
}
|
||||
|
||||
function fromObject(obj) {
|
||||
|
@ -314,7 +260,7 @@ Object.setPrototypeOf(SlowBuffer.prototype, Uint8Array.prototype);
|
|||
Object.setPrototypeOf(SlowBuffer, Uint8Array);
|
||||
|
||||
Buffer.isBuffer = function isBuffer(b) {
|
||||
return b instanceof Buffer;
|
||||
return b != null && b._isBuffer === true && b !== Buffer.prototype;
|
||||
};
|
||||
|
||||
Buffer.compare = function compare(a, b) {
|
||||
|
@ -718,12 +664,12 @@ Buffer.prototype.base64Slice = function base64Slice(
|
|||
}
|
||||
};
|
||||
|
||||
Buffer.prototype.base64Write = function base64Write_(
|
||||
Buffer.prototype.base64Write = function base64Write(
|
||||
string,
|
||||
offset,
|
||||
length,
|
||||
) {
|
||||
return base64Write(string, this, offset, length);
|
||||
return blitBuffer(base64ToBytes(string), this, offset, length);
|
||||
};
|
||||
|
||||
Buffer.prototype.base64urlSlice = function base64urlSlice(
|
||||
|
@ -791,8 +737,8 @@ Buffer.prototype.ucs2Write = function ucs2Write(string, offset, length) {
|
|||
);
|
||||
};
|
||||
|
||||
Buffer.prototype.utf8Slice = function utf8Slice(offset, length) {
|
||||
return _utf8Slice(this, offset, length);
|
||||
Buffer.prototype.utf8Slice = function utf8Slice(string, offset, length) {
|
||||
return _utf8Slice(this, string, offset, length);
|
||||
};
|
||||
|
||||
Buffer.prototype.utf8Write = function utf8Write(string, offset, length) {
|
||||
|
@ -885,7 +831,9 @@ function fromArrayBuffer(obj, byteOffset, length) {
|
|||
}
|
||||
}
|
||||
|
||||
return new FastBuffer(obj, byteOffset, length);
|
||||
const buffer = new Uint8Array(obj, byteOffset, length);
|
||||
Object.setPrototypeOf(buffer, Buffer.prototype);
|
||||
return buffer;
|
||||
}
|
||||
|
||||
function _base64Slice(buf, start, end) {
|
||||
|
@ -2157,7 +2105,6 @@ export const encodingOps = {
|
|||
dir,
|
||||
),
|
||||
slice: (buf, start, end) => buf.asciiSlice(start, end),
|
||||
create: (string) => asciiToBytes(string),
|
||||
write: (buf, string, offset, len) => buf.asciiWrite(string, offset, len),
|
||||
},
|
||||
base64: {
|
||||
|
@ -2172,7 +2119,6 @@ export const encodingOps = {
|
|||
encodingsMap.base64,
|
||||
dir,
|
||||
),
|
||||
create: (string) => base64ToBytes(string),
|
||||
slice: (buf, start, end) => buf.base64Slice(start, end),
|
||||
write: (buf, string, offset, len) => buf.base64Write(string, offset, len),
|
||||
},
|
||||
|
@ -2188,7 +2134,6 @@ export const encodingOps = {
|
|||
encodingsMap.base64url,
|
||||
dir,
|
||||
),
|
||||
create: (string) => base64UrlToBytes(string),
|
||||
slice: (buf, start, end) => buf.base64urlSlice(start, end),
|
||||
write: (buf, string, offset, len) =>
|
||||
buf.base64urlWrite(string, offset, len),
|
||||
|
@ -2205,7 +2150,6 @@ export const encodingOps = {
|
|||
encodingsMap.hex,
|
||||
dir,
|
||||
),
|
||||
create: (string) => hexToBytes(string),
|
||||
slice: (buf, start, end) => buf.hexSlice(start, end),
|
||||
write: (buf, string, offset, len) => buf.hexWrite(string, offset, len),
|
||||
},
|
||||
|
@ -2221,7 +2165,6 @@ export const encodingOps = {
|
|||
encodingsMap.latin1,
|
||||
dir,
|
||||
),
|
||||
create: (string) => asciiToBytes(string),
|
||||
slice: (buf, start, end) => buf.latin1Slice(start, end),
|
||||
write: (buf, string, offset, len) => buf.latin1Write(string, offset, len),
|
||||
},
|
||||
|
@ -2237,7 +2180,6 @@ export const encodingOps = {
|
|||
encodingsMap.utf16le,
|
||||
dir,
|
||||
),
|
||||
create: (string) => utf16leToBytes(string),
|
||||
slice: (buf, start, end) => buf.ucs2Slice(start, end),
|
||||
write: (buf, string, offset, len) => buf.ucs2Write(string, offset, len),
|
||||
},
|
||||
|
@ -2253,7 +2195,6 @@ export const encodingOps = {
|
|||
encodingsMap.utf8,
|
||||
dir,
|
||||
),
|
||||
create: (string) => utf8Encoder.encode(string),
|
||||
slice: (buf, start, end) => buf.utf8Slice(start, end),
|
||||
write: (buf, string, offset, len) => buf.utf8Write(string, offset, len),
|
||||
},
|
||||
|
@ -2269,7 +2210,6 @@ export const encodingOps = {
|
|||
encodingsMap.utf16le,
|
||||
dir,
|
||||
),
|
||||
create: (string) => utf16leToBytes(string),
|
||||
slice: (buf, start, end) => buf.ucs2Slice(start, end),
|
||||
write: (buf, string, offset, len) => buf.ucs2Write(string, offset, len),
|
||||
},
|
||||
|
|
|
@ -7,7 +7,6 @@ import {
|
|||
forgivingBase64Decode,
|
||||
forgivingBase64UrlDecode,
|
||||
} from "ext:deno_web/00_infra.js";
|
||||
import { op_base64_write } from "ext:core/ops";
|
||||
|
||||
export function asciiToBytes(str: string) {
|
||||
const length = str.length;
|
||||
|
@ -28,22 +27,6 @@ export function base64ToBytes(str: string) {
|
|||
}
|
||||
}
|
||||
|
||||
export function base64Write(
|
||||
str: string,
|
||||
buffer: Uint8Array,
|
||||
offset: number = 0,
|
||||
length?: number,
|
||||
): number {
|
||||
length = length ?? buffer.byteLength - offset;
|
||||
try {
|
||||
return op_base64_write(str, buffer, offset, length);
|
||||
} catch {
|
||||
str = base64clean(str);
|
||||
str = str.replaceAll("-", "+").replaceAll("_", "/");
|
||||
return op_base64_write(str, buffer, offset, length);
|
||||
}
|
||||
}
|
||||
|
||||
const INVALID_BASE64_RE = /[^+/0-9A-Za-z-_]/g;
|
||||
function base64clean(str: string) {
|
||||
// Node takes equal signs as end of the Base64 encoding
|
||||
|
|
|
@ -39,15 +39,10 @@ const {
|
|||
Symbol,
|
||||
MathMin,
|
||||
DataViewPrototypeGetBuffer,
|
||||
DataViewPrototypeGetByteLength,
|
||||
DataViewPrototypeGetByteOffset,
|
||||
ObjectPrototypeIsPrototypeOf,
|
||||
String,
|
||||
TypedArrayPrototypeGetBuffer,
|
||||
TypedArrayPrototypeGetByteLength,
|
||||
TypedArrayPrototypeGetByteOffset,
|
||||
StringPrototypeToLowerCase,
|
||||
Uint8Array,
|
||||
} = primordials;
|
||||
const { isTypedArray } = core;
|
||||
|
||||
|
@ -88,21 +83,11 @@ function normalizeBuffer(buf: Buffer) {
|
|||
}
|
||||
if (isBufferType(buf)) {
|
||||
return buf;
|
||||
} else if (isTypedArray(buf)) {
|
||||
return Buffer.from(
|
||||
new Uint8Array(
|
||||
TypedArrayPrototypeGetBuffer(buf),
|
||||
TypedArrayPrototypeGetByteOffset(buf),
|
||||
TypedArrayPrototypeGetByteLength(buf),
|
||||
),
|
||||
);
|
||||
} else {
|
||||
return Buffer.from(
|
||||
new Uint8Array(
|
||||
DataViewPrototypeGetBuffer(buf),
|
||||
DataViewPrototypeGetByteOffset(buf),
|
||||
DataViewPrototypeGetByteLength(buf),
|
||||
),
|
||||
isTypedArray(buf)
|
||||
? TypedArrayPrototypeGetBuffer(buf)
|
||||
: DataViewPrototypeGetBuffer(buf),
|
||||
);
|
||||
}
|
||||
}
|
||||
|
|
|
@ -16,6 +16,7 @@ use deno_core::ByteString;
|
|||
use deno_core::OpState;
|
||||
use deno_core::Resource;
|
||||
use deno_core::ResourceId;
|
||||
use deno_core::ToJsBuffer;
|
||||
use deno_core::U16String;
|
||||
|
||||
use encoding_rs::CoderResult;
|
||||
|
@ -61,7 +62,6 @@ deno_core::extension!(deno_web,
|
|||
parameters = [P: TimersPermission],
|
||||
ops = [
|
||||
op_base64_decode,
|
||||
op_base64_write,
|
||||
op_base64_encode,
|
||||
op_base64_atob,
|
||||
op_base64_btoa,
|
||||
|
@ -130,43 +130,12 @@ deno_core::extension!(deno_web,
|
|||
);
|
||||
|
||||
#[op2]
|
||||
#[buffer]
|
||||
fn op_base64_decode(#[string] input: String) -> Result<Vec<u8>, AnyError> {
|
||||
#[serde]
|
||||
fn op_base64_decode(#[string] input: String) -> Result<ToJsBuffer, AnyError> {
|
||||
let mut s = input.into_bytes();
|
||||
let decoded_len = forgiving_base64_decode_inplace(&mut s)?;
|
||||
s.truncate(decoded_len);
|
||||
Ok(s)
|
||||
}
|
||||
|
||||
#[op2(fast)]
|
||||
#[smi]
|
||||
fn op_base64_write(
|
||||
#[string] input: String,
|
||||
#[buffer] buffer: &mut [u8],
|
||||
#[smi] start: u32,
|
||||
#[smi] max_len: u32,
|
||||
) -> Result<u32, AnyError> {
|
||||
let tsb_len = buffer.len() as u32;
|
||||
|
||||
if start > tsb_len {
|
||||
return Err(type_error("Offset is out of bounds"));
|
||||
}
|
||||
|
||||
let max_len = std::cmp::min(max_len, tsb_len - start) as usize;
|
||||
let start = start as usize;
|
||||
|
||||
if max_len == 0 {
|
||||
return Ok(0);
|
||||
}
|
||||
|
||||
let mut s = input.into_bytes();
|
||||
let decoded_len = forgiving_base64_decode_inplace(&mut s)?;
|
||||
|
||||
let max_len = std::cmp::min(max_len, decoded_len);
|
||||
|
||||
buffer[start..start + max_len].copy_from_slice(&s[..max_len]);
|
||||
|
||||
Ok(max_len as u32)
|
||||
Ok(s.into())
|
||||
}
|
||||
|
||||
#[op2]
|
||||
|
|
|
@ -123,9 +123,7 @@ Deno.test(
|
|||
|
||||
Deno.test("should work with dataview", () => {
|
||||
const buf = Buffer.from("hello world");
|
||||
const compressed = brotliCompressSync(
|
||||
new DataView(buf.buffer, buf.byteOffset, buf.byteLength),
|
||||
);
|
||||
const compressed = brotliCompressSync(new DataView(buf.buffer));
|
||||
const decompressed = brotliDecompressSync(compressed);
|
||||
assertEquals(decompressed.toString(), "hello world");
|
||||
});
|
||||
|
@ -143,9 +141,7 @@ Deno.test(
|
|||
"zlib compression with dataview",
|
||||
() => {
|
||||
const buf = Buffer.from("hello world");
|
||||
const compressed = gzipSync(
|
||||
new DataView(buf.buffer, buf.byteOffset, buf.byteLength),
|
||||
);
|
||||
const compressed = gzipSync(new DataView(buf.buffer));
|
||||
const decompressed = unzipSync(compressed);
|
||||
assertEquals(decompressed.toString(), "hello world");
|
||||
},
|
||||
|
|
Loading…
Reference in a new issue