2023-01-02 16:00:42 -05:00
|
|
|
// Copyright 2018-2023 the Deno authors. All rights reserved. MIT license.
|
2021-01-30 13:22:24 -05:00
|
|
|
|
|
|
|
// @ts-check
|
|
|
|
/// <reference no-default-lib="true" />
|
|
|
|
/// <reference path="../../core/lib.deno_core.d.ts" />
|
2021-07-06 10:20:21 -04:00
|
|
|
/// <reference path="../../core/internal.d.ts" />
|
2021-04-07 19:23:26 -04:00
|
|
|
/// <reference path="../webidl/internal.d.ts" />
|
2021-01-30 13:22:24 -05:00
|
|
|
/// <reference path="../web/internal.d.ts" />
|
|
|
|
/// <reference path="../web/lib.deno_web.d.ts" />
|
|
|
|
/// <reference path="./internal.d.ts" />
|
|
|
|
/// <reference lib="esnext" />
|
|
|
|
|
2023-12-07 08:21:01 -05:00
|
|
|
import { core, primordials } from "ext:core/mod.js";
|
2023-02-07 14:22:46 -05:00
|
|
|
const ops = core.ops;
|
2023-03-08 06:44:54 -05:00
|
|
|
import * as webidl from "ext:deno_webidl/00_webidl.js";
|
2023-03-14 10:06:12 -04:00
|
|
|
import { ReadableStream } from "ext:deno_web/06_streams.js";
|
2023-05-09 06:37:13 -04:00
|
|
|
import { URL } from "ext:deno_url/00_url.js";
|
2023-02-07 14:22:46 -05:00
|
|
|
const {
|
|
|
|
ArrayBufferPrototype,
|
|
|
|
ArrayBufferPrototypeSlice,
|
2023-04-02 13:41:41 -04:00
|
|
|
ArrayBufferPrototypeGetByteLength,
|
2023-02-07 14:22:46 -05:00
|
|
|
ArrayBufferIsView,
|
|
|
|
ArrayPrototypePush,
|
|
|
|
AsyncGeneratorPrototypeNext,
|
2023-04-02 13:41:41 -04:00
|
|
|
DataViewPrototypeGetBuffer,
|
|
|
|
DataViewPrototypeGetByteLength,
|
|
|
|
DataViewPrototypeGetByteOffset,
|
2023-02-07 14:22:46 -05:00
|
|
|
Date,
|
|
|
|
DatePrototypeGetTime,
|
|
|
|
MathMax,
|
|
|
|
MathMin,
|
|
|
|
ObjectPrototypeIsPrototypeOf,
|
|
|
|
RegExpPrototypeTest,
|
|
|
|
// TODO(lucacasonato): add SharedArrayBuffer to primordials
|
|
|
|
// SharedArrayBufferPrototype
|
2023-04-14 16:23:28 -04:00
|
|
|
SafeFinalizationRegistry,
|
2023-02-28 18:14:16 -05:00
|
|
|
SafeRegExp,
|
2023-02-07 14:22:46 -05:00
|
|
|
StringPrototypeCharAt,
|
|
|
|
StringPrototypeToLowerCase,
|
|
|
|
StringPrototypeSlice,
|
|
|
|
Symbol,
|
|
|
|
SymbolFor,
|
|
|
|
TypedArrayPrototypeSet,
|
2023-04-02 13:41:41 -04:00
|
|
|
TypedArrayPrototypeGetBuffer,
|
|
|
|
TypedArrayPrototypeGetByteLength,
|
|
|
|
TypedArrayPrototypeGetByteOffset,
|
|
|
|
TypedArrayPrototypeGetSymbolToStringTag,
|
2023-02-07 14:22:46 -05:00
|
|
|
TypeError,
|
|
|
|
Uint8Array,
|
|
|
|
} = primordials;
|
2023-04-30 05:11:37 -04:00
|
|
|
import { createFilteredInspectProxy } from "ext:deno_console/01_console.js";
|
2023-12-26 20:30:26 -05:00
|
|
|
const {
|
|
|
|
op_blob_read_part,
|
|
|
|
} = core.ensureFastOps();
|
2023-02-07 14:22:46 -05:00
|
|
|
|
|
|
|
// TODO(lucacasonato): this needs to not be hardcoded and instead depend on
|
|
|
|
// host os.
|
|
|
|
const isWindows = false;
|
|
|
|
|
|
|
|
/**
|
|
|
|
* @param {string} input
|
|
|
|
* @param {number} position
|
|
|
|
* @returns {{result: string, position: number}}
|
|
|
|
*/
|
|
|
|
function collectCodepointsNotCRLF(input, position) {
|
|
|
|
// See https://w3c.github.io/FileAPI/#convert-line-endings-to-native and
|
|
|
|
// https://infra.spec.whatwg.org/#collect-a-sequence-of-code-points
|
|
|
|
const start = position;
|
|
|
|
for (
|
|
|
|
let c = StringPrototypeCharAt(input, position);
|
|
|
|
position < input.length && !(c === "\r" || c === "\n");
|
|
|
|
c = StringPrototypeCharAt(input, ++position)
|
|
|
|
);
|
|
|
|
return { result: StringPrototypeSlice(input, start, position), position };
|
|
|
|
}
|
|
|
|
|
|
|
|
/**
|
|
|
|
* @param {string} s
|
|
|
|
* @returns {string}
|
|
|
|
*/
|
|
|
|
function convertLineEndingsToNative(s) {
|
|
|
|
const nativeLineEnding = isWindows ? "\r\n" : "\n";
|
|
|
|
|
|
|
|
let { result, position } = collectCodepointsNotCRLF(s, 0);
|
|
|
|
|
|
|
|
while (position < s.length) {
|
|
|
|
const codePoint = StringPrototypeCharAt(s, position);
|
|
|
|
if (codePoint === "\r") {
|
|
|
|
result += nativeLineEnding;
|
|
|
|
position++;
|
|
|
|
if (
|
|
|
|
position < s.length && StringPrototypeCharAt(s, position) === "\n"
|
|
|
|
) {
|
|
|
|
position++;
|
|
|
|
}
|
|
|
|
} else if (codePoint === "\n") {
|
|
|
|
position++;
|
|
|
|
result += nativeLineEnding;
|
|
|
|
}
|
|
|
|
const { result: token, position: newPosition } = collectCodepointsNotCRLF(
|
|
|
|
s,
|
|
|
|
position,
|
2021-01-30 13:22:24 -05:00
|
|
|
);
|
2023-02-07 14:22:46 -05:00
|
|
|
position = newPosition;
|
|
|
|
result += token;
|
2021-01-30 13:22:24 -05:00
|
|
|
}
|
|
|
|
|
2023-02-07 14:22:46 -05:00
|
|
|
return result;
|
|
|
|
}
|
2021-01-30 13:22:24 -05:00
|
|
|
|
2023-02-07 14:22:46 -05:00
|
|
|
/** @param {(BlobReference | Blob)[]} parts */
|
|
|
|
async function* toIterator(parts) {
|
|
|
|
for (let i = 0; i < parts.length; ++i) {
|
2023-04-02 13:41:41 -04:00
|
|
|
// deno-lint-ignore prefer-primordials
|
2023-02-07 14:22:46 -05:00
|
|
|
yield* parts[i].stream();
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
|
|
|
/** @typedef {BufferSource | Blob | string} BlobPart */
|
|
|
|
|
|
|
|
/**
|
|
|
|
* @param {BlobPart[]} parts
|
|
|
|
* @param {string} endings
|
|
|
|
* @returns {{ parts: (BlobReference|Blob)[], size: number }}
|
|
|
|
*/
|
|
|
|
function processBlobParts(parts, endings) {
|
|
|
|
/** @type {(BlobReference|Blob)[]} */
|
|
|
|
const processedParts = [];
|
|
|
|
let size = 0;
|
|
|
|
for (let i = 0; i < parts.length; ++i) {
|
|
|
|
const element = parts[i];
|
|
|
|
if (ObjectPrototypeIsPrototypeOf(ArrayBufferPrototype, element)) {
|
|
|
|
const chunk = new Uint8Array(ArrayBufferPrototypeSlice(element, 0));
|
|
|
|
ArrayPrototypePush(processedParts, BlobReference.fromUint8Array(chunk));
|
2023-04-02 13:41:41 -04:00
|
|
|
size += ArrayBufferPrototypeGetByteLength(element);
|
2023-02-07 14:22:46 -05:00
|
|
|
} else if (ArrayBufferIsView(element)) {
|
2023-04-02 13:41:41 -04:00
|
|
|
if (TypedArrayPrototypeGetSymbolToStringTag(element) !== undefined) {
|
|
|
|
// TypedArray
|
|
|
|
const chunk = new Uint8Array(
|
|
|
|
TypedArrayPrototypeGetBuffer(/** @type {Uint8Array} */ (element)),
|
|
|
|
TypedArrayPrototypeGetByteOffset(/** @type {Uint8Array} */ (element)),
|
|
|
|
TypedArrayPrototypeGetByteLength(/** @type {Uint8Array} */ (element)),
|
|
|
|
);
|
|
|
|
size += TypedArrayPrototypeGetByteLength(
|
|
|
|
/** @type {Uint8Array} */ (element),
|
|
|
|
);
|
|
|
|
ArrayPrototypePush(processedParts, BlobReference.fromUint8Array(chunk));
|
|
|
|
} else {
|
|
|
|
// DataView
|
|
|
|
const chunk = new Uint8Array(
|
|
|
|
DataViewPrototypeGetBuffer(/** @type {DataView} */ (element)),
|
|
|
|
DataViewPrototypeGetByteOffset(/** @type {DataView} */ (element)),
|
|
|
|
DataViewPrototypeGetByteLength(/** @type {DataView} */ (element)),
|
|
|
|
);
|
|
|
|
size += DataViewPrototypeGetByteLength(
|
|
|
|
/** @type {DataView} */ (element),
|
|
|
|
);
|
|
|
|
ArrayPrototypePush(processedParts, BlobReference.fromUint8Array(chunk));
|
|
|
|
}
|
2023-02-07 14:22:46 -05:00
|
|
|
} else if (ObjectPrototypeIsPrototypeOf(BlobPrototype, element)) {
|
|
|
|
ArrayPrototypePush(processedParts, element);
|
|
|
|
size += element.size;
|
|
|
|
} else if (typeof element === "string") {
|
|
|
|
const chunk = core.encode(
|
|
|
|
endings == "native" ? convertLineEndingsToNative(element) : element,
|
|
|
|
);
|
2023-04-02 13:41:41 -04:00
|
|
|
size += TypedArrayPrototypeGetByteLength(chunk);
|
2023-02-07 14:22:46 -05:00
|
|
|
ArrayPrototypePush(processedParts, BlobReference.fromUint8Array(chunk));
|
|
|
|
} else {
|
|
|
|
throw new TypeError("Unreachable code (invalid element type)");
|
2021-01-30 13:22:24 -05:00
|
|
|
}
|
|
|
|
}
|
2023-02-07 14:22:46 -05:00
|
|
|
return { parts: processedParts, size };
|
|
|
|
}
|
|
|
|
|
2023-02-28 18:14:16 -05:00
|
|
|
const NORMALIZE_PATTERN = new SafeRegExp(/^[\x20-\x7E]*$/);
|
|
|
|
|
2023-02-07 14:22:46 -05:00
|
|
|
/**
|
|
|
|
* @param {string} str
|
|
|
|
* @returns {string}
|
|
|
|
*/
|
|
|
|
function normalizeType(str) {
|
|
|
|
let normalizedType = str;
|
2023-02-28 18:14:16 -05:00
|
|
|
if (!RegExpPrototypeTest(NORMALIZE_PATTERN, str)) {
|
2023-02-07 14:22:46 -05:00
|
|
|
normalizedType = "";
|
|
|
|
}
|
|
|
|
return StringPrototypeToLowerCase(normalizedType);
|
|
|
|
}
|
|
|
|
|
|
|
|
/**
|
|
|
|
* Get all Parts as a flat array containing all references
|
|
|
|
* @param {Blob} blob
|
|
|
|
* @param {string[]} bag
|
|
|
|
* @returns {string[]}
|
|
|
|
*/
|
|
|
|
function getParts(blob, bag = []) {
|
|
|
|
const parts = blob[_parts];
|
|
|
|
for (let i = 0; i < parts.length; ++i) {
|
|
|
|
const part = parts[i];
|
|
|
|
if (ObjectPrototypeIsPrototypeOf(BlobPrototype, part)) {
|
|
|
|
getParts(part, bag);
|
|
|
|
} else {
|
|
|
|
ArrayPrototypePush(bag, part._id);
|
2021-01-30 13:22:24 -05:00
|
|
|
}
|
|
|
|
}
|
2023-02-07 14:22:46 -05:00
|
|
|
return bag;
|
|
|
|
}
|
2021-01-30 13:22:24 -05:00
|
|
|
|
2023-02-07 14:22:46 -05:00
|
|
|
const _type = Symbol("Type");
|
|
|
|
const _size = Symbol("Size");
|
|
|
|
const _parts = Symbol("Parts");
|
|
|
|
|
|
|
|
class Blob {
|
|
|
|
[_type] = "";
|
|
|
|
[_size] = 0;
|
|
|
|
[_parts];
|
2021-01-30 13:22:24 -05:00
|
|
|
|
2021-04-28 10:08:51 -04:00
|
|
|
/**
|
2023-02-07 14:22:46 -05:00
|
|
|
* @param {BlobPart[]} blobParts
|
|
|
|
* @param {BlobPropertyBag} options
|
2021-07-05 09:34:37 -04:00
|
|
|
*/
|
2023-02-07 14:22:46 -05:00
|
|
|
constructor(blobParts = [], options = {}) {
|
|
|
|
const prefix = "Failed to construct 'Blob'";
|
2023-05-01 06:47:13 -04:00
|
|
|
blobParts = webidl.converters["sequence<BlobPart>"](
|
|
|
|
blobParts,
|
2023-02-07 14:22:46 -05:00
|
|
|
prefix,
|
2023-05-01 06:47:13 -04:00
|
|
|
"Argument 1",
|
|
|
|
);
|
|
|
|
options = webidl.converters["BlobPropertyBag"](
|
|
|
|
options,
|
2023-02-07 14:22:46 -05:00
|
|
|
prefix,
|
2023-05-01 06:47:13 -04:00
|
|
|
"Argument 2",
|
|
|
|
);
|
2023-02-07 14:22:46 -05:00
|
|
|
|
|
|
|
this[webidl.brand] = webidl.brand;
|
|
|
|
|
|
|
|
const { parts, size } = processBlobParts(
|
|
|
|
blobParts,
|
|
|
|
options.endings,
|
|
|
|
);
|
|
|
|
|
|
|
|
this[_parts] = parts;
|
|
|
|
this[_size] = size;
|
|
|
|
this[_type] = normalizeType(options.type);
|
2021-01-30 13:22:24 -05:00
|
|
|
}
|
|
|
|
|
2023-02-07 14:22:46 -05:00
|
|
|
/** @returns {number} */
|
|
|
|
get size() {
|
|
|
|
webidl.assertBranded(this, BlobPrototype);
|
|
|
|
return this[_size];
|
2021-01-30 13:22:24 -05:00
|
|
|
}
|
|
|
|
|
2023-02-07 14:22:46 -05:00
|
|
|
/** @returns {string} */
|
|
|
|
get type() {
|
|
|
|
webidl.assertBranded(this, BlobPrototype);
|
|
|
|
return this[_type];
|
2021-07-05 09:34:37 -04:00
|
|
|
}
|
2021-04-14 16:49:16 -04:00
|
|
|
|
2023-02-07 14:22:46 -05:00
|
|
|
/**
|
|
|
|
* @param {number} [start]
|
|
|
|
* @param {number} [end]
|
|
|
|
* @param {string} [contentType]
|
|
|
|
* @returns {Blob}
|
|
|
|
*/
|
|
|
|
slice(start = undefined, end = undefined, contentType = undefined) {
|
|
|
|
webidl.assertBranded(this, BlobPrototype);
|
|
|
|
const prefix = "Failed to execute 'slice' on 'Blob'";
|
|
|
|
if (start !== undefined) {
|
2023-05-01 06:47:13 -04:00
|
|
|
start = webidl.converters["long long"](start, prefix, "Argument 1", {
|
2023-02-07 14:22:46 -05:00
|
|
|
clamp: true,
|
2021-04-07 19:23:26 -04:00
|
|
|
});
|
2023-02-07 14:22:46 -05:00
|
|
|
}
|
|
|
|
if (end !== undefined) {
|
2023-05-01 06:47:13 -04:00
|
|
|
end = webidl.converters["long long"](end, prefix, "Argument 2", {
|
2023-02-07 14:22:46 -05:00
|
|
|
clamp: true,
|
2021-04-07 19:23:26 -04:00
|
|
|
});
|
2021-01-30 13:22:24 -05:00
|
|
|
}
|
2023-02-07 14:22:46 -05:00
|
|
|
if (contentType !== undefined) {
|
2023-05-01 06:47:13 -04:00
|
|
|
contentType = webidl.converters["DOMString"](
|
|
|
|
contentType,
|
2023-02-07 14:22:46 -05:00
|
|
|
prefix,
|
2023-05-01 06:47:13 -04:00
|
|
|
"Argument 3",
|
|
|
|
);
|
2021-01-30 13:22:24 -05:00
|
|
|
}
|
|
|
|
|
2023-02-07 14:22:46 -05:00
|
|
|
// deno-lint-ignore no-this-alias
|
|
|
|
const O = this;
|
|
|
|
/** @type {number} */
|
|
|
|
let relativeStart;
|
|
|
|
if (start === undefined) {
|
|
|
|
relativeStart = 0;
|
|
|
|
} else {
|
|
|
|
if (start < 0) {
|
|
|
|
relativeStart = MathMax(O.size + start, 0);
|
2021-01-30 13:22:24 -05:00
|
|
|
} else {
|
2023-02-07 14:22:46 -05:00
|
|
|
relativeStart = MathMin(start, O.size);
|
2021-01-30 13:22:24 -05:00
|
|
|
}
|
2023-02-07 14:22:46 -05:00
|
|
|
}
|
|
|
|
/** @type {number} */
|
|
|
|
let relativeEnd;
|
|
|
|
if (end === undefined) {
|
|
|
|
relativeEnd = O.size;
|
|
|
|
} else {
|
|
|
|
if (end < 0) {
|
|
|
|
relativeEnd = MathMax(O.size + end, 0);
|
2021-01-30 13:22:24 -05:00
|
|
|
} else {
|
2023-02-07 14:22:46 -05:00
|
|
|
relativeEnd = MathMin(end, O.size);
|
2021-01-30 13:22:24 -05:00
|
|
|
}
|
2023-02-07 14:22:46 -05:00
|
|
|
}
|
2021-07-05 09:34:37 -04:00
|
|
|
|
2023-02-07 14:22:46 -05:00
|
|
|
const span = MathMax(relativeEnd - relativeStart, 0);
|
|
|
|
const blobParts = [];
|
|
|
|
let added = 0;
|
2021-07-05 09:34:37 -04:00
|
|
|
|
2023-02-07 14:22:46 -05:00
|
|
|
const parts = this[_parts];
|
|
|
|
for (let i = 0; i < parts.length; ++i) {
|
|
|
|
const part = parts[i];
|
|
|
|
// don't add the overflow to new blobParts
|
|
|
|
if (added >= span) {
|
|
|
|
// Could maybe be possible to remove variable `added`
|
|
|
|
// and only use relativeEnd?
|
|
|
|
break;
|
|
|
|
}
|
|
|
|
const size = part.size;
|
|
|
|
if (relativeStart && size <= relativeStart) {
|
|
|
|
// Skip the beginning and change the relative
|
|
|
|
// start & end position as we skip the unwanted parts
|
|
|
|
relativeStart -= size;
|
|
|
|
relativeEnd -= size;
|
2021-01-30 13:22:24 -05:00
|
|
|
} else {
|
2023-06-05 15:57:01 -04:00
|
|
|
// deno-lint-ignore prefer-primordials
|
2023-02-07 14:22:46 -05:00
|
|
|
const chunk = part.slice(
|
|
|
|
relativeStart,
|
|
|
|
MathMin(part.size, relativeEnd),
|
|
|
|
);
|
|
|
|
added += chunk.size;
|
|
|
|
relativeEnd -= part.size;
|
|
|
|
ArrayPrototypePush(blobParts, chunk);
|
|
|
|
relativeStart = 0; // All next sequential parts should start at 0
|
2021-01-30 13:22:24 -05:00
|
|
|
}
|
|
|
|
}
|
|
|
|
|
2023-02-07 14:22:46 -05:00
|
|
|
/** @type {string} */
|
|
|
|
let relativeContentType;
|
|
|
|
if (contentType === undefined) {
|
|
|
|
relativeContentType = "";
|
|
|
|
} else {
|
|
|
|
relativeContentType = normalizeType(contentType);
|
2021-01-30 13:22:24 -05:00
|
|
|
}
|
|
|
|
|
2023-02-07 14:22:46 -05:00
|
|
|
const blob = new Blob([], { type: relativeContentType });
|
|
|
|
blob[_parts] = blobParts;
|
|
|
|
blob[_size] = span;
|
|
|
|
return blob;
|
|
|
|
}
|
2022-03-16 09:46:52 -04:00
|
|
|
|
2023-02-07 14:22:46 -05:00
|
|
|
/**
|
|
|
|
* @returns {ReadableStream<Uint8Array>}
|
|
|
|
*/
|
|
|
|
stream() {
|
|
|
|
webidl.assertBranded(this, BlobPrototype);
|
|
|
|
const partIterator = toIterator(this[_parts]);
|
|
|
|
const stream = new ReadableStream({
|
|
|
|
type: "bytes",
|
|
|
|
/** @param {ReadableByteStreamController} controller */
|
|
|
|
async pull(controller) {
|
|
|
|
while (true) {
|
|
|
|
const { value, done } = await AsyncGeneratorPrototypeNext(
|
|
|
|
partIterator,
|
|
|
|
);
|
2023-08-04 07:57:54 -04:00
|
|
|
if (done) {
|
|
|
|
controller.close();
|
|
|
|
controller.byobRequest?.respond(0);
|
|
|
|
return;
|
|
|
|
}
|
2023-04-02 13:41:41 -04:00
|
|
|
if (TypedArrayPrototypeGetByteLength(value) > 0) {
|
2023-02-07 14:22:46 -05:00
|
|
|
return controller.enqueue(value);
|
|
|
|
}
|
2022-03-16 09:46:52 -04:00
|
|
|
}
|
2023-02-07 14:22:46 -05:00
|
|
|
},
|
|
|
|
});
|
|
|
|
return stream;
|
2021-01-30 13:22:24 -05:00
|
|
|
}
|
|
|
|
|
2023-02-07 14:22:46 -05:00
|
|
|
/**
|
|
|
|
* @returns {Promise<string>}
|
|
|
|
*/
|
|
|
|
async text() {
|
|
|
|
webidl.assertBranded(this, BlobPrototype);
|
|
|
|
const buffer = await this.#u8Array(this.size);
|
|
|
|
return core.decode(buffer);
|
|
|
|
}
|
2021-06-07 04:04:10 -04:00
|
|
|
|
2023-02-07 14:22:46 -05:00
|
|
|
async #u8Array(size) {
|
|
|
|
const bytes = new Uint8Array(size);
|
|
|
|
const partIterator = toIterator(this[_parts]);
|
|
|
|
let offset = 0;
|
|
|
|
while (true) {
|
|
|
|
const { value, done } = await AsyncGeneratorPrototypeNext(
|
|
|
|
partIterator,
|
|
|
|
);
|
|
|
|
if (done) break;
|
2023-04-02 13:41:41 -04:00
|
|
|
const byteLength = TypedArrayPrototypeGetByteLength(value);
|
2023-02-07 14:22:46 -05:00
|
|
|
if (byteLength > 0) {
|
|
|
|
TypedArrayPrototypeSet(bytes, value, offset);
|
|
|
|
offset += byteLength;
|
2021-04-07 19:23:26 -04:00
|
|
|
}
|
|
|
|
}
|
2023-02-07 14:22:46 -05:00
|
|
|
return bytes;
|
|
|
|
}
|
2021-04-07 19:23:26 -04:00
|
|
|
|
2023-02-07 14:22:46 -05:00
|
|
|
/**
|
|
|
|
* @returns {Promise<ArrayBuffer>}
|
|
|
|
*/
|
|
|
|
async arrayBuffer() {
|
|
|
|
webidl.assertBranded(this, BlobPrototype);
|
|
|
|
const buf = await this.#u8Array(this.size);
|
2023-04-02 13:41:41 -04:00
|
|
|
return TypedArrayPrototypeGetBuffer(buf);
|
2023-02-07 14:22:46 -05:00
|
|
|
}
|
2021-04-07 19:23:26 -04:00
|
|
|
|
2023-11-19 03:13:38 -05:00
|
|
|
[SymbolFor("Deno.privateCustomInspect")](inspect, inspectOptions) {
|
|
|
|
return inspect(
|
|
|
|
createFilteredInspectProxy({
|
|
|
|
object: this,
|
|
|
|
evaluate: ObjectPrototypeIsPrototypeOf(BlobPrototype, this),
|
|
|
|
keys: [
|
|
|
|
"size",
|
|
|
|
"type",
|
|
|
|
],
|
|
|
|
}),
|
|
|
|
inspectOptions,
|
|
|
|
);
|
2023-02-07 14:22:46 -05:00
|
|
|
}
|
|
|
|
}
|
|
|
|
|
2023-10-09 23:01:01 -04:00
|
|
|
webidl.configureInterface(Blob);
|
2023-02-07 14:22:46 -05:00
|
|
|
const BlobPrototype = Blob.prototype;
|
|
|
|
|
|
|
|
webidl.converters["Blob"] = webidl.createInterfaceConverter(
|
|
|
|
"Blob",
|
|
|
|
Blob.prototype,
|
|
|
|
);
|
2023-05-01 06:47:13 -04:00
|
|
|
webidl.converters["BlobPart"] = (V, prefix, context, opts) => {
|
2023-02-07 14:22:46 -05:00
|
|
|
// Union for ((ArrayBuffer or ArrayBufferView) or Blob or USVString)
|
|
|
|
if (typeof V == "object") {
|
|
|
|
if (ObjectPrototypeIsPrototypeOf(BlobPrototype, V)) {
|
2023-05-01 06:47:13 -04:00
|
|
|
return webidl.converters["Blob"](V, prefix, context, opts);
|
2021-02-04 09:05:36 -05:00
|
|
|
}
|
2023-02-07 14:22:46 -05:00
|
|
|
if (
|
|
|
|
ObjectPrototypeIsPrototypeOf(ArrayBufferPrototype, V) ||
|
|
|
|
// deno-lint-ignore prefer-primordials
|
|
|
|
ObjectPrototypeIsPrototypeOf(SharedArrayBuffer.prototype, V)
|
|
|
|
) {
|
2023-05-01 06:47:13 -04:00
|
|
|
return webidl.converters["ArrayBuffer"](V, prefix, context, opts);
|
2021-02-04 09:05:36 -05:00
|
|
|
}
|
2023-02-07 14:22:46 -05:00
|
|
|
if (ArrayBufferIsView(V)) {
|
2023-05-01 06:47:13 -04:00
|
|
|
return webidl.converters["ArrayBufferView"](V, prefix, context, opts);
|
2021-07-05 09:34:37 -04:00
|
|
|
}
|
2021-02-04 09:05:36 -05:00
|
|
|
}
|
2023-02-07 14:22:46 -05:00
|
|
|
// BlobPart is passed to processBlobParts after conversion, which calls core.encode()
|
|
|
|
// on the string.
|
|
|
|
// core.encode() is equivalent to USVString normalization.
|
2023-05-01 06:47:13 -04:00
|
|
|
return webidl.converters["DOMString"](V, prefix, context, opts);
|
2023-02-07 14:22:46 -05:00
|
|
|
};
|
|
|
|
webidl.converters["sequence<BlobPart>"] = webidl.createSequenceConverter(
|
|
|
|
webidl.converters["BlobPart"],
|
|
|
|
);
|
|
|
|
webidl.converters["EndingType"] = webidl.createEnumConverter("EndingType", [
|
|
|
|
"transparent",
|
|
|
|
"native",
|
|
|
|
]);
|
|
|
|
const blobPropertyBagDictionary = [
|
|
|
|
{
|
|
|
|
key: "type",
|
|
|
|
converter: webidl.converters["DOMString"],
|
|
|
|
defaultValue: "",
|
|
|
|
},
|
|
|
|
{
|
|
|
|
key: "endings",
|
|
|
|
converter: webidl.converters["EndingType"],
|
|
|
|
defaultValue: "transparent",
|
|
|
|
},
|
|
|
|
];
|
|
|
|
webidl.converters["BlobPropertyBag"] = webidl.createDictionaryConverter(
|
|
|
|
"BlobPropertyBag",
|
|
|
|
blobPropertyBagDictionary,
|
|
|
|
);
|
|
|
|
|
|
|
|
const _Name = Symbol("[[Name]]");
|
|
|
|
const _LastModified = Symbol("[[LastModified]]");
|
|
|
|
|
|
|
|
class File extends Blob {
|
|
|
|
/** @type {string} */
|
|
|
|
[_Name];
|
|
|
|
/** @type {number} */
|
|
|
|
[_LastModified];
|
2021-07-05 09:34:37 -04:00
|
|
|
|
|
|
|
/**
|
2023-02-07 14:22:46 -05:00
|
|
|
* @param {BlobPart[]} fileBits
|
|
|
|
* @param {string} fileName
|
|
|
|
* @param {FilePropertyBag} options
|
2021-07-05 09:34:37 -04:00
|
|
|
*/
|
2023-02-07 14:22:46 -05:00
|
|
|
constructor(fileBits, fileName, options = {}) {
|
|
|
|
const prefix = "Failed to construct 'File'";
|
2023-04-12 15:58:57 -04:00
|
|
|
webidl.requiredArguments(arguments.length, 2, prefix);
|
2023-02-07 14:22:46 -05:00
|
|
|
|
2023-05-01 06:47:13 -04:00
|
|
|
fileBits = webidl.converters["sequence<BlobPart>"](
|
|
|
|
fileBits,
|
2023-02-07 14:22:46 -05:00
|
|
|
prefix,
|
2023-05-01 06:47:13 -04:00
|
|
|
"Argument 1",
|
|
|
|
);
|
|
|
|
fileName = webidl.converters["USVString"](fileName, prefix, "Argument 2");
|
|
|
|
options = webidl.converters["FilePropertyBag"](
|
|
|
|
options,
|
2023-02-07 14:22:46 -05:00
|
|
|
prefix,
|
2023-05-01 06:47:13 -04:00
|
|
|
"Argument 3",
|
|
|
|
);
|
2023-02-07 14:22:46 -05:00
|
|
|
|
|
|
|
super(fileBits, options);
|
2021-07-05 09:34:37 -04:00
|
|
|
|
2023-02-07 14:22:46 -05:00
|
|
|
/** @type {string} */
|
|
|
|
this[_Name] = fileName;
|
|
|
|
if (options.lastModified === undefined) {
|
|
|
|
/** @type {number} */
|
|
|
|
this[_LastModified] = DatePrototypeGetTime(new Date());
|
|
|
|
} else {
|
|
|
|
/** @type {number} */
|
|
|
|
this[_LastModified] = options.lastModified;
|
2021-07-05 09:34:37 -04:00
|
|
|
}
|
2023-02-07 14:22:46 -05:00
|
|
|
}
|
2021-07-05 09:34:37 -04:00
|
|
|
|
2023-02-07 14:22:46 -05:00
|
|
|
/** @returns {string} */
|
|
|
|
get name() {
|
|
|
|
webidl.assertBranded(this, FilePrototype);
|
|
|
|
return this[_Name];
|
|
|
|
}
|
2021-07-05 09:34:37 -04:00
|
|
|
|
2023-02-07 14:22:46 -05:00
|
|
|
/** @returns {number} */
|
|
|
|
get lastModified() {
|
|
|
|
webidl.assertBranded(this, FilePrototype);
|
|
|
|
return this[_LastModified];
|
2021-07-05 09:34:37 -04:00
|
|
|
}
|
2023-09-14 01:06:58 -04:00
|
|
|
|
2023-11-19 03:13:38 -05:00
|
|
|
[SymbolFor("Deno.privateCustomInspect")](inspect, inspectOptions) {
|
|
|
|
return inspect(
|
|
|
|
createFilteredInspectProxy({
|
|
|
|
object: this,
|
|
|
|
evaluate: ObjectPrototypeIsPrototypeOf(FilePrototype, this),
|
|
|
|
keys: [
|
|
|
|
"name",
|
|
|
|
"size",
|
|
|
|
"type",
|
|
|
|
],
|
|
|
|
}),
|
|
|
|
inspectOptions,
|
|
|
|
);
|
2023-09-14 01:06:58 -04:00
|
|
|
}
|
2023-02-07 14:22:46 -05:00
|
|
|
}
|
2021-07-05 09:34:37 -04:00
|
|
|
|
2023-10-09 23:01:01 -04:00
|
|
|
webidl.configureInterface(File);
|
2023-02-07 14:22:46 -05:00
|
|
|
const FilePrototype = File.prototype;
|
|
|
|
|
|
|
|
webidl.converters["FilePropertyBag"] = webidl.createDictionaryConverter(
|
|
|
|
"FilePropertyBag",
|
|
|
|
blobPropertyBagDictionary,
|
|
|
|
[
|
|
|
|
{
|
|
|
|
key: "lastModified",
|
|
|
|
converter: webidl.converters["long long"],
|
|
|
|
},
|
|
|
|
],
|
|
|
|
);
|
|
|
|
|
|
|
|
// A finalization registry to deallocate a blob part when its JS reference is
|
|
|
|
// garbage collected.
|
2023-04-14 16:23:28 -04:00
|
|
|
const registry = new SafeFinalizationRegistry((uuid) => {
|
2023-02-07 14:22:46 -05:00
|
|
|
ops.op_blob_remove_part(uuid);
|
|
|
|
});
|
|
|
|
|
|
|
|
// TODO(lucacasonato): get a better stream from Rust in BlobReference#stream
|
|
|
|
|
|
|
|
/**
|
|
|
|
* An opaque reference to a blob part in Rust. This could be backed by a file,
|
|
|
|
* in memory storage, or something else.
|
|
|
|
*/
|
|
|
|
class BlobReference {
|
fix: a `Request` whose URL is a revoked blob URL should still fetch (#11947)
In the spec, a URL record has an associated "blob URL entry", which for
`blob:` URLs is populated during parsing to contain a reference to the
`Blob` object that backs that object URL. It is this blob URL entry that
the `fetch` API uses to resolve an object URL.
Therefore, since the `Request` constructor parses URL inputs, it will
have an associated blob URL entry which will be used when fetching, even
if the object URL has been revoked since the construction of the
`Request` object. (The `Request` constructor takes the URL as a string
and parses it, so the object URL must be live at the time it is called.)
This PR adds a new `blobFromObjectUrl` JS function (backed by a new
`op_blob_from_object_url` op) that, if the URL is a valid object URL,
returns a new `Blob` object whose parts are references to the same Rust
`BlobPart`s used by the original `Blob` object. It uses this function to
add a new `blobUrlEntry` field to inner requests, which will be `null`
or such a `Blob`, and then uses `Blob.prototype.stream()` as the
response's body. As a result of this, the `blob:` URL resolution from
`op_fetch` is now useless, and has been removed.
2021-09-08 05:29:21 -04:00
|
|
|
/**
|
2023-02-07 14:22:46 -05:00
|
|
|
* Don't use directly. Use `BlobReference.fromUint8Array`.
|
|
|
|
* @param {string} id
|
|
|
|
* @param {number} size
|
|
|
|
*/
|
|
|
|
constructor(id, size) {
|
|
|
|
this._id = id;
|
|
|
|
this.size = size;
|
|
|
|
registry.register(this, id);
|
|
|
|
}
|
|
|
|
|
|
|
|
/**
|
|
|
|
* Create a new blob part from a Uint8Array.
|
fix: a `Request` whose URL is a revoked blob URL should still fetch (#11947)
In the spec, a URL record has an associated "blob URL entry", which for
`blob:` URLs is populated during parsing to contain a reference to the
`Blob` object that backs that object URL. It is this blob URL entry that
the `fetch` API uses to resolve an object URL.
Therefore, since the `Request` constructor parses URL inputs, it will
have an associated blob URL entry which will be used when fetching, even
if the object URL has been revoked since the construction of the
`Request` object. (The `Request` constructor takes the URL as a string
and parses it, so the object URL must be live at the time it is called.)
This PR adds a new `blobFromObjectUrl` JS function (backed by a new
`op_blob_from_object_url` op) that, if the URL is a valid object URL,
returns a new `Blob` object whose parts are references to the same Rust
`BlobPart`s used by the original `Blob` object. It uses this function to
add a new `blobUrlEntry` field to inner requests, which will be `null`
or such a `Blob`, and then uses `Blob.prototype.stream()` as the
response's body. As a result of this, the `blob:` URL resolution from
`op_fetch` is now useless, and has been removed.
2021-09-08 05:29:21 -04:00
|
|
|
*
|
2023-02-07 14:22:46 -05:00
|
|
|
* @param {Uint8Array} data
|
|
|
|
* @returns {BlobReference}
|
|
|
|
*/
|
|
|
|
static fromUint8Array(data) {
|
|
|
|
const id = ops.op_blob_create_part(data);
|
2023-04-02 13:41:41 -04:00
|
|
|
return new BlobReference(id, TypedArrayPrototypeGetByteLength(data));
|
2023-02-07 14:22:46 -05:00
|
|
|
}
|
|
|
|
|
|
|
|
/**
|
|
|
|
* Create a new BlobReference by slicing this BlobReference. This is a copy
|
|
|
|
* free operation - the sliced reference will still reference the original
|
|
|
|
* underlying bytes.
|
fix: a `Request` whose URL is a revoked blob URL should still fetch (#11947)
In the spec, a URL record has an associated "blob URL entry", which for
`blob:` URLs is populated during parsing to contain a reference to the
`Blob` object that backs that object URL. It is this blob URL entry that
the `fetch` API uses to resolve an object URL.
Therefore, since the `Request` constructor parses URL inputs, it will
have an associated blob URL entry which will be used when fetching, even
if the object URL has been revoked since the construction of the
`Request` object. (The `Request` constructor takes the URL as a string
and parses it, so the object URL must be live at the time it is called.)
This PR adds a new `blobFromObjectUrl` JS function (backed by a new
`op_blob_from_object_url` op) that, if the URL is a valid object URL,
returns a new `Blob` object whose parts are references to the same Rust
`BlobPart`s used by the original `Blob` object. It uses this function to
add a new `blobUrlEntry` field to inner requests, which will be `null`
or such a `Blob`, and then uses `Blob.prototype.stream()` as the
response's body. As a result of this, the `blob:` URL resolution from
`op_fetch` is now useless, and has been removed.
2021-09-08 05:29:21 -04:00
|
|
|
*
|
2023-02-07 14:22:46 -05:00
|
|
|
* @param {number} start
|
|
|
|
* @param {number} end
|
|
|
|
* @returns {BlobReference}
|
fix: a `Request` whose URL is a revoked blob URL should still fetch (#11947)
In the spec, a URL record has an associated "blob URL entry", which for
`blob:` URLs is populated during parsing to contain a reference to the
`Blob` object that backs that object URL. It is this blob URL entry that
the `fetch` API uses to resolve an object URL.
Therefore, since the `Request` constructor parses URL inputs, it will
have an associated blob URL entry which will be used when fetching, even
if the object URL has been revoked since the construction of the
`Request` object. (The `Request` constructor takes the URL as a string
and parses it, so the object URL must be live at the time it is called.)
This PR adds a new `blobFromObjectUrl` JS function (backed by a new
`op_blob_from_object_url` op) that, if the URL is a valid object URL,
returns a new `Blob` object whose parts are references to the same Rust
`BlobPart`s used by the original `Blob` object. It uses this function to
add a new `blobUrlEntry` field to inner requests, which will be `null`
or such a `Blob`, and then uses `Blob.prototype.stream()` as the
response's body. As a result of this, the `blob:` URL resolution from
`op_fetch` is now useless, and has been removed.
2021-09-08 05:29:21 -04:00
|
|
|
*/
|
2023-02-07 14:22:46 -05:00
|
|
|
slice(start, end) {
|
|
|
|
const size = end - start;
|
|
|
|
const id = ops.op_blob_slice_part(this._id, {
|
|
|
|
start,
|
|
|
|
len: size,
|
|
|
|
});
|
|
|
|
return new BlobReference(id, size);
|
|
|
|
}
|
fix: a `Request` whose URL is a revoked blob URL should still fetch (#11947)
In the spec, a URL record has an associated "blob URL entry", which for
`blob:` URLs is populated during parsing to contain a reference to the
`Blob` object that backs that object URL. It is this blob URL entry that
the `fetch` API uses to resolve an object URL.
Therefore, since the `Request` constructor parses URL inputs, it will
have an associated blob URL entry which will be used when fetching, even
if the object URL has been revoked since the construction of the
`Request` object. (The `Request` constructor takes the URL as a string
and parses it, so the object URL must be live at the time it is called.)
This PR adds a new `blobFromObjectUrl` JS function (backed by a new
`op_blob_from_object_url` op) that, if the URL is a valid object URL,
returns a new `Blob` object whose parts are references to the same Rust
`BlobPart`s used by the original `Blob` object. It uses this function to
add a new `blobUrlEntry` field to inner requests, which will be `null`
or such a `Blob`, and then uses `Blob.prototype.stream()` as the
response's body. As a result of this, the `blob:` URL resolution from
`op_fetch` is now useless, and has been removed.
2021-09-08 05:29:21 -04:00
|
|
|
|
2023-02-07 14:22:46 -05:00
|
|
|
/**
|
|
|
|
* Read the entire contents of the reference blob.
|
|
|
|
* @returns {AsyncGenerator<Uint8Array>}
|
|
|
|
*/
|
|
|
|
async *stream() {
|
2023-12-26 20:30:26 -05:00
|
|
|
yield op_blob_read_part(this._id);
|
2023-02-07 14:22:46 -05:00
|
|
|
|
|
|
|
// let position = 0;
|
|
|
|
// const end = this.size;
|
|
|
|
// while (position !== end) {
|
|
|
|
// const size = MathMin(end - position, 65536);
|
|
|
|
// const chunk = this.slice(position, position + size);
|
|
|
|
// position += chunk.size;
|
2023-12-26 20:30:26 -05:00
|
|
|
// yield op_blob_read_part( chunk._id);
|
2023-02-07 14:22:46 -05:00
|
|
|
// }
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
|
|
|
/**
|
|
|
|
* Construct a new Blob object from an object URL.
|
|
|
|
*
|
|
|
|
* This new object will not duplicate data in memory with the original Blob
|
|
|
|
* object from which this URL was created or with other Blob objects created
|
|
|
|
* from the same URL, but they will be different objects.
|
|
|
|
*
|
|
|
|
* The object returned from this function will not be a File object, even if
|
|
|
|
* the original object from which the object URL was constructed was one. This
|
|
|
|
* means that the `name` and `lastModified` properties are lost.
|
|
|
|
*
|
|
|
|
* @param {string} url
|
|
|
|
* @returns {Blob | null}
|
|
|
|
*/
|
|
|
|
function blobFromObjectUrl(url) {
|
|
|
|
const blobData = ops.op_blob_from_object_url(url);
|
|
|
|
if (blobData === null) {
|
|
|
|
return null;
|
|
|
|
}
|
fix: a `Request` whose URL is a revoked blob URL should still fetch (#11947)
In the spec, a URL record has an associated "blob URL entry", which for
`blob:` URLs is populated during parsing to contain a reference to the
`Blob` object that backs that object URL. It is this blob URL entry that
the `fetch` API uses to resolve an object URL.
Therefore, since the `Request` constructor parses URL inputs, it will
have an associated blob URL entry which will be used when fetching, even
if the object URL has been revoked since the construction of the
`Request` object. (The `Request` constructor takes the URL as a string
and parses it, so the object URL must be live at the time it is called.)
This PR adds a new `blobFromObjectUrl` JS function (backed by a new
`op_blob_from_object_url` op) that, if the URL is a valid object URL,
returns a new `Blob` object whose parts are references to the same Rust
`BlobPart`s used by the original `Blob` object. It uses this function to
add a new `blobUrlEntry` field to inner requests, which will be `null`
or such a `Blob`, and then uses `Blob.prototype.stream()` as the
response's body. As a result of this, the `blob:` URL resolution from
`op_fetch` is now useless, and has been removed.
2021-09-08 05:29:21 -04:00
|
|
|
|
2023-02-07 14:22:46 -05:00
|
|
|
/** @type {BlobReference[]} */
|
|
|
|
const parts = [];
|
|
|
|
let totalSize = 0;
|
fix: a `Request` whose URL is a revoked blob URL should still fetch (#11947)
In the spec, a URL record has an associated "blob URL entry", which for
`blob:` URLs is populated during parsing to contain a reference to the
`Blob` object that backs that object URL. It is this blob URL entry that
the `fetch` API uses to resolve an object URL.
Therefore, since the `Request` constructor parses URL inputs, it will
have an associated blob URL entry which will be used when fetching, even
if the object URL has been revoked since the construction of the
`Request` object. (The `Request` constructor takes the URL as a string
and parses it, so the object URL must be live at the time it is called.)
This PR adds a new `blobFromObjectUrl` JS function (backed by a new
`op_blob_from_object_url` op) that, if the URL is a valid object URL,
returns a new `Blob` object whose parts are references to the same Rust
`BlobPart`s used by the original `Blob` object. It uses this function to
add a new `blobUrlEntry` field to inner requests, which will be `null`
or such a `Blob`, and then uses `Blob.prototype.stream()` as the
response's body. As a result of this, the `blob:` URL resolution from
`op_fetch` is now useless, and has been removed.
2021-09-08 05:29:21 -04:00
|
|
|
|
2023-02-07 14:22:46 -05:00
|
|
|
for (let i = 0; i < blobData.parts.length; ++i) {
|
|
|
|
const { uuid, size } = blobData.parts[i];
|
|
|
|
ArrayPrototypePush(parts, new BlobReference(uuid, size));
|
|
|
|
totalSize += size;
|
fix: a `Request` whose URL is a revoked blob URL should still fetch (#11947)
In the spec, a URL record has an associated "blob URL entry", which for
`blob:` URLs is populated during parsing to contain a reference to the
`Blob` object that backs that object URL. It is this blob URL entry that
the `fetch` API uses to resolve an object URL.
Therefore, since the `Request` constructor parses URL inputs, it will
have an associated blob URL entry which will be used when fetching, even
if the object URL has been revoked since the construction of the
`Request` object. (The `Request` constructor takes the URL as a string
and parses it, so the object URL must be live at the time it is called.)
This PR adds a new `blobFromObjectUrl` JS function (backed by a new
`op_blob_from_object_url` op) that, if the URL is a valid object URL,
returns a new `Blob` object whose parts are references to the same Rust
`BlobPart`s used by the original `Blob` object. It uses this function to
add a new `blobUrlEntry` field to inner requests, which will be `null`
or such a `Blob`, and then uses `Blob.prototype.stream()` as the
response's body. As a result of this, the `blob:` URL resolution from
`op_fetch` is now useless, and has been removed.
2021-09-08 05:29:21 -04:00
|
|
|
}
|
|
|
|
|
2023-02-07 14:22:46 -05:00
|
|
|
const blob = webidl.createBranded(Blob);
|
|
|
|
blob[_type] = blobData.media_type;
|
|
|
|
blob[_size] = totalSize;
|
|
|
|
blob[_parts] = parts;
|
|
|
|
return blob;
|
|
|
|
}
|
|
|
|
|
2023-05-09 06:37:13 -04:00
|
|
|
/**
|
|
|
|
* @param {Blob} blob
|
|
|
|
* @returns {string}
|
|
|
|
*/
|
|
|
|
function createObjectURL(blob) {
|
|
|
|
const prefix = "Failed to execute 'createObjectURL' on 'URL'";
|
|
|
|
webidl.requiredArguments(arguments.length, 1, prefix);
|
|
|
|
blob = webidl.converters["Blob"](blob, prefix, "Argument 1");
|
|
|
|
|
|
|
|
return ops.op_blob_create_object_url(blob.type, getParts(blob));
|
|
|
|
}
|
|
|
|
|
|
|
|
/**
|
|
|
|
* @param {string} url
|
|
|
|
* @returns {void}
|
|
|
|
*/
|
|
|
|
function revokeObjectURL(url) {
|
|
|
|
const prefix = "Failed to execute 'revokeObjectURL' on 'URL'";
|
|
|
|
webidl.requiredArguments(arguments.length, 1, prefix);
|
|
|
|
url = webidl.converters["DOMString"](url, prefix, "Argument 1");
|
|
|
|
|
|
|
|
ops.op_blob_revoke_object_url(url);
|
|
|
|
}
|
|
|
|
|
|
|
|
URL.createObjectURL = createObjectURL;
|
|
|
|
URL.revokeObjectURL = revokeObjectURL;
|
|
|
|
|
2023-02-07 14:22:46 -05:00
|
|
|
export {
|
|
|
|
Blob,
|
|
|
|
blobFromObjectUrl,
|
|
|
|
BlobPrototype,
|
|
|
|
File,
|
|
|
|
FilePrototype,
|
|
|
|
getParts,
|
|
|
|
};
|