2021-01-30 13:22:24 -05:00
|
|
|
// Copyright 2018-2021 the Deno authors. All rights reserved. MIT license.
|
|
|
|
|
|
|
|
// @ts-check
|
|
|
|
/// <reference no-default-lib="true" />
|
|
|
|
/// <reference path="../../core/lib.deno_core.d.ts" />
|
2021-07-06 10:20:21 -04:00
|
|
|
/// <reference path="../../core/internal.d.ts" />
|
2021-04-07 19:23:26 -04:00
|
|
|
/// <reference path="../webidl/internal.d.ts" />
|
2021-01-30 13:22:24 -05:00
|
|
|
/// <reference path="../web/internal.d.ts" />
|
|
|
|
/// <reference path="../web/lib.deno_web.d.ts" />
|
|
|
|
/// <reference path="./internal.d.ts" />
|
|
|
|
/// <reference lib="esnext" />
|
2021-02-04 17:18:32 -05:00
|
|
|
"use strict";
|
2021-01-30 13:22:24 -05:00
|
|
|
|
|
|
|
((window) => {
|
2021-06-05 17:10:07 -04:00
|
|
|
const core = window.Deno.core;
|
2021-04-07 19:23:26 -04:00
|
|
|
const webidl = window.__bootstrap.webidl;
|
2021-07-06 10:20:21 -04:00
|
|
|
const {
|
|
|
|
ArrayBuffer,
|
|
|
|
ArrayBufferPrototypeSlice,
|
|
|
|
ArrayBufferIsView,
|
|
|
|
ArrayPrototypePush,
|
|
|
|
Date,
|
|
|
|
DatePrototypeGetTime,
|
|
|
|
MathMax,
|
|
|
|
MathMin,
|
|
|
|
RegExpPrototypeTest,
|
|
|
|
StringPrototypeCharAt,
|
|
|
|
StringPrototypeToLowerCase,
|
|
|
|
StringPrototypeSlice,
|
|
|
|
Symbol,
|
|
|
|
SymbolFor,
|
|
|
|
TypedArrayPrototypeSet,
|
|
|
|
TypeError,
|
|
|
|
Uint8Array,
|
|
|
|
} = window.__bootstrap.primordials;
|
2021-07-08 09:43:36 -04:00
|
|
|
const consoleInternal = window.__bootstrap.console;
|
2021-04-07 19:23:26 -04:00
|
|
|
|
2021-01-30 13:22:24 -05:00
|
|
|
// TODO(lucacasonato): this needs to not be hardcoded and instead depend on
|
|
|
|
// host os.
|
|
|
|
const isWindows = false;
|
|
|
|
|
|
|
|
/**
|
|
|
|
* @param {string} input
|
|
|
|
* @param {number} position
|
|
|
|
* @returns {{result: string, position: number}}
|
|
|
|
*/
|
|
|
|
function collectCodepointsNotCRLF(input, position) {
|
|
|
|
// See https://w3c.github.io/FileAPI/#convert-line-endings-to-native and
|
|
|
|
// https://infra.spec.whatwg.org/#collect-a-sequence-of-code-points
|
|
|
|
const start = position;
|
|
|
|
for (
|
2021-07-06 10:20:21 -04:00
|
|
|
let c = StringPrototypeCharAt(input, position);
|
2021-01-30 13:22:24 -05:00
|
|
|
position < input.length && !(c === "\r" || c === "\n");
|
2021-07-06 10:20:21 -04:00
|
|
|
c = StringPrototypeCharAt(input, ++position)
|
2021-01-30 13:22:24 -05:00
|
|
|
);
|
2021-07-06 10:20:21 -04:00
|
|
|
return { result: StringPrototypeSlice(input, start, position), position };
|
2021-01-30 13:22:24 -05:00
|
|
|
}
|
|
|
|
|
|
|
|
/**
|
|
|
|
* @param {string} s
|
|
|
|
* @returns {string}
|
|
|
|
*/
|
|
|
|
function convertLineEndingsToNative(s) {
|
|
|
|
const nativeLineEnding = isWindows ? "\r\n" : "\n";
|
|
|
|
|
|
|
|
let { result, position } = collectCodepointsNotCRLF(s, 0);
|
|
|
|
|
|
|
|
while (position < s.length) {
|
2021-07-06 10:20:21 -04:00
|
|
|
const codePoint = StringPrototypeCharAt(s, position);
|
2021-01-30 13:22:24 -05:00
|
|
|
if (codePoint === "\r") {
|
|
|
|
result += nativeLineEnding;
|
|
|
|
position++;
|
2021-07-06 10:20:21 -04:00
|
|
|
if (
|
|
|
|
position < s.length && StringPrototypeCharAt(s, position) === "\n"
|
|
|
|
) {
|
2021-01-30 13:22:24 -05:00
|
|
|
position++;
|
|
|
|
}
|
|
|
|
} else if (codePoint === "\n") {
|
|
|
|
position++;
|
|
|
|
result += nativeLineEnding;
|
|
|
|
}
|
|
|
|
const { result: token, position: newPosition } = collectCodepointsNotCRLF(
|
|
|
|
s,
|
|
|
|
position,
|
|
|
|
);
|
|
|
|
position = newPosition;
|
|
|
|
result += token;
|
|
|
|
}
|
|
|
|
|
|
|
|
return result;
|
|
|
|
}
|
|
|
|
|
2021-07-05 09:34:37 -04:00
|
|
|
/** @param {(BlobReference | Blob)[]} parts */
|
|
|
|
async function* toIterator(parts) {
|
|
|
|
for (const part of parts) {
|
|
|
|
yield* part.stream();
|
2021-01-30 13:22:24 -05:00
|
|
|
}
|
|
|
|
}
|
|
|
|
|
|
|
|
/** @typedef {BufferSource | Blob | string} BlobPart */
|
|
|
|
|
2021-04-28 10:08:51 -04:00
|
|
|
/**
|
2021-07-05 09:34:37 -04:00
|
|
|
* @param {BlobPart[]} parts
|
|
|
|
* @param {string} endings
|
|
|
|
* @returns {{ parts: (BlobReference|Blob)[], size: number }}
|
|
|
|
*/
|
2021-01-30 13:22:24 -05:00
|
|
|
function processBlobParts(parts, endings) {
|
2021-07-05 09:34:37 -04:00
|
|
|
/** @type {(BlobReference|Blob)[]} */
|
|
|
|
const processedParts = [];
|
|
|
|
let size = 0;
|
2021-01-30 13:22:24 -05:00
|
|
|
for (const element of parts) {
|
|
|
|
if (element instanceof ArrayBuffer) {
|
2021-07-06 10:20:21 -04:00
|
|
|
const chunk = new Uint8Array(ArrayBufferPrototypeSlice(element, 0));
|
|
|
|
ArrayPrototypePush(processedParts, BlobReference.fromUint8Array(chunk));
|
2021-07-05 09:34:37 -04:00
|
|
|
size += element.byteLength;
|
2021-07-06 10:20:21 -04:00
|
|
|
} else if (ArrayBufferIsView(element)) {
|
2021-07-05 09:34:37 -04:00
|
|
|
const chunk = new Uint8Array(
|
|
|
|
element.buffer,
|
2021-01-30 13:22:24 -05:00
|
|
|
element.byteOffset,
|
2021-07-05 09:34:37 -04:00
|
|
|
element.byteLength,
|
2021-01-30 13:22:24 -05:00
|
|
|
);
|
2021-07-05 09:34:37 -04:00
|
|
|
size += element.byteLength;
|
2021-07-06 10:20:21 -04:00
|
|
|
ArrayPrototypePush(processedParts, BlobReference.fromUint8Array(chunk));
|
2021-01-30 13:22:24 -05:00
|
|
|
} else if (element instanceof Blob) {
|
2021-07-06 10:20:21 -04:00
|
|
|
ArrayPrototypePush(processedParts, element);
|
2021-07-05 09:34:37 -04:00
|
|
|
size += element.size;
|
2021-01-30 13:22:24 -05:00
|
|
|
} else if (typeof element === "string") {
|
2021-07-05 09:34:37 -04:00
|
|
|
const chunk = core.encode(
|
|
|
|
endings == "native" ? convertLineEndingsToNative(element) : element,
|
|
|
|
);
|
|
|
|
size += chunk.byteLength;
|
2021-07-06 10:20:21 -04:00
|
|
|
ArrayPrototypePush(processedParts, BlobReference.fromUint8Array(chunk));
|
2021-01-30 13:22:24 -05:00
|
|
|
} else {
|
2021-07-05 09:34:37 -04:00
|
|
|
throw new TypeError("Unreachable code (invalid element type)");
|
2021-01-30 13:22:24 -05:00
|
|
|
}
|
|
|
|
}
|
2021-07-05 09:34:37 -04:00
|
|
|
return { parts: processedParts, size };
|
2021-01-30 13:22:24 -05:00
|
|
|
}
|
|
|
|
|
|
|
|
/**
|
2021-04-28 10:08:51 -04:00
|
|
|
* @param {string} str
|
2021-01-30 13:22:24 -05:00
|
|
|
* @returns {string}
|
|
|
|
*/
|
|
|
|
function normalizeType(str) {
|
|
|
|
let normalizedType = str;
|
2021-07-06 10:20:21 -04:00
|
|
|
if (!RegExpPrototypeTest(/^[\x20-\x7E]*$/, str)) {
|
2021-01-30 13:22:24 -05:00
|
|
|
normalizedType = "";
|
|
|
|
}
|
2021-07-06 10:20:21 -04:00
|
|
|
return StringPrototypeToLowerCase(normalizedType);
|
2021-01-30 13:22:24 -05:00
|
|
|
}
|
|
|
|
|
2021-07-05 09:34:37 -04:00
|
|
|
/**
|
|
|
|
* Get all Parts as a flat array containing all references
|
|
|
|
* @param {Blob} blob
|
|
|
|
* @param {string[]} bag
|
|
|
|
* @returns {string[]}
|
|
|
|
*/
|
|
|
|
function getParts(blob, bag = []) {
|
|
|
|
for (const part of blob[_parts]) {
|
|
|
|
if (part instanceof Blob) {
|
|
|
|
getParts(part, bag);
|
|
|
|
} else {
|
2021-07-06 10:20:21 -04:00
|
|
|
ArrayPrototypePush(bag, part._id);
|
2021-07-05 09:34:37 -04:00
|
|
|
}
|
2021-04-14 16:49:16 -04:00
|
|
|
}
|
2021-07-05 09:34:37 -04:00
|
|
|
return bag;
|
|
|
|
}
|
2021-04-14 16:49:16 -04:00
|
|
|
|
fix: a `Request` whose URL is a revoked blob URL should still fetch (#11947)
In the spec, a URL record has an associated "blob URL entry", which for
`blob:` URLs is populated during parsing to contain a reference to the
`Blob` object that backs that object URL. It is this blob URL entry that
the `fetch` API uses to resolve an object URL.
Therefore, since the `Request` constructor parses URL inputs, it will
have an associated blob URL entry which will be used when fetching, even
if the object URL has been revoked since the construction of the
`Request` object. (The `Request` constructor takes the URL as a string
and parses it, so the object URL must be live at the time it is called.)
This PR adds a new `blobFromObjectUrl` JS function (backed by a new
`op_blob_from_object_url` op) that, if the URL is a valid object URL,
returns a new `Blob` object whose parts are references to the same Rust
`BlobPart`s used by the original `Blob` object. It uses this function to
add a new `blobUrlEntry` field to inner requests, which will be `null`
or such a `Blob`, and then uses `Blob.prototype.stream()` as the
response's body. As a result of this, the `blob:` URL resolution from
`op_fetch` is now useless, and has been removed.
2021-09-08 05:29:21 -04:00
|
|
|
const _type = Symbol("Type");
|
2021-07-05 09:34:37 -04:00
|
|
|
const _size = Symbol("Size");
|
|
|
|
const _parts = Symbol("Parts");
|
2021-01-30 13:22:24 -05:00
|
|
|
|
2021-07-05 09:34:37 -04:00
|
|
|
class Blob {
|
fix: a `Request` whose URL is a revoked blob URL should still fetch (#11947)
In the spec, a URL record has an associated "blob URL entry", which for
`blob:` URLs is populated during parsing to contain a reference to the
`Blob` object that backs that object URL. It is this blob URL entry that
the `fetch` API uses to resolve an object URL.
Therefore, since the `Request` constructor parses URL inputs, it will
have an associated blob URL entry which will be used when fetching, even
if the object URL has been revoked since the construction of the
`Request` object. (The `Request` constructor takes the URL as a string
and parses it, so the object URL must be live at the time it is called.)
This PR adds a new `blobFromObjectUrl` JS function (backed by a new
`op_blob_from_object_url` op) that, if the URL is a valid object URL,
returns a new `Blob` object whose parts are references to the same Rust
`BlobPart`s used by the original `Blob` object. It uses this function to
add a new `blobUrlEntry` field to inner requests, which will be `null`
or such a `Blob`, and then uses `Blob.prototype.stream()` as the
response's body. As a result of this, the `blob:` URL resolution from
`op_fetch` is now useless, and has been removed.
2021-09-08 05:29:21 -04:00
|
|
|
[_type] = "";
|
2021-07-05 09:34:37 -04:00
|
|
|
[_size] = 0;
|
|
|
|
[_parts];
|
2021-01-30 13:22:24 -05:00
|
|
|
|
2021-02-04 09:05:36 -05:00
|
|
|
/**
|
2021-04-07 19:23:26 -04:00
|
|
|
* @param {BlobPart[]} blobParts
|
|
|
|
* @param {BlobPropertyBag} options
|
2021-01-30 13:22:24 -05:00
|
|
|
*/
|
2021-04-07 19:23:26 -04:00
|
|
|
constructor(blobParts = [], options = {}) {
|
|
|
|
const prefix = "Failed to construct 'Blob'";
|
|
|
|
blobParts = webidl.converters["sequence<BlobPart>"](blobParts, {
|
|
|
|
context: "Argument 1",
|
|
|
|
prefix,
|
|
|
|
});
|
|
|
|
options = webidl.converters["BlobPropertyBag"](options, {
|
|
|
|
context: "Argument 2",
|
|
|
|
prefix,
|
|
|
|
});
|
2021-01-30 13:22:24 -05:00
|
|
|
|
2021-04-07 19:23:26 -04:00
|
|
|
this[webidl.brand] = webidl.brand;
|
2021-01-30 13:22:24 -05:00
|
|
|
|
2021-07-05 09:34:37 -04:00
|
|
|
const { parts, size } = processBlobParts(
|
2021-04-07 19:23:26 -04:00
|
|
|
blobParts,
|
|
|
|
options.endings,
|
|
|
|
);
|
2021-07-05 09:34:37 -04:00
|
|
|
|
|
|
|
this[_parts] = parts;
|
|
|
|
this[_size] = size;
|
fix: a `Request` whose URL is a revoked blob URL should still fetch (#11947)
In the spec, a URL record has an associated "blob URL entry", which for
`blob:` URLs is populated during parsing to contain a reference to the
`Blob` object that backs that object URL. It is this blob URL entry that
the `fetch` API uses to resolve an object URL.
Therefore, since the `Request` constructor parses URL inputs, it will
have an associated blob URL entry which will be used when fetching, even
if the object URL has been revoked since the construction of the
`Request` object. (The `Request` constructor takes the URL as a string
and parses it, so the object URL must be live at the time it is called.)
This PR adds a new `blobFromObjectUrl` JS function (backed by a new
`op_blob_from_object_url` op) that, if the URL is a valid object URL,
returns a new `Blob` object whose parts are references to the same Rust
`BlobPart`s used by the original `Blob` object. It uses this function to
add a new `blobUrlEntry` field to inner requests, which will be `null`
or such a `Blob`, and then uses `Blob.prototype.stream()` as the
response's body. As a result of this, the `blob:` URL resolution from
`op_fetch` is now useless, and has been removed.
2021-09-08 05:29:21 -04:00
|
|
|
this[_type] = normalizeType(options.type);
|
2021-01-30 13:22:24 -05:00
|
|
|
}
|
|
|
|
|
|
|
|
/** @returns {number} */
|
|
|
|
get size() {
|
2021-04-07 19:23:26 -04:00
|
|
|
webidl.assertBranded(this, Blob);
|
2021-07-05 09:34:37 -04:00
|
|
|
return this[_size];
|
2021-01-30 13:22:24 -05:00
|
|
|
}
|
|
|
|
|
|
|
|
/** @returns {string} */
|
|
|
|
get type() {
|
2021-04-07 19:23:26 -04:00
|
|
|
webidl.assertBranded(this, Blob);
|
fix: a `Request` whose URL is a revoked blob URL should still fetch (#11947)
In the spec, a URL record has an associated "blob URL entry", which for
`blob:` URLs is populated during parsing to contain a reference to the
`Blob` object that backs that object URL. It is this blob URL entry that
the `fetch` API uses to resolve an object URL.
Therefore, since the `Request` constructor parses URL inputs, it will
have an associated blob URL entry which will be used when fetching, even
if the object URL has been revoked since the construction of the
`Request` object. (The `Request` constructor takes the URL as a string
and parses it, so the object URL must be live at the time it is called.)
This PR adds a new `blobFromObjectUrl` JS function (backed by a new
`op_blob_from_object_url` op) that, if the URL is a valid object URL,
returns a new `Blob` object whose parts are references to the same Rust
`BlobPart`s used by the original `Blob` object. It uses this function to
add a new `blobUrlEntry` field to inner requests, which will be `null`
or such a `Blob`, and then uses `Blob.prototype.stream()` as the
response's body. As a result of this, the `blob:` URL resolution from
`op_fetch` is now useless, and has been removed.
2021-09-08 05:29:21 -04:00
|
|
|
return this[_type];
|
2021-01-30 13:22:24 -05:00
|
|
|
}
|
|
|
|
|
2021-04-28 10:08:51 -04:00
|
|
|
/**
|
2021-01-30 13:22:24 -05:00
|
|
|
* @param {number} [start]
|
|
|
|
* @param {number} [end]
|
|
|
|
* @param {string} [contentType]
|
|
|
|
* @returns {Blob}
|
|
|
|
*/
|
2021-08-12 13:16:23 -04:00
|
|
|
slice(start = undefined, end = undefined, contentType = undefined) {
|
2021-04-07 19:23:26 -04:00
|
|
|
webidl.assertBranded(this, Blob);
|
|
|
|
const prefix = "Failed to execute 'slice' on 'Blob'";
|
|
|
|
if (start !== undefined) {
|
|
|
|
start = webidl.converters["long long"](start, {
|
|
|
|
clamp: true,
|
|
|
|
context: "Argument 1",
|
|
|
|
prefix,
|
|
|
|
});
|
|
|
|
}
|
|
|
|
if (end !== undefined) {
|
|
|
|
end = webidl.converters["long long"](end, {
|
|
|
|
clamp: true,
|
|
|
|
context: "Argument 2",
|
|
|
|
prefix,
|
|
|
|
});
|
|
|
|
}
|
|
|
|
if (contentType !== undefined) {
|
|
|
|
contentType = webidl.converters["DOMString"](contentType, {
|
|
|
|
context: "Argument 3",
|
|
|
|
prefix,
|
|
|
|
});
|
|
|
|
}
|
|
|
|
|
2021-05-18 11:24:01 -04:00
|
|
|
// deno-lint-ignore no-this-alias
|
2021-01-30 13:22:24 -05:00
|
|
|
const O = this;
|
|
|
|
/** @type {number} */
|
|
|
|
let relativeStart;
|
|
|
|
if (start === undefined) {
|
|
|
|
relativeStart = 0;
|
|
|
|
} else {
|
|
|
|
if (start < 0) {
|
2021-07-06 10:20:21 -04:00
|
|
|
relativeStart = MathMax(O.size + start, 0);
|
2021-01-30 13:22:24 -05:00
|
|
|
} else {
|
2021-07-06 10:20:21 -04:00
|
|
|
relativeStart = MathMin(start, O.size);
|
2021-01-30 13:22:24 -05:00
|
|
|
}
|
|
|
|
}
|
|
|
|
/** @type {number} */
|
|
|
|
let relativeEnd;
|
|
|
|
if (end === undefined) {
|
|
|
|
relativeEnd = O.size;
|
|
|
|
} else {
|
|
|
|
if (end < 0) {
|
2021-07-06 10:20:21 -04:00
|
|
|
relativeEnd = MathMax(O.size + end, 0);
|
2021-01-30 13:22:24 -05:00
|
|
|
} else {
|
2021-07-06 10:20:21 -04:00
|
|
|
relativeEnd = MathMin(end, O.size);
|
2021-01-30 13:22:24 -05:00
|
|
|
}
|
|
|
|
}
|
2021-07-05 09:34:37 -04:00
|
|
|
|
2021-07-06 10:20:21 -04:00
|
|
|
const span = MathMax(relativeEnd - relativeStart, 0);
|
2021-07-05 09:34:37 -04:00
|
|
|
const blobParts = [];
|
|
|
|
let added = 0;
|
|
|
|
|
|
|
|
for (const part of this[_parts]) {
|
|
|
|
// don't add the overflow to new blobParts
|
|
|
|
if (added >= span) {
|
|
|
|
// Could maybe be possible to remove variable `added`
|
|
|
|
// and only use relativeEnd?
|
|
|
|
break;
|
|
|
|
}
|
|
|
|
const size = part.size;
|
|
|
|
if (relativeStart && size <= relativeStart) {
|
|
|
|
// Skip the beginning and change the relative
|
|
|
|
// start & end position as we skip the unwanted parts
|
|
|
|
relativeStart -= size;
|
|
|
|
relativeEnd -= size;
|
|
|
|
} else {
|
|
|
|
const chunk = part.slice(
|
|
|
|
relativeStart,
|
2021-07-06 10:20:21 -04:00
|
|
|
MathMin(part.size, relativeEnd),
|
2021-07-05 09:34:37 -04:00
|
|
|
);
|
|
|
|
added += chunk.size;
|
|
|
|
relativeEnd -= part.size;
|
2021-07-06 10:20:21 -04:00
|
|
|
ArrayPrototypePush(blobParts, chunk);
|
2021-07-05 09:34:37 -04:00
|
|
|
relativeStart = 0; // All next sequential parts should start at 0
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
2021-01-30 13:22:24 -05:00
|
|
|
/** @type {string} */
|
|
|
|
let relativeContentType;
|
|
|
|
if (contentType === undefined) {
|
|
|
|
relativeContentType = "";
|
|
|
|
} else {
|
2021-04-07 19:23:26 -04:00
|
|
|
relativeContentType = normalizeType(contentType);
|
2021-01-30 13:22:24 -05:00
|
|
|
}
|
2021-07-05 09:34:37 -04:00
|
|
|
|
|
|
|
const blob = new Blob([], { type: relativeContentType });
|
|
|
|
blob[_parts] = blobParts;
|
|
|
|
blob[_size] = span;
|
|
|
|
return blob;
|
2021-01-30 13:22:24 -05:00
|
|
|
}
|
|
|
|
|
|
|
|
/**
|
|
|
|
* @returns {ReadableStream<Uint8Array>}
|
|
|
|
*/
|
|
|
|
stream() {
|
2021-04-07 19:23:26 -04:00
|
|
|
webidl.assertBranded(this, Blob);
|
2021-07-05 09:34:37 -04:00
|
|
|
const partIterator = toIterator(this[_parts]);
|
2021-01-30 13:22:24 -05:00
|
|
|
const stream = new ReadableStream({
|
|
|
|
type: "bytes",
|
|
|
|
/** @param {ReadableByteStreamController} controller */
|
2021-07-05 09:34:37 -04:00
|
|
|
async pull(controller) {
|
|
|
|
while (true) {
|
|
|
|
const { value, done } = await partIterator.next();
|
|
|
|
if (done) return controller.close();
|
|
|
|
if (value.byteLength > 0) {
|
|
|
|
return controller.enqueue(value);
|
|
|
|
}
|
|
|
|
}
|
2021-01-30 13:22:24 -05:00
|
|
|
},
|
|
|
|
});
|
|
|
|
return stream;
|
|
|
|
}
|
|
|
|
|
|
|
|
/**
|
|
|
|
* @returns {Promise<string>}
|
|
|
|
*/
|
|
|
|
async text() {
|
2021-04-07 19:23:26 -04:00
|
|
|
webidl.assertBranded(this, Blob);
|
2021-01-30 13:22:24 -05:00
|
|
|
const buffer = await this.arrayBuffer();
|
2021-06-05 17:10:07 -04:00
|
|
|
return core.decode(new Uint8Array(buffer));
|
2021-01-30 13:22:24 -05:00
|
|
|
}
|
|
|
|
|
|
|
|
/**
|
|
|
|
* @returns {Promise<ArrayBuffer>}
|
|
|
|
*/
|
|
|
|
async arrayBuffer() {
|
2021-04-07 19:23:26 -04:00
|
|
|
webidl.assertBranded(this, Blob);
|
2021-01-30 13:22:24 -05:00
|
|
|
const stream = this.stream();
|
2021-07-05 09:34:37 -04:00
|
|
|
const bytes = new Uint8Array(this.size);
|
|
|
|
let offset = 0;
|
2021-01-30 13:22:24 -05:00
|
|
|
for await (const chunk of stream) {
|
2021-07-06 10:20:21 -04:00
|
|
|
TypedArrayPrototypeSet(bytes, chunk, offset);
|
2021-07-05 09:34:37 -04:00
|
|
|
offset += chunk.byteLength;
|
2021-01-30 13:22:24 -05:00
|
|
|
}
|
|
|
|
return bytes.buffer;
|
|
|
|
}
|
2021-07-05 09:34:37 -04:00
|
|
|
|
2021-07-06 10:20:21 -04:00
|
|
|
[SymbolFor("Deno.customInspect")](inspect) {
|
2021-07-08 09:43:36 -04:00
|
|
|
return inspect(consoleInternal.createFilteredInspectProxy({
|
|
|
|
object: this,
|
|
|
|
evaluate: this instanceof Blob,
|
|
|
|
keys: [
|
|
|
|
"size",
|
|
|
|
"type",
|
|
|
|
],
|
|
|
|
}));
|
2021-07-05 09:34:37 -04:00
|
|
|
}
|
2021-01-30 13:22:24 -05:00
|
|
|
}
|
|
|
|
|
2021-06-07 04:04:10 -04:00
|
|
|
webidl.configurePrototype(Blob);
|
|
|
|
|
2021-04-07 19:23:26 -04:00
|
|
|
webidl.converters["Blob"] = webidl.createInterfaceConverter("Blob", Blob);
|
|
|
|
webidl.converters["BlobPart"] = (V, opts) => {
|
|
|
|
// Union for ((ArrayBuffer or ArrayBufferView) or Blob or USVString)
|
|
|
|
if (typeof V == "object") {
|
|
|
|
if (V instanceof Blob) {
|
|
|
|
return webidl.converters["Blob"](V, opts);
|
|
|
|
}
|
|
|
|
if (V instanceof ArrayBuffer || V instanceof SharedArrayBuffer) {
|
|
|
|
return webidl.converters["ArrayBuffer"](V, opts);
|
|
|
|
}
|
2021-07-06 10:20:21 -04:00
|
|
|
if (ArrayBufferIsView(V)) {
|
2021-04-07 19:23:26 -04:00
|
|
|
return webidl.converters["ArrayBufferView"](V, opts);
|
|
|
|
}
|
|
|
|
}
|
|
|
|
return webidl.converters["USVString"](V, opts);
|
|
|
|
};
|
|
|
|
webidl.converters["sequence<BlobPart>"] = webidl.createSequenceConverter(
|
|
|
|
webidl.converters["BlobPart"],
|
|
|
|
);
|
|
|
|
webidl.converters["EndingType"] = webidl.createEnumConverter("EndingType", [
|
|
|
|
"transparent",
|
|
|
|
"native",
|
|
|
|
]);
|
|
|
|
const blobPropertyBagDictionary = [
|
|
|
|
{
|
|
|
|
key: "type",
|
|
|
|
converter: webidl.converters["DOMString"],
|
|
|
|
defaultValue: "",
|
|
|
|
},
|
|
|
|
{
|
|
|
|
key: "endings",
|
|
|
|
converter: webidl.converters["EndingType"],
|
|
|
|
defaultValue: "transparent",
|
|
|
|
},
|
|
|
|
];
|
|
|
|
webidl.converters["BlobPropertyBag"] = webidl.createDictionaryConverter(
|
|
|
|
"BlobPropertyBag",
|
|
|
|
blobPropertyBagDictionary,
|
|
|
|
);
|
|
|
|
|
2021-02-04 09:05:36 -05:00
|
|
|
const _Name = Symbol("[[Name]]");
|
2021-07-05 09:34:37 -04:00
|
|
|
const _LastModified = Symbol("[[LastModified]]");
|
2021-02-04 09:05:36 -05:00
|
|
|
|
|
|
|
class File extends Blob {
|
|
|
|
/** @type {string} */
|
|
|
|
[_Name];
|
|
|
|
/** @type {number} */
|
2021-07-05 09:34:37 -04:00
|
|
|
[_LastModified];
|
2021-02-04 09:05:36 -05:00
|
|
|
|
|
|
|
/**
|
2021-04-28 10:08:51 -04:00
|
|
|
* @param {BlobPart[]} fileBits
|
|
|
|
* @param {string} fileName
|
|
|
|
* @param {FilePropertyBag} options
|
2021-02-04 09:05:36 -05:00
|
|
|
*/
|
2021-04-07 19:23:26 -04:00
|
|
|
constructor(fileBits, fileName, options = {}) {
|
|
|
|
const prefix = "Failed to construct 'File'";
|
|
|
|
webidl.requiredArguments(arguments.length, 2, { prefix });
|
|
|
|
|
|
|
|
fileBits = webidl.converters["sequence<BlobPart>"](fileBits, {
|
|
|
|
context: "Argument 1",
|
|
|
|
prefix,
|
|
|
|
});
|
|
|
|
fileName = webidl.converters["USVString"](fileName, {
|
|
|
|
context: "Argument 2",
|
|
|
|
prefix,
|
|
|
|
});
|
|
|
|
options = webidl.converters["FilePropertyBag"](options, {
|
|
|
|
context: "Argument 3",
|
|
|
|
prefix,
|
|
|
|
});
|
|
|
|
|
|
|
|
super(fileBits, options);
|
|
|
|
|
2021-02-04 09:05:36 -05:00
|
|
|
/** @type {string} */
|
2021-05-25 09:35:17 -04:00
|
|
|
this[_Name] = fileName;
|
2021-04-07 19:23:26 -04:00
|
|
|
if (options.lastModified === undefined) {
|
2021-02-04 09:05:36 -05:00
|
|
|
/** @type {number} */
|
2021-07-06 10:20:21 -04:00
|
|
|
this[_LastModified] = DatePrototypeGetTime(new Date());
|
2021-02-04 09:05:36 -05:00
|
|
|
} else {
|
|
|
|
/** @type {number} */
|
2021-07-05 09:34:37 -04:00
|
|
|
this[_LastModified] = options.lastModified;
|
2021-02-04 09:05:36 -05:00
|
|
|
}
|
|
|
|
}
|
|
|
|
|
|
|
|
/** @returns {string} */
|
|
|
|
get name() {
|
2021-04-07 19:23:26 -04:00
|
|
|
webidl.assertBranded(this, File);
|
2021-02-04 09:05:36 -05:00
|
|
|
return this[_Name];
|
|
|
|
}
|
|
|
|
|
|
|
|
/** @returns {number} */
|
|
|
|
get lastModified() {
|
2021-04-07 19:23:26 -04:00
|
|
|
webidl.assertBranded(this, File);
|
2021-07-05 09:34:37 -04:00
|
|
|
return this[_LastModified];
|
|
|
|
}
|
2021-02-04 09:05:36 -05:00
|
|
|
}
|
|
|
|
|
2021-06-07 04:04:10 -04:00
|
|
|
webidl.configurePrototype(File);
|
|
|
|
|
2021-04-07 19:23:26 -04:00
|
|
|
webidl.converters["FilePropertyBag"] = webidl.createDictionaryConverter(
|
|
|
|
"FilePropertyBag",
|
|
|
|
blobPropertyBagDictionary,
|
|
|
|
[
|
|
|
|
{
|
|
|
|
key: "lastModified",
|
|
|
|
converter: webidl.converters["long long"],
|
|
|
|
},
|
|
|
|
],
|
|
|
|
);
|
|
|
|
|
2021-07-05 09:34:37 -04:00
|
|
|
// A finalization registry to deallocate a blob part when its JS reference is
|
|
|
|
// garbage collected.
|
|
|
|
const registry = new FinalizationRegistry((uuid) => {
|
|
|
|
core.opSync("op_blob_remove_part", uuid);
|
|
|
|
});
|
|
|
|
|
|
|
|
// TODO(lucacasonato): get a better stream from Rust in BlobReference#stream
|
|
|
|
|
|
|
|
/**
|
|
|
|
* An opaque reference to a blob part in Rust. This could be backed by a file,
|
|
|
|
* in memory storage, or something else.
|
|
|
|
*/
|
|
|
|
class BlobReference {
|
|
|
|
/**
|
|
|
|
* Don't use directly. Use `BlobReference.fromUint8Array`.
|
|
|
|
* @param {string} id
|
|
|
|
* @param {number} size
|
|
|
|
*/
|
|
|
|
constructor(id, size) {
|
|
|
|
this._id = id;
|
|
|
|
this.size = size;
|
|
|
|
registry.register(this, id);
|
|
|
|
}
|
|
|
|
|
|
|
|
/**
|
|
|
|
* Create a new blob part from a Uint8Array.
|
|
|
|
*
|
|
|
|
* @param {Uint8Array} data
|
|
|
|
* @returns {BlobReference}
|
|
|
|
*/
|
|
|
|
static fromUint8Array(data) {
|
|
|
|
const id = core.opSync("op_blob_create_part", data);
|
|
|
|
return new BlobReference(id, data.byteLength);
|
|
|
|
}
|
|
|
|
|
|
|
|
/**
|
|
|
|
* Create a new BlobReference by slicing this BlobReference. This is a copy
|
|
|
|
* free operation - the sliced reference will still reference the original
|
|
|
|
* underlying bytes.
|
|
|
|
*
|
|
|
|
* @param {number} start
|
|
|
|
* @param {number} end
|
|
|
|
* @returns {BlobReference}
|
|
|
|
*/
|
|
|
|
slice(start, end) {
|
|
|
|
const size = end - start;
|
|
|
|
const id = core.opSync("op_blob_slice_part", this._id, {
|
|
|
|
start,
|
|
|
|
len: size,
|
|
|
|
});
|
|
|
|
return new BlobReference(id, size);
|
|
|
|
}
|
|
|
|
|
|
|
|
/**
|
|
|
|
* Read the entire contents of the reference blob.
|
|
|
|
* @returns {AsyncGenerator<Uint8Array>}
|
|
|
|
*/
|
|
|
|
async *stream() {
|
|
|
|
yield core.opAsync("op_blob_read_part", this._id);
|
|
|
|
|
|
|
|
// let position = 0;
|
|
|
|
// const end = this.size;
|
|
|
|
// while (position !== end) {
|
2021-07-06 10:20:21 -04:00
|
|
|
// const size = MathMin(end - position, 65536);
|
2021-07-05 09:34:37 -04:00
|
|
|
// const chunk = this.slice(position, position + size);
|
|
|
|
// position += chunk.size;
|
|
|
|
// yield core.opAsync("op_blob_read_part", chunk._id);
|
|
|
|
// }
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
fix: a `Request` whose URL is a revoked blob URL should still fetch (#11947)
In the spec, a URL record has an associated "blob URL entry", which for
`blob:` URLs is populated during parsing to contain a reference to the
`Blob` object that backs that object URL. It is this blob URL entry that
the `fetch` API uses to resolve an object URL.
Therefore, since the `Request` constructor parses URL inputs, it will
have an associated blob URL entry which will be used when fetching, even
if the object URL has been revoked since the construction of the
`Request` object. (The `Request` constructor takes the URL as a string
and parses it, so the object URL must be live at the time it is called.)
This PR adds a new `blobFromObjectUrl` JS function (backed by a new
`op_blob_from_object_url` op) that, if the URL is a valid object URL,
returns a new `Blob` object whose parts are references to the same Rust
`BlobPart`s used by the original `Blob` object. It uses this function to
add a new `blobUrlEntry` field to inner requests, which will be `null`
or such a `Blob`, and then uses `Blob.prototype.stream()` as the
response's body. As a result of this, the `blob:` URL resolution from
`op_fetch` is now useless, and has been removed.
2021-09-08 05:29:21 -04:00
|
|
|
/**
|
|
|
|
* Construct a new Blob object from an object URL.
|
|
|
|
*
|
|
|
|
* This new object will not duplicate data in memory with the original Blob
|
|
|
|
* object from which this URL was created or with other Blob objects created
|
|
|
|
* from the same URL, but they will be different objects.
|
|
|
|
*
|
|
|
|
* The object returned from this function will not be a File object, even if
|
|
|
|
* the original object from which the object URL was constructed was one. This
|
|
|
|
* means that the `name` and `lastModified` properties are lost.
|
|
|
|
*
|
|
|
|
* @param {string} url
|
|
|
|
* @returns {Blob | null}
|
|
|
|
*/
|
|
|
|
function blobFromObjectUrl(url) {
|
|
|
|
const blobData = core.opSync("op_blob_from_object_url", url);
|
|
|
|
if (blobData === null) {
|
|
|
|
return null;
|
|
|
|
}
|
|
|
|
|
|
|
|
/** @type {BlobReference[]} */
|
|
|
|
const parts = [];
|
|
|
|
let totalSize = 0;
|
|
|
|
|
|
|
|
for (const { uuid, size } of blobData.parts) {
|
|
|
|
ArrayPrototypePush(parts, new BlobReference(uuid, size));
|
|
|
|
totalSize += size;
|
|
|
|
}
|
|
|
|
|
|
|
|
const blob = webidl.createBranded(Blob);
|
|
|
|
blob[_type] = blobData.media_type;
|
|
|
|
blob[_size] = totalSize;
|
|
|
|
blob[_parts] = parts;
|
|
|
|
return blob;
|
|
|
|
}
|
|
|
|
|
2021-02-04 09:05:36 -05:00
|
|
|
window.__bootstrap.file = {
|
fix: a `Request` whose URL is a revoked blob URL should still fetch (#11947)
In the spec, a URL record has an associated "blob URL entry", which for
`blob:` URLs is populated during parsing to contain a reference to the
`Blob` object that backs that object URL. It is this blob URL entry that
the `fetch` API uses to resolve an object URL.
Therefore, since the `Request` constructor parses URL inputs, it will
have an associated blob URL entry which will be used when fetching, even
if the object URL has been revoked since the construction of the
`Request` object. (The `Request` constructor takes the URL as a string
and parses it, so the object URL must be live at the time it is called.)
This PR adds a new `blobFromObjectUrl` JS function (backed by a new
`op_blob_from_object_url` op) that, if the URL is a valid object URL,
returns a new `Blob` object whose parts are references to the same Rust
`BlobPart`s used by the original `Blob` object. It uses this function to
add a new `blobUrlEntry` field to inner requests, which will be `null`
or such a `Blob`, and then uses `Blob.prototype.stream()` as the
response's body. As a result of this, the `blob:` URL resolution from
`op_fetch` is now useless, and has been removed.
2021-09-08 05:29:21 -04:00
|
|
|
blobFromObjectUrl,
|
2021-07-05 09:34:37 -04:00
|
|
|
getParts,
|
2021-01-30 13:22:24 -05:00
|
|
|
Blob,
|
2021-02-04 09:05:36 -05:00
|
|
|
File,
|
2021-01-30 13:22:24 -05:00
|
|
|
};
|
|
|
|
})(this);
|