mirror of
https://github.com/denoland/deno.git
synced 2024-12-22 23:34:47 -05:00
BREAKING CHANGE: change order of args in Deno.copy() (#4885)
This commit is contained in:
parent
833539fcaf
commit
0cb1bb98cc
13 changed files with 18 additions and 19 deletions
|
@ -70,8 +70,7 @@ export interface ReadWriteCloser extends Reader, Writer, Closer {}
|
|||
// https://golang.org/pkg/io/#ReadWriteSeeker
|
||||
export interface ReadWriteSeeker extends Reader, Writer, Seeker {}
|
||||
|
||||
// https://golang.org/pkg/io/#Copy
|
||||
export async function copy(dst: Writer, src: Reader): Promise<number> {
|
||||
export async function copy(src: Reader, dst: Writer): Promise<number> {
|
||||
let n = 0;
|
||||
const b = new Uint8Array(DEFAULT_BUFFER_SIZE);
|
||||
let gotEOF = false;
|
||||
|
|
8
cli/js/lib.deno.ns.d.ts
vendored
8
cli/js/lib.deno.ns.d.ts
vendored
|
@ -565,16 +565,16 @@ declare namespace Deno {
|
|||
*
|
||||
* const source = await Deno.open("my_file.txt");
|
||||
* const buffer = new Deno.Buffer()
|
||||
* const bytesCopied1 = await Deno.copy(Deno.stdout, source);
|
||||
* const bytesCopied2 = await Deno.copy(buffer, source);
|
||||
* const bytesCopied1 = await Deno.copy(source, Deno.stdout);
|
||||
* const bytesCopied2 = await Deno.copy(source, buffer);
|
||||
*
|
||||
* Because `copy()` is defined to read from `src` until `EOF`, it does not
|
||||
* treat an `EOF` from `read()` as an error to be reported.
|
||||
*
|
||||
* @param dst The destination to copy to
|
||||
* @param src The source to copy from
|
||||
* @param dst The destination to copy to
|
||||
*/
|
||||
export function copy(dst: Writer, src: Reader): Promise<number>;
|
||||
export function copy(src: Reader, dst: Writer): Promise<number>;
|
||||
|
||||
/** Turns a Reader, `r`, into an async iterator.
|
||||
*
|
||||
|
|
|
@ -18,7 +18,7 @@ unitTest({ perms: { read: true } }, async function filesCopyToStdout(): Promise<
|
|||
const filename = "cli/tests/fixture.json";
|
||||
const file = await Deno.open(filename);
|
||||
assert(file.rid > 2);
|
||||
const bytesWritten = await Deno.copy(Deno.stdout, file);
|
||||
const bytesWritten = await Deno.copy(file, Deno.stdout);
|
||||
const fileSize = Deno.statSync(filename).size;
|
||||
assertEquals(bytesWritten, fileSize);
|
||||
console.log("bytes written", bytesWritten);
|
||||
|
|
|
@ -4,7 +4,7 @@ async function main(): Promise<void> {
|
|||
for (let i = 1; i < args.length; i++) {
|
||||
const filename = args[i];
|
||||
const file = await open(filename);
|
||||
await copy(stdout, file);
|
||||
await copy(file, stdout);
|
||||
}
|
||||
}
|
||||
|
||||
|
|
|
@ -470,7 +470,7 @@ export class Untar {
|
|||
while (rest > 0) {
|
||||
await this.reader.readFull(this.block);
|
||||
const arr = rest < recordSize ? this.block.subarray(0, rest) : this.block;
|
||||
await Deno.copy(writer, new Deno.Buffer(arr));
|
||||
await Deno.copy(new Deno.Buffer(arr), writer);
|
||||
rest -= recordSize;
|
||||
}
|
||||
|
||||
|
|
|
@ -30,8 +30,8 @@ Deno.test(async function createTarArchive(): Promise<void> {
|
|||
await tar.append("dir/tar.ts", { filePath });
|
||||
|
||||
// write tar data to a buffer
|
||||
const writer = new Deno.Buffer(),
|
||||
wrote = await Deno.copy(writer, tar.getReader());
|
||||
const writer = new Deno.Buffer();
|
||||
const wrote = await Deno.copy(tar.getReader(), writer);
|
||||
|
||||
/**
|
||||
* 3072 = 512 (header) + 512 (content) + 512 (header) + 512 (content)
|
||||
|
|
|
@ -2,6 +2,6 @@
|
|||
const filenames = Deno.args;
|
||||
for (const filename of filenames) {
|
||||
const file = await Deno.open(filename);
|
||||
await Deno.copy(Deno.stdout, file);
|
||||
await Deno.copy(file, Deno.stdout);
|
||||
file.close();
|
||||
}
|
||||
|
|
|
@ -3,7 +3,7 @@ const url_ = Deno.args[0];
|
|||
const res = await fetch(url_);
|
||||
|
||||
// TODO(ry) Re-enable streaming in this example.
|
||||
// Originally we did: await Deno.copy(Deno.stdout, res.body);
|
||||
// Originally we did: await Deno.copy(res.body, Deno.stdout);
|
||||
// But maybe more JS-y would be: res.pipeTo(Deno.stdout);
|
||||
|
||||
const body = new Uint8Array(await res.arrayBuffer());
|
||||
|
|
|
@ -273,7 +273,7 @@ export async function writeResponse(
|
|||
const contentLength = headers.get("content-length");
|
||||
assert(contentLength != null);
|
||||
const bodyLength = parseInt(contentLength);
|
||||
const n = await Deno.copy(writer, r.body);
|
||||
const n = await Deno.copy(r.body, writer);
|
||||
assert(n === bodyLength);
|
||||
} else {
|
||||
await writeChunkedBody(writer, r.body);
|
||||
|
|
|
@ -33,6 +33,6 @@ test(async function ioMultiReader(): Promise<void> {
|
|||
const n = await copyN(w, r, 4);
|
||||
assertEquals(n, 4);
|
||||
assertEquals(w.toString(), "abcd");
|
||||
await copy(w, r);
|
||||
await copy(r, w);
|
||||
assertEquals(w.toString(), "abcdef");
|
||||
});
|
||||
|
|
|
@ -9,6 +9,6 @@ test(async function ioStringWriter(): Promise<void> {
|
|||
const r = new StringReader("0123456789");
|
||||
await copyN(w, r, 4);
|
||||
assertEquals(w.toString(), "base0123");
|
||||
await copy(w, r);
|
||||
await copy(r, w);
|
||||
assertEquals(w.toString(), "base0123456789");
|
||||
});
|
||||
|
|
|
@ -308,7 +308,7 @@ export class MultipartReader {
|
|||
}
|
||||
// file
|
||||
let formFile: FormFile | undefined;
|
||||
const n = await copy(buf, p);
|
||||
const n = await copy(p, buf);
|
||||
const contentType = p.headers.get("content-type");
|
||||
assert(contentType != null, "content-type must be set");
|
||||
if (n > maxMemory) {
|
||||
|
@ -573,7 +573,7 @@ export class MultipartWriter {
|
|||
file: Reader
|
||||
): Promise<void> {
|
||||
const f = await this.createFormFile(field, filename);
|
||||
await copy(f, file);
|
||||
await copy(file, f);
|
||||
}
|
||||
|
||||
private flush(): Promise<void> {
|
||||
|
|
|
@ -208,7 +208,7 @@ test({
|
|||
assert(file.tempfile != null);
|
||||
const f = await open(file.tempfile);
|
||||
const w = new StringWriter();
|
||||
await copy(w, f);
|
||||
await copy(f, w);
|
||||
const json = JSON.parse(w.toString());
|
||||
assertEquals(json["compilerOptions"]["target"], "es2018");
|
||||
f.close();
|
||||
|
|
Loading…
Reference in a new issue