mirror of
https://github.com/denoland/deno.git
synced 2024-11-29 16:30:56 -05:00
f5e46c9bf2
This looks like a massive PR, but it's only a move from cli/tests -> tests, and updates of relative paths for files. This is the first step towards aggregate all of the integration test files under tests/, which will lead to a set of integration tests that can run without the CLI binary being built. While we could leave these tests under `cli`, it would require us to keep a more complex directory structure for the various test runners. In addition, we have a lot of complexity to ignore various test files in the `cli` project itself (cargo publish exclusion rules, autotests = false, etc). And finally, the `tests/` folder will eventually house the `test_ffi`, `test_napi` and other testing code, reducing the size of the root repo directory. For easier review, the extremely large and noisy "move" is in the first commit (with no changes -- just a move), while the remainder of the changes to actual files is in the second commit.
54 lines
1.7 KiB
JavaScript
54 lines
1.7 KiB
JavaScript
// deno-fmt-ignore-file
|
||
// deno-lint-ignore-file
|
||
|
||
// Copyright Joyent and Node contributors. All rights reserved. MIT license.
|
||
// Taken from Node 18.12.1
|
||
// This file is automatically generated by `tools/node_compat/setup.ts`. Do not modify this file manually.
|
||
|
||
'use strict';
|
||
const common = require('../common');
|
||
const fixtures = require('../common/fixtures');
|
||
const assert = require('assert');
|
||
const fs = require('fs');
|
||
|
||
// Test that concurrent file read streams don’t interfere with each other’s
|
||
// contents, and that the chunks generated by the reads only retain a
|
||
// 'reasonable' amount of memory.
|
||
|
||
// Refs: https://github.com/nodejs/node/issues/21967
|
||
|
||
const filename = fixtures.path('loop.js'); // Some small non-homogeneous file.
|
||
const content = fs.readFileSync(filename);
|
||
|
||
const N = 2000;
|
||
let started = 0;
|
||
let done = 0;
|
||
|
||
const arrayBuffers = new Set();
|
||
|
||
function startRead() {
|
||
++started;
|
||
const chunks = [];
|
||
fs.createReadStream(filename)
|
||
.on('data', (chunk) => {
|
||
chunks.push(chunk);
|
||
arrayBuffers.add(chunk.buffer);
|
||
})
|
||
.on('end', common.mustCall(() => {
|
||
if (started < N)
|
||
startRead();
|
||
assert.deepStrictEqual(Buffer.concat(chunks), content);
|
||
if (++done === N) {
|
||
const retainedMemory =
|
||
[...arrayBuffers].map((ab) => ab.byteLength).reduce((a, b) => a + b);
|
||
assert(retainedMemory / (N * content.length) <= 3,
|
||
`Retaining ${retainedMemory} bytes in ABs for ${N} ` +
|
||
`chunks of size ${content.length}`);
|
||
}
|
||
}));
|
||
}
|
||
|
||
// Don’t start the reads all at once – that way we would have to allocate
|
||
// a large amount of memory upfront.
|
||
for (let i = 0; i < 6; ++i)
|
||
startRead();
|