1
0
Fork 0
mirror of https://github.com/denoland/deno.git synced 2024-12-22 07:14:47 -05:00
denoland-deno/tests/node_compat/test/parallel/test-stream-readable-unshift.js
Matt Mastracci f5e46c9bf2
chore: move cli/tests/ -> tests/ (#22369)
This looks like a massive PR, but it's only a move from cli/tests ->
tests, and updates of relative paths for files.

This is the first step towards aggregate all of the integration test
files under tests/, which will lead to a set of integration tests that
can run without the CLI binary being built.

While we could leave these tests under `cli`, it would require us to
keep a more complex directory structure for the various test runners. In
addition, we have a lot of complexity to ignore various test files in
the `cli` project itself (cargo publish exclusion rules, autotests =
false, etc).

And finally, the `tests/` folder will eventually house the `test_ffi`,
`test_napi` and other testing code, reducing the size of the root repo
directory.

For easier review, the extremely large and noisy "move" is in the first
commit (with no changes -- just a move), while the remainder of the
changes to actual files is in the second commit.
2024-02-10 20:22:13 +00:00

177 lines
4.2 KiB
JavaScript

// deno-fmt-ignore-file
// deno-lint-ignore-file
// Copyright Joyent and Node contributors. All rights reserved. MIT license.
// Taken from Node 18.12.1
// This file is automatically generated by `tools/node_compat/setup.ts`. Do not modify this file manually.
'use strict';
const common = require('../common');
const assert = require('assert');
const { Readable } = require('stream');
{
// Check that strings are saved as Buffer
const readable = new Readable({ read() {} });
const string = 'abc';
readable.on('data', common.mustCall((chunk) => {
assert(Buffer.isBuffer(chunk));
assert.strictEqual(chunk.toString('utf8'), string);
}, 1));
readable.unshift(string);
}
{
// Check that data goes at the beginning
const readable = new Readable({ read() {} });
const unshift = 'front';
const push = 'back';
const expected = [unshift, push];
readable.on('data', common.mustCall((chunk) => {
assert.strictEqual(chunk.toString('utf8'), expected.shift());
}, 2));
readable.push(push);
readable.unshift(unshift);
}
{
// Check that buffer is saved with correct encoding
const readable = new Readable({ read() {} });
const encoding = 'base64';
const string = Buffer.from('abc').toString(encoding);
readable.on('data', common.mustCall((chunk) => {
assert.strictEqual(chunk.toString(encoding), string);
}, 1));
readable.unshift(string, encoding);
}
{
const streamEncoding = 'base64';
function checkEncoding(readable) {
// chunk encodings
const encodings = ['utf8', 'binary', 'hex', 'base64'];
const expected = [];
readable.on('data', common.mustCall((chunk) => {
const { encoding, string } = expected.pop();
assert.strictEqual(chunk.toString(encoding), string);
}, encodings.length));
for (const encoding of encodings) {
const string = 'abc';
// If encoding is the same as the state.encoding the string is
// saved as is
const expect = encoding !== streamEncoding ?
Buffer.from(string, encoding).toString(streamEncoding) : string;
expected.push({ encoding, string: expect });
readable.unshift(string, encoding);
}
}
const r1 = new Readable({ read() {} });
r1.setEncoding(streamEncoding);
checkEncoding(r1);
const r2 = new Readable({ read() {}, encoding: streamEncoding });
checkEncoding(r2);
}
{
// Both .push & .unshift should have the same behaviour
// When setting an encoding, each chunk should be emitted with that encoding
const encoding = 'base64';
function checkEncoding(readable) {
const string = 'abc';
readable.on('data', common.mustCall((chunk) => {
assert.strictEqual(chunk, Buffer.from(string).toString(encoding));
}, 2));
readable.push(string);
readable.unshift(string);
}
const r1 = new Readable({ read() {} });
r1.setEncoding(encoding);
checkEncoding(r1);
const r2 = new Readable({ read() {}, encoding });
checkEncoding(r2);
}
{
// Check that ObjectMode works
const readable = new Readable({ objectMode: true, read() {} });
const chunks = ['a', 1, {}, []];
readable.on('data', common.mustCall((chunk) => {
assert.strictEqual(chunk, chunks.pop());
}, chunks.length));
for (const chunk of chunks) {
readable.unshift(chunk);
}
}
{
// Should not throw: https://github.com/nodejs/node/issues/27192
const highWaterMark = 50;
class ArrayReader extends Readable {
constructor(opt) {
super({ highWaterMark });
// The error happened only when pushing above hwm
this.buffer = new Array(highWaterMark * 2).fill(0).map(String);
}
_read(size) {
while (this.buffer.length) {
const chunk = this.buffer.shift();
if (!this.buffer.length) {
this.push(chunk);
this.push(null);
return true;
}
if (!this.push(chunk))
return;
}
}
}
function onRead() {
while (null !== (stream.read())) {
// Remove the 'readable' listener before unshifting
stream.removeListener('readable', onRead);
stream.unshift('a');
stream.on('data', (chunk) => {
console.log(chunk.length);
});
break;
}
}
const stream = new ArrayReader();
stream.once('readable', common.mustCall(onRead));
stream.on('end', common.mustCall());
}