1
0
Fork 0
mirror of https://github.com/denoland/deno.git synced 2024-11-26 16:09:27 -05:00
denoland-deno/cli/tests/node_compat/test/parallel/test-stream-pipeline-queued-end-in-destroy.js

47 lines
1.3 KiB
JavaScript
Raw Normal View History

// deno-fmt-ignore-file
// deno-lint-ignore-file
// Copyright Joyent and Node contributors. All rights reserved. MIT license.
// Taken from Node 18.12.1
// This file is automatically generated by "node/_tools/setup.ts". Do not modify this file manually
'use strict';
const common = require('../common');
const assert = require('assert');
const { Readable, Duplex, pipeline } = require('stream');
// Test that the callback for pipeline() is called even when the ._destroy()
// method of the stream places an .end() request to itself that does not
// get processed before the destruction of the stream (i.e. the 'close' event).
// Refs: https://github.com/nodejs/node/issues/24456
const readable = new Readable({
read: common.mustCall(() => {})
});
const duplex = new Duplex({
write(chunk, enc, cb) {
// Simulate messages queueing up.
},
read() {},
destroy(err, cb) {
// Call end() from inside the destroy() method, like HTTP/2 streams
// do at the time of writing.
this.end();
cb(err);
}
});
duplex.on('finished', common.mustNotCall());
pipeline(readable, duplex, common.mustCall((err) => {
assert.strictEqual(err.code, 'ERR_STREAM_PREMATURE_CLOSE');
}));
// Write one chunk of data, and destroy the stream later.
// That should trigger the pipeline destruction.
readable.push('foo');
setImmediate(() => {
readable.destroy();
});