Skip to content
Closed
Show file tree
Hide file tree
Changes from 1 commit
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
Next Next commit
stream: do not swallow errors with async iterators and pipeline
Before this patch, pipeline() could swallow errors by pre-emptively
producing a ERR_STREAM_PREMATURE_CLOSE that was not really helpful
to the user.

Co-Authored-By: Robert Nagy <ronagy@icloud.com>
  • Loading branch information
mcollina and ronag committed Mar 9, 2020
commit e4c3f55b6d52c6b58643d7594215a08549ebe674
43 changes: 26 additions & 17 deletions lib/internal/streams/pipeline.js
Original file line number Diff line number Diff line change
Expand Up @@ -25,10 +25,10 @@ let EE;
let PassThrough;
let createReadableStreamAsyncIterator;

function destroyer(stream, reading, writing, final, callback) {
function destroyer(stream, reading, writing, callback) {
const _destroy = once((err) => {
const readable = stream.readable || isRequest(stream);
if (err || !final || !readable) {
Comment thread
ronag marked this conversation as resolved.
if (err || !readable) {
destroyImpl.destroyer(stream, err);
}
callback(err);
Expand Down Expand Up @@ -123,6 +123,7 @@ async function pump(iterable, writable, finish) {
if (!EE) {
EE = require('events');
}
let error;
try {
for await (const chunk of iterable) {
if (!writable.write(chunk)) {
Expand All @@ -132,7 +133,9 @@ async function pump(iterable, writable, finish) {
}
writable.end();
} catch (err) {
finish(err);
error = err;
} finally {
finish(error);
}
}

Expand All @@ -149,36 +152,37 @@ function pipeline(...streams) {
let value;
const destroys = [];

function finish(err, final) {
if (!error && err) {
let finishCount = 0;

function finish(err) {
const final = --finishCount === 0;

if (err && (!error || error.code === 'ERR_STREAM_PREMATURE_CLOSE')) {
error = err;
}

if (error || final) {
for (const destroy of destroys) {
destroy(error);
}
if (!error && !final) {
return;
}

while (destroys.length) {
destroys.shift()(error);
}

if (final) {
callback(error, value);
}
}

function wrap(stream, reading, writing, final) {
destroys.push(destroyer(stream, reading, writing, final, (err) => {
finish(err, final);
}));
}

let ret;
for (let i = 0; i < streams.length; i++) {
const stream = streams[i];
const reading = i < streams.length - 1;
const writing = i > 0;

if (isStream(stream)) {
wrap(stream, reading, writing, !reading);
finishCount++;
destroys.push(destroyer(stream, reading, writing, finish));
}

if (i === 0) {
Expand Down Expand Up @@ -224,20 +228,25 @@ function pipeline(...streams) {
pt.destroy(err);
});
} else if (isIterable(ret, true)) {
finishCount++;
pump(ret, pt, finish);
} else {
throw new ERR_INVALID_RETURN_VALUE(
'AsyncIterable or Promise', 'destination', ret);
}

ret = pt;
wrap(ret, false, true, true);

finishCount++;
destroys.push(destroyer(ret, false, true, finish));
}
} else if (isStream(stream)) {
if (isReadable(ret)) {
ret.pipe(stream);
} else {
ret = makeAsyncIterable(ret);

finishCount++;
pump(ret, stream, finish);
}
ret = stream;
Expand Down
27 changes: 27 additions & 0 deletions test/parallel/test-stream-pipeline.js
Original file line number Diff line number Diff line change
Expand Up @@ -984,3 +984,30 @@ const { promisify } = require('util');
}));
src.end();
}

{
let res = '';
const rs = new Readable({
read() {
setImmediate(() => {
rs.push('hello');
});
}
});
const ws = new Writable({
write: common.mustNotCall()
});
pipeline(rs, async function*(stream) {
/* eslint no-unused-vars: off */
for await (const chunk of stream) {
throw new Error('kaboom');
}
}, async function *(source) {
for await (const chunk of source) {
res += chunk;
}
}, ws, common.mustCall((err) => {
assert.strictEqual(err.message, 'kaboom');
assert.strictEqual(res, '');
}));
}