Skip to content

Commit

Permalink
stream: make from read one at a time
Browse files Browse the repository at this point in the history
Currently from will eagerly buffer up items
which means that errors are also eagerly
encountered and items which are buffer when
an error occurs will be discarded, which is
inconsistent with how generators work.

Fixes: nodejs#29428
  • Loading branch information
ronag committed May 1, 2020
1 parent 0413acc commit 0aabadc
Show file tree
Hide file tree
Showing 2 changed files with 27 additions and 1 deletion.
2 changes: 2 additions & 0 deletions lib/internal/streams/from.js
Original file line number Diff line number Diff line change
Expand Up @@ -33,6 +33,8 @@ function from(Readable, iterable, opts) {

const readable = new Readable({
objectMode: true,
highWaterMark: 1,
// TODO(ronag): What options should be allowed?
...opts
});

Expand Down
26 changes: 25 additions & 1 deletion test/parallel/test-readable-from.js
Original file line number Diff line number Diff line change
Expand Up @@ -159,6 +159,29 @@ async function asTransformStream() {
}
}

async function endWithError() {
async function* generate() {
yield 1;
yield 2;
yield Promise.reject('Boum');
}

const stream = Readable.from(generate());

const expected = [1, 2];

try {
for await (const chunk of stream) {
strictEqual(chunk, expected.shift());
}
throw new Error();
} catch (err) {
strictEqual(expected.length, 0);
strictEqual(err, 'Boum');
}
}


Promise.all([
toReadableBasicSupport(),
toReadableSyncIterator(),
Expand All @@ -168,5 +191,6 @@ Promise.all([
toReadableOnData(),
toReadableOnDataNonObject(),
destroysTheStreamWhenThrowing(),
asTransformStream()
asTransformStream(),
endWithError()
]).then(mustCall());

0 comments on commit 0aabadc

Please sign in to comment.