Skip to content

Commit 6641fda

Browse files
Linkgorontargos
authored andcommitted
stream: add iterator helper find
Continue iterator-helpers work by adding `find` to readable streams. PR-URL: #41849 Reviewed-By: Benjamin Gruenbaum <[email protected]> Reviewed-By: James M Snell <[email protected]> Reviewed-By: Matteo Collina <[email protected]>
1 parent 3ef9f8d commit 6641fda

7 files changed

+280
-153
lines changed

doc/api/stream.md

+70-19
Original file line numberDiff line numberDiff line change
@@ -1696,7 +1696,8 @@ added: v16.14.0
16961696

16971697
> Stability: 1 - Experimental
16981698
1699-
* `fn` {Function|AsyncFunction} a function to map over every item in the stream.
1699+
* `fn` {Function|AsyncFunction} a function to map over every chunk in the
1700+
stream.
17001701
* `data` {any} a chunk of data from the stream.
17011702
* `options` {Object}
17021703
* `signal` {AbortSignal} aborted if the stream is destroyed allowing to
@@ -1709,16 +1710,16 @@ added: v16.14.0
17091710
* Returns: {Readable} a stream mapped with the function `fn`.
17101711

17111712
This method allows mapping over the stream. The `fn` function will be called
1712-
for every item in the stream. If the `fn` function returns a promise - that
1713+
for every chunk in the stream. If the `fn` function returns a promise - that
17131714
promise will be `await`ed before being passed to the result stream.
17141715

17151716
```mjs
17161717
import { Readable } from 'stream';
17171718
import { Resolver } from 'dns/promises';
17181719

17191720
// With a synchronous mapper.
1720-
for await (const item of Readable.from([1, 2, 3, 4]).map((x) => x * 2)) {
1721-
console.log(item); // 2, 4, 6, 8
1721+
for await (const chunk of Readable.from([1, 2, 3, 4]).map((x) => x * 2)) {
1722+
console.log(chunk); // 2, 4, 6, 8
17221723
}
17231724
// With an asynchronous mapper, making at most 2 queries at a time.
17241725
const resolver = new Resolver();
@@ -1740,7 +1741,7 @@ added: v16.14.0
17401741

17411742
> Stability: 1 - Experimental
17421743
1743-
* `fn` {Function|AsyncFunction} a function to filter items from stream.
1744+
* `fn` {Function|AsyncFunction} a function to filter chunks from the stream.
17441745
* `data` {any} a chunk of data from the stream.
17451746
* `options` {Object}
17461747
* `signal` {AbortSignal} aborted if the stream is destroyed allowing to
@@ -1752,8 +1753,8 @@ added: v16.14.0
17521753
aborted.
17531754
* Returns: {Readable} a stream filtered with the predicate `fn`.
17541755

1755-
This method allows filtering the stream. For each item in the stream the `fn`
1756-
function will be called and if it returns a truthy value, the item will be
1756+
This method allows filtering the stream. For each chunk in the stream the `fn`
1757+
function will be called and if it returns a truthy value, the chunk will be
17571758
passed to the result stream. If the `fn` function returns a promise - that
17581759
promise will be `await`ed.
17591760

@@ -1762,8 +1763,8 @@ import { Readable } from 'stream';
17621763
import { Resolver } from 'dns/promises';
17631764

17641765
// With a synchronous predicate.
1765-
for await (const item of Readable.from([1, 2, 3, 4]).filter((x) => x > 2)) {
1766-
console.log(item); // 3, 4
1766+
for await (const chunk of Readable.from([1, 2, 3, 4]).filter((x) => x > 2)) {
1767+
console.log(chunk); // 3, 4
17671768
}
17681769
// With an asynchronous predicate, making at most 2 queries at a time.
17691770
const resolver = new Resolver();
@@ -1789,7 +1790,7 @@ added: v16.15.0
17891790

17901791
> Stability: 1 - Experimental
17911792
1792-
* `fn` {Function|AsyncFunction} a function to call on each item of the stream.
1793+
* `fn` {Function|AsyncFunction} a function to call on each chunk of the stream.
17931794
* `data` {any} a chunk of data from the stream.
17941795
* `options` {Object}
17951796
* `signal` {AbortSignal} aborted if the stream is destroyed allowing to
@@ -1801,12 +1802,12 @@ added: v16.15.0
18011802
aborted.
18021803
* Returns: {Promise} a promise for when the stream has finished.
18031804

1804-
This method allows iterating a stream. For each item in the stream the
1805+
This method allows iterating a stream. For each chunk in the stream the
18051806
`fn` function will be called. If the `fn` function returns a promise - that
18061807
promise will be `await`ed.
18071808

18081809
This method is different from `for await...of` loops in that it can optionally
1809-
process items concurrently. In addition, a `forEach` iteration can only be
1810+
process chunks concurrently. In addition, a `forEach` iteration can only be
18101811
stopped by having passed a `signal` option and aborting the related
18111812
`AbortController` while `for await...of` can be stopped with `break` or
18121813
`return`. In either case the stream will be destroyed.
@@ -1820,8 +1821,8 @@ import { Readable } from 'stream';
18201821
import { Resolver } from 'dns/promises';
18211822

18221823
// With a synchronous predicate.
1823-
for await (const item of Readable.from([1, 2, 3, 4]).filter((x) => x > 2)) {
1824-
console.log(item); // 3, 4
1824+
for await (const chunk of Readable.from([1, 2, 3, 4]).filter((x) => x > 2)) {
1825+
console.log(chunk); // 3, 4
18251826
}
18261827
// With an asynchronous predicate, making at most 2 queries at a time.
18271828
const resolver = new Resolver();
@@ -1886,7 +1887,7 @@ added: v16.15.0
18861887

18871888
> Stability: 1 - Experimental
18881889
1889-
* `fn` {Function|AsyncFunction} a function to call on each item of the stream.
1890+
* `fn` {Function|AsyncFunction} a function to call on each chunk of the stream.
18901891
* `data` {any} a chunk of data from the stream.
18911892
* `options` {Object}
18921893
* `signal` {AbortSignal} aborted if the stream is destroyed allowing to
@@ -1927,6 +1928,56 @@ console.log(anyBigFile); // `true` if any file in the list is bigger than 1MB
19271928
console.log('done'); // Stream has finished
19281929
```
19291930

1931+
##### `readable.find(fn[, options])`
1932+
1933+
<!-- YAML
1934+
added: REPLACEME
1935+
-->
1936+
1937+
> Stability: 1 - Experimental
1938+
1939+
* `fn` {Function|AsyncFunction} a function to call on each chunk of the stream.
1940+
* `data` {any} a chunk of data from the stream.
1941+
* `options` {Object}
1942+
* `signal` {AbortSignal} aborted if the stream is destroyed allowing to
1943+
abort the `fn` call early.
1944+
* `options` {Object}
1945+
* `concurrency` {number} the maximum concurrent invocation of `fn` to call
1946+
on the stream at once. **Default:** `1`.
1947+
* `signal` {AbortSignal} allows destroying the stream if the signal is
1948+
aborted.
1949+
* Returns: {Promise} a promise evaluating to the first chunk for which `fn`
1950+
evaluated with a truthy value, or `undefined` if no element was found.
1951+
1952+
This method is similar to `Array.prototype.find` and calls `fn` on each chunk
1953+
in the stream to find a chunk with a truthy value for `fn`. Once an `fn` call's
1954+
awaited return value is truthy, the stream is destroyed and the promise is
1955+
fulfilled with value for which `fn` returned a truthy value. If all of the
1956+
`fn` calls on the chunks return a falsy value, the promise is fulfilled with
1957+
`undefined`.
1958+
1959+
```mjs
1960+
import { Readable } from 'stream';
1961+
import { stat } from 'fs/promises';
1962+
1963+
// With a synchronous predicate.
1964+
await Readable.from([1, 2, 3, 4]).find((x) => x > 2); // 3
1965+
await Readable.from([1, 2, 3, 4]).find((x) => x > 0); // 1
1966+
await Readable.from([1, 2, 3, 4]).find((x) => x > 10); // undefined
1967+
1968+
// With an asynchronous predicate, making at most 2 file checks at a time.
1969+
const foundBigFile = await Readable.from([
1970+
'file1',
1971+
'file2',
1972+
'file3',
1973+
]).find(async (fileName) => {
1974+
const stats = await stat(fileName);
1975+
return stat.size > 1024 * 1024;
1976+
}, { concurrency: 2 });
1977+
console.log(foundBigFile); // File name of large file, if any file in the list is bigger than 1MB
1978+
console.log('done'); // Stream has finished
1979+
```
1980+
19301981
##### `readable.every(fn[, options])`
19311982

19321983
<!-- YAML
@@ -1935,7 +1986,7 @@ added: v16.15.0
19351986

19361987
> Stability: 1 - Experimental
19371988
1938-
* `fn` {Function|AsyncFunction} a function to call on each item of the stream.
1989+
* `fn` {Function|AsyncFunction} a function to call on each chunk of the stream.
19391990
* `data` {any} a chunk of data from the stream.
19401991
* `options` {Object}
19411992
* `signal` {AbortSignal} aborted if the stream is destroyed allowing to
@@ -1985,7 +2036,7 @@ added: v16.15.0
19852036
> Stability: 1 - Experimental
19862037
19872038
* `fn` {Function|AsyncGeneratorFunction|AsyncFunction} a function to map over
1988-
every item in the stream.
2039+
every chunk in the stream.
19892040
* `data` {any} a chunk of data from the stream.
19902041
* `options` {Object}
19912042
* `signal` {AbortSignal} aborted if the stream is destroyed allowing to
@@ -2009,8 +2060,8 @@ import { Readable } from 'stream';
20092060
import { createReadStream } from 'fs';
20102061

20112062
// With a synchronous mapper.
2012-
for await (const item of Readable.from([1, 2, 3, 4]).flatMap((x) => [x, x])) {
2013-
console.log(item); // 1, 1, 2, 2, 3, 3, 4, 4
2063+
for await (const chunk of Readable.from([1, 2, 3, 4]).flatMap((x) => [x, x])) {
2064+
console.log(chunk); // 1, 1, 2, 2, 3, 3, 4, 4
20142065
}
20152066
// With an asynchronous mapper, combine the contents of 4 files
20162067
const concatResult = Readable.from([

lib/internal/streams/operators.js

+14-28
Original file line numberDiff line numberDiff line change
@@ -186,31 +186,9 @@ function asIndexedPairs(options = undefined) {
186186
}
187187

188188
async function some(fn, options) {
189-
if (options != null && typeof options !== 'object') {
190-
throw new ERR_INVALID_ARG_TYPE('options', ['Object']);
191-
}
192-
if (options?.signal != null) {
193-
validateAbortSignal(options.signal, 'options.signal');
194-
}
195-
196-
// https://tc39.es/proposal-iterator-helpers/#sec-iteratorprototype.some
197-
// Note that some does short circuit but also closes the iterator if it does
198-
const ac = new AbortController();
199-
if (options?.signal) {
200-
if (options.signal.aborted) {
201-
ac.abort();
202-
}
203-
options.signal.addEventListener('abort', () => ac.abort(), {
204-
[kWeakHandler]: this,
205-
once: true,
206-
});
207-
}
208-
const mapped = this.map(fn, { ...options, signal: ac.signal });
209-
for await (const result of mapped) {
210-
if (result) {
211-
ac.abort();
212-
return true;
213-
}
189+
// eslint-disable-next-line no-unused-vars
190+
for await (const unused of filter.call(this, fn, options)) {
191+
return true;
214192
}
215193
return false;
216194
}
@@ -226,6 +204,13 @@ async function every(fn, options) {
226204
}, options));
227205
}
228206

207+
async function find(fn, options) {
208+
for await (const result of filter.call(this, fn, options)) {
209+
return result;
210+
}
211+
return undefined;
212+
}
213+
229214
async function forEach(fn, options) {
230215
if (typeof fn !== 'function') {
231216
throw new ERR_INVALID_ARG_TYPE(
@@ -236,7 +221,7 @@ async function forEach(fn, options) {
236221
return kEmpty;
237222
}
238223
// eslint-disable-next-line no-unused-vars
239-
for await (const unused of this.map(forEachFn, options));
224+
for await (const unused of map.call(this, forEachFn, options));
240225
}
241226

242227
function filter(fn, options) {
@@ -250,7 +235,7 @@ function filter(fn, options) {
250235
}
251236
return kEmpty;
252237
}
253-
return this.map(filterFn, options);
238+
return map.call(this, filterFn, options);
254239
}
255240

256241
// Specific to provide better error to reduce since the argument is only
@@ -329,7 +314,7 @@ async function toArray(options) {
329314
}
330315

331316
function flatMap(fn, options) {
332-
const values = this.map(fn, options);
317+
const values = map.call(this, fn, options);
333318
return async function* flatMap() {
334319
for await (const val of values) {
335320
yield* val;
@@ -415,4 +400,5 @@ module.exports.promiseReturningOperators = {
415400
reduce,
416401
toArray,
417402
some,
403+
find,
418404
};

test/parallel/test-stream-filter.js

+8
Original file line numberDiff line numberDiff line change
@@ -98,3 +98,11 @@ const { setTimeout } = require('timers/promises');
9898
const stream = Readable.from([1, 2, 3, 4, 5]).filter((x) => true);
9999
assert.strictEqual(stream.readable, true);
100100
}
101+
{
102+
const stream = Readable.from([1, 2, 3, 4, 5]);
103+
Object.defineProperty(stream, 'map', {
104+
value: common.mustNotCall(() => {}),
105+
});
106+
// Check that map isn't getting called.
107+
stream.filter(() => true);
108+
}

test/parallel/test-stream-flatMap.js

+8
Original file line numberDiff line numberDiff line change
@@ -121,3 +121,11 @@ function oneTo5() {
121121
const stream = oneTo5().flatMap((x) => x);
122122
assert.strictEqual(stream.readable, true);
123123
}
124+
{
125+
const stream = oneTo5();
126+
Object.defineProperty(stream, 'map', {
127+
value: common.mustNotCall(() => {}),
128+
});
129+
// Check that map isn't getting called.
130+
stream.flatMap(() => true);
131+
}

test/parallel/test-stream-forEach.js

+8
Original file line numberDiff line numberDiff line change
@@ -129,3 +129,11 @@ const { once } = require('events');
129129
const stream = Readable.from([1, 2, 3, 4, 5]).forEach((_) => true);
130130
assert.strictEqual(typeof stream.then, 'function');
131131
}
132+
{
133+
const stream = Readable.from([1, 2, 3, 4, 5]);
134+
Object.defineProperty(stream, 'map', {
135+
value: common.mustNotCall(() => {}),
136+
});
137+
// Check that map isn't getting called.
138+
stream.forEach(() => true);
139+
}

0 commit comments

Comments
 (0)