admin管理员组文章数量:1302232
What is the remended approach in JavaScript to passing around generators that include filtering + mapping logic?
Somehow, JavaScript generators are missing such fundamental things as filter
and map
operands, similar to arrays, to be able to create a generator that includes that logic, without having to run the iteration first.
My head-on approach was to implement custom functions that apply the logic:
function * filter(g, cb) {
let a;
do {
a = g.next();
if (!a.done && cb(a.value)) {
yield a.value;
}
} while (!a.done);
return a.value;
}
function * map(g, cb) {
let a;
do {
a = g.next();
if (!a.done) {
yield cb(a.value);
}
} while (!a.done);
return a.value;
}
But this creates a callback hell. I want to simply chain a generator, like a regular array:
// create a filtered & re-mapped generator, without running it:
const gen = myGenerator().filter(a => a > 0).map(b => ({value: b}));
// pass generator into a function that will run it:
processGenerator(gen);
Is there a way to extend generators to automatically have access to such basic functions?
As an extra, if somebody wants to weight in on why such fundamental things aren't part of the generators implementation, that'll be awesome! I would think that filtering and mapping are the two most essential things one needs for sequences.
UPDATE
This all ended with me writing my own iter-ops library :)
What is the remended approach in JavaScript to passing around generators that include filtering + mapping logic?
Somehow, JavaScript generators are missing such fundamental things as filter
and map
operands, similar to arrays, to be able to create a generator that includes that logic, without having to run the iteration first.
My head-on approach was to implement custom functions that apply the logic:
function * filter(g, cb) {
let a;
do {
a = g.next();
if (!a.done && cb(a.value)) {
yield a.value;
}
} while (!a.done);
return a.value;
}
function * map(g, cb) {
let a;
do {
a = g.next();
if (!a.done) {
yield cb(a.value);
}
} while (!a.done);
return a.value;
}
But this creates a callback hell. I want to simply chain a generator, like a regular array:
// create a filtered & re-mapped generator, without running it:
const gen = myGenerator().filter(a => a > 0).map(b => ({value: b}));
// pass generator into a function that will run it:
processGenerator(gen);
Is there a way to extend generators to automatically have access to such basic functions?
As an extra, if somebody wants to weight in on why such fundamental things aren't part of the generators implementation, that'll be awesome! I would think that filtering and mapping are the two most essential things one needs for sequences.
UPDATE
This all ended with me writing my own iter-ops library :)
Share edited Dec 22, 2022 at 14:43 vitaly-t asked Jul 3, 2021 at 16:23 vitaly-tvitaly-t 25.9k17 gold badges127 silver badges150 bronze badges 8- 3 Looks like there is a proposal github./tc39/proposal-iterator-helpers currently on stage 2 – skyboyer Commented Jul 3, 2021 at 17:17
- There are also links to some npm packages that might implement some of desired functionality – skyboyer Commented Jul 3, 2021 at 17:19
- @skyboyer That looks interesting. I wonder though, if there is any polyfill to use it today, and not years from now :) – vitaly-t Commented Jul 3, 2021 at 17:20
- 1 Does this answer your question? How to extend the Generator class? – Patrik Valkovič Commented Jul 3, 2021 at 18:20
- 1 Following question is dealing with the same issue: stackoverflow./questions/47534156/… – Patrik Valkovič Commented Jul 3, 2021 at 18:20
4 Answers
Reset to default 5An alternative to your solution would be to use the for...of
loop instead of a do...while
.
I would also prefer the filter
and map
functions to consume and produce a generator function like below :
function filter(gen, predicate) {
return function*() {
for (let e of gen()) {
if (predicate(e)) {
yield e;
}
}
}
}
function map(gen, fn) {
return function*() {
for (let e of gen()) {
yield fn(e);
}
}
}
function generatorWrapper(gen) {
return {
call: () => gen(),
filter: predicate => generatorWrapper(filter(gen, predicate)),
map: fn => generatorWrapper(map(gen, fn))
};
}
function* generator() {
yield 1;
yield 2;
yield 3;
}
const it = generatorWrapper(generator)
.filter(x => x > 1)
.map(x => x * 2)
.call();
for (let e of it) {
console.log(e);
}
I may have figured out a proper solution to this...
I created class Iterable
, which extends on this answer:
class Iterable {
constructor(generator) {
this[Symbol.iterator] = generator;
}
static extend(generator, cc) {
// cc - optional calling context, when generator is a class method;
return function () {
return new Iterable(generator.bind(cc ?? this, ...arguments));
}
}
}
Iterable.prototype.filter = function (predicate) {
let iterable = this;
return new Iterable(function* () {
for (let value of iterable)
if (predicate(value)) {
yield value;
}
});
};
Iterable.prototype.map = function (cb) {
let iterable = this;
return new Iterable(function* () {
for (let value of iterable) {
yield cb(value);
}
});
};
Now we can take an existing generator function, like this:
function* test(value1, value2) {
yield value1;
yield value2;
}
and turn it into an extended iterator:
const extTest = Iterable.extend(test);
and then use it in place of the original generator:
const i = extTest(111, 222).filter(f => f > 0).map(m => ({value: m}));
This now works correctly:
const values = [...i];
//=> [ { value: 111 }, { value: 222 } ]
UPDATE
In the end of all this, I wrote my own iter-ops library.
How about a pipelining function that will take the original iterable and yield values through pipelined decorators?
const pipe = function* (iterable, decorators) {
// First build the pipeline by iterating over the decorators
// and applying them in sequence.
for(const decorator of decorators) {
iterable = decorator(iterable)
}
// Then yield the values of the posed iterable.
for(const value of iterable) {
yield value;
}
};
const filter = predicate =>
function* (iterable) {
for (const value of iterable) {
if (predicate(value)) {
yield value;
}
}
};
const map = cb =>
function* (iterable) {
for (const value of iterable) {
yield cb(value);
}
};
const mergeMap = cb =>
function* (iterable) {
for (const value of iterable) {
for (const mapped of cb(value)) {
yield mapped;
}
}
};
const take = n =>
function* (iterable) {
for (const value of iterable) {
if (!n--) {
break;
}
yield value;
}
};
function* test(value1, value2) {
yield value1;
yield value2;
}
function* infinite() {
for (;;) yield Math.random();
}
for (const value of pipe(test(111, 222), [
filter(f => f > 0),
map(m => ({ value: m }))
])) {
console.log(value);
}
for (const value of pipe(infinite(), [
take(5),
mergeMap(v => [v, { timesTwo: v * 2 }])
])) {
console.log(value);
}
.as-console-wrapper {
max-height: 100% !important;
}
Since ECMAScript 2025 introduced iterator helper methods, the code you wished had worked, now actually works out of the box:
// Your code:
const gen = myGenerator().filter(a => a > 0).map(b => ({value: b}));
processGenerator(gen);
// Some functions that above code is calling:
function* myGenerator() { // A generator for the demo
for (let i = 1; i < 20; i = -i*2) yield i;
}
function processGenerator(gen) { // A consumer of the generator
for (const obj of gen) console.log(obj);
}
The filter
and map
methods are methods of iterator helper objects, i.e. iterators that inherit from Iterator
. Note that generators return such iterators, as do all native JS methods that return iterators, such as Array.prototype.values
, Object.entries
, ...
本文标签: javascriptFilter and map functions for generatorsStack Overflow
版权声明:本文标题:javascript - Filter and map functions for generators - Stack Overflow 内容由网友自发贡献,该文观点仅代表作者本人, 转载请联系作者并注明出处:http://www.betaflare.com/web/1741677103a2391945.html, 本站仅提供信息存储空间服务,不拥有所有权,不承担相关法律责任。如发现本站有涉嫌抄袭侵权/违法违规的内容,一经查实,本站将立刻删除。
发表评论