higher-order generators
You can choose to manipulate the generator functions themselves
const Generator =
{
map: (f,g) => function* (...args)
{
for (const x of g (...args))
yield f (x)
},
filter: (f,g) => function* (...args)
{
for (const x of g (...args))
if (f (x))
yield x
}
}
// some functions !
const square = x =>
x * x
const isEven = x =>
(x & 1) === 0
// a generator !
const range = function* (x = 0, y = 1)
{
while (x < y)
yield x++
}
// higher order generator !
for (const x of Generator.map (square, Generator.filter (isEven, range)) (0,10))
console.log('evens squared', x)
higher-order iterators
Or you can choose to manipulate iterators
const Iterator =
{
map: (f, it) => function* ()
{
for (const x of it)
yield f (x)
} (),
filter: (f, it) => function* ()
{
for (const x of it)
if (f (x))
yield x
} ()
}
// some functions !
const square = x =>
x * x
const isEven = x =>
(x & 1) === 0
// a generator !
const range = function* (x = 0, y = 1)
{
while (x < y)
yield x++
}
// higher-order iterators !
for (const x of Iterator.map (square, Iterator.filter (isEven, range (0, 10))))
console.log('evens squared', x)
recommendation
In most cases, I think it's more practical to manipulate the iterator because of it's well-defined (albeit kludgy) interface. It allows you to do something like
Iterator.map (square, Iterator.filter (isEven, [10,11,12,13]))
Whereas the other approach is
Generator.map (square, Generator.filter (isEven, Array.from)) ([10,11,12,13])
Both have a use-case, but I find the former much nicer than the latter
persistent iterators
JavaScript's stateful iterators annoy me – each subsequent call to .next
alters the internal state irreversibly.
But! there's nothing stopping you from making your own iterators tho and then creating an adapter to plug into JavaScript's stack-safe generator mechanism
If this interests you, you might like some of the other accompanying examples found here: Loop to a filesystem structure in my object to get all the files
The only gain isn't that we can reuse a persistent iterator, it's that with this implementation, subsequent reads are even faster than the first because of memoisation – score: JavaScript 0, Persistent Iterators 2
// -------------------------------------------------------------------
const Memo = (f, memo) => () =>
memo === undefined
? (memo = f (), memo)
: memo
// -------------------------------------------------------------------
const Yield = (value, next = Return) =>
({ done: false, value, next: Memo (next) })
const Return = value =>
({ done: true, value })
// -------------------------------------------------------------------
const MappedIterator = (f, it = Return ()) =>
it.done
? Return ()
: Yield (f (it.value), () => MappedIterator (f, it.next ()))
const FilteredIterator = (f, it = Return ()) =>
it.done
? Return ()
: f (it.value)
? Yield (it.value, () => FilteredIterator (f, it.next ()))
: FilteredIterator (f, it.next ())
// -------------------------------------------------------------------
const Generator = function* (it = Return ())
{
while (it.done === false)
(yield it.value, it = it.next ())
return it.value
}
// -------------------------------------------------------------------
const Range = (x = 0, y = 1) =>
x < y
? Yield (x, () => Range (x + 1, y))
: Return ()
const square = x =>
x * x
const isEven = x =>
(x & 1) === 0
// -------------------------------------------------------------------
for (const x of Generator (MappedIterator (square, FilteredIterator (isEven, Range (0,10)))))
console.log ('evens squared', x)