问题
I have been reading about map
, reduce
and filter
a lot because of how much they are used in react and FP in general. If we write something like:
let myArr = [1,2,3,4,5,6,7,8,9]
let sumOfDoubleOfOddNumbers = myArr.filter(num => num % 2)
.map(num => num * 2)
.reduce((acc, currVal) => acc + currVal, 0);
3 different loops are run.
I've read about Java 8 streams as well and know that they use what is called a monad, ie, the computations are stored first. They are performed once only in one iteration. For example,
Stream.of("d2", "a2", "b1", "b3", "c")
.map(s -> {
System.out.println("map: " + s);
return s.toUpperCase();
})
.filter(s -> {
System.out.println("filter: " + s);
return s.startsWith("A");
})
.forEach(s -> System.out.println("forEach: " + s));
// map: d2
// filter: D2
// map: a2
// filter: A2
// forEach: A2
// map: b1
// filter: B1
// map: b3
// filter: B3
// map: c
// filter: C
PS: Java code is taken from: http://winterbe.com/posts/2014/07/31/java8-stream-tutorial-examples/
There are many other languages that use this same method. Is there a way to do it the same way in JS as well?
回答1:
This is an exact clone of your Java code. Unlike Bergi's solution, no need to modify global prototypes.
class Stream {
constructor(iter) {
this.iter = iter;
}
* [Symbol.iterator]() {
yield* this.iter;
}
static of(...args) {
return new this(function* () {
yield* args
}());
}
_chain(next) {
return new this.constructor(next.call(this));
}
map(fn) {
return this._chain(function* () {
for (let a of this)
yield fn(a);
});
}
filter(fn) {
return this._chain(function* () {
for (let a of this)
if (fn(a))
yield (a);
});
}
forEach(fn) {
for (let a of this)
fn(a)
}
}
Stream.of("d2", "a2", "b1", "b3", "c")
.map(s => {
console.log("map: " + s);
return s.toUpperCase();
})
.filter(s => {
console.log("filter: " + s);
return s.startsWith("A");
})
.forEach(s => console.log('forEach', s));
Actually, the chaining functionality could be decoupled from specific iterators to provide a generic framework:
// polyfill, remove me later on
Array.prototype.values = Array.prototype.values || function* () { yield* this };
class Iter {
constructor(iter) { this.iter = iter }
* [Symbol.iterator]() { yield* this.iter }
static of(...args) { return this.from(args) }
static from(args) { return new this(args.values()) }
_(gen) { return new this.constructor(gen.call(this)) }
}
Now, you can throw arbitrary generators into this, both predefined and ad-hoc ones, for example:
let map = fn => function* () {
for (let a of this)
yield fn(a);
};
let filter = fn => function* () {
for (let a of this)
if (fn(a))
yield (a);
};
it = Iter.of("d2", "a2", "b1", "b3", "c", "a000")
._(map(s => s.toUpperCase()))
._(filter(s => s.startsWith("A")))
._(function*() {
for (let x of [...this].sort())
yield x;
});
console.log([...it])
回答2:
You can achieve this using piping, i dunno if this makes it too complicated, but by using piping you can call Array.reduce
on the pipe and it performs the same behaviour on each iteration.
const stream = ["d2", "a2", "b1", "b3", "c"];
const _pipe = (a, b) => (arg) => b(a(arg));
const pipe = (...ops) => ops.reduce(_pipe);
const _map = (value) => (console.log(`map: ${value}`), value.toUpperCase());
const _filter = (value) => (console.log(`filter: ${value}`),
value.startsWith("A") ? value : undefined);
const _forEach = (value) => value ? (console.log(`forEach: ${value}`), value) : undefined;
const mapFilterEach = pipe(_map,_filter,_forEach);
const result = stream.reduce((sum, element) => {
const value = mapFilterEach(element);
if(value) sum.push(value);
return sum;
}, []);
I took the pipe function from here
Here is a polyfill of the pipe reduce and an example if you want to use it for more dynamic purposes
Array.prototype.pipeReduce = function(...pipes){
const _pipe = (a, b) => (arg) => b(a(arg));
const pipe = (...ops) => ops.reduce(_pipe);
const reducePipes = pipe(...pipes);
return this.reduce((sum, element) => {
const value = reducePipes(element);
if(value) sum.push(value);
return sum;
}, []);
};
const stream = ["d2", "a2", "b1", "b3", "c"];
const reduced = stream.pipeReduce((mapValue) => {
console.log(`map: ${mapValue}`);
return mapValue.toUpperCase();
}, (filterValue) => {
console.log(`filter: ${filterValue}`);
return filterValue.startsWith("A") ? filterValue : undefined;
}, (forEachValue) => {
if(forEachValue){
console.log(`forEach: ${forEachValue}`);
return forEachValue;
}
return undefined;
});
console.log(reduced); //["A2"]
回答3:
what is called a monad, ie, the computations are stored first
Um, no, that's not what Monad means.
Is there a way to do it the same way in JS as well?
Yes, you can use iterators. Check this implementation or that one (and for the monad methods, here).
const myArr = [1,2,3,4,5,6,7,8,9];
const sumOfDoubleOfOddNumbers = myArr.values() // get iterator
.filter(num => num % 2)
.map(num => num * 2)
.reduce((acc, currVal) => acc + currVal, 0);
console.log(sumOfDoubleOfOddNumbers);
["d2", "a2", "b1", "b3", "c"].values()
.map(s => {
console.log("map: " + s);
return s.toUpperCase();
})
.filter(s => {
console.log("filter: " + s);
return s.startsWith("A");
})
.forEach(s => console.log("forEach: " + s));
var IteratorPrototype = Object.getPrototypeOf(Object.getPrototypeOf([][Symbol.iterator]()));
IteratorPrototype.map = function*(f) {
for (var x of this)
yield f(x);
};
IteratorPrototype.filter = function*(p) {
for (var x of this)
if (p(x))
yield x;
};
IteratorPrototype.reduce = function(f, acc) {
for (var x of this)
acc = f(acc, x);
return acc;
};
IteratorPrototype.forEach = function(f) {
for (var x of this)
f(x);
};
Array.prototype.values = Array.prototype[Symbol.iterator];
const myArr = [1,2,3,4,5,6,7,8,9];
const sumOfDoubleOfOddNumbers = myArr.values() // get iterator
.filter(num => num % 2)
.map(num => num * 2)
.reduce((acc, currVal) => acc + currVal, 0);
console.log({sumOfDoubleOfOddNumbers});
["d2", "a2", "b1", "b3", "c"].values()
.map(s => {
console.log("map: " + s);
return s.toUpperCase();
})
.filter(s => {
console.log("filter: " + s);
return s.startsWith("A");
})
.forEach(s => console.log("forEach: " + s));
In production code, you probably should use static functions instead of putting custom methods on the builtin iterator prototype.
回答4:
Array.prototype.map and Array.prototype.filter creates new arrays from the previous one. Array.prototype.reduce applies a function against an accumulator and each element in the array (from left to right) to reduce it to a single value.
Therefore, neither of them allow lazy evaluation.
You can achieve laziness by reducing your multiples loops into one:
const array = [1, 2, 3, 4, 5, 6, 7, 8, 9]
const result = array.reduce((acc, x) => x % 2 ? acc += x * 2 : acc, 0);
console.log(result);
Another way could be handling lazy evaluations by yourself in a custom object as follows. Next snippet is an example redefining filter
and map
:
const array = [1, 2, 3, 4, 5, 6, 7, 8, 9];
// convert to a lazy structure...
const results = toLazy(array)
.filter(x => {
console.log('filter', x);
return x % 2 !== 0;
})
.map(x => {
console.log('map', x);
return x * 2;
});
// check logs for `filter` and `map` callbacks
console.log(results.run()); // -> [2, 6, 10, 14, 18]
function toLazy(array) {
const lazy = {};
let callbacks = [];
function addCallback(type, callback) {
callbacks.push({ type, callback });
return lazy;
}
lazy.filter = addCallback.bind(null, 'filter');
lazy.map = addCallback.bind(null, 'map');
lazy.run = function () {
const results = [];
for (var i = 0; i < array.length; i += 1) {
const item = array[i];
for (var { callback, type } of callbacks) {
if (type === 'filter') {
if (!callback(item, i)) {
break;
}
} else if (type === 'map') {
results.push(callback(item, i));
}
}
}
return results;
};
return lazy;
}
However, you can check libraries like lazy.js which provides a lazy engine under the hood using iterators.
来源:https://stackoverflow.com/questions/49221528/how-to-reduce-iterations-when-chaining-map-reduce-filter