I am doing some computations inside a double forEach
Loop something like this:
array.forEach(function(element){
Object.keys(element).forEach
Take a look at async.js, and especially its control flow statements, such as each
whilst
and until
.
Using async.js you can get what you want to have.
In your actual situation what you want is the each
function (which has formerly been known as forEach
), respectively the eachSeries
function which does not run the single iterations in parallel, but in serial (see the documentation for eachSeries for details).
To provide an example:
async.eachSeries([ 2, 3, 5, 7, 11 ], function (prime, callback) {
console.log(prime);
callback(); // Alternatively: callback(new Error());
}, function (err) {
if (err) { throw err; }
console.log('Well done :-)!');
});
This will iterate over the array of prime numbers and print them in the correct order, one after each other, and finally print out Well done :-)!
.
let dataObject = {};
Promise.all(objectArray.map(object => {
return new Promise(resolve => {
yourFunction(object, anotherParameter).then(returnValue => {
dataObject[object] = returnValue;
resolve();
});
});
})).then(() => {
return dataObject;
});
HIGH PERFORMANCE SOLUTION:
There might be a case when you might want/allow to process the array asynchronously/parallely but want a function to be called after all members of forEach have been processed. Example:
var async = require('async');
async.forEach(array,function(elementOfArray, callback){
//process each element of array in parallel
// ..
// ..
// ..
callback(); //notify that this iteration is complete
}, function(err){
if(err){throw err;}
console.log("processing all elements completed");
});
Hence, this way you perform non-blocking CPU intensive operations.
NOTE: When you use eachSeries for huge arrays, so many iterative callbacks might overflow the stack.
Read here: https://github.com/caolan/async#control-flow
You can wrap your callbacks in a count-down closure:
var len = array.length;
function countdownWrapper(callback, callbackArgs) {
callback(callbackArgs);
if (--len == 0) {
someFunctionHere();
}
}
array.forEach(function(element){
Object.keys(element).forEach(function(key){
var wrappedCallback = countdownWrapper.bind(callback);
/* some complex computations with asynchronous WRAPPED callbacks */
});
});
If the call-backs have different number of arguments, you can do a little surgery on arguments
instead of using an explicit callbackArgs
parameter.
EDIT Your edit clarifies that you want to start each complex computation after the previous calculation completes it's callback. This can also be easily arranged through closures:
function complexOp(key, callback) { /* . . . */ }
function originalCallback(...) { /* . . . */ }
function doSomethingElse() { /* . . . */ }
var iteratorCallback = (function (body, callback, completion) {
var i = 0, len = array.length;
return function iterator() {
callback.apply(null, arguments);
if (++i < len) {
body(array[i], iterator);
} else {
completion();
}
};
}(complexOp, originalCallback, doSomethingElse));
// kick everything off:
complexOp(array[0], iteratorCallback);
For browsers which support Promise (or using polyfill) / nodejs I've implemented my self a sync version of forEach with callbacks, so I'll just share that here..
The callback must return a promise..
function forEachSync(array, callback) {
let lastIndex = array.length - 1;
let startIndex = 0;
return new Promise((resolve, reject) => { // Finish all
let functionToIterateWith = (currIndex) => {
if (currIndex > lastIndex) {
return resolve();
} else {
callback(array[currIndex]).then(() => {
functionToIterateWith(currIndex + 1);
}).catch(err => reject(err));
}
}
functionToIterateWith(startIndex);
});
}