I am trying to execute following array (avoid callbackHell) of functions(sync/async), in a sequential order, implementing function runCallbacksInSequence (I need to implement my own function to understand how callbacks work and avoid using Async.js).
Here is what I have so far. The function runCallbacksInSequence works well till it gets the same callback cb(null, 'one'); the second time. Ideally if it gets the same callback more than once it should not execute it second time and ignore it.
If you have any ideas let me know how I can implement it. - no promises and async/await
function first(cb) {
setTimeout(function() {
console.log('first()');
cb(null, 'one');
cb(null, 'one');//not supposed to be execute by runCallbacksInSequence
}, 0);
}
function last(cb) {
console.log('last()');
cb(null, 'lastCall');
}
function runCallbacksInSequence(fns, cb) {
fns.reduce(
(r, f) => {
return k => {
return r(() => {
return f((e, x) => {
return e ? cb(e) : k(x);
});
});
};
},
k => {
return k();
}
)(r => {
return cb(null, r);
});
}
fns = [first, last];
runCallbacksInSequence(fns, function(err, results) {
if (err) return console.log('error: ' + err.message);
console.log(results);
});
Related
I'm trying to translate all elements of the array wordList and pushing them to a new list translatedWordList, but because translate() is async the new list is empty when I callback or return it
function translateWordList(wordList, callback) {
var translatedWordList = [];
wordList.forEach((word) => {
translate(word, (translation) => {
translatedWordList.push(translation);
});
});
callback(translatedWordList);
}
I tried to solve this by delaying the callback using a while loop that runs until the length of translatedWordList and wordList match, but the function translate is not logging anything
function translateWordList(wordList, callback) {
var translatedWordList = [];
wordList.forEach((word) => {
translate(word, (translation) => {
translatedWordList.push(translation);
console.log(translation)
counter++;
});
});
while (translateWordList.length < wordList.length) {}
callback(translatedWordList);
}
Instead of using a Array#forEach, just use a normal for loop and make your function async so you can use the await keyword.
async function translateWordList(wordList, callback) {
var translatedWordList = [];
for(const word of wordList) {
translatedWordList.push(await translate(word));
}
callback(translatedWordList);
}
If your translate function does not return a promise and only uses a callback, then you can promisify that function.
function translatePromise(word) {
return new Promise((resolve, reject) => {
translate(word, (translation, err) => {
// assuming your callback signals an error in the second parameter
if(err) {
reject(err);
return;
}
resolve(translation);
});
});
}
Then replace the translate(word) call in the first example with translatePromise(word).
However, if you don't want to work with async/await you could do something like this.
function translateWordList(wordList, callback) {
Promise.all(wordList.map((word) => {
return new Promise((resolve) => {
translate(word, resolve);
});
}))
.then(callback);
}
Im playing arround with promises and callbacks and wonder what is the correct way to write a function that returns a promise if no callback is passed.
My result looks like this, but im not sure if this is correct (in the meaning of anti pattern)
const mySuperFunction = function mySuperFunction(data, cb) {
let wrapper = new Promise((resolve, reject) => {
setTimeout(() => {
if (Math.random() >= 0.5) {
resolve(Date.now());
} else {
reject(new Error("Not today..."));
}
}, 100);
});
if (cb) {
wrapper.then((result) => {
cb(null, result);
}, (error) => {
cb(error);
});
} else {
return wrapper;
}
};
mySuperFunction().then((time) => {
console.log(time)
}).catch((err) => {
console.log(err);
});
mySuperFunction(null, (err, time) => {
console.log(err, time)
});
Its simple: create a function and wrap the "work" code in a promise.
If no callback is passed to my function, i return the wrapped promise. If a calback is passed, i wrap/call it from .then(...) and .catch(...)
Is this ok, or do i miss some special cases where this dosn't work ?
Is this ok, or do i miss some special cases where this dosn't work ?
Your code works, but it adds unuseful overhead. A promise consumes more memory than callback and it is slower than callbacks.
I would write something like
const { promisify } = require('util')
function mySuperFunctionCallback (data, cb) {
setTimeout(() => {
if (Math.random() >= 0.5) {
cb(Date.now())
} else {
cb(new Error('Not today...'))
}
}, 100)
}
const mySuperFunctionAsync = promisify(mySuperFunctionCallback)
function mySuperFunction (data, cb) {
if (cb) {
mySuperFunctionCallback(data, cb)
} else {
return mySuperFunctionAsync(data)
}
}
mySuperFunction().then((time) => {
console.log(time)
}).catch((err) => {
console.log(err)
})
mySuperFunction(null, (err, time) => {
console.log(err, time)
})
I am trying to produce async function by iterating through the array of 'titles' and passing array of function later to async waterfall.
according to docs:
var asyncFunction = [
function(callback){
asyncFunc(1, function(){
callback(null);
});
},
// page 2
function(data, callback){
asyncFunc(2, function(){
callback(null, data);
});
}
]
The first function takes callback as the first parameter, while the second and all subsequent functions function is taking data as the first parameter and callback as the second.
My question how do I create function conditionally based on the index of map iteration?
Below is !non-working code example to give you an idea what I'm trying to accomplish.
Thanks
const asyncFuncs = ['a','b','c'].map( (letter, index) => {
const args = i == 0 ? [callback] : [data, callback]
return function(args...){
asyncFunc(2, function(){
callback(null, data);
});
}
})
async.waterfall(asyncFuncs, (error, result) => {})
Sorry if I was wrong.
I guess that you want to dynamic waterfall functions base on your data.
var yourData = ['a','b','c'];
async.map(yourData, yourAssignFunction, function (err, result) {
if(!err) {
console.log('Success: ' + result);
} else {
console.log('Error: ' + err);
}});
function yourAssignFunction(item, callback) {
if (item === 'a') { // your conditions check here
// use waterfall 1
callback(waterfallFunction1);
} else {
// use waterfall 2
callback(waterfallFunction2);
}
}
function waterfallFunction1(item, callback) {
async.waterfall([
function(cb){
console.log(' -> task1: ', item);
cb(null, item);
},
function(response,cb){
console.log(' -> task2: ', item);
cb(null, item);
}], callback)
}
I have following code
function someHelper(someList) {
return someList.map((item) => {
return (next) => someService.firstCall(paramForFirst, (err, result) => {
if(err) next(err);
else someService.secondCall(paramForSecond, result, next);
})
});
}
module.exports = {
doSomething(param, callback) {
async.parallel(someHelper(someList), callback);
}
};
How I can convert second return of someHelper function to use async.waterfall?
Here is the solution I have. Pass the next(callback) from parallel and then call methods in waterfall array list and finally call the next at the end. By defination waterfall will call callback with result or error.
function someHelper(someList) {
return someList.map((item) => {
return (next) => {
async.waterfall([
async.apply(someService.firstCall, paramForFirst),
async.apply(someService.secondCall, paramForSecond)
], next);
}
});
}
I am putting together an example that shows how a simple synchronous Node.js program can be transformed into an asynchronous version that uses async/await. There should be several intermediate steps, starting with a normal callback-based version, following up with one that uses two callbacks, one for the normal (resolve) case and another for the error (reject) case, which would then lead to promises.
The job of each version is to create an empty folder copy (which might exist already and it might contain files) and copy all files (called file1.txt and file2.txt) in the folder orig there. If an error occurs anywhere it should be explicitly caught, printed to the console and the program should not continue any further.
The version with normal error-first callbacks works just fine, but I ran into an issue with the split-callback version. It only copies file2.txt, but not file1.txt.
Here is the code I use for transforming the fs-functions:
const fs = require('fs');
fs.exists = function(path, callback) {
fs.stat(path, (err, stats) => {
if (err) {
callback(null, false);
} else {
callback(null, true);
}
});
};
function splitCallback(f) {
return (...params) => {
reject = params[params.length - 2];
resolve = params[params.length - 1];
params = params.slice(0, params.length - 2);
f(...params, (err, data) => {
if (err) {
reject(err);
} else {
resolve(data);
}
});
};
}
const sfs = {};
const functionNames = ['exists', 'readdir', 'unlink', 'mkdir', 'readFile', 'writeFile'];
for (const functionName of functionNames) {
sfs[functionName] = splitCallback(fs[functionName].bind(fs));
}
And this is the actual example using those functions:
function handleError(err) {
console.error(err);
}
function initCopyDirectory(callback) {
sfs.exists('copy', handleError, exists => {
if (exists) {
sfs.readdir('copy', handleError, filenames => {
let fileCount = filenames.length;
if (fileCount === 0) {
callback();
}
for (const filename of filenames) {
sfs.unlink(`copy/${filename}`, handleError, () => {
fileCount--;
if (fileCount === 0) {
callback();
}
});
}
});
} else {
sfs.mkdir('copy', handleError, () => callback);
}
});
}
function copyFiles() {
// sfs.readdir('orig', handleError, filenames => {
// for (const filename of filenames) {
// console.log(filename);
// sfs.readFile(`orig/${filename}`, handleError, data => {
// console.log('reading', filename);
// sfs.writeFile(`copy/${filename}`, data, handleError, () => {
// console.log('writing', filename);
// });
// });
// }
// });
sfs.readdir('orig', handleError, filenames => {
for (const filename of filenames) {
fs.readFile(`orig/${filename}`, (err, data) => {
if (err) {
handleError(err);
} else {
sfs.writeFile(`copy/${filename}`, data, handleError, () => {});
}
});
}
});
}
function main() {
initCopyDirectory(copyFiles);
}
main();
As it is written here it works properly (using Node version 7.4.0 for Windows), but when I swap the comments in the copyFiles-function (thereby changing readFile) only one file is copied and I get the following output:
file1.txt
file2.txt
reading file2.txt
writing file2.txt
writing file2.txt
What is the problem?
Try this instead of the commented code:
for (const filename of filenames) {
(function(filename){
console.log(filename);
sfs.readFile(`orig/${filename}`, handleError, data => {
console.log('reading', filename);
sfs.writeFile(`copy/${filename}`, data, handleError, () => {
console.log('writing', filename);
});
});
})(filename)
}
The problem is that you are running asynchronous functions inside a for loop and expecting them to behave synchronously. By the time sfs.writeFile is called (after executing sfs.readFile) the for loop has long been finished executing and so you are left with only the last filename, file2. By wrapping everything inside the for loop in a closure you maintain the proper values.
Here is a simpler example:
for (var i = 0; i < 10 ; i++) {
setTimeout(function(){
console.log(i)
}, 100)
}
will print 10 10 times because by the time the timeout executes (0.1 seconds) the for loop is already done, whereas the following code will print the numbers 0 through 9 because the original values are preserved by the closure. (try it yourself)
for (var i = 0; i < 10 ; i++) {
(function(i){
setTimeout(function(){
console.log(i)
}, 100)
})(i)
}
The issue was that I forgot to put const in front of the variable declarations in splitCallback. This made them global variables that kept being overridden. Activating strict mode would have thrown an error instead. Here is the correct code:
function splitCallback(f) {
return (...params) => {
const input = params.slice(0, params.length - 2);
const [reject, resolve] = params.slice(params.length - 2);
f(...input, (err, ...output) => {
if (err) {
reject(err);
} else {
resolve(...output);
}
});
};
}