I am putting together an example that shows how a simple synchronous Node.js program can be transformed into an asynchronous version that uses async/await. There should be several intermediate steps, starting with a normal callback-based version, following up with one that uses two callbacks, one for the normal (resolve) case and another for the error (reject) case, which would then lead to promises.
The job of each version is to create an empty folder copy (which might exist already and it might contain files) and copy all files (called file1.txt and file2.txt) in the folder orig there. If an error occurs anywhere it should be explicitly caught, printed to the console and the program should not continue any further.
The version with normal error-first callbacks works just fine, but I ran into an issue with the split-callback version. It only copies file2.txt, but not file1.txt.
Here is the code I use for transforming the fs-functions:
const fs = require('fs');
fs.exists = function(path, callback) {
fs.stat(path, (err, stats) => {
if (err) {
callback(null, false);
} else {
callback(null, true);
}
});
};
function splitCallback(f) {
return (...params) => {
reject = params[params.length - 2];
resolve = params[params.length - 1];
params = params.slice(0, params.length - 2);
f(...params, (err, data) => {
if (err) {
reject(err);
} else {
resolve(data);
}
});
};
}
const sfs = {};
const functionNames = ['exists', 'readdir', 'unlink', 'mkdir', 'readFile', 'writeFile'];
for (const functionName of functionNames) {
sfs[functionName] = splitCallback(fs[functionName].bind(fs));
}
And this is the actual example using those functions:
function handleError(err) {
console.error(err);
}
function initCopyDirectory(callback) {
sfs.exists('copy', handleError, exists => {
if (exists) {
sfs.readdir('copy', handleError, filenames => {
let fileCount = filenames.length;
if (fileCount === 0) {
callback();
}
for (const filename of filenames) {
sfs.unlink(`copy/${filename}`, handleError, () => {
fileCount--;
if (fileCount === 0) {
callback();
}
});
}
});
} else {
sfs.mkdir('copy', handleError, () => callback);
}
});
}
function copyFiles() {
// sfs.readdir('orig', handleError, filenames => {
// for (const filename of filenames) {
// console.log(filename);
// sfs.readFile(`orig/${filename}`, handleError, data => {
// console.log('reading', filename);
// sfs.writeFile(`copy/${filename}`, data, handleError, () => {
// console.log('writing', filename);
// });
// });
// }
// });
sfs.readdir('orig', handleError, filenames => {
for (const filename of filenames) {
fs.readFile(`orig/${filename}`, (err, data) => {
if (err) {
handleError(err);
} else {
sfs.writeFile(`copy/${filename}`, data, handleError, () => {});
}
});
}
});
}
function main() {
initCopyDirectory(copyFiles);
}
main();
As it is written here it works properly (using Node version 7.4.0 for Windows), but when I swap the comments in the copyFiles-function (thereby changing readFile) only one file is copied and I get the following output:
file1.txt
file2.txt
reading file2.txt
writing file2.txt
writing file2.txt
What is the problem?
Try this instead of the commented code:
for (const filename of filenames) {
(function(filename){
console.log(filename);
sfs.readFile(`orig/${filename}`, handleError, data => {
console.log('reading', filename);
sfs.writeFile(`copy/${filename}`, data, handleError, () => {
console.log('writing', filename);
});
});
})(filename)
}
The problem is that you are running asynchronous functions inside a for loop and expecting them to behave synchronously. By the time sfs.writeFile is called (after executing sfs.readFile) the for loop has long been finished executing and so you are left with only the last filename, file2. By wrapping everything inside the for loop in a closure you maintain the proper values.
Here is a simpler example:
for (var i = 0; i < 10 ; i++) {
setTimeout(function(){
console.log(i)
}, 100)
}
will print 10 10 times because by the time the timeout executes (0.1 seconds) the for loop is already done, whereas the following code will print the numbers 0 through 9 because the original values are preserved by the closure. (try it yourself)
for (var i = 0; i < 10 ; i++) {
(function(i){
setTimeout(function(){
console.log(i)
}, 100)
})(i)
}
The issue was that I forgot to put const in front of the variable declarations in splitCallback. This made them global variables that kept being overridden. Activating strict mode would have thrown an error instead. Here is the correct code:
function splitCallback(f) {
return (...params) => {
const input = params.slice(0, params.length - 2);
const [reject, resolve] = params.slice(params.length - 2);
f(...input, (err, ...output) => {
if (err) {
reject(err);
} else {
resolve(...output);
}
});
};
}
Related
This is my code, which works fine if i run it from my local using local aws account , but it doesn't work from my dev environment. S3.getobject api doesnt get executed and code prints the next log skipping the getobject call :
const unzipFromS3 = (key) => {
return new Promise(async (resolve, reject) => {
log.info("inside unzipfroms3");
var zlib = require('zlib');
// let fileName = _.replace(key, 'Root/', '');
let options = {
'Bucket': config.bucketName,
'Key': "Root/" + key,
}
log.info("Key:", options);
await s3.getObject(options).on('error', error => {
log.error(error) }).promise().then((res) => {
yauzl.fromBuffer(res.body, { lazyEntries: true }, function (err, zipfile) {
log.info("Inside Yauzl")
if (err) throw err;
zipfile.readEntry();
zipfile.on("entry", function (entry) {
if (/\/$/.test(entry.fileName)) {
zipfile.readEntry();
} else {
zipfile.openReadStream(entry, function (err, readStream) {
if (err) throw err;
// readStream.pipe(fs.createWriteStream(`result/${entry.fileName}`));
readStream
.pipe(uploadFromStream(s3));
function uploadFromStream(s3) {
log.info("Inside uploadFromStream")
var pass = new Stream.PassThrough();
let options = {
'Bucket': config.bucketName,
'Key': entry.fileName,
}
var params = { ...options, Body: pass };
s3.upload(params, function (err, data) {
log.error(err, data);
});
return pass;
}
readStream.on("end", function () {
zipfile.readEntry();
});
});
}
});
});
});
});
};
In order to use await, i.e. the promised based version of S3.getObject(), you must add the promise() method to your method call as explained in the Using JavaScript Promises chapter of the AWS SDK developer guide. Moreover, there is also an Using async/await chapter that you can look into.
In your case, the code can be modified to something like:
await s3.getObject(options).promise()
.then((res) => {
yauzl.fromBuffer(/* more code */);
});
I am create a simple NODE-JS function that Converts PDF to Image > Crops Image > Merge Them back with ImageMagick.
and this is the complete code i am using :
var os = require('os');
var fs = require('fs');
var path = require('path');
var gs = require('node-gs');
var sharp = require('sharp');
var areaMap = require('./areaMap');
const { performance } = require('perf_hooks');
var spawn = require('child_process').spawnSync;
var pExcep = 'someException';
var gsPath = 'Ghostscript/gs26';
var src = path.join(os.tmpdir(), '/');
var Files = {
file1: path.join(src, 'out1.jpeg'),
file2: path.join(src, 'out2.jpeg'),
OutImg: path.join(src, 'out.jpeg')
}
var crop = function (s, sFile) {
return new Promise((res, rej) => {
s = areaMap[s];
sharp(Files.OutImg).extract(s)
.toFile(sFile)
.then(()=> res())
.catch((err) => rej(err));
});
};
var getBaseCard = function (s) {
if (RegExp('^([0-9]{8})$').test(s)) { return 'SOMETHINGHERE' } else { return 'inception'; }
//This can be done on client side.
}
var GetCardType = function (base, sInfo) {
return new Promise((res, rej) => {
if (base === 'SOEMTHINGHERE') {
if (sInfo.includes('SOMETHINGHERE2')) {
if (sInfo.includes(pExcep)) {
res('PA_S_')
} else {
res('PA_S2')
}
} else {
res('PA_ST')
}
} else {
res('SA_')
}
})
}
var PdfToText = function (file, pass) {
return new Promise((res, rej) => {
gs()
.batch().safer().nopause().res(2).option('-dDEVICEWIDTHPOINTS=20').option('-dDEVICEHEIGHTPOINTS=20').option('-dFIXEDMEDIA').option('-sPDFPassword=' + pass).device('txtwrite').output('-').input(file).executablePath(gsPath)
.exec((err, stdout, stderr) => {
if (!err) {
res(stdout);
} else {
console.log(stdout);
console.log(err);
console.log(stderr);
}
})
});
}
var getBaseImage = function (file, pass, quality) {
return new Promise((res, rej) => {
gs()
.batch().nopause().safer().res(300 * quality).option('-dTextAlphaBits=4').option('-dGraphicsAlphaBits=4').option('-sPDFPassword=' + pass)
.executablePath(gsPath).device('jpeg').output(Files.OutImg).input(file)
.exec((err, stdout, stderr) => {
if (!err) { res(); } else { rej(stdout) };
})
})
}
exports.processCard = function (file, password, quality) {
return new Promise((resolve, reject) => {
getBaseImage(file, password, quality) // Convert PDF to Image
.then(() => {
PdfToText(file, password) // Extract Text from pdf
.then((res) => {
GetCardType(getBaseCard(password), res) // finally get PDF Type
.then((ct) => {
// crop image here using Sharp
Promise.all([
crop(ct + 'A_' + quality, Files.file1),
crop(ct + 'B_' + quality, Files.file2)])
.then(() => {
// Merge Above two image into one using ImageMagick convert
spawn('convert', [Files.file1, Files.file2, '+append', 'files/out1.jpg']);
fs.unlinkSync(Files.OutImg); // Unlink tmp folders
fs.unlinkSync(Files.file1);
fs.unlinkSync(Files.file2);
resolve(); // finally resolve
}).catch((err) => reject(err));
}).catch((err) => reject(err))
}).catch((err) => reject(err))
}).catch((err) => reject(err))
})
}
and now these are the problem i am facing:
1. ImageMagick isn't creating the output file.
2. fs.unlinksysnc throws ENOENT: no such file or directory, unlink '/tmp/out1.jpeg'
on average every second execution.
3. Using above code increases execution time.
For Example: getBaseImage should complete in 600ms but it takes 1400 using above code.
About speed in General it (The Complete Function not just getBaseImage) should finish in 1100-1500ms(*) on average but the time taken is ~2500ms.
*1100-1500ms time is achievable by using function chaining but that is hard to read and maintaine for me.
I am going to use this function in Firebase Functions.
How to properly chain these functions ?
EDIT
exports.processCard = function (file, password, quality) {
return new Promise((resolve, reject) => {
console.log(performance.now());
getBaseImage(file, password, quality) //Convert PDF TO IMAGE
.then(() => { return PdfToText(file, password) })
.then((res) => {return GetCardType(getBaseCard(password), res) })
.then((ct) => {
return Promise.all([
crop(ct + 'A_' + quality, Files.file1),
crop(ct + 'B_' + quality, Files.file2)])
})
.then(() => {
spawn('convert', [Files.file1, Files.file2, '+append', 'files/out1.jpg']);
fs.unlinkSync(Files.OutImg); // Unlink tmp folders
fs.unlinkSync(Files.file1);
fs.unlinkSync(Files.file2);
resolve();
})
.catch((err) => { console.log(err) });
Using above pattern didn't solved my issues here.
There's a good chance this weirdness is caused by using the file system. If I understand it correctly, the fs in cloud functions is in memory, so when you write to it, read from it, and remove from it, you're using more and less os memory. That can get weird if a function is called repeatedly and re uses the loaded module.
One thing to try to keep the state clean for each invocation is to put everything (including the requires) inside the scope of the handler. That way you instantiate everything freshly on each invocation.
Finally, you don't seem to be waiting for the spawned convert command to run, you'll need to wait for it to complete:
const convertProc = spawn('convert', [Files.file1, Files.file2, '+append', 'files/out1.jpg']);
convertProc.on('close', function() {
fs.unlinkSync(Files.OutImg); // Unlink tmp folders
fs.unlinkSync(Files.file1);
fs.unlinkSync(Files.file2);
resolve();
})
convertProc.on('close', function(error) {
reject(error);
});
Then you wait for it to complete before you resolve.
So I have a method, which I want to call multiple times in a loop. This is the function:
function PageSpeedCall(callback) {
var pagespeedCall = `https://www.googleapis.com/pagespeedonline/v4/runPagespeed?url=https://${websites[0]}&strategy=mobile&key=${keys.pageSpeed}`;
// second call
var results = '';
https.get(pagespeedCall, resource => {
resource.setEncoding('utf8');
resource.on('data', data => {
results += data;
});
resource.on('end', () => {
callback(null, results);
});
resource.on('error', err => {
callback(err);
});
});
// callback(null, );
}
As you see this is an async function that calls the PageSpeed API. It then gets the response thanks to the callback and renders it in the view. Now how do I get this to be work in a for/while loop? For example
function PageSpeedCall(websites, i, callback) {
var pagespeedCall = `https://www.googleapis.com/pagespeedonline/v4/runPagespeed?url=https://${websites[i]}&strategy=mobile&key=${keys.pageSpeed}`;
// second call
var results = '';
https.get(pagespeedCall, resource => {
resource.setEncoding('utf8');
resource.on('data', data => {
results += data;
});
resource.on('end', () => {
callback(null, results);
});
resource.on('error', err => {
callback(err);
});
});
// callback(null, );
}
var websites = ['google.com','facebook.com','stackoverflow.com'];
for (let i = 0; i < websites.length; i++) {
PageSpeedCall(websites, i);
}
I want to get a raport for each of these sites. The length of the array will change depending on what the user does.
I am using async.parallel to call the functions like this:
let freeReportCalls = [PageSpeedCall, MozCall, AlexaCall];
async.parallel(freeReportCalls, (err, results) => {
if (err) {
console.log(err);
} else {
res.render('reports/report', {
title: 'Report',
// bw: JSON.parse(results[0]),
ps: JSON.parse(results[0]),
moz: JSON.parse(results[1]),
// pst: results[0],
// mozt: results[1],
// bw: results[1],
al: JSON.parse(results[2]),
user: req.user,
});
}
});
I tried to use promise chaining, but for some reason I cannot put it together in my head. This is my attempt.
return Promise.all([PageSpeedCall,MozCall,AlexaCall]).then(([ps,mz,al]) => {
if (awaiting != null)
var areAwaiting = true;
res.render('admin/', {
title: 'Report',
// bw: JSON.parse(results[0]),
ps: JSON.parse(results[0]),
moz: JSON.parse(results[1]),
// pst: results[0],
// mozt: results[1],
// bw: results[1],
al: JSON.parse(results[2]),
user: req.user,
});
}).catch(e => {
console.error(e)
});
I tried doing this:
return Promise.all([for(let i = 0;i < websites.length;i++){PageSpeedCall(websites, i)}, MozCall, AlexaCall]).
then(([ps, mz, al]) => {
if (awaiting != null)
var areAwaiting = true;
res.render('admin/', {
title: 'Report',
// bw: JSON.parse(results[0]),
ps: JSON.parse(results[0]),
moz: JSON.parse(results[1]),
// pst: results[0],
// mozt: results[1],
// bw: results[1],
al: JSON.parse(results[2]),
user: req.user,
});
}).catch(e => {
console.error(e)
});
But node just said it's stupid.
And this would work if I didn't want to pass the websites and the iterator into the functions. Any idea how to solve this?
To recap. So far the functions work for single websites. I'd like them to work for an array of websites.
I'm basically not sure how to call them, and how to return the responses.
It's much easier if you use fetch and async/await
const fetch = require('node-fetch');
async function PageSpeedCall(website) {
const pagespeedCall = `https://www.googleapis.com/pagespeedonline/v4/runPagespeed?url=https://${website}&strategy=mobile&key=${keys.pageSpeed}`;
const result = await fetch(pagespeeddCall);
return await result.json();
}
async function callAllSites (websites) {
const results = [];
for (const website of websites) {
results.push(await PageSpeedCall(website));
}
return results;
}
callAllSites(['google.com','facebook.com','stackoverflow.com'])
.then(results => console.log(results))
.error(error => console.error(error));
Which is better with a Promise.all
async function callAllSites (websites) {
return await Promise.all(websites.map(website => PageSpeedCall(website));
}
Starting on Node 7.5.0 you can use native async/await:
async function PageSpeedCall(website) {
var pagespeedCall = `https://www.googleapis.com/pagespeedonline/v4/runPagespeed?url=https://${website}&strategy=mobile&key=${keys.pageSpeed}`;
return await promisify(pagespeedCall);
}
async function getResults(){
const websites = ['google.com','facebook.com','stackoverflow.com'];
return websites.map(website => {
try {
return await PageSpeedCall(website);
}
catch (ex) {
// handle exception
}
})
}
Node http "callback" to promise function:
function promisify(url) {
// return new pending promise
return new Promise((resolve, reject) => {
// select http or https module, depending on reqested url
const lib = url.startsWith('https') ? require('https') : require('http');
const request = lib.get(url, (response) => {
// handle http errors
if (response.statusCode < 200 || response.statusCode > 299) {
reject(new Error('Failed to load page, status code: ' + response.statusCode));
}
// temporary data holder
const body = [];
// on every content chunk, push it to the data array
response.on('data', (chunk) => body.push(chunk));
// we are done, resolve promise with those joined chunks
response.on('end', () => resolve(body.join('')));
});
// handle connection errors of the request
request.on('error', (err) => reject(err))
})
}
Make PageSpeedCall a promise and push that promise to an array as many times as you need, e.g. myArray.push(PageSpeedCall(foo)) then myArray.push(PageSpeedCall(foo2)) and so on. Then you Promise.all the array.
If subsequent asynch calls require the result of a prior asynch call, that is what .then is for.
Promise.all()
Promise.all([promise1, promise2, promise3]).then(function(values) {
console.log(values);
});
I'm trying to write a asynchronous function to create some user directories with node.js.
I would like the callback to be executed with a response containing the status of the operations for later processing. But the object is not being logged from within the for loop and the second mkdir. Also the subdirectory name is logged as the same thing even though all the directories are created correctly?
I have been looking for a while as how to solve this i think its due to closures and needs an IIFE? just i am totally lost now on how to solve it. Can anyone help point me in the right direction please?
here is my code:
const fs = require('fs');
const path = require('path');
var showSettings = {
"userDirectories": ["shows", "uploads", "backups", "logs"],
"showsFolder": "shows"
};
var Files = function() {};
Files.prototype.makeUserDirectories = (username, callback) => {
let directory = (path.join(__dirname, "../users", username));
let response = {};
fs.mkdir(directory, err => {
if (err) {
response.status = "ERROR";
response.error = err;
console.log('failed to create directory', err);
} else {
console.log(`creating directory ${directory} succeeded`);
let subdirectory = "";
for (let i = 0; i < showSettings.userDirectories.length; i++) {
subdirectory = (path.join(__dirname, "../users", username, showSettings.userDirectories[i]));
fs.mkdir(subdirectory, err => {
if (err) {
response.status = "ERROR";
response.error = err;
console.log('error creating subdirectory', err);
} else {
response.status = "OK";
console.log(`creating directory ${subdirectory} succeeded`);
};
});
}
console.log(response);
}
if (callback && typeof(callback) === "function") {
console.log(response);
callback(response);
}
});
};
testFiles.makeUserDirectories("mr.test#somedomain.com", function(data) {
console.log("in callback function");
console.log(data);
});
My problem is that the returned response object to the callback is empty.
i think its something to do with the for loop and an IIFE but i am not entirely sure how to do this or if there is abetter way to achieve what i am trying to do?
Many thanks!
Your issue is that you're trying to execute your callback before your asynchronous operations have completed. Asynchronous operations can be very complicated, and there are many libraries to make things simpler, many based on the concept of Promises. These are objects that allow you to chain multiple operations together, with the cost of more overhead. I would highly recommend using Promises to produce a more intuitive function:
const fs = require('fs');
const path = require('path');
var showSettings = {
"userDirectories": ["shows", "uploads", "backups", "logs"],
"showsFolder": "shows"
};
var Files = function() {};
function mkdir(path) {
return new Promise((resolve, reject) => {
fs.mkdir(path, (err) => {
if(err) {
reject(err);
} else {
resolve("OK");
}
})
});
}
Files.prototype.makeUserDirectories = (username, callback) => {
let directory = (path.join(__dirname, "../users", username));
return mkdir(directory).then(() => {
console.log(`creating directory ${directory} succeeded`);
let subdirectory = "";
const operations = [];
for (let i = 0; i < showSettings.userDirectories.length; i++) {
subdirectory = (path.join(__dirname, "../users", username, showSettings.userDirectories[i]));
operations.push(mkdir(subdirectory));
}
return Promise.all(operations);
}).then((status) => { // this will not be called until all operations complete
console.log({status});
if (callback && typeof(callback) === "function") {
callback({status});
}
}).catch((error) => { // this will be called if an error is encountered at any point
callback({status: 'ERROR', error});
})
};
var testFiles = new Files();
testFiles.makeUserDirectories("mr.test#somedomain.com", function(data) {
console.log("in callback function");
console.log(data);
});
EDIT: Updated with a cleaner implementation.
In an attempt to grasp Q.js, I'd like to convert the following code using async.series in Q.js. Basically I create a folder if it doesn't exist (using mkdirp), move a file into a backup folder and save a file into a main folder.
var async = require('async');
var fs = require('fs');
var path = require('path');
var sessiondId = new Date().getTime() % 2 == 0 ? new Date().getTime().toString() : '_1234';
var backupFolder = path.join(__dirname,sessiondId);
var backupFullPath = path.join(backupFolder,'a.txt');
var fullPath = path.join(__dirname,'main','a.txt');
var mkdirp = require('mkdirp');
async.series({
createOrSkip: function(callback) {
mkdirp(backupFolder, function (err, dir) {
if(err) {
callback(err, null);
} else {
callback(null, {created: !!dir, folderAt: backupFolder});
}
});
},
move: function(callback) {
fs.rename(fullPath, backupFullPath, function(err) {
if(err) {
callback(err, null);
} else {
callback(null, {backupAt: backupFullPath});
}
});
},
write: function(callback) {
fs.writeFile(fullPath, 'abc', function(err) {
if (err) {
callback(err, null);
} else {
callback(null, {saveAt: fullPath});
}
});
}
}, function(err, result) {
console.log(result);
});
Actually I don't know where to start. Thanks for your help.
R.
The key is to convert the node.js functions to return promises using Q.denodeify before you start, this means the header of your file should look like:
var Q = require('q')
var fs = require('fs');
var path = require('path');
var sessiondId = new Date().getTime() % 2 == 0 ? new Date().getTime().toString() : '_1234';
var backupFolder = path.join(__dirname,sessiondId);
var backupFullPath = path.join(backupFolder,'a.txt');
var fullPath = path.join(__dirname,'main','a.txt');
var mkdirp = Q.denodeify(require('mkdirp'));
var rename = Q.denodeify(fs.rename);
var writeFile = Q.denodeify(fs.writeFile);
That change wouldn't be needed if node.js natively supported promises.
Option 1
// createOrSkip
mkdirp(backupFolder)
.then(function (dir) {
// move
return rename(fullPath, backupFullPath);
})
.then(function () {
// write
return writeFile(fullPath, 'abc');
})
.done(function () {
console.log('operation complete')
});
I don't think it gets much simpler than that. Like #Bergi said though, it's more similar to "waterfall". If you want the exact behavior of series (but with promises) you'll have to use something like Option 2 or Option 3.
Option 2
You could write out the code manually to save the results. I usually find that, although this requires a little extra writing, it's by far the easiest to read:
var result = {}
mkdirp(backupFolder)
.then(function (dir) {
result.createOrSkip = {created: !!dir, folderAt: backupFolder};
return rename(fullPath, backupFullPath);
})
.then(function () {
result.move = {backupAt: backupFullPath};
return writeFile(fullPath, 'abc');
})
.then(function () {
result.write = {saveAt: fullPath};
return result;
})
.done(function (result) {
console.log(result);
});
Option 3
If you find yourself using this sort of code all the time, you could write a very simple series helper (I've never found the need to do this personally):
function promiseSeries(series) {
var ready = Q(null);
var result = {};
Object.keys(series)
.forEach(function (key) {
ready = ready.then(function () {
return series[key]();
}).then(function (res) {
result[key] = res;
});
});
return ready.then(function () {
return result;
});
}
promiseSeries({
createOrSkip: function () {
return mkdirp(backupFolder).then(function (dir) {
return {created: !!dir, folderAt: backupFolder};
});
},
move: function () {
return rename(fullPath, backupFullPath)
.thenResolve({backupAt: backupFullPath});
},
write: function () {
return writeFile(fullPath, 'abc')
.thenResolve({saveAt: fullPath});
}
}).done(function (result) {
console.log(result);
});
I'd say once you've written the helper, the code is a lot clearer for promises than with all the error handling cruft required to work with callbacks. I'd say it's clearer still when you either write it by hand or don't keep track of all those intermediate results.
Summing Up
You may or may not think these examples are clearer than the async.series version. Consider how well you might know that function though. It's actually doing something pretty complex in a very opaque manner. I initially assumed that only the last result would be returned (ala waterfall) and had to look it up in the documentation of Async. I almost never have to look something up int the documentation of a Promise library.
Make each of your functions return a promise. Construct them with a Deferred:
function createOrSkip(folder) {
var deferred = Q.defer();
mkdirp(folder, function (err, dir) {
if(err) {
deferred.reject(err);
} else {
deferred.resolve({created: !!dir, folderAt: backupFolder});
}
});
return deferred.promise;
}
However, there are helper functions for node-style callbacks so that you don't need to check for the err yourself everytime. With Q.nfcall it becomes
function createOrSkip(folder) {
return Q.nfcall(mkdirp, folder).then(function transform(dir) {
return {created: !!dir, folderAt: backupFolder};
});
}
The transform function will map the result (dir) to the object you expect.
If you have done this for all your functions, you can chain them with then:
createOrSkip(backupfolder).then(function(createResult) {
return move(fullPath, backupFullPath);
}).then(function(moveResult) {
return write(fullPath, 'abc');
}).then(function(writeResult) {
console.log("I'm done");
}, function(err) {
console.error("Something has failed:", err);
});
Notice that this works like async's waterfall, not series, i.e. the intermediate results will be lost. To achieve that, you would need to nest them:
createOrSkip(backupfolder).then(function(createResult) {
return move(fullPath, backupFullPath).then(function(moveResult) {
return write(fullPath, 'abc');.then(function(writeResult) {
return {
createOrSkip: createResult,
move: moveResult,
write: writeResult
};
});
});
}).then(function(res){
console.log(res);
}, function(err) {
console.error("Something has failed:", err);
});