Node.js handling successive asynchronous operations - javascript

I'm trying to write a asynchronous function to create some user directories with node.js.
I would like the callback to be executed with a response containing the status of the operations for later processing. But the object is not being logged from within the for loop and the second mkdir. Also the subdirectory name is logged as the same thing even though all the directories are created correctly?
I have been looking for a while as how to solve this i think its due to closures and needs an IIFE? just i am totally lost now on how to solve it. Can anyone help point me in the right direction please?
here is my code:
const fs = require('fs');
const path = require('path');
var showSettings = {
"userDirectories": ["shows", "uploads", "backups", "logs"],
"showsFolder": "shows"
};
var Files = function() {};
Files.prototype.makeUserDirectories = (username, callback) => {
let directory = (path.join(__dirname, "../users", username));
let response = {};
fs.mkdir(directory, err => {
if (err) {
response.status = "ERROR";
response.error = err;
console.log('failed to create directory', err);
} else {
console.log(`creating directory ${directory} succeeded`);
let subdirectory = "";
for (let i = 0; i < showSettings.userDirectories.length; i++) {
subdirectory = (path.join(__dirname, "../users", username, showSettings.userDirectories[i]));
fs.mkdir(subdirectory, err => {
if (err) {
response.status = "ERROR";
response.error = err;
console.log('error creating subdirectory', err);
} else {
response.status = "OK";
console.log(`creating directory ${subdirectory} succeeded`);
};
});
}
console.log(response);
}
if (callback && typeof(callback) === "function") {
console.log(response);
callback(response);
}
});
};
testFiles.makeUserDirectories("mr.test#somedomain.com", function(data) {
console.log("in callback function");
console.log(data);
});
My problem is that the returned response object to the callback is empty.
i think its something to do with the for loop and an IIFE but i am not entirely sure how to do this or if there is abetter way to achieve what i am trying to do?
Many thanks!

Your issue is that you're trying to execute your callback before your asynchronous operations have completed. Asynchronous operations can be very complicated, and there are many libraries to make things simpler, many based on the concept of Promises. These are objects that allow you to chain multiple operations together, with the cost of more overhead. I would highly recommend using Promises to produce a more intuitive function:
const fs = require('fs');
const path = require('path');
var showSettings = {
"userDirectories": ["shows", "uploads", "backups", "logs"],
"showsFolder": "shows"
};
var Files = function() {};
function mkdir(path) {
return new Promise((resolve, reject) => {
fs.mkdir(path, (err) => {
if(err) {
reject(err);
} else {
resolve("OK");
}
})
});
}
Files.prototype.makeUserDirectories = (username, callback) => {
let directory = (path.join(__dirname, "../users", username));
return mkdir(directory).then(() => {
console.log(`creating directory ${directory} succeeded`);
let subdirectory = "";
const operations = [];
for (let i = 0; i < showSettings.userDirectories.length; i++) {
subdirectory = (path.join(__dirname, "../users", username, showSettings.userDirectories[i]));
operations.push(mkdir(subdirectory));
}
return Promise.all(operations);
}).then((status) => { // this will not be called until all operations complete
console.log({status});
if (callback && typeof(callback) === "function") {
callback({status});
}
}).catch((error) => { // this will be called if an error is encountered at any point
callback({status: 'ERROR', error});
})
};
var testFiles = new Files();
testFiles.makeUserDirectories("mr.test#somedomain.com", function(data) {
console.log("in callback function");
console.log(data);
});
EDIT: Updated with a cleaner implementation.

Related

Async/Await in for loop NodeJS Not blocking the loop execuation

I know that old school for loop works in the traditional way - that it waits for the await to finish getting results.
But in my use case, I need to read a file from local/s3 and process it line by line, and for each line I need to call an External API.
Generally I use await inside the loop because all are running inside a lambda and I don't want to use all memory for running it parallelly.
Here I am reading the file using a stream.on() method, and in order to use await inside that, I need to add async in read method, like so:
stream.on('data',async () =>{
while(data=stream.read()!==null){
console.log('line');
const requests = getRequests(); // sync code,no pblms
for(let i=0;i<requests.length;i++){
const result = await apiCall(request[i);
console.log('result from api')
const finalResult = await anotherapiCall(result.data);
}
}
});
This is working but order in which the lines are processed is not guaranteed. I need all in a sync manner. Any help?
Complete Code
async function processSOIFileLocal (options, params) {
console.log('Process SOI file');
const readStream = byline.createStream(fs.createReadStream(key));
readStream.setEncoding('utf8');
const pattern = /^UHL\s|^UTL\s/;
const regExp = new RegExp(pattern);
readStream.on('readable', () => {
let line;
while (null !== (line = readStream.read())) {
if (!regExp.test(line.toString())) {
totalRecordsCount++;
dataObject = soiParser(line);
const { id } = dataObject;
const XMLRequests = createLoSTRequestXML(
options,
{ mapping: event.mapping, row: dataObject }
);
console.log('Read line');
console.log(id);
try {
for (let i = 0;i < XMLRequests.length;i++) {
totalRequestsCount++;
console.log('Sending request');
const response = await sendLoSTRequest(
options,
{ data: XMLRequests[i],
url: LOST_URL }
);
console.log("got response");
const responseObj = await xml2js.
parseStringPromise(response.data);
if (Object.keys(responseObj).indexOf('errors') !== -1) {
fs.writeFileSync(`${ERR_DIR}/${generateKey()}-${id}.xml`, response.data);
failedRequestsCount++;
} else {
successRequestsCount++;
console.log('Response from the Lost Server');
console.log(response[i].data);
}
}
} catch (err) {
console.log(err);
}
}
}
})
.on('end', () => {
console.log('file processed');
console.log(`
************************************************
Total Records Processed:${totalRecordsCount}
Total Requests Sent: ${totalRequestsCount}
Success Requests: ${successRequestsCount}
Failed Requests: ${failedRequestsCount}
************************************************
`);
});
}
async function sendLoSTRequest (options, params) {
const { axios } = options;
const { url, data } = params;
if (url) {
return axios.post(url, data);
// eslint-disable-next-line no-else-return
} else {
console.log('URL is not found');
return null;
}
}
Code needs to flow like so:
read a line in a sync way
process the line and transform the line into an array of two members
for every member call API and do stuff
once line is complete, look for another line, all done in order
UPDATE: Got a workaround..but it fires stream.end() without waiting stream to finish read
async function processSOIFileLocal (options, params) {
console.log('Process SOI file');
const { ERR_DIR, fs, xml2js, LOST_URL, byline, event } = options;
const { key } = params;
const responseObject = {};
let totalRecordsCount = 0;
let totalRequestsCount = 0;
let failedRequestsCount = 0;
let successRequestsCount = 0;
let dataObject = {};
const queue = (() => {
let q = Promise.resolve();
return fn => (q = q.then(fn));
})();
const readStream = byline.createStream(fs.createReadStream(key));
readStream.setEncoding('utf8');
const pattern = /^UHL\s|^UTL\s/;
const regExp = new RegExp(pattern);
readStream.on('readable', () => {
let line;
while (null !== (line = readStream.read())) {
if (!regExp.test(line.toString())) {
totalRecordsCount++;
dataObject = soiParser(line);
const { id } = dataObject;
const XMLRequests = createLoSTRequestXML(
options,
{ mapping: event.mapping, row: dataObject }
);
// eslint-disable-next-line no-loop-func
queue(async () => {
try {
for (let i = 0;i < XMLRequests.length;i++) {
console.log('Sending request');
console.log(id);
totalRequestsCount++;
const response = await sendLoSTRequest(
options,
{ data: XMLRequests[i],
url: LOST_URL }
);
console.log('got response');
const responseObj = await xml2js.
parseStringPromise(response.data);
if (Object.keys(responseObj).indexOf('errors') !== -1) {
// console.log('Response have the error:');
// await handleError(options, { err: responseObj, id });
failedRequestsCount++;
fs.writeFileSync(`${ERR_DIR}/${generateKey()}-${id}.xml`, response.data);
} else {
console.log('Response from the Lost Server');
console.log(response[i].data);
successRequestsCount++;
}
}
} catch (err) {
console.log(err);
}
});
}
}
})
.on('end', () => {
console.log('file processed');
console.log(`
************************************************
Total Records Processed:${totalRecordsCount}
Total Requests Sent: ${totalRequestsCount}
Success Requests: ${successRequestsCount}
Failed Requests: ${failedRequestsCount}
************************************************
`);
Object.assign(responseObject, {
failedRequestsCount,
successRequestsCount,
totalRecordsCount,
totalRequestsCount
});
});
}
Thank You
The sample code at the top of your question could be rewritten like
const queue = (() => {
let q = Promise.resolve();
return (fn) => (q = q.then(fn));
})();
stream.on('data', async() => {
while (data = stream.read() !== null) {
console.log('line');
const requests = getRequests(); // sync code,no pblms
queue(async () => {
for (let i = 0; i < requests.length; i++) {
const result = await apiCall(request[i]);
console.log('result from api');
const finalResult = await anotherapiCall(result.data);
}
});
}
});
Hopefully that will be useful for the complete code
If anyone want a solution for synchronisely process the file, ie, linebyline read and execute some Async call, it's recommended to use inbuilt stream transform. There we can create a transform function and return a callback when finishes.
That's will help of any one face this issues.
Through2 is a small npm library that also can be used for the same.

Node FTP download files from one server and upload to another server

I've been looking around for a while now and this is the only resource I've found on the internet related to my problem. I'm trying to download files from one ftp server then upload them to another ftp server, one by one using promises and without having to save the files locally during the process.
First I'm calling client.List() recursively from ftp module to get an array of file paths I'll need to download from the source ftp server. This works fine.
getRecursively(client, path) {
var _this = this;
let downloadList = [];
let paths = [];
let promise = new Promise((resolve, reject) => {
client.list(path, function(err, list) {
async function loop() {
for (var i = 0; i < list.length; i++) {
if (list[i].type == 'd') {
let _list = await _this.getRecursively(client, path + '/' + list[i].name)
downloadList = downloadList.concat(_list);
} else {
if ( list[i].name.match(/\.(jpg|jpeg)$/i) ) {
downloadList.push({path: path, name: list[i].name});
}
}
}
console.log("One complete");
resolve(downloadList);
}
loop();
})
})
return promise;
}
Next, I'm looping through the list of file paths and sending off promises which are throttled using es6-promise-pool module, so right now its concurrency limit is set to 10.
This is what each promise looks like:
getAndInsert(file) {
let _this = this;
let promise = new Promise((resolve, reject) => {
let c = new Client();
c.on('ready', () => {
let d = new Client();
d.on('ready', () => {
c.get(file.path + '/' + file.name, function(err, stream) {
if (err) {console.log(err); console.log("FILE NAME: " + file.name)}
d.put(stream.pipe(passThrough()), '/images/' + file.name, function() {
_this.uploadCount += 1;
_this.uploadedImages.push(file.name)
console.log(_this.uploadCount + '/' + _this._list.length + " uploaded.")
c.end();
d.end();
resolve(true);
});
});
})
d.on('error', (err) => {
if (err) console.log(err);
_this.onCompleteCallback();
})
d.connect(destinationFTP);
})
c.on('error', (err) => {
if (err) console.log(err);
_this.onCompleteCallback();
})
c.connect(sourceFTP);
})
return promise;
}
Each promise makes its own connection to the source and destination ftp server. I'm using the stream module's Transform object as well when I call d.put(stream.pipe(passThrough()). Here is that function.
const passThrough = () => {
var passthrough = new Transform();
passthrough._transform = function(data, encoding, done) {
this.push(data);
done();
};
return passthrough;
}
Finally, here is the main code that fires off the promises.
*buildPromises(list) {
for (let i = 0; i < list.length; i++) {
yield this.getAndInsert(list[i]);
}
}
let iterator = _this.buildPromises(list);
var pool = new PromisePool(iterator, 10);
pool.start()
.then(function(){
console.log("Finished")
}).catch((err) => {
console.log(err);
console.log("error processing pool promise");
})
This will go through and build the list just fine, however when I send the promises off I'm getting the following error:
Error: write after end
at writeAfterEnd (_stream_writable.js:236:12)
at Transform.Writable.write (_stream_writable.js:287:5)
at Socket.ondata (_stream_readable.js:639:20)
at emitOne (events.js:116:13)
at Socket.emit (events.js:211:7)
at Socket.Readable.read (_stream_readable.js:475:10)
at flow (_stream_readable.js:846:34)
at Transform.<anonymous> (_stream_readable.js:707:7)
at emitNone (events.js:106:13)
It might make it through like 5 and then error out, and sometimes more, but it seems to be pretty consistent. I've also noticed sometimes I'll get similar errors saying that 'The file is already in use', but each file im uploading has a unique name. Any help is appreciated, and if you need any more info I'll do my best to provide more information. Thank you.
So I found a solution. In my getAndInsert() function was doing:
c.get(file.path + '/' + file.name, function(err, stream) {
if (err) {console.log(err); console.log("FILE NAME: " + file.name)}
d.put(stream.pipe(passThrough()), '/images/' + file.name, function() {
_this.uploadCount += 1;
_this.uploadedImages.push(file.name)
console.log(_this.uploadCount + '/' + _this._list.length + " uploaded.")
c.end();
d.end();
resolve(true);
});
});
The problem resided in stream.pipe(passThrough()). It seemed as if I was trying to write after the stream had already ended. This is what solved my problem:
let chunks = [];
stream.on('data', (chunk) => {
chunks.push(chunk);
})
stream.on('end', () => {
d.put(Buffer.concat(chunks), '/images/' + file.name, function() {
_this.uploadCount += 1;
_this.uploadedImages.push(file.name)
console.log(_this.uploadCount + '/' + _this._list.length + " uploaded.")
c.end();
d.end();
resolve(true);
});
})
When new data is available from the stream push to an array called chunks. When the stream is finished, call .put and pass in Buffer.concat(chunks)
Hope this helps anyone going through a similar problem.

Jest TypeError: function not found

I am trying to run a test with jest, except it cannot find my function. I am exporting multiple functions from one file using module.exports = {}.
Here is my test file:
const queries = ('../config/queries');
test('Check Activation Code. -- checkActivationCode(test#gmail.com, 0123456789) ', () => {
let email = 'test%40gmail.com';
let code = 1234567890;
let result = "";
queries.checkActivationCode(email, code, function(error, result) {
if (error) result = false;
else result = true;
});
expect(result).toBe(true);
});
My file structure is as follows, /config/queries/index.js, I am able to access the functions in my other files just fine.
Here is my queries/index.js file:
module.exports = {
checkActivationCode: function(email, code, callback) {
pool.getConnection(function(error, connection) {
connection.query('SELECT code FROM confirmation WHERE email = ?', [email.toLowerCase()], function(error, results) {
if (error) callback(error);
try {
if (results[0].code === code) {
callback(null, true);
} else {
callback(null, false);
}
} catch (e) {
callback(null, false);
}
});
connection.release();
});
}
}
I forgot to require my queries.
const queries = ('../config/queries');
to
const queries = require('../config/queries');
Simple typo. Thanks #BenceGedai for catching that.

Splitting a node-style callback into two callbacks

I am putting together an example that shows how a simple synchronous Node.js program can be transformed into an asynchronous version that uses async/await. There should be several intermediate steps, starting with a normal callback-based version, following up with one that uses two callbacks, one for the normal (resolve) case and another for the error (reject) case, which would then lead to promises.
The job of each version is to create an empty folder copy (which might exist already and it might contain files) and copy all files (called file1.txt and file2.txt) in the folder orig there. If an error occurs anywhere it should be explicitly caught, printed to the console and the program should not continue any further.
The version with normal error-first callbacks works just fine, but I ran into an issue with the split-callback version. It only copies file2.txt, but not file1.txt.
Here is the code I use for transforming the fs-functions:
const fs = require('fs');
fs.exists = function(path, callback) {
fs.stat(path, (err, stats) => {
if (err) {
callback(null, false);
} else {
callback(null, true);
}
});
};
function splitCallback(f) {
return (...params) => {
reject = params[params.length - 2];
resolve = params[params.length - 1];
params = params.slice(0, params.length - 2);
f(...params, (err, data) => {
if (err) {
reject(err);
} else {
resolve(data);
}
});
};
}
const sfs = {};
const functionNames = ['exists', 'readdir', 'unlink', 'mkdir', 'readFile', 'writeFile'];
for (const functionName of functionNames) {
sfs[functionName] = splitCallback(fs[functionName].bind(fs));
}
And this is the actual example using those functions:
function handleError(err) {
console.error(err);
}
function initCopyDirectory(callback) {
sfs.exists('copy', handleError, exists => {
if (exists) {
sfs.readdir('copy', handleError, filenames => {
let fileCount = filenames.length;
if (fileCount === 0) {
callback();
}
for (const filename of filenames) {
sfs.unlink(`copy/${filename}`, handleError, () => {
fileCount--;
if (fileCount === 0) {
callback();
}
});
}
});
} else {
sfs.mkdir('copy', handleError, () => callback);
}
});
}
function copyFiles() {
// sfs.readdir('orig', handleError, filenames => {
// for (const filename of filenames) {
// console.log(filename);
// sfs.readFile(`orig/${filename}`, handleError, data => {
// console.log('reading', filename);
// sfs.writeFile(`copy/${filename}`, data, handleError, () => {
// console.log('writing', filename);
// });
// });
// }
// });
sfs.readdir('orig', handleError, filenames => {
for (const filename of filenames) {
fs.readFile(`orig/${filename}`, (err, data) => {
if (err) {
handleError(err);
} else {
sfs.writeFile(`copy/${filename}`, data, handleError, () => {});
}
});
}
});
}
function main() {
initCopyDirectory(copyFiles);
}
main();
As it is written here it works properly (using Node version 7.4.0 for Windows), but when I swap the comments in the copyFiles-function (thereby changing readFile) only one file is copied and I get the following output:
file1.txt
file2.txt
reading file2.txt
writing file2.txt
writing file2.txt
What is the problem?
Try this instead of the commented code:
for (const filename of filenames) {
(function(filename){
console.log(filename);
sfs.readFile(`orig/${filename}`, handleError, data => {
console.log('reading', filename);
sfs.writeFile(`copy/${filename}`, data, handleError, () => {
console.log('writing', filename);
});
});
})(filename)
}
The problem is that you are running asynchronous functions inside a for loop and expecting them to behave synchronously. By the time sfs.writeFile is called (after executing sfs.readFile) the for loop has long been finished executing and so you are left with only the last filename, file2. By wrapping everything inside the for loop in a closure you maintain the proper values.
Here is a simpler example:
for (var i = 0; i < 10 ; i++) {
setTimeout(function(){
console.log(i)
}, 100)
}
will print 10 10 times because by the time the timeout executes (0.1 seconds) the for loop is already done, whereas the following code will print the numbers 0 through 9 because the original values are preserved by the closure. (try it yourself)
for (var i = 0; i < 10 ; i++) {
(function(i){
setTimeout(function(){
console.log(i)
}, 100)
})(i)
}
The issue was that I forgot to put const in front of the variable declarations in splitCallback. This made them global variables that kept being overridden. Activating strict mode would have thrown an error instead. Here is the correct code:
function splitCallback(f) {
return (...params) => {
const input = params.slice(0, params.length - 2);
const [reject, resolve] = params.slice(params.length - 2);
f(...input, (err, ...output) => {
if (err) {
reject(err);
} else {
resolve(...output);
}
});
};
}

Q.js: How can I rewrite an async series flow in Q.js?

In an attempt to grasp Q.js, I'd like to convert the following code using async.series in Q.js. Basically I create a folder if it doesn't exist (using mkdirp), move a file into a backup folder and save a file into a main folder.
var async = require('async');
var fs = require('fs');
var path = require('path');
var sessiondId = new Date().getTime() % 2 == 0 ? new Date().getTime().toString() : '_1234';
var backupFolder = path.join(__dirname,sessiondId);
var backupFullPath = path.join(backupFolder,'a.txt');
var fullPath = path.join(__dirname,'main','a.txt');
var mkdirp = require('mkdirp');
async.series({
createOrSkip: function(callback) {
mkdirp(backupFolder, function (err, dir) {
if(err) {
callback(err, null);
} else {
callback(null, {created: !!dir, folderAt: backupFolder});
}
});
},
move: function(callback) {
fs.rename(fullPath, backupFullPath, function(err) {
if(err) {
callback(err, null);
} else {
callback(null, {backupAt: backupFullPath});
}
});
},
write: function(callback) {
fs.writeFile(fullPath, 'abc', function(err) {
if (err) {
callback(err, null);
} else {
callback(null, {saveAt: fullPath});
}
});
}
}, function(err, result) {
console.log(result);
});
Actually I don't know where to start. Thanks for your help.
R.
The key is to convert the node.js functions to return promises using Q.denodeify before you start, this means the header of your file should look like:
var Q = require('q')
var fs = require('fs');
var path = require('path');
var sessiondId = new Date().getTime() % 2 == 0 ? new Date().getTime().toString() : '_1234';
var backupFolder = path.join(__dirname,sessiondId);
var backupFullPath = path.join(backupFolder,'a.txt');
var fullPath = path.join(__dirname,'main','a.txt');
var mkdirp = Q.denodeify(require('mkdirp'));
var rename = Q.denodeify(fs.rename);
var writeFile = Q.denodeify(fs.writeFile);
That change wouldn't be needed if node.js natively supported promises.
Option 1
// createOrSkip
mkdirp(backupFolder)
.then(function (dir) {
// move
return rename(fullPath, backupFullPath);
})
.then(function () {
// write
return writeFile(fullPath, 'abc');
})
.done(function () {
console.log('operation complete')
});
I don't think it gets much simpler than that. Like #Bergi said though, it's more similar to "waterfall". If you want the exact behavior of series (but with promises) you'll have to use something like Option 2 or Option 3.
Option 2
You could write out the code manually to save the results. I usually find that, although this requires a little extra writing, it's by far the easiest to read:
var result = {}
mkdirp(backupFolder)
.then(function (dir) {
result.createOrSkip = {created: !!dir, folderAt: backupFolder};
return rename(fullPath, backupFullPath);
})
.then(function () {
result.move = {backupAt: backupFullPath};
return writeFile(fullPath, 'abc');
})
.then(function () {
result.write = {saveAt: fullPath};
return result;
})
.done(function (result) {
console.log(result);
});
Option 3
If you find yourself using this sort of code all the time, you could write a very simple series helper (I've never found the need to do this personally):
function promiseSeries(series) {
var ready = Q(null);
var result = {};
Object.keys(series)
.forEach(function (key) {
ready = ready.then(function () {
return series[key]();
}).then(function (res) {
result[key] = res;
});
});
return ready.then(function () {
return result;
});
}
promiseSeries({
createOrSkip: function () {
return mkdirp(backupFolder).then(function (dir) {
return {created: !!dir, folderAt: backupFolder};
});
},
move: function () {
return rename(fullPath, backupFullPath)
.thenResolve({backupAt: backupFullPath});
},
write: function () {
return writeFile(fullPath, 'abc')
.thenResolve({saveAt: fullPath});
}
}).done(function (result) {
console.log(result);
});
I'd say once you've written the helper, the code is a lot clearer for promises than with all the error handling cruft required to work with callbacks. I'd say it's clearer still when you either write it by hand or don't keep track of all those intermediate results.
Summing Up
You may or may not think these examples are clearer than the async.series version. Consider how well you might know that function though. It's actually doing something pretty complex in a very opaque manner. I initially assumed that only the last result would be returned (ala waterfall) and had to look it up in the documentation of Async. I almost never have to look something up int the documentation of a Promise library.
Make each of your functions return a promise. Construct them with a Deferred:
function createOrSkip(folder) {
var deferred = Q.defer();
mkdirp(folder, function (err, dir) {
if(err) {
deferred.reject(err);
} else {
deferred.resolve({created: !!dir, folderAt: backupFolder});
}
});
return deferred.promise;
}
However, there are helper functions for node-style callbacks so that you don't need to check for the err yourself everytime. With Q.nfcall it becomes
function createOrSkip(folder) {
return Q.nfcall(mkdirp, folder).then(function transform(dir) {
return {created: !!dir, folderAt: backupFolder};
});
}
The transform function will map the result (dir) to the object you expect.
If you have done this for all your functions, you can chain them with then:
createOrSkip(backupfolder).then(function(createResult) {
return move(fullPath, backupFullPath);
}).then(function(moveResult) {
return write(fullPath, 'abc');
}).then(function(writeResult) {
console.log("I'm done");
}, function(err) {
console.error("Something has failed:", err);
});
Notice that this works like async's waterfall, not series, i.e. the intermediate results will be lost. To achieve that, you would need to nest them:
createOrSkip(backupfolder).then(function(createResult) {
return move(fullPath, backupFullPath).then(function(moveResult) {
return write(fullPath, 'abc');.then(function(writeResult) {
return {
createOrSkip: createResult,
move: moveResult,
write: writeResult
};
});
});
}).then(function(res){
console.log(res);
}, function(err) {
console.error("Something has failed:", err);
});

Categories

Resources