I am trying to query my database several times and construct an object which stores every response from my database in a field. Here is my code:
router.post('/search', (req, res) => {
var collection = db.get().collection('styles')
var data = [];
collection.distinct('make.name', (err, docs) => {
data.push({'make': docs });
});
collection.distinct('model', (function (err, docs) {
data.push({'model': docs });
}))
res.send(data);
});
Since NodeJS/Express is asynchronous, this isn't working as I would like. How can I reconstruct this endpoint to make several database calls (from the same collection) and return an object containing it?
There's more than one way to do it:
Nested callbacks
Without promises you could nest the callbacks:
router.post('/search', (req, res) => {
var collection = db.get().collection('styles')
var data = [];
collection.distinct('make.name', (err, docs) => {
if (err) {
// ALWAYS HANDLE ERRORS!
}
data.push({'make': docs });
collection.distinct('model', (function (err, docs) {
if (err) {
// ALWAYS HANDLE ERRORS!
}
data.push({'model': docs });
res.send(data);
}))
});
});
This would be the easiest way, but note that it is not efficient if those two requests could be done in parallel.
The async module
You can use the async module:
router.post('/search', (req, res) => {
var collection = db.get().collection('styles')
var data = [];
async.parallel({
make: cb => collection.distinct('make.name', cb),
model: cb => collection.distinct('model', cb),
}, (err, responses) => {
if (err) {
// ALWAYS HANDLE ERRORS!
}
data.push({'make': responses.make });
data.push({'model': responses.model });
res.send(data);
});
});
See: https://caolan.github.io/async/docs.html#parallel
But this may still not be the most convenient method.
ES2017 async/await
The most flexible way of doing that if you have 30 calls to make would be to:
Use functions that return promises instead of functions that take callbacks
Use async/await if you can or at least generator based coroutines
Await on promises (or yield promises) when the logic needs to run in sequence
Use Promise.all() for anything that can be done in parallel
With async/await your code could look like this:
// in sequence:
var make = await collection.distinct('make.name');
var model = await collection.distinct('model');
// use 'make' and 'model'
Or:
// in parallel:
var array = await Promise.all([
collection.distinct('make.name'),
collection.distinct('model'),
]);
// use array[0] and array[1]
A big advantage of async/await is the error handling:
try {
var x = await asyncFunc1();
var array = await Promise.all([asyncFunc2(x), asyncFunc3(x)]);
var y = asyncFunc4(array);
console.log(await asyncFunc5(y));
} catch (err) {
// handle any error here
}
You can only use it inside of a function created with the async keyword. For more info, see:
https://developer.mozilla.org/en-US/docs/Web/JavaScript/Reference/Statements/async_function
https://developer.mozilla.org/en-US/docs/Web/JavaScript/Reference/Operators/await
For support in browsers, see:
http://caniuse.com/async-functions
For support in Node, see:
http://node.green/#ES2017-features-async-functions
In places where you don't have native support for async and await you can use Babel:
https://babeljs.io/docs/plugins/transform-async-to-generator/
or with a slightly different syntax a generator based approach like in co or Bluebird coroutines:
https://www.npmjs.com/package/co
http://bluebirdjs.com/docs/api/promise.coroutine.html
See those answers for more info:
try/catch blocks with async/await
node.js ~ constructing chained sequence of Promise resolves
How to run Generator Functions in Parallel?
node.js ~ constructing chained sequence of Promise resolves
Using async/await + Bluebird to promisifyAll
jQuery: Return data after ajax call success
You can do it with Promises
router.post('/search', (req, res) => {
var collection = db.get().collection('styles');
// Create promise for "make.name" query
let firstQuery = new Promise((resolve, reject) => {
collection.distinct('make.name', (err, docs) => {
if (!err) {
resolve(docs);
} else {
reject(err);
}
});
});
// Create promise for "model" query
let secondQuery = new Promise((resolve, reject) => {
collection.distinct('model', (function (err, docs) {
if (!err) {
resolve(docs);
} else {
reject(err);
}
}))
})
// Run both queries at the same time and handle both resolve results or first reject
Promise.all([firstQuery, secondQuery])
.then((results) => {
res.send({ "make.name": results[0], "model": results[1] });
})
.catch((err) => {
// Catch error
res.send({});
});
});
Also you can use destructuring in callback functions like that:
Promise.all([firstQuery, secondQuery])
.then(([makeName, model]) => res.send({ "make.name": makeName, model }))
UPD: If you have a bunch of collection to request you can create an array of collections name, map it to promise requests and handle with Promise.all, for example
let collections = ["firstCollection", "secondCollection", "nCollection"];
let promises = collections.map((collectionName) => {
return new Promise((resolve, reject) => {
collection.distinct(collectionName, (err, docs) => {
if (!err) {
resolve(docs)
} else {
reject(err);
}
});
})
});
Promise.all(promises)
.then(results => {
// Do what you want to do
})
.catch(error => {
// or catch
});
Related
I have created a function inside my route that will upload all the images into Cloudinary 3rd party library and it will return all the URLs links, and I am pushing all the links in my URLs variable and then the links will be stored into the database.
I want until the links are available on my URLs variable it will not insert into my database. I am confused about how I can do it using async/await or using promises
This is my route with function. I am using node, express, multer.
app.post('/addProduct', async (req, res, next) => {
let urls = [];
async function sendImagesToCloudinary() {
for (let file of req.files) {
await cloudinary.uploader.upload(
file.path,
{
public_id: `${Date.now()}`,
resource_type: 'auto'
}
).then(result => {
//del files after upload on cloudinary
fs.unlink(file.path, function (err) {
if (err) {
console.log(err);
}
});
urls.push(result.url);
})
.catch(err => {
console.log(err);
});
}
res.json(urls);
}
sendImagesToCloudinary();
// Publish on database
const result = await unityMartMediaCollection.insertOne({ urls: urls })
res.json(result)
});
It's worthwhile to (a) tease apart the various async ops into smaller - clearer, testable - functions, and (b) utilize only one style of promise syntax...
// isolated version of he OP upload
async function upload(file) {
const params = { public_id: `${Date.now()}`, resource_type: 'auto' }
return cloudinary.uploader.upload(file.path, params);
}
// this promisify's fs.unlink (probably natively available in fs.promise)
async unlink(file) {
return new Promise((resolve, reject) => {
fs.unlink(file.path, error => error? reject(error) : resolve());
});
}
// upload, then unlink if upload succeeds, return the url upload result
// catch errors here, so other concurrent uploads can continue
async uploadAndUnlink(file) {
try {
const url = await upload(file);
await unlink(file);
return url
} catch (err) {
console.log(err);
}
}
// implement the route, process the files concurrently
app.post('/addProduct', async (req, res, next) => {
const promises = req.files.map(file => uploadAndUnlink(file));
const urls = await Promise.all(promises);
const result = await unityMartMediaCollection.insertOne({ urls: urls })
res.json(result);
});
I took the liberty of removing the .json invocation on res inside the URL-producing method. The strong implication from the rest of the code is that the aim is to return (to the client) the result of the unityMartMediaCollection call.
Need to parse some XML files from mass array with file_path values.
Try to use async, fs, xml2js.
When use single string file_path all works perfect. But when I use aync.filter() with array I can't understand how I can return result from xml.parseString()
const fs = require('fs');
const xml2js = require('xml2js');
const async = require('async');
var mass=['/file1.xml','/fil2.xml','/file3.xml',...]
async.filter(mass, async function(file_path, callback){
if(fs.statSync(file_path)['size']>0){
fs.readFileSync(file_path, 'utf8', function(err, data) {
xml.parseString(data, function (err, result) {
console.log(Object.keys(result)[0]);
return result; //need get this result to results array
})
})
}
}, function(err, results) {
console.log(results)
});
Who can understand how it works and what I need to change in my code.
Thanks a lot!
You are trying to map and filter at the same time. Since your filter condition is synchronously available, use the array filter method for that, and then pass that to async.map.
You should then call the callback function, that async.map provides to you, passing it the result. So don't return it, but call the callback.
The readFileSync method does not take a callback like its asynchronous counterpart. It just returns the data.
Also, drop the async keyword, as you are not using the await keyword at all.
async.map(mass.filter((file_path) => fs.statSync(file_path).size > 0),
function(file_path, callback){
var data = fs.readFileSync(file_path, 'utf8');
xml.parseString(data, function (err, result) {
console.log(Object.keys(result)[0]);
callback(null, result);
})
}, function(err, results) {
console.log(results)
});
It should be noted however, that since Node now comes with the Promise API, and even the async/await extension to that, the async module has become much less interesting. Consider using Promises.
const promises = mass.filter(file_path => {
return fs.statSync(file_path).size > 0
}).map(function(file_path) {
return new Promise(resolve => {
const data = fs.readFileSync(file_path, 'utf8');
xml.parseString(data, function (err, result) {
console.log(Object.keys(result)[0]);
resolve(result);
});
});
});
Promise.all(promises).then(results => {
console.log(results);
});
I am trying to call two functions and pass the output of the first function as a parameter into the second.
Function 1:
module.exports.getAllStatisticsByUserId = function(id, callback){
User.findById(id, (err, user) =>{
if(err)
throw err;
if(user)
callback(null, user.statistics);
});
}
Function 2:
module.exports.getGameByStatisticsId = function(id, callback){
Statistics.findById(id, (err, statistics) =>{
if(err)
throw err;
if(statistics)
callback(null, statistics.game);
});
};
I am trying to execute the second method by passing the output of the first method as a parameter but the asynchronous nature of javascript is messing it up. I have tried implementing promises to no avail.
Can anyone suggest some good javascript practices to deal with calling functions asynchronously when they need each other? Any help would be appreciated.
After fixing the issue I mentioned above, you can call them in sequence like this:
module.exports.getAllStatisticsByUserId = function(id, callback){
User.findById(id, (err, user) =>{
if(err) callback(err);
if(user) callback(null, user.statistics);
});
};
module.exports.getGameByStatisticsId = function(id, callback){
Statistics.findById(id, (err, statistics) =>{
if(err) callback(err);
if(statistics) callback(null, statistics.game);
});
};
someService.getAllStatisticsByUserId(id, (err, statistics) => {
if (err || !statistics) {
// handle error
return;
}
someService.getGameByStatisticsId(statistics.id, (err, game) => {
if (err || !game) {
// handle error
return;
}
// handle game
});
});
However, as noted in Mongoose documentation:
When a callback function is not passed, an instance of Query is returned, which provides a special query builder interface.
A Query has a .then() function, and thus can be used as a promise.
So you can simply rewrite the calls like this:
someService.getAllStatisticsByUserId(id).then(statistics =>
someService.getGameByStatisticsId(statistics.id)
).then(game => {
// handle game
}).catch(err => {
// handle error
});
or convert it into an async/await function:
async function getGameByUserId(id) {
try {
const statistics = await someService.getAllStatisticsByUserId(id);
const game = await someService.getGameByStatisticsId(statistics.id);
// handle game
} catch (error) {
// handle error
}
}
Note that an async function always returns a Promise, so you must await it or chain it with a .then() to ensure completion of the query and resolve the returned value, if any.
It looks like you should be able to write:
getAllStatisticsByUserId("me", (err, stats) => {
getGameByStatisticsId(stats.id, (err, game) => {
console.log(game);
});
});
Here's how it would look if these functions returned promises instead.
getAllStatisticsByUserId("me")
.then(stats => getGameByStatisticsId(stats.id))
.then(game => console.log(game))
Even better, if you're able to use a version of Node that supports async/await then you could write.
let stats = await getAllStatisticsByUserId("me");
let game = await getGameByStatisticsId(stats.id);
console.log(game);
This would mean slightly rewriting the original functions (unless User.findById and Statistics.findById already return promises).
module.exports.getAllStatisticsByUserId = function(id, callback){
return new Promise((resolve, reject) => {
User.findById(id, (err, user) =>{
if(err) return reject(err);
return resolve(user.statistics);
});
});
}
Is it possible to use javascript promises instead of regular callbacks within CosmosDB (DocumentDB) stored procedure API calls? An usage would be implementing pagination.
For example
token = getToken();
doSomething(token);
//....
function getToken(....) {
//...
collection.queryDocuments(link, query, queryOptions, function(error, documents, responseOptions) {
return responseOptions.continuation;
});
}
would not work because the token is returned within a callback, and the execution continues. Could you please give an example of how you would implement this?
The version of ECMAScript referenced in Cosmos DB docs supports async/await and Promises. I am able to use both of those in my stored procedures.
Here's a function that returns a promise that makes a parameterized document query:
function queryDocumentsAsync(sql, parameters, options) {
const querySpec = {
query: sql,
parameters: parameters
};
return new Promise((resolve, reject)=>{
let isAccepted = __.queryDocuments(__.getSelfLink(), querySpec, options || {}, (err, feed, options) => {
if(err) reject(err);
resolve({
feed: feed,
options: options
});
});
if(!isAccepted) throw "Query was not accepted.";
});
}
I am seeing some limitations around forcing a rollback with this approach, though. If you throw an Error, it gets swallowed by the promise chain and never gets out.
Here's an example on how to use async await for query and replace scenario.
function async_sample() {
const ERROR_CODE = {
NotAccepted: 429
};
const asyncHelper = {
queryDocuments(sqlQuery, options) {
return new Promise((resolve, reject) => {
const isAccepted = __.queryDocuments(__.getSelfLink(), sqlQuery, options, (err, feed, options) => {
if (err) reject(err);
resolve({ feed, options });
});
if (!isAccepted) reject(new Error(ERROR_CODE.NotAccepted, "replaceDocument was not accepted."));
});
},
replaceDocument(doc) {
return new Promise((resolve, reject) => {
const isAccepted = __.replaceDocument(doc._self, doc, (err, result, options) => {
if (err) reject(err);
resolve({ result, options });
});
if (!isAccepted) reject(new Error(ERROR_CODE.NotAccepted, "replaceDocument was not accepted."));
});
}
};
async function main() {
let continuation;
do {
let { feed, options } = await asyncHelper.queryDocuments("SELECT * from c", { continuation });
for (let doc of feed) {
doc.newProp = 1;
await asyncHelper.replaceDocument(doc);
}
continuation = options.continuation;
} while (continuation);
}
main().catch(err => getContext().abort(err));
}
With some cleverness you can use webpack to inline node dependencies, including promisify, which lets you do this:
https://github.com/Oblarg/cosmosdb-storedprocs-ts/blob/master/BuildStoredProcs.js
In my Node.js code I need to make 2 or 3 API calls, and each will return some data. After all API calls are complete, I want to collect all the data into a single JSON object to send to the frontend.
I know how to do this using the API callbacks (the next call will happen in the previous call's callback) but this would be slow:
//1st request
request('http://www.example.com', function (err1, res1, body) {
//2nd request
request('http://www.example2.com', function (err2, res2, body2) {
//combine data and do something with it
});
});
I know you could also do something similar and neater with promises, but I think the same concept applies where the next call won't execute until the current one has finished.
Is there a way to call all functions at the same time, but for my final block of code to wait for all API calls to complete and supply data before executing?
Promises give you Promise.all() (this is true for native promises as well as library ones like bluebird's).
Update: Since Node 8, you can use util.promisify() like you would with Bluebird's .promisify()
var requestAsync = util.promisify(request); // const util = require('util')
var urls = ['url1', 'url2'];
Promise.all(urls.map(requestAsync)).then(allData => {
// All data available here in the order of the elements in the array
});
So what you can do (native):
function requestAsync(url) {
return new Promise(function(resolve, reject) {
request(url, function(err, res, body) {
if (err) { return reject(err); }
return resolve([res, body]);
});
});
}
Promise.all([requestAsync('url1'), requestAsync('url2')])
.then(function(allData) {
// All data available here in the order it was called.
});
If you have bluebird, this is even simpler:
var requestAsync = Promise.promisify(request);
var urls = ['url1', 'url2'];
Promise.all(urls.map(requestAsync)).then(allData => {
// All data available here in the order of the elements in the array
});
Sounds like async.parallel() would also do the job if you'd like to use async:
var async = require('async');
async.parallel({
one: function(parallelCb) {
request('http://www.example1.com', function (err, res, body) {
parallelCb(null, {err: err, res: res, body: body});
});
},
two: function(parallelCb) {
request('http://www.example2.com', function (err, res, body) {
parallelCb(null, {err: err, res: res, body: body});
});
},
three: function(parallelCb) {
request('http://www.example3.com', function (err, res, body) {
parallelCb(null, {err: err, res: res, body: body});
});
}
}, function(err, results) {
// results will have the results of all 3
console.log(results.one);
console.log(results.two);
console.log(results.three);
});
https://developer.mozilla.org/en-US/docs/Web/JavaScript/Reference/Global_Objects/Promise/all
Promise.all is now included with ES6 so you don't need any 3rd party libraries at all.
"Promise.all waits for all fulfillments (or the first rejection)"
I've setup a gist to demonstrate Promise.all() with refactoring itterations at: https://gist.github.com/rainabba/21bf3b741c6f9857d741b69ba8ad78b1
I'm using an IIFE (Immediately Invoked Function Expression). If you're not familiar, you'll want to be for the example below though the gist shows how with using an IIFE. https://en.wikipedia.org/wiki/Immediately-invoked_function_expression
TL;DR
( function( promises ){
return new Promise( ( resolve, reject ) => {
Promise.all( promises )
.then( values => {
console.log("resolved all promises")
console.dir( values );
resolve( values.reduce( (sum,value) => { return sum+value }) ); //Use Array.prototype.reduce() to sum the values in the array
})
.catch( err => {
console.dir( err );
throw err;
});
});
})([
new Promise( ( resolve, reject ) => {
console.log("resolving 1");
resolve( 1 );
}),
new Promise( ( resolve, reject ) => {
console.log("resolving 2");
resolve( 2 );
})
]).then( sum => { console.dir( { sum: sum } ) } )
I had a similar use case where I had to do 10 concurrent calls. I did it with the combination of async/await and Promise.all.
async function getData() {
try {
let result = null
const ids = [1,2,3,4,5,6,7,8,9,10,11,12,13,14,15]
let promises = ids.map(async (id) => {
return fetch(
`https://jsonplaceholder.typicode.com/todos/${id}`
).then((data) => data.json());
});
result = await Promise.all(promises)
return result
} catch(err) {
console.log("error: ", err)
}
}
getData().then(data => console.log(data))