Why line 27 is executed before line 24? - javascript

I m beginner with javascript and try to build a clean object to use the Dockerode library in my use Case. I have an async problem here my line 27 executes before 24 and I don't understand why and how to fix it!
Also if it's easier for you, please visit this public gist: https://gist.github.com/msitruk/2cdb655a0bebdb29c61d8bc5606a2695
const Docker = require('dockerode');
const docker = new Docker({
socketPath: '/var/run/docker.sock'
});
// CONSTRUCTOR
function SearchUtils() {
this.listContainersPromise = docker.listContainers({all: true});
this.scraperListId = [];
}
// "METHODS"
SearchUtils.prototype.run = function() {
this.getScraperContainersListPromise()
.then((containers) => {
for (let i = 0; i < containers.length; i++) {
if (containers[i].Names.toString().indexOf("scraper") !== -1) {
this.addToScraperList(containers[i].Id, "wait");
}
}
}, (err)=>{console.log(err)})
.then(()=>{
this.checkReadyScraper();
},(err)=>{console.log(err)})
.then(() => {
this.scrap();
}, (err)=>{console.log(err)});
};
SearchUtils.prototype.checkReadyScraper = function() {
for (let i = 0; i < this.scraperListId.length; i++) {
this.exec("getStatus", this.scraperListId[i].id);
}
};
SearchUtils.prototype.getScraperContainersListPromise = function() {
return this.listContainersPromise; // <- Not working
};
SearchUtils.prototype.exec = function(type, containerId){
let container = docker.getContainer(containerId);
if (type === "getStatus"){
this.runExec(container, 'cat /home/immobot/status');
}
else if (type === "scrap") {
this.runExec(container, 'torify scrapy crawl seloger -o seloger.json');
}
};
SearchUtils.prototype.scrap = function() {
let localRdyScraperList = [];
for (let i = 0; i < this.scraperListId.length; i++) {
if(this.scraperListId[i].status.toString('utf8').indexOf("ready") !== -1){
localRdyScraperList.push(this.scraperListId[i].id);
}
}
console.log("test de localRdyScraperList : "+localRdyScraperList);
// this.exec("scrap", this.scraperListId[i].id);
};
SearchUtils.prototype.addToScraperList = function(containerId,status) {
this.scraperListId.push({id: containerId, status: status});
};
SearchUtils.prototype.getScraperList = function() {
return this.scraperListId;
};
SearchUtils.prototype.getScraperList = function() {
return this.scraperListId;
};
SearchUtils.prototype.runExec = function (container, cmd) {
let options = {
Cmd: [ '/bin/bash', '-c', cmd ],
AttachStdout: true,
AttachStderr: true
};
container.exec(options, (err, exec) => {
if (err) return;
exec.start((err, stream) => {
if (err){
console.log("error : "+err);
return;
}
// container.modem.demuxStream(stream, process.stdout, process.stderr)
if (cmd === "cat /home/immobot/status"){
let newStream = require('stream');
let logStream = new newStream.PassThrough();
logStream.on('data', (chunk) => {
// console.log(chunk.toString('utf8'));
if (chunk.toString('utf8').indexOf("ready") !== -1){
console.log("CONTAINER READY !!");
//EDIT CONTAINER STATUS IN SCRAPERLIST TO READY
this.changeStatusToReady(container.id);
}
});
container.modem.demuxStream(stream, logStream, process.stderr);
}
else if (cmd === "torify scrapy crawl seloger -o seloger.json"){
console.log("on lance le scrape sur un des scraper rdy");
container.modem.demuxStream(stream, process.stdout, process.stderr)
}
// container.modem.demuxStream(stream, logStream, process.stderr);
exec.inspect(function(err, data) {
if (err){
console.log("error : "+err);
return;
}
});
});
});
};
SearchUtils.prototype.changeStatusToReady = function (containerId){
for (let i = 0; i < this.scraperListId.length; i++) {
if(this.scraperListId[i].id === containerId){
this.scraperListId[i].status = "ready";
}
}
// console.log(this.getScraperList());
};
module.exports = SearchUtils;

If your chaining promises, don't forget to return your next promise..
eg..
.then(()=>{
this.checkReadyScraper();
}
If checkReadyScraper() is a promise, then you will want to return it.
eg.
.then(()=>{
return this.checkReadyScraper();
}
Otherwise all your doing is running checkReadyScraper() and totally ignoring the returned Promise.
Here is how I think your runExec should look. I'm assuming the exec.inspect is what you want to resolve on.
SearchUtils.prototype.runExec = function (container, cmd) {
return new Promise ((resolve, reject)=>{
let options = {
Cmd: [ '/bin/bash', '-c', cmd ],
AttachStdout: true,
AttachStderr: true
};
container.exec(options, (err, exec) => {
if (err) return reject(err); //return error
exec.start((err, stream) => {
if (err){
console.log("error : "+err);
return reject(err); //return error
}
// container.modem.demuxStream(stream, process.stdout, process.stderr)
if (cmd === "cat /home/immobot/status"){
let newStream = require('stream');
let logStream = new newStream.PassThrough();
logStream.on('data', (chunk) => {
// console.log(chunk.toString('utf8'));
if (chunk.toString('utf8').indexOf("ready") !== -1){
console.log("CONTAINER READY !!");
//EDIT CONTAINER STATUS IN SCRAPERLIST TO READY
this.changeStatusToReady(container.id);
}
});
container.modem.demuxStream(stream, logStream, process.stderr);
}
else if (cmd === "torify scrapy crawl seloger -o seloger.json"){
console.log("on lance le scrape sur un des scraper rdy");
container.modem.demuxStream(stream, process.stdout, process.stderr)
}
// container.modem.demuxStream(stream, logStream, process.stderr);
exec.inspect(function(err, data) {
if (err){
console.log("error : "+err);
//don't forget to return the rejection
return reject(err);
}
//looks like everything was ok, lets resolve
resolve(data);
});
});
});
//resolve("ok"); too early
// TODO ADD EROR STRATEGY
//reject("error"), pointless
});
};

Executing a task (with container.exec, on line 81 in your code), runs asynchronious from the rest of your steps, having a callback when they are done.
You would have to make sure all the checks for scrapers are finished before running the scrap command if order matters.

First - there is no need to handle errors in every then() call.
You can implement a single error catch, which will catch error in any then() item in a sequence:
.then(()=> {
this.checkReadyScraper();
})
.then(() => {
this.scrap();
})
.catch(e => console.log(e))
Also note, that arrow function like catch(e => console.log(e)) doesn't require {} and ;
Your problem is that your task is Async. If you want to chain tasks - you should make a task to return a Promise
This is a rough exaple what you should refactor:
//Should return Promise
SearchUtils.prototype.exec = function(type, containerId){
let container = docker.getContainer(containerId);
if (type === "getStatus"){
//runExec should return us a Promise
return this.runExec(container, 'cat /home/immobot/status');
}
else if (type === "scrap") {
return this.runExec(container, 'torify scrapy crawl seloger -o seloger.json');
}
};
SearchUtils.prototype.runExec = function (container, cmd) {
return new Promise(function(resolve, reject) {
//do some stuff
//then call resolve(result)
//or call reject(error)
});
}
After this, you will be able to chain Promises (which is quite awesome actually and helps to solve callback hell):
.then(()=> {
//this not returns Promise, and it will be correctly chained
return this.checkReadyScraper();
})
.then(() => {
//this not returns Promise, and it will be correctly chained
return this.scrap();
})
.catch(e => console.log(e))
Also, to make this looks cleaner I even recommend to do some small refactoring, which will finally give you oneliner:
.then(this.checkReadyScraper).then(this.scrap).catch(console.log)

Related

Calling async function multiple times

So I have a method, which I want to call multiple times in a loop. This is the function:
function PageSpeedCall(callback) {
var pagespeedCall = `https://www.googleapis.com/pagespeedonline/v4/runPagespeed?url=https://${websites[0]}&strategy=mobile&key=${keys.pageSpeed}`;
// second call
var results = '';
https.get(pagespeedCall, resource => {
resource.setEncoding('utf8');
resource.on('data', data => {
results += data;
});
resource.on('end', () => {
callback(null, results);
});
resource.on('error', err => {
callback(err);
});
});
// callback(null, );
}
As you see this is an async function that calls the PageSpeed API. It then gets the response thanks to the callback and renders it in the view. Now how do I get this to be work in a for/while loop? For example
function PageSpeedCall(websites, i, callback) {
var pagespeedCall = `https://www.googleapis.com/pagespeedonline/v4/runPagespeed?url=https://${websites[i]}&strategy=mobile&key=${keys.pageSpeed}`;
// second call
var results = '';
https.get(pagespeedCall, resource => {
resource.setEncoding('utf8');
resource.on('data', data => {
results += data;
});
resource.on('end', () => {
callback(null, results);
});
resource.on('error', err => {
callback(err);
});
});
// callback(null, );
}
var websites = ['google.com','facebook.com','stackoverflow.com'];
for (let i = 0; i < websites.length; i++) {
PageSpeedCall(websites, i);
}
I want to get a raport for each of these sites. The length of the array will change depending on what the user does.
I am using async.parallel to call the functions like this:
let freeReportCalls = [PageSpeedCall, MozCall, AlexaCall];
async.parallel(freeReportCalls, (err, results) => {
if (err) {
console.log(err);
} else {
res.render('reports/report', {
title: 'Report',
// bw: JSON.parse(results[0]),
ps: JSON.parse(results[0]),
moz: JSON.parse(results[1]),
// pst: results[0],
// mozt: results[1],
// bw: results[1],
al: JSON.parse(results[2]),
user: req.user,
});
}
});
I tried to use promise chaining, but for some reason I cannot put it together in my head. This is my attempt.
return Promise.all([PageSpeedCall,MozCall,AlexaCall]).then(([ps,mz,al]) => {
if (awaiting != null)
var areAwaiting = true;
res.render('admin/', {
title: 'Report',
// bw: JSON.parse(results[0]),
ps: JSON.parse(results[0]),
moz: JSON.parse(results[1]),
// pst: results[0],
// mozt: results[1],
// bw: results[1],
al: JSON.parse(results[2]),
user: req.user,
});
}).catch(e => {
console.error(e)
});
I tried doing this:
return Promise.all([for(let i = 0;i < websites.length;i++){PageSpeedCall(websites, i)}, MozCall, AlexaCall]).
then(([ps, mz, al]) => {
if (awaiting != null)
var areAwaiting = true;
res.render('admin/', {
title: 'Report',
// bw: JSON.parse(results[0]),
ps: JSON.parse(results[0]),
moz: JSON.parse(results[1]),
// pst: results[0],
// mozt: results[1],
// bw: results[1],
al: JSON.parse(results[2]),
user: req.user,
});
}).catch(e => {
console.error(e)
});
But node just said it's stupid.
And this would work if I didn't want to pass the websites and the iterator into the functions. Any idea how to solve this?
To recap. So far the functions work for single websites. I'd like them to work for an array of websites.
I'm basically not sure how to call them, and how to return the responses.
It's much easier if you use fetch and async/await
const fetch = require('node-fetch');
async function PageSpeedCall(website) {
const pagespeedCall = `https://www.googleapis.com/pagespeedonline/v4/runPagespeed?url=https://${website}&strategy=mobile&key=${keys.pageSpeed}`;
const result = await fetch(pagespeeddCall);
return await result.json();
}
async function callAllSites (websites) {
const results = [];
for (const website of websites) {
results.push(await PageSpeedCall(website));
}
return results;
}
callAllSites(['google.com','facebook.com','stackoverflow.com'])
.then(results => console.log(results))
.error(error => console.error(error));
Which is better with a Promise.all
async function callAllSites (websites) {
return await Promise.all(websites.map(website => PageSpeedCall(website));
}
Starting on Node 7.5.0 you can use native async/await:
async function PageSpeedCall(website) {
var pagespeedCall = `https://www.googleapis.com/pagespeedonline/v4/runPagespeed?url=https://${website}&strategy=mobile&key=${keys.pageSpeed}`;
return await promisify(pagespeedCall);
}
async function getResults(){
const websites = ['google.com','facebook.com','stackoverflow.com'];
return websites.map(website => {
try {
return await PageSpeedCall(website);
}
catch (ex) {
// handle exception
}
})
}
Node http "callback" to promise function:
function promisify(url) {
// return new pending promise
return new Promise((resolve, reject) => {
// select http or https module, depending on reqested url
const lib = url.startsWith('https') ? require('https') : require('http');
const request = lib.get(url, (response) => {
// handle http errors
if (response.statusCode < 200 || response.statusCode > 299) {
reject(new Error('Failed to load page, status code: ' + response.statusCode));
}
// temporary data holder
const body = [];
// on every content chunk, push it to the data array
response.on('data', (chunk) => body.push(chunk));
// we are done, resolve promise with those joined chunks
response.on('end', () => resolve(body.join('')));
});
// handle connection errors of the request
request.on('error', (err) => reject(err))
})
}
Make PageSpeedCall a promise and push that promise to an array as many times as you need, e.g. myArray.push(PageSpeedCall(foo)) then myArray.push(PageSpeedCall(foo2)) and so on. Then you Promise.all the array.
If subsequent asynch calls require the result of a prior asynch call, that is what .then is for.
Promise.all()
Promise.all([promise1, promise2, promise3]).then(function(values) {
console.log(values);
});

Updating many(100k+) documents in the most efficient way MongoDB

I have a function that runs periodically, that updates the item.price of some Documents in my Prices Collection. The Price Collection has 100k+ items. The function looks like this:
//Just a helper function for multiple GET requests with request.
let _request = (urls, cb) => {
let results = {}, i = urls.length, c = 0;
handler = (err, response, body) => {
let url = response.request.uri.href;
results[url] = { err, response, body };
if (++c === urls.length) {
cb(results);
}
};
while (i--) {
request(urls[i], handler);
}
};
// function to update the prices in our Prices collection.
const update = (cb) => {
Price.remove({}, (err, remove) => {
if (err) {
return logger.error(`Error removing items...`);
}
logger.info(`Removed all items... Beginning to update.`);
_request(urls, (responses) => {
let url, response, gameid;
for (url in responses) {
id = url.split('/')[5].split('?')[0];
response = responses[url];
if (response.err) {
logger.error(`Error in request to ${url}: ${err}`);
return;
}
if (response.body) {
logger.info(`Request to ${url} successful.`)
let jsonResult = {};
try {
jsonResult = JSON.parse(response.body);
} catch (e) {
logger.error(`Could not parse.`);
}
logger.info(`Response body for ${id} is ${Object.keys(jsonResult).length}.`);
let allItemsArray = Object.keys(jsonResult).map((key, index) => {
return {
itemid: id,
hash_name: key,
price: jsonResult[key]
}
});
Price.insertMany(allItemsArray).then(docs => {
logger.info(`Saved docs for ${id}`)
}, (e) => {
logger.error(`Error saving docs.`);
});
}
}
if (cb && typeof cb == 'function') {
cb();
}
})
});
}
As you can see, to avoid iterating through 100k+ Documents, and updating each and every one of them separately, I delete them all at the beginning, and just call the API that gives me these Items with prices, and use InsertMany to Insert all of them into my Prices Collection.
This updating process will happen every 30 minutes.
But I just now realised, what if some user wants to check the Prices and my Prices Collection is currently empty because it's in the middle of updating itself?
The Question
So do I have to iterate through all of them in order to not delete it? (Remember, there are MANY documents to be updated every 30 mins.) Or is there another solution?
Here's a picture of how my Prices Collection looks (there are 100k docs like these, I just want to update the price property):
Update:
I have re-written my update function a bit and now it looks like this:
const update = (cb = null) => {
Price.remove({}, (err, remove) => {
if (err) {
return logger.error(`Error removing items...`);
}
logger.info(`Removed all items... Beginning to update.`);
_request(urls, (responses) => {
let url, response, gameid;
for (url in responses) {
gameid = url.split('/')[5].split('?')[0];
response = responses[url];
if (response.err) {
logger.error(`Error in request to ${url}: ${err}`);
return;
}
if (response.body) {
logger.info(`Request to ${url} successful.`)
let jsonResult = {};
try {
jsonResult = JSON.parse(response.body);
} catch (e) {
logger.error(`Could not parse.`);
}
logger.info(`Response body for ${gameid} is ${Object.keys(jsonResult).length}.`);
let allItemsArray = Object.keys(jsonResult).map((key, index) => {
return {
game_id: gameid,
market_hash_name: key,
price: jsonResult[key]
}
});
let bulk = Price.collection.initializeUnorderedBulkOp();
allItemsArray.forEach(item => {
bulk.find({market_hash_name: item.market_hash_name})
.upsert().updateOne(item);
});
bulk.execute((err, bulkers) => {
if (err) {
return logger.error(`Error bulking: ${e}`);
}
logger.info(`Updated Items for ${gameid}`)
});
// Price.insertMany(allItemsArray).then(docs => {
// logger.info(`Saved docs for ${gameid}`)
// }, (e) => {
// logger.error(`Error saving docs.`);
// });
}
}
if (cb && typeof cb == 'function') {
cb();
}
})
});
}
Notice the bulk variable now (Thanks #Rahul) but now, the collection takes ages to update. My processor is burning up and it literally takes 3+ minutes to update 60k+ documents. I honestly feel like the previous method, while it might delete all of them and then reinserting them, it also takes 10x faster.
Anyone?
From my experience (updating millions of mongo docs on a hourly basis), here's a realistic approach to very large bulk updates:
do all your API calls separately and write results in as bson into a file
invoke mongoimport and import that bson file into a new empty collection prices_new. Javascript, let alone high-level OO wrappers, are just too slow for that
rename prices_new -> prices dropTarget=true (this will be atomic hence no downtime)
Schematically, it would look like this in JS
let fname = '/tmp/data.bson';
let apiUrls = [...];
async function doRequest(url) {
// perform a request and return an array of records
}
let responses = await Promise.all(apiUrls.map(doRequest));
// if the data too big to fit in memory, use streams instead of this:
let data = flatMap(responses, BSON.serialize).join('\n'));
await fs.writeFile(fname, data);
await child_process.exec(`mongoimport --collection prices_new --drop ${fname}`);
await db.prices_new.renameCollection('prices', true);
There's no need to clear the database and do a fresh insert. You can use the bulkWrite() method for this or use the updateMany() method to do the updates.
You can refactor the existing code to
const update = (cb) => {
_request(urls, responses => {
let bulkUpdateOps = [], gameid;
responses.forEach(url => {
let response = responses[url];
gameid = url.split('/')[5].split('?')[0];
if (response.err) {
logger.error(`Error in request to ${url}: ${response.err}`);
return;
}
if (response.body) {
logger.info(`Request to ${url} successful.`)
let jsonResult = {};
try {
jsonResult = JSON.parse(response.body);
} catch (e) {
logger.error(`Could not parse.`);
}
Object.keys(jsonResult).forEach(key => {
bulkUpdateOps.push({
"updateOne": {
"filter": { market_hash_name: key },
"update": { "$set": {
game_id: gameid,
price: jsonResult[key]
} },
"upsert": true
}
});
});
}
if (bulkUpdateOps.length === 1000) {
Price.bulkWrite(bulkUpdateOps).then(result => {
logger.info(`Updated Items`)
}).catch(e => logger.error(`Error bulking: ${e}`));
bulkUpdateOps = [];
}
});
if (bulkUpdateOps.length > 0) {
Price.bulkWrite(bulkUpdateOps).then(result => {
logger.info(`Updated Items`)
}).catch(e => logger.error(`Error bulking: ${e}`));
}
});
if (cb && typeof cb == 'function') {
cb();
}
}
I have not tested anything but you can try this, might be helpful. I am using bluebird library for concurrency.
let _request = (url) => {
return new Promise((resolve, reject) => {
request(url, (err, response, body) => {
if (err) {
reject(err);
}
resolve(body);
});
});
};
const formatRespose = async (response) => {
// do stuff
return {
query: {}, // itemid: id,
body: {}
};
}
const bulkUpsert = (allItemsArray) => {
let bulk = Price.collection.initializeUnorderedBulkOp();
return new Promise((resolve, reject) => {
allItemsArray.forEach(item => {
bulk.find(item.query).upsert().updateOne(item.body);
});
bulk.execute((err, bulkers) => {
if (err) {
return reject(err);
}
return resolve(bulkers);
});
});
}
const getAndUpdateData = async (urls) => {
const allItemsArray = urls.map((url) => {
const requestData = await _request(url); // you can make this also parallel
const formattedData = formatRespose(requestData); // return {query: {},body: {} };
return formattedData;
});
return await (bulkUpsert(allItemsArray));
};
function update() {
// split urls into as per your need 100/1000
var i, j, chunkUrls = [],
chunk = 100;
for (i = 0, j = urls.length; i < j; i += chunk) {
chunkUrls.push(getAndUpdateData(urls.slice(i, i + chunk)));
}
Bluebird.map(chunkUrls, function (chunk) {
return await chunk;
}, {
concurrency: 1 // depends on concurrent request change 1 = 100 request get and insert in db at time
}).then(function () {
console.log("done");
}).catch(function () {
console.log("error");
});
}

Mongoose fetching data, setting in a list (undefined)

I am trying to fetch all messages of two users from MongoDB using a custom function. I am not able to set list. I have no idea how to use callback here.
fetchall = function(chatuser, me) {
var allmessage = [] ;
var promise = this.model.find({},function(err, docs) {
if (err) { return console.error(err);
}
console.log(docs); //working
// doSomethingElse(docs);
allmessage = JSON.stringify(docs);
return allmessage;
}).exec();
// promise.then(doSomethingElse(cc));
console.log('all',allmessage); // undefined
return allmessage;
};
here is Example
fetchall = function(chatuser, me, cb) {
var allmessage = [];
var promise = this.model.find({}, function(err, docs) {
if (err) {
// here cb is callback
return cb(err)
} else {
console.log(docs); //working
if (Array.isArray(docs) && docs.length > 0) {
// do it here
cb(null, docs)
} else {
// throw error here
// no result found like
cb()
}
}).exec();
};

Loop not pushing game data to Array before console dump?

I have been looking over this code and redoing it so often trying to get this loop to work. I have also come to realise i am getting 429 error from s3 which i have to look into to. in the matter at hand, i am attempting to save the data to my gameData Array and then output via console after the promise has fulfilled so ultimately the promise variable can return the array. But i am not able to get all the data to store in the array. Something todo with the loop i guess but i cant pin point it. Any ideas?
var getGameData = function(matchIdArray) {
var promise = new Promise(function(resolve,reject) {
s3.headObject({Bucket: 'lolsearchgames', Key: '347341'}, function(err, result) {
if (err && err.code === 'NotFound') {
var gameData = new Array();
for (var i = 0; i < matchIdArray.length; i++) {
gameData.push(new Promise(function(res, rej) { lolapi.Match.get(matchIdArray[i], function (error, gamedata) {
if (error) {
rej(error);
}
if (gamedata) {
res(gamedata);
}
})})
.then(function() {
if (i === 9) {
console.log(gameData);
resolve(gameData);
}
}));
}
} else {
// Retrieve from s3 Bucket
}
})
});
return promise;
};
Here's a snippet of your code that needs to be rewritten
if (err && err.code === 'NotFound') {
var gameData = new Array();
for (var i = 0; i < matchIdArray.length; i++) {
gameData.push(new Promise(function(res, rej) {
lolapi.Match.get(matchIdArray[i], function(error, gamedata) {
if (error) {
rej(error);
}
if (gamedata) {
res(gamedata);
}
})
})
);
}
Promise.all(gamedata).then(function() {
console.log(gameData);
resolve(gameData);
});
}
This uses Promise.all on the array of gamedata promises to wait for them to all resolve before resolving the "main" promise
Note: this is a quick and dirty fix, the code may be able to be rewritten to remove the need for the outer new Promise - however, without seeing the code foe the else that's only a speculation on my part
Still "quick and dirty", but a better rewrite
if (err && err.code === 'NotFound') {
Promise.all(matchIdArray.map(function(matchId) {
return new Promise(function(res, rej) {
lolapi.Match.get(matchId, function(error, gamedata) {
if (error) {
rej(error);
}
if (gamedata) {
res(gamedata);
}
})
});
})).then(function() {
console.log(gameData);
resolve(gameData);
});
}

How to Send response after for each loop completed

Response.json should execute after foreach loop completes its execution
var todoarr = (req.body.data) ? req.body.data : undefined
todoarr.forEach(function(element) {
if(element.done == true) {
TodoService.removeTodo(element, function(success) {
});
}
});
res.json("success");
You can try to use async.js http://caolan.github.io/async/ .
each method http://caolan.github.io/async/docs.html#each
Or you can try use Promise.all.
For example:
let promiseArr = [];
todoarr.forEach(function(element) {
if(element.done == true) {
promiseArr.push(somePromiseMethod(element));
}
});
//now execute promise all
Promise.all(promiseArr)
.then((result) => res.send("success"))
.catch((err) => res.send(err));
More info here. https://developer.mozilla.org/en/docs/Web/JavaScript/Reference/Global_Objects/Promise/all
Some promise example:
function somePromiseMethod(element) {
return new Promise((resolve,reject) => {
TodoService.removeTodo(element, function(success) {
resolve();
});
});
}
Hope this helps.
You can't send multiple responses on single request, the only thing you can do it's a single response with the array of results:
es with async:
const async = require('async')
// FIX ME: this isn't correctly handled!
const todoarr = (req.body.data) ? req.body.data : undefined
let results = []
async.each(todoarr, function(element, callback) {
console.log('Processing todo ' + element)
if(element.done == true) {
TodoService.removeTodo(element, function(err, success) {
if(err){
callback(err)
} else {
results.push(success)
callback(null, success)
}
})
}
}, function(err) {
if(err) {
console.log('A element failed to process', err)
res.status(500).json(err)
} else {
console.log('All elements have been processed successfully')
// array with the results of each removeTodo job
res.status(200).json(results)
}
})
You can send the response inside the callback function of forEach.
Modify your function so that it will call res.json() on the last iteration only.
Example:
var todoarr = (req.body.data) ? req.body.data : undefined
todoarr.forEach(function(element,index) {
if(element.done == true) {
TodoService.removeTodo(element, function(success) {
});
}
if(index==todoarr.length-1){
res.json("success");
}
});
However, it may not be according to coding standards but it can definitely solve the problem.

Categories

Resources