Processing array of data with promise - javascript

I'm seeking guidance on how best to handle the following scenario. I'm fairly new to JS and async development, so I'm trying to understand the best way to handle this. I call one api (callAPI) which returns an array of items. I need to take those items and send them to another api (callAPI2) but that api doesn't have a bulk method, so I have to call the api for each item in the array. Below is how I have it structured: getArray promise returns the first array, I send the array to promise 2 (getIndividualData) where I loop and call the second api. I'm wondering if there are better ways to structure this? If I pass in a large array, I may need to pace the calls to the api so I don't get throttled... so maybe I need some version of Promise.all?
let getArray = function() {
return new Promise(function(resolve,reject) {
callApi.get().on('success', function(result, response) {
resolve(result);
});
});
}
let getIndividualData = function(arrayOfItems) {
return new Promise(function(resolve,reject) {
var responseArray = [];
for(var i = 0; i < arrayOfItems.length; i++) {
callApi2.get(arrayOfItems[i]).on('success', function(result, response) {
responseArray.push(result);
});
}
resolve(responseArray);
});
}
let failureCallback = function() {
return "Error!";
}
getArray().then(function(response) {
return getIndividualData(response);
}).then(function(finalArray) {
console.log(`The final array is ${JSON.stringify(finalArray)}`);
}).catch(failureCallback);

You can make a request for each item in a large array without getting throttled by implementing a concurrency throttler with a Set of Promises and async/await syntax. I've duplicated your code below, modifying the implementation of getIndividualData and passing in concurrency as an option.
let getArray = function() {
return new Promise(function(resolve,reject) {
callApi.get().on('success', function(result, response) {
resolve(result);
});
});
}
let getIndividualData = async function(arrayOfItems, { concurrency }) {
var promiseSet = new Set(),
responseArray = [],
i = 0;
while (i < arrayOfItems.length) {
if (promiseSet.size >= concurrency) {
await Promise.race(promiseSet)
}
const promise = new Promise(function(resolve,reject) {
callApi2.get(arrayOfItems[i]).on('success', function(result, response) {
resolve(result)
})
})
responseArray.push(promise.then(result => {
promiseSet.delete(promise)
return result
}))
i += 1
}
return Promise.all(responseArray)
}
let failureCallback = function() {
return "Error!";
}
getArray().then(function(response) {
return getIndividualData(response, { concurrency: 10 });
}).then(function(finalArray) {
console.log(`The final array is ${JSON.stringify(finalArray)}`);
}).catch(failureCallback);

Reformulating this to a helper that promisifies an object that has an .on('success') event handler and an async function for the top-level .then()ish code gives us something like this...
To pace the API calls, add in p-limit or similar to getIndividualData.
function promisifyOnSuccessObj(onSuccessObj) {
return new Promise((resolve) => {
onSuccessObj.on("success", (result, response) => resolve(result));
// TODO: what about `response`?
// TODO: onSuccessObj.on('error')..?
});
}
function getIndividualData(arrayOfItems) {
// Returns an array of promises
return arrayOfItems.map((item) =>
promisifyOnSuccessObj(callApi2.get(item)),
);
}
async function doThings() {
const result = await promisifyOnSuccessObj(callApi.get());
const individualDatas = await Promise.all(getIndividualData(result));
console.log(`The final array is ${JSON.stringify(individualDatas)}`);
}

You could combine Promise.all, map and async...await syntax and in the end get one array of resolved individual promises based on the previous resolved array promise.
const mockApi = {
request: (response) => {
return new Promise((resolve, reject) => {
setTimeout(() => resolve(response), 1000)
})
},
getArray() {
return this.request(['foo', 'bar', 'baz'])
},
getItem(item) {
return this.request(`Resolved: ${item}`)
}
}
async function getData() {
const array = await mockApi.getArray();
const final = await Promise.all(array.map(e => mockApi.getItem(e)));
console.log(final)
}
getData()

Related

How to use Iterators and generators to perform the following operations?

I want to get the ​data.json​ file, parse into an object, and make it an iterable. (Don't change the object data type into an array)
Make it an iterable using
Iterators 2. Generators
When that object is passed inside a FOR OF loop, I should get the output of each item as the following pattern.
Post Id: 1 Title: Some title
Post Id: 2 Title: Some other title
And so on …
This is the link for the json file[https://raw.githubusercontent.com/attainu/curriculum-master-fullstack/master/coding-challenges/deep-dive/iterators-data.json?token=AOGF265VMPYWFKXO6RNGXPS67WAMM][1]
console.log("connected");
function fetchJSONFile(path, callback) {
var httpRequest = new XMLHttpRequest();
httpRequest.onreadystatechange = function() {
if (httpRequest.readyState === 4) {
if (httpRequest.status === 200) {
var data = JSON.parse(httpRequest.responseText);
if (callback) callback(data);
}
}
};
httpRequest.open('GET', path);
httpRequest.send();
}
fetchJSONFile(`https://raw.githubusercontent.com/attainu/curriculum-master-fullstack/master/coding-challenges/deep-dive/iterators-data.json?token=AOGF265VMPYWFKXO6RNGXPS67WAMM`, function(data)
{
data[Symbol.iterator] = function()
{
var c=0;
return {
// I Don't know how to access the key of the object inside the object , Can you help me please??
next() {
c++;
if (c <= data.length) {
return { done: false, value: "value" };
} else {
return { done: true };
}
}
}};
for (let val of data) {
console.log(val);
}
});
I have tried this with fetch. You can make changes.
Did little digging. came up with this. You will have to define your own iterator. Is this what you wanted ?
const fetch = require('node-fetch');
const arr = [];
let status;
fetch('https://raw.githubusercontent.com/attainu/curriculum-master-fullstack/master/coding-challenges/deep-dive/iterators-data.json?token=AOGF265VMPYWFKXO6RNGXPS67WAMM') // Call the fetch function passing the url of the API as a parameter
.then((res) => {
status = res.status;
return res.json()
})
.then((jsonData) => {
jsonData[Symbol.iterator] = function () {
var self = this;
var values = Object.keys(this);
var i = 0;
return {
next: function () {
return {
value: self[values[i++]],
done: i > values.length
}
}
}
}
//you we can iterate over object
for (var p of jsonData) {
const obj = {
"PostID": p.id,
"Title": p.title
}
arr.push(obj)
}
console.log(arr)
})
.catch((err) => {
// handle error for example
console.error(err);
});
So you are asked to create a generator and iterator.
It is not clear what is expected: if you create an iterator on the response, then this is only possible when the response has already been received, not at the time you initiate the HTTP request. So that iterator can only exist in the future, at the time the response comes back.
There are at least two distinct ways to do this:
1. Create an iterator immediately, but an async one
This way you can create it immediately, but the resulting iterator will yield promises, not the actual response values. With for await ... of you can get the values from that iterator in an asynchronous manner.
Here is how that looks:
async function * generator(path) {
const obj = await fetch(path).then(resp => resp.json());
for (const postId in obj) yield obj[postId];
}
// The main program has to be asynchronous:
(async (path) => {
// Consume the async iterator that you get from the async generator
console.log("wait for it...");
for await (let { id, title } of generator(path)) {
console.log("postId: ", id, "title: ", title);
}
})("https://raw.githubusercontent.com/attainu/curriculum-master-fullstack/master/coding-challenges/deep-dive/iterators-data.json?token=AOGF265VMPYWFKXO6RNGXPS67WAMM");
2. Create an iterator only after you have received the response
Here you create a normal iterator, and only at the time you receive the response:
function * generator(obj) {
for (const postId in obj) yield obj[postId];
}
// The main program has to be asynchronous:
(async (path) => {
console.log("wait for it...");
// Perform the request
const obj = await fetch(path).then(resp => resp.json());
// Consume the iterator that you get from the generator
for (let { id, title } of generator(obj)) {
console.log("postId: ", id, "title: ", title);
}
})("https://raw.githubusercontent.com/attainu/curriculum-master-fullstack/master/coding-challenges/deep-dive/iterators-data.json?token=AOGF265VMPYWFKXO6RNGXPS67WAMM");

Wait for callback to be returned in a controller

I want to upload some files, add them to a database and return the ids of the new objects.
fn: async function (inputs) {
let docIds = []
let settings = {...}
await inputs.filesToUpload.upload(settings, async (err, files) => {
if (err)
throw {'invalid': 'The provided data is invalid.'}
for (let i = 0; i < files.length; i += 1) {
let newDocument = await Document.create({
name: file.filename
}).fetch()
docIds.push(newDocument.id)
}
})
return {
ids: docIds
}
})
Unfortunately, the controller doesn't wait for the objects to be created in the database and returns {ids: []} immediately, only then the documents are uploaded and objects created. I have tried using passing the ids as a callback and promise but the controller always executes return without waiting for the results.
inputs.filesToUpload.upload is making a callback which is always going to be asynchronous.
async keyword before a function with callback can not make it wait.
async works only if the function is returning a promise
check following code where I have extracted the upload flow in a separate function that returns a promise
you can then await on this promise and get generated ids..
async function test(inputs) {
const docIds = await upload(inputs, {});
return { ids: docIds };
}
function upload(inputs, settings) {
return new Promise((resolve, reject) => {
const ids = [];
inputs.filesToUpload.upload(settings, async (err, files) => {
if (err) {
return reject({ 'invalid': 'The provided data is invalid.' });
}
for (let i = 0; i < files.length; i += 1) {
let newDocument = await Document.create({ name: file.filename }).fetch();
ids.push(newDocument.id);
}
resolve(ids);
});
});
}
Please note that the above function is just clarifying the use of promises.
It can be implemented in various ways specifically if we want to optimise it.
EDIT
As an example, Promise.all can be used to optimise the upload if order is not a concern, something like like this -
function upload(inputs, settings) {
return new Promise((resolve, reject) => {
const ids = [];
inputs.filesToUpload.upload(settings, async (err, files) => {
if (err) {
return reject({ 'invalid': 'The provided data is invalid.' });
}
const newDocuments = await Promise.all(files.map(file =>
Document.create({ name: file.filename }).fetch())
);
resolve(newDocuments.map(newDocument => newDocument.id));
});
});
}
I hope this helps.

For loop not updating array from mongoose callback

I think my problem is with the asynchronous nature of JS. I'm trying to push items in an array, but it doesn't seem to be updating it... I did console.log statements inside the for loop and see it populate the array with numbers, but when I console.log the array outside the loop, I get an empty array. I am using Mongoose.
Any suggestions?
Here's the code:
let collections = [];
return Promise.all(courts.map(court => {
return new Promise((resolve, reject) => {
return Promise.all(court.users.map(async user => {
let tempPromise = new Promise((resolve, reject) => {
setTimeout(() => {
resolve();
}, 5000);
});
return SignDetail.find({
userName: user.userName,
signStatus: "signIn",
}).then(function(sign) {
if (user.userName.endsWith('zs')) {
let signCount = 0;
if (sign.length > 1) {
for (let j = 0; j < sign.length; j++) {
let courtObj = {courtName: sign[j].userName}; //make court object
signCount++; //increment each time there's a signature
if (j === sign.length - 1) { //only push object in array when all signatures have been counted
courtObj.signCount = signCount;
collections.push(courtObj);
console.log(collections)
}
}
}
} //end here
});
return tempPromise;
})).then(_ => resolve(collections));
})
})).then(collections => {
// HERE you will your collection and you can use this promise where this function is being called.
console.log(collections);
});
Function SignDetail.find() is async function, you cannot return the res.render synchronously. You need to return a promise from this function which resolves to desired output.
You can do something like this.
let collections = [];
return Promise.all(courts.map(court => {
return new Promise((resolve, reject) => {
return Promise.all(court.users.map(oUser => {
var tempPromise = new Promise();
if(oUser.userName.endsWith('zs')){
SignDetail.find(
{username: oUser.username. signStatus: 'signIn'},
function(err, sign){
collection.push(sign.length);
tempPromise.resolve();
})
} else{
tempPromise.resolve();
}
return tempPromise;
})).then(_ => resolve());
})
})).then(_ => {
// HERE you will your collection and you can use this promise where this function is being called.
console.log(collections);
});

Recursion with promises on async calls

I am using an external api that I use as my datasource. That api gives its data back based on a pagination like type.
So I can get data for page 1, 2, 3 etc. But the problem that I have is that I need all data at once, the api doesn't support this so I write it myself.
I have made the actual call code:
function getTransactionPart(start){
return new Promise(function(resolve, reject) {
const options = {
url: 'myurl?limit=40&start='+start,
json: true
};
let result = {total_transactions : 0, transactions : []};
request(options, function (error, response, body) {
if (error) {
return reject(result);
}
body.data.forEach(function (transaction) {
result.transactions.push({
timestamp: transaction.timestamp,
amount: transaction.amount,
confirmed: transaction.confirmed
});
});
result.total_transactions = body.total
return resolve(result);
})
})
}
The above code returns me the expected results with the limit that I gave. Also I get a number back (result.total_transactions) when this is more than 40 I need to make another call with 40 as start etc etc.
The code where I need to combine it:
function getTransactions(start) {
return new Promise(function(resolve, reject) {
getTransactionPart(start).then(result => {
if(result.total_transactions > 40) {
//next call
} else {
// return all?
}
console.log(result)
}).catch(error => {
console.error(error);
return r.json({
})
});
})
}
So I do the first call with getTransactionPart(0) after that the method itself needs to combine the result form all the sub calls and return the whole result as expected. How can I do this with recursion and promises?
This is easier if you use an async function and await the request:
async function getTransactions(start) {
const result = [];
for(let pos = start; ; pos += 40) {
const { total_transactions, transactions } = await getTransactionPart(pos);
result.push(...transactions);
if(total_transactions < 40) break;
}
return result;
}
For sure you could also do this recursively, but do you really need that?
async function getTransactions(start) {
const { total_transactions, transactions } = await getTransactionPart(pos);
if(total_transactions < 40)
return transactions;
return transactions.concat(await getTransactions(start + 40));
}

Iterate array and wait for promises

How can I iterate through an array of data using Promises and returning data? I have seen some promises.push(asyncFunc) methods but some of the entries from my array will fail so from what I gather I can't use that.
var filesFromDisk = [
'41679_4_2015-09-06_17-02-12.mp4',
'41679_4_2015-09-06_17-02-12.smil',
'41680_4_2015-09-09_10-44-05.mp4'
];
start(filesFromDisk)
.then((data) => {
console.log(data); // Want my data here
});
I start start(dbFiles) from another file which is why I want the data returned there.
function start(dbFiles) {
var listOfFiles = [],
promises = [];
return new Promise((fulfill, reject) => {
for (var i = 0; i < dbFiles.length; i++) {
getMp4(dbFiles[i])
.then((data) => {
listOfFiles = listOfFiles.concat(data);
console.log(listOfFiles);
})
}
fulfill(listOfFiles) // Need to happen AFTER for loop has filled listOfFiles
});
}
So for every entry in my array I want to check if the file with the new extension exists and read that file. If the file with extension does not exist I fulfill the original file. My Promise.all chain works and all the data is returned in for loop above (getMp4(dbFiles[i]))
function getMp4(filename) {
var mp4Files = [];
var smil = privateMethods.setSmileExt(localData.devPath + filename.toString());
return new Promise((fulfill, reject) => {
Promise.all([
privateMethods.fileExists(smil),
privateMethods.readTest(smil)
]).then(() => {
readFile(filename).then((files) => {
fulfill(files)
});
}).catch((err) => {
if (!err.exists) fulfill([filename]);
});
});
}
function readFile(filename){
var filesFromSmil = [];
return new Promise((fulfill, reject) => {
fs.readFile(localData.devPath + filename, function (err, res){
if (err) {
reject(err);
}
else {
xmlParser(res.toString(), {trim: true}, (err, result) => {
var entry = JSON.parse(JSON.stringify(result.smil.body[0].switch[0].video));
for (var i = 0; i < entry.length; i++) {
filesFromSmil.push(privateMethods.getFileName(entry[i].$.src))
}
});
fulfill(filesFromSmil);
}
});
});
};
Methods in the Promise.all chain in getMp4 - have no problems with these that I know.
var privateMethods = {
getFileName: (str) => {
var rx = /[a-zA-Z-1\--9-_]*.mp4/g;
var file = rx.exec(str);
return file[0];
},
setSmileExt: (videoFile) => {
return videoFile.split('.').shift() + '.smil';
},
fileExists: (file) => {
return new Promise((fulfill, reject) => {
try {
fs.accessSync(file);
fulfill({exists: true})
} catch (ex) {
reject({exists: false})
}
})
},
readTest: (file) => {
return new Promise((fulfill, reject) => {
fs.readFile(file, (err, res) => {
if (err) reject(err);
else fulfill(res.toString());
})
})
}
}
If you need them to run in parallel, Promise.all is what you want:
function start(dbFiles) {
return Promise.all(dbFiles.map(getMp4));
}
That starts the getMp4 operation for all of the files and waits until they all complete, then resolves with an array of the results. (getMp4 will receive multiple arguments — the value, its index, and a a reference to the dbFiles arary — but since it only uses the first, that's fine.)
Usage:
start(filesFromDisk).then(function(results) {
// `results` is an array of the results, in order
});
Just for completeness, if you needed them to run sequentially, you could use the reduce pattern:
function start(dbFiles) {
return dbFiles.reduce(function(p, file) {
return p.then(function(results) {
return getMp4(file).then(function(data) {
results.push(data);
return results;
});
});
}, Promise.resolve([]));
}
Same usage. Note how we start with a promise resolved with [], then queue up a bunch of then handlers, each of which receives the array, does the getMp4 call, and when it gets the result pushes the result on the array and returns it; the final resolution value is the filled array.

Categories

Resources