Execute a sequential requests handling each one - javascript

I need send requests in sequence but stopping sequence when one returns error, i'm using $http provider from angular.
My code is this:
const steps = [
{ url: '/api/test/1' },
{ url: '/api/test/2' },
{ url: '/api/test/3' },
{ url: '/api/test/4' },
]
const executeStep = index => {
// Validate step existence
if (typeof steps[index] == 'undefined') return
// Request
$http({method: 'get', url: steps[index].url})
.success(response => {
if (!response.success) throw response
executeStep(index+1)
}).error(err => {
console.error(err)
alert('Error')
})
}
executeStep(0)
My question is: Has a better way to do this job?

Here is the code, very similar to yours, but without handling or adding index numbers, so it is a bit more clear.
const queue = [
{ url: '/api/test/1' },
{ url: '/api/test/2' },
{ url: '/api/test/3' },
{ url: '/api/test/4' },
];
function runQueue() {
const req = queue[0];
$http({method: 'get', url: req.url})
.success(response => {
console.log(response);
queue.shift();
if (queue.length > 0) {
runQueue();
}
})
.error(err => {
console.error(err);
})
}
Solution using Bluebird Promises.
Another good option can be (especially if your code is server-side nodejs) using of bluebird Promise library.
That library is not very popular nowadays because Promises are supported natively almost everywhere. But still some methods are very useful. In you case Promise.each method will work perfectly.
const Promise = require('bluebird'); // or import * as Promise from 'bluebird';
const queue = [
{ url: '/api/test/1' },
{ url: '/api/test/2' },
{ url: '/api/test/3' },
{ url: '/api/test/4' },
];
Promise.each(queue, (req) => {
return http$({method: 'get', url: req.url}).then((resp) => {
console.log(resp);
});
}).then(() => {
console.log("done");
}).catch(err => {
console.error(err);
})

What you need is promise chaining and recursion.
const steps = [
{ url: '/api/test/1' },
{ url: '/api/test/2' },
{ url: '/api/test/3' },
{ url: '/api/test/4' },
]
const executeStep = index => {
// Validate step existence
if (typeof steps[index] == 'undefined') return true;
// Request
return $http({method: 'get', url: steps[index].url})
.then(response => {
if (!response.success) throw response
return executeStep(index+1)
})
}
executeStep(0)
.then( () => console.log('all done'))
.catch(e => console.error('something failed', e))
Here, every time a request is finished it returns back a new promise that contains the next request. That way executeStep(0) will be resolved only when every other promise has finished or it will be rejected immediately if any promise encounters an error.

You can use async function in combination with promises for that.
const queue = [
{ url: '/api/test/1' },
{ url: '/api/test/2' },
{ url: '/api/test/3' },
{ url: '/api/test/4' },
];
function oneRequest(req) {
return new Promise((resolve,reject)=>{
$http({method: 'get', url: req.url})
.success(response => resolve(response))
.error(err => reject(err));
});
}
async function allRequests(reqs) {
let resps = [];
for(var i=0;i<reqs.length;i++) {
let resp = await oneRequest(reqs[i]);
resps.push(resp);
}
return resps;
}
allRequests(queue)
.then(resps => console.log(resps))
.catch(err => console.log(err));

Related

Resolve promises created within Array.map on a timeout

I'm trying to resolve an array of promises using a timeout to avoid rate limit exceptions on API requests. However, I'm still hitting the rate limit, as the timeout doesn't seem to be working.
Not really sure how to make this work.
router.route("/").put(async (req, res) => {
const { apiKey: access_token, course, assignments } = req.body;
try {
const returnedIds = [];
const promises = await assignments.map((assignment) => {
return axios({
method: "PUT",
url: `https://url.to.api/api/v1/courses/${course}/assignments/${assignment.id}`,
params: {
access_token,
},
data: {
assignment: {
points_possible: assignment.points_possible,
},
},
});
});
const promiseResolution = function() {
Promise.all([...promises]).then((values) => {
values.forEach((_, index) => {
returnedIds.push(assignments[index].id);
});
res.status(201).json({
returnedIds,
});
});
};
setTimeout(promiseResolution, 5000);
} catch (e) {
res.status(401);
}
});
If you just want to put some time between API calls this should do.
router.route("/").put(async (req, res) => {
const { apiKey, course, assignments } = req.body;
try {
const returnedIds = [];
for (const assignment of assignments) {
returnedIds.push(await loadID(apiKey, course, assignment));
await wait(5000);
}
res.status(201).json({ returnedIds })
} catch (e) {
res.status(401);
}
});
function wait(duration) {
return new Promise((resolve) => setTimeout(resolve, duration));
}
function loadID(apiKey, course, assignment) {
// Makes the request, parses out the stuff you want from the response...
}
I would caution against using Promise.all since you probably want to check the result of each request before making the next one. For example, if the third request gets rate limited, you probably shouldn't bother making further requests.
It's because Promise.all will fire all promises at once, so your setTimeout is just set a timeout for all promises not individual promise as well.
You should try to make delay for each promise:
const promises = await assignments.map((assignment) => {
// some delay function
return axios({
method: "PUT",
url: `https://url.to.api/api/v1/courses/${course}/assignments/${assignment.id}`,
params: {
access_token,
},
data: {
assignment: {
points_possible: assignment.points_possible,
},
},
});
});
You can try this: (It's React but you should only focus on fetchData function) and see the logs:
https://codesandbox.io/s/zen-feynman-ql833?file=/src/App.js

Creating a promise for web worker's onmessage event with nth number of REST calls

I am using web workers to fetch information of websites including their subsites(nth number) recursively. When all of the calls are done I want to fire off a function that would format the data it receives (allSites array). So I thought it would be a great idea to use a Promise.all with an object that has all my resolved promises.
The problem is that it doesn't wait for all the resolved promises because it's waiting to hear messages posted from the worker. I can't define a length because it could be any number of websites + subsites.
Bonus: I have an object with resolved promises. Can I call a certain resolve like this?
keyName[index]()
It says it's not a function but shouldn't I be able to call it like that? Any help is greatly appreciated.
function getTreeData(cb) {
let allSites = [];
let baseUrl = "https://www.somewebsite.com/"
let resolver = {};
let rejecter = {};
let workerUrl =
"https://www.somewebsite.com/siteassets/worker.js";
let myWorker = new Worker(workerUrl);
function firstIteration() {
return new Promise((resolve, reject) => {
resolver[baseUrl] = resolve;
rejecter[baseUrl] = reject;
myWorker.postMessage({
requestDigest: document.getElementById("__REQUESTDIGEST").value,
qs1: "/_api/web/webinfos?$select=ServerRelativeUrl,Title",
qs2:
"/_api/Web/RoleAssignments?$expand=Member/Users,RoleDefinitionBindings",
url: baseUrl,
});
});
}
firstIteration();
//spawn a worker
myWorker.onmessage = function (e) {
allSites = allSites.concat([
{ pathname: e.data.url, groups: e.data.permissions },
]);
e.data.sites.forEach(function (props) {
return new Promise((resolve, reject) => {
myWorker.postMessage({
requestDigest: document.getElementById("__REQUESTDIGEST").value,
qs1: "/_api/web/webinfos?$select=ServerRelativeUrl,Title",
qs2:
"/_api/Web/RoleAssignments?$expand=Member/Users,RoleDefinitionBindings",
url: "www.somewebsite.com" + props.url,
});
resolver[props.url] = resolve;
rejecter[props.url] = reject;
});
});
resolver[e.data.url](); //it says that it is not a function
};
myWorker.onerror = function (e) {
rejecter[e.data.url]();
};
//After my first inital promises resovles resolve the rest (checks object of resolves)
resolver[baseUrl]().then(() => {
Promise.all(Object.values(resolver)).then(() => {
reduceData(cb, allSites);
});
});
}
Though it is working properly here's the code for the web worker. (worker.js)
function formatSites(props) {
return {
url: "www.someSite.com",
};
}
function formatSitesInfo(props) {
//get all info of the site or subsite
var accessArr = props.RoleDefinitionBindings.results
.reduce(function (r, a) {
return r.concat([a.Name]);
}, [])
.sort()
.join(", ");
return {
access: accessArr,
isGroup: props.Member.hasOwnProperty("AllowRequestToJoinLeave")
? true
: false,
name: props.Member.Title,
members: (props.Member.Users?.results || []).map(function (member) {
return {
access: accessArr,
email: member.Email,
groupName: props.Member.Title,
id: member.Id,
isAdmin: member.IsSiteAdmin,
title: member.Title,
};
}),
};
}
function _getRequest(props) {
return new Promise(function (resolve, reject) {
fetch(props.url + props.qs, {
method: "GET",
headers: {
Accept: "application/json; odata=verbose",
"Content-type": "application/json; odata=verbose",
"X-RequestDigest": props.requestDigest,
},
})
.then(function (resp) {
return resp.json();
})
.then(resolve)
.catch(reject);
});
}
self.addEventListener("message", function (e) {
if (!e.data.data) {
var promise1 = _getRequest(Object.assign(e.data, { qs: e.data.qs1 }));
var promise2 = _getRequest(Object.assign(e.data, { qs: e.data.qs2 }));
Promise.all([promise1, promise2]).then(function ([data1, data2]) {
self.postMessage({
info: data2.d.results.map(formatSitesInfo),
sites: data1.d.results.map(formatSites),
url: e.data.url,
});
});
}
});
I ended up making a timer to would be reset if there's a another call to the worker(another subsite was found to have childern). If the timer stops that means that we have reached the end of childern for the sites. It then goes to the next function. There will be a dash of latency, but it's something I can live with.

Jest is green even if Expected is not equal Received

it('User is already present as a supplier', (done) => {
const store = mockStore({}, [{ type: 'get_user', data: { } }]);
return store.dispatch(userGetAction({ role: 'supplier' }, () => {})).then(() => {
try {
expect(store.getActions()[0].data.disabled).toEqual(true);
expect(store.getActions()[0].data.errormessage).toEqual('User is already present as a assitantbuyer');
} catch (err) {
console.log(err);
}
done();
}).catch(() => {
done();
});
});
Why it is passing and showing green status even it is Expected is not equal to Actual.
PASS src/actions/user-get-action-assistant-buyer.test.jsx
● Console
console.error node_modules/fbjs/lib/warning.js:33
console.log src/actions/user-get-action-assistant-buyer.test.jsx:25
{ Error: expect(received).toEqual(expected)
Expected value to equal:
"User is already present"
Received:
"User is already present"
at store.dispatch.then (/Users/prakashchandrabarnwal/Desktop/myProductInduct_CE_Admin/src/actions/user-get-action-assistant-buyer.test.jsx:23:57)
matcherResult:
{ actual: 'User is already present as a assitant buyer',
expected: 'User is already present as a assitantbuyer',
message: [Function],
name: 'toEqual',
pass: false } }
If i do not wrap expect inside try catch its silently going inside .catch()
thunk code returning UnhandledPromiseRejectionWarning:
const buyerAction = (data = {}, cb) => dispatch => axios({
method: 'POST',
url: `http://localhost:3001/api/manageUsers`,
headers: {
'x-access-token': authService.getAccessToken()
},
data
}).then(res => new Promise((resolve, reject) => {
if (res.status === 200 && res.data) {
dispatch({ type: 'buyer_created', data: res.data.message });
if (data.role === 'buyer') {
axios({
method: 'POST',
url: `http://localhost:3001/api/populateBuyerLookUp`,
headers: {
'x-access-token': authService.getAccessToken()
},
data
})
.then((response) => {
resolve(response);
}).catch((err) => {
reject(err);
});
}
cb(res.data.message);
} else {
reject(res);
}
}))
.catch(() => {
console.log('error');
});
(node:44182) UnhandledPromiseRejectionWarning: Unhandled promise rejection. This error originated either by throwing inside of an async function without a catch block, or by rejecting a promise which was not handled with .catch(). (rejection id: 1)
So, your expect fails with an error, you catch this error and just log it, in other words, you mute the error. After all, you call "done" just like there is no error.
The code doing things just like you wrote: ignore and mute any errors. You have to remove all catch from your test
it('User is already present as a supplier', () => {
const store = mockStore({}, [{ type: 'get_user', data: { } }]);
return store
.dispatch(userGetAction({ role: 'supplier' }, () => {}))
.then(() => {
expect(store.getActions()[0].data.disabled).toEqual(true);
expect(store.getActions()[0].data.errormessage).toEqual('User is already present as a assitantbuyer');
});
});
You can return Promise from your test (There is no need to use done) and if promise fails, the whole test fails. that's all
UPD: regarding UnhandledPromiseRejectionWarning, I think it could be linked with your request to the "populateBuyerLookUp", this request completely out of the flow. I tried to fix it, but it difficult to understand what you want to do exactly
const buyerAction = (data = {}, cb) => dispatch => axios({
method: 'POST',
url: `http://localhost:3001/api/manageUsers`,
headers: {
'x-access-token': authService.getAccessToken()
},
data
})
.then((res) => {
dispatch({type: 'buyer_created', data: res.data.message});
let promise;
if (data.role === 'buyer') {
promise = axios({
method: 'POST',
url: `http://localhost:3001/api/populateBuyerLookUp`,
headers: {
'x-access-token': authService.getAccessToken()
},
data
});
}
return Promise.resolve(promise).then(() => res.data.message);
})
.then((message) => {
cb(message)
}).catch(()=>{console.log("error")});

Using jQuery when to defer ajax processing

I have a list of 15+ ajax requests that need to be called in a specific order. I need each ajax call to wait until the previous function finishes before making the next call. This issue arises because my ajax call, has a direct callback that is also an ajax call.
createCheckIn() {
this.selectedList = [...] // long list of objects
count = 0
for ( i=0; i < this.selectedList.length; i++ ) {
$.ajax({
method: "POST",
url: url,
data: {
check_in: {
client_id: this.selectClient.id,
program_id: this.program_id
}
},
success: function(res) {
that.createWeighIn(count, res.id)
count = count + 1
},
error: function(err) {
console.log(err)
}
})
}
},
createWeighIn(index, check_in_id) {
let data = {}
let that = this
data.weigh_in = this.selectedList[index]
$.ajax({
method: "POST",
url: url,
data: data,
success: function(res) {
console.log(res)
},
error: function(err) {
console.log(err)
}
})
}
the correct data is generated but I believe the ordering is off because eventually there is a call to createCheckIn() that begins before the previous entry has completed.
Is there a way to chain these functions such that createCheckIn() and createWeighIn() are called (and complete) before selectedList iterates.
your for loop in createCheckIn() will not stop to wait on your ajax return. you can do something like:
function createCheckIn(oldI, oldCount){
var count = 0;
var currentI = 0;
if(oldCount != null){
count = oldCount;
}
if(oldI != null){
currentI = oldI;
}
if(currentI < this.selectedList.length){
$.ajax({
method: "POST",
url: url,
data: {
check_in: {
client_id: this.selectClient.id,
program_id: this.program_id
}
},
success: function(res) {
that.createWeighIn(count, res.id)
createCheckIn(currentI + 1, count + 1);
},
error: function(err) {
console.log(err)
}
}); //ajax
} // if
}
seems likely that you can eliminate one of those counters too, the i or the count
Seems like this is missing some potentially really important details about what you need to do leading up to this (ie. this.selectedItems generation) and what happens after (what if one call checkin fails, what if a checkin succeeds but its corresponding weighIn fails, etc..). That said...
It seems you are not actually using the counter for anything other than to reference data you already have, so why not just pass that in directly like:
createWeighIn(weighInData, check_in_id) {
let data = {};
let that = this;
data.weigh_in = weighInData;
// ... your other code
}
I would make createCheckIn only handle doing the ajax request and making a single "reservation" in your system. Then i would make a new method called checkIn that uses the two previous method to process all of selected items:
checkIn() {
let self = this;
let promises = [];
let this.selectedList = [...];
for (let = 0; i < this.selectedList.length; i++) {
// always create the deferred outside the call
let def = $.Deferred();
promises.push(def.promise());
this.createCheckIn().done(function (res) {
self.createWeighIn(self.selectedList[i], res.id))
.done(function () {
// resolve
def.resolve.apply(def, Array.prototype.slice.call(arguments);
})
.fail(function () {
def.reject.apply(def, Array.prototype.slice.call(arguments);
});
}).fail(function () {
// if checkin fails, always reject because we know weighIn wont be called
def.reject.apply(def, Array.prototype.slice.call(arguments);
});
};
// this will resolve/fail when all promises (from createWeighIn) resolve/fail
return $.when.apply(null, promises);
}
so putting it all together:
{
createCheckIn() {
let request = $.ajax({
method: "POST",
url: url,
data: {
check_in: {
client_id: this.selectClient.id,
program_id: this.program_id
}
}
})
.fail(function(err) {
console.log(err)
});
};
return request;
},
createWeighIn(data, check_in_id) {
let params = {};
params.weigh_in = data;
let request = $.ajax({
method: "POST",
url: url,
data: params,
success: function(res) {
console.log(res)
},
error: function(err) {
console.log(err)
}
});
return request;
},
checkIn() {
let self = this;
let promises = [];
let this.selectedList = [...];
for (let = 0; i < this.selectedList.length; i++) {
// always create the deferred outside the call
let def = $.Deferred();
promises.push(def.promise());
this.createCheckIn().done(function (res) {
self.createWeighIn(self.selectedList[i], res.id))
.done(function () {
// resolve
def.resolve.apply(def, Array.prototype.slice.call(arguments);
})
.fail(function () {
def.reject.apply(def, Array.prototype.slice.call(arguments);
});
}).fail(function () {
// if checkin fails, always reject because we know weighIn wont be called
def.reject.apply(def, Array.prototype.slice.call(arguments);
});
};
// this will resolve/fail when all promises (from createWeighIn) resolve/fail
return $.when.apply(null, promises);
}
}
I ended up introducing promises, and some recursion and removing the loop altogether. I basically begin the process by calling createCheckIn() with an index of 0:
this.createCheckIn(0)
createCheckIn(index) {
this.selectedList = [...] // long list of objects
count = 0
let prom = new Promise(function(resolve, reject) {
$.ajax({
method: "POST",
url: url,
data: {
check_in: {
client_id: that.selectClient.id,
program_id: that.program_id
}
},
success: function(res) {
resolve(that.createWeighIn(index, res.id))
},
error: function(err) {
reject(console.log(err))
}
})
})
},
createWeighIn(index, check_in_id) {
let data = {}
let that = this
data.weigh_in = this.selectedList[index]
let prom = new Promise(function(resolve, reject) {
$.ajax({
method: "POST",
url: url,
data: data,
success: function(res) {
console.log(res)
if ( index == (that.selectedList.length - 1) ) {
that.complete = true
resolve(console.log("complete"))
} else {
index++
resolve(that.createCheckIn(index))
}
},
error: function(err) {
console.log(err)
}
})
})
}

How to use promises for parallel uploads and not be constrained by having to wait for a promise to resolve?

I'm trying to parallelize uploading files to dropbox and am stuck at the maximum number of parallel requests a browser allows AND those requests needing to finish before the next requests commence. The second part slows things down considerably so I'm wondering if someone has an idea where I'm making things too complicated... Here is the code:
...
function DropboxStorage(spec) {
...
if (spec.batch_upload) {
this._batch_upload = spec.batch_upload;
this._batch_buffer = parseInt(spec.batch_buffer || 3000, 10);
this._batch_dict = {"entries": [], "done": [], "defer": {}};
this._batch_resolver_list = [];
}
this._access_token = spec.access_token;
}
// I'm calling putAttachment for both single and batch upload. My idea
// was to use a trigger, which fires after _buffer_batch milliseconds
// finishing pending items unless resolved "redundant" - rejecting would
// also reject the RSVP.all call. Here is the putAttachment code:
DropboxStorage.prototype.putAttachment = function (id, name, blob) {
var context = this,
id = restrictDocumentId(id),
path;
restrictAttachmentId(name);
if (!context._batch_upload) {
...
}
return new RSVP.Queue()
.push(function () {
return jIO.util.ajax({
type: "POST",
url: BATCH_START_URL,
headers: {
"Authorization": "Bearer " + context._access_token,
"Content-Type": "application/octet-stream",
"Dropbox-API-Arg": JSON.stringify({"close": false}),
}
});
})
.push(function (evt) {
var session_id = JSON.parse(evt.target.response).session_id;
path = id + "/" + name;
context._batch_dict.entries.push({
"cursor": {"session_id": session_id, "offset": blob.size},
"commit": {
"path": path,
"mode": "overwrite",
"autorename": false,
"mute": false
}
});
return jIO.util.ajax({
type: "POST",
url: BATCH_UPLOAD_URL,
headers: {
"Authorization": "Bearer " + context._access_token,
"Content-Type": "application/octet-stream",
"Dropbox-API-Arg": JSON.stringify({
"cursor": {"session_id": session_id, "offset": 0},
"close": true
})
},
data: blob
});
})
.push(function () {
var len = context._batch_resolver_list.length,
call_result_defer = new RSVP.defer(),
trigger = new RSVP.defer(),
resolver = triggerCancelableBatchResolver(context, trigger.promise);
// resolve previous call without finishing the batch
if (0 < len && len < 1000 &&
context._batch_dict.entries.length !== 0) {
context._batch_resolver_list[len - 1].resolve(true);
}
context._batch_dict.defer[path] = call_result_defer;
context._batch_resolver_list.push(trigger);
// ISSUE: doing the below will parallelize to the max of concurrent
// requests a browser supports and wait for those to finish
// return RSVP.all([call_result_defer.promise, resolver]);
// this works, but without an answer for every request made
return;
})
.push(undefined, function (error) {
throw error;
});
};
The triggerCancelableBatchResolver looks like this:
function triggerCancelableBatchResolver(context, trigger) {
return new RSVP.Queue()
// wait for 3000ms OR the trigger being resolved
.push(function () {
return RSVP.any([RSVP.delay(context._batch_buffer), trigger]);
})
.push(function (is_redundant) {
// trigger => nothing happens
if (is_redundant) {
return;
}
// finish all pending items
return new RSVP.Queue()
.push(function () {
return jIO.util.ajax({
type: "POST",
url: BATCH_FINISH_URL,
headers: {
"Authorization": "Bearer " + context._access_token,
"Content-Type": "application/json",
},
data: JSON.stringify({
"entries": context._batch_dict.entries
})
});
})
.push(function (evt) {
context._batch_dict.entries = [];
context._batch_resolver_list = [];
return pollForCommitFinish(
context, JSON.parse(evt.target.response).async_job_id
);
})
.push(function (entries) {
context._batch_dict.done = context._batch_dict.done.concat(entries);
// no more requests coming in, finish
if (context._batch_dict.entries.length === 0) {
return finishBatch(context);
}
});
});
}
// recursively loop until commits on Dropbox have completed
function pollForCommitFinish(context, job_id) {
return new RSVP.Queue()
.push(function () {
return jIO.util.ajax({
type: "POST",
url: BATCH_STATUS_URL,
headers: {
"Authorization": "Bearer " + context._access_token,
"Content-Type": "application/json",
},
data: JSON.stringify({"async_job_id": job_id})
});
})
.push(function (evt) {
var status = JSON.parse(evt.target.response);
if (status['.tag'] === "in_progress") {
return new RSVP.Queue()
.push(function () {
return RSVP.delay(context._batch_buffer);
})
.push(function () {
return pollForCommitFinish(context, job_id);
});
}
return status.entries;
})
}
// finish the batch returning the correct response for every request
function finishBatch(context) {
return new RSVP.Queue()
.push(function () {
var defers = context._batch_dict.defer;
return RSVP.all(context._batch_dict.done.map(function (item) {
var path = item.path_display;
if (defers[path] === undefined) {
//throw;
}
// return a response to every batch request made
if (item[".tag"] === "failure") {
return context._batch_dict.defer[path].reject(item);
}
return context._batch_dict.defer[path].resolve(item);
}));
})
.push(function () {
context._batch_dict.done = [];
context._batch_dict.defer = {};
console.log("DONE")
});
}
The code works, but as mentioned above, if I want to return a response for every request made (or throw in case of issues), I need to return:
return RSVP.all([call_result_defer.promise, resolver]);
which will stall until call_result_defer.promise is resolved at the end of the batch with batch size being limited to number of parallel requests allowed by the browser.
If I instead just
return;
I can load up to 1000 files into the batch before resolving. However at the cost of being able to return a reply for individual calls to putAttachment.
I have tried returning a promise instead of a defer and calling it's resolve/reject methods once done, but with the same result.
Question:
Is there a promise way to return something which "flags" the request as done and returns a result once available? I thought promises where just that, but somehow I cannot get it to work here.
Thanks for help!

Categories

Resources