Using jQuery when to defer ajax processing - javascript

I have a list of 15+ ajax requests that need to be called in a specific order. I need each ajax call to wait until the previous function finishes before making the next call. This issue arises because my ajax call, has a direct callback that is also an ajax call.
createCheckIn() {
this.selectedList = [...] // long list of objects
count = 0
for ( i=0; i < this.selectedList.length; i++ ) {
$.ajax({
method: "POST",
url: url,
data: {
check_in: {
client_id: this.selectClient.id,
program_id: this.program_id
}
},
success: function(res) {
that.createWeighIn(count, res.id)
count = count + 1
},
error: function(err) {
console.log(err)
}
})
}
},
createWeighIn(index, check_in_id) {
let data = {}
let that = this
data.weigh_in = this.selectedList[index]
$.ajax({
method: "POST",
url: url,
data: data,
success: function(res) {
console.log(res)
},
error: function(err) {
console.log(err)
}
})
}
the correct data is generated but I believe the ordering is off because eventually there is a call to createCheckIn() that begins before the previous entry has completed.
Is there a way to chain these functions such that createCheckIn() and createWeighIn() are called (and complete) before selectedList iterates.

your for loop in createCheckIn() will not stop to wait on your ajax return. you can do something like:
function createCheckIn(oldI, oldCount){
var count = 0;
var currentI = 0;
if(oldCount != null){
count = oldCount;
}
if(oldI != null){
currentI = oldI;
}
if(currentI < this.selectedList.length){
$.ajax({
method: "POST",
url: url,
data: {
check_in: {
client_id: this.selectClient.id,
program_id: this.program_id
}
},
success: function(res) {
that.createWeighIn(count, res.id)
createCheckIn(currentI + 1, count + 1);
},
error: function(err) {
console.log(err)
}
}); //ajax
} // if
}
seems likely that you can eliminate one of those counters too, the i or the count

Seems like this is missing some potentially really important details about what you need to do leading up to this (ie. this.selectedItems generation) and what happens after (what if one call checkin fails, what if a checkin succeeds but its corresponding weighIn fails, etc..). That said...
It seems you are not actually using the counter for anything other than to reference data you already have, so why not just pass that in directly like:
createWeighIn(weighInData, check_in_id) {
let data = {};
let that = this;
data.weigh_in = weighInData;
// ... your other code
}
I would make createCheckIn only handle doing the ajax request and making a single "reservation" in your system. Then i would make a new method called checkIn that uses the two previous method to process all of selected items:
checkIn() {
let self = this;
let promises = [];
let this.selectedList = [...];
for (let = 0; i < this.selectedList.length; i++) {
// always create the deferred outside the call
let def = $.Deferred();
promises.push(def.promise());
this.createCheckIn().done(function (res) {
self.createWeighIn(self.selectedList[i], res.id))
.done(function () {
// resolve
def.resolve.apply(def, Array.prototype.slice.call(arguments);
})
.fail(function () {
def.reject.apply(def, Array.prototype.slice.call(arguments);
});
}).fail(function () {
// if checkin fails, always reject because we know weighIn wont be called
def.reject.apply(def, Array.prototype.slice.call(arguments);
});
};
// this will resolve/fail when all promises (from createWeighIn) resolve/fail
return $.when.apply(null, promises);
}
so putting it all together:
{
createCheckIn() {
let request = $.ajax({
method: "POST",
url: url,
data: {
check_in: {
client_id: this.selectClient.id,
program_id: this.program_id
}
}
})
.fail(function(err) {
console.log(err)
});
};
return request;
},
createWeighIn(data, check_in_id) {
let params = {};
params.weigh_in = data;
let request = $.ajax({
method: "POST",
url: url,
data: params,
success: function(res) {
console.log(res)
},
error: function(err) {
console.log(err)
}
});
return request;
},
checkIn() {
let self = this;
let promises = [];
let this.selectedList = [...];
for (let = 0; i < this.selectedList.length; i++) {
// always create the deferred outside the call
let def = $.Deferred();
promises.push(def.promise());
this.createCheckIn().done(function (res) {
self.createWeighIn(self.selectedList[i], res.id))
.done(function () {
// resolve
def.resolve.apply(def, Array.prototype.slice.call(arguments);
})
.fail(function () {
def.reject.apply(def, Array.prototype.slice.call(arguments);
});
}).fail(function () {
// if checkin fails, always reject because we know weighIn wont be called
def.reject.apply(def, Array.prototype.slice.call(arguments);
});
};
// this will resolve/fail when all promises (from createWeighIn) resolve/fail
return $.when.apply(null, promises);
}
}

I ended up introducing promises, and some recursion and removing the loop altogether. I basically begin the process by calling createCheckIn() with an index of 0:
this.createCheckIn(0)
createCheckIn(index) {
this.selectedList = [...] // long list of objects
count = 0
let prom = new Promise(function(resolve, reject) {
$.ajax({
method: "POST",
url: url,
data: {
check_in: {
client_id: that.selectClient.id,
program_id: that.program_id
}
},
success: function(res) {
resolve(that.createWeighIn(index, res.id))
},
error: function(err) {
reject(console.log(err))
}
})
})
},
createWeighIn(index, check_in_id) {
let data = {}
let that = this
data.weigh_in = this.selectedList[index]
let prom = new Promise(function(resolve, reject) {
$.ajax({
method: "POST",
url: url,
data: data,
success: function(res) {
console.log(res)
if ( index == (that.selectedList.length - 1) ) {
that.complete = true
resolve(console.log("complete"))
} else {
index++
resolve(that.createCheckIn(index))
}
},
error: function(err) {
console.log(err)
}
})
})
}

Related

Return promise after multiple calls with the same instance of an Ajax method

I'm using Ajax with JQuery to fetch data from an API that only returns 100 records at a time. If my query gives a result with more than 100 records, the API will include an "offset" parameter in the response. I have to use this offset parameter in a new API call to get the next 100 records. The API will include a new offset parameter if there's even more records to fetch. And so on until all records are fetched.
As you can see I've solved this by having the function call itself until the "offset" parameter is no longer included. I.e. until there are no more records to fetch.
Because of this behavior of the API, I cannot use the Ajax method's own .done-function, since it would be executed multiple times (for each iteration of the Ajax method).
How can adjust the function below to return a promise when all Ajax calls have been done?
function getContracts(offset) {
var data = {};
if (offset !== undefined) {
data["offset"] = offset;
}
$.ajax({
url: url,
headers: {
Authorization: apiKey
},
data: data,
success: function(result){
$.each(result.records, function() {
contracts.push(this);
});
if (result.hasOwnProperty("offset")) {
getContracts(result.offset);
}
}
});
}
Real and full code as requested:
var objectContracts = [];
var landContracts = [];
var locations = [];
var customers = [];
var landOwners = [];
var frameworkAgreements = [];
function getObjectContracts(offset) {
return new Promise((resolve, reject) => {
var data = {};
data["view"] = 'Alla Objektsavtal';
if (offset !== undefined) {
data["offset"] = offset;
}
$.ajax({
url: url + "Objektsavtal",
headers: {
Authorization: apiKey
},
data: data,
success: function(result){
$.each(result.records, function() {
objectContracts.push(this);
});
if (result.hasOwnProperty("offset")) {
getObjectContracts(result.offset);
} else {
resolve();
}
}
});
});
}
function getLandContracts(offset) {
return new Promise((resolve, reject) => {
var data = {};
data["view"] = 'Alla Markavtal';
if (offset !== undefined) {
data["offset"] = offset;
}
$.ajax({
url: url + "Markavtal",
headers: {
Authorization: apiKey
},
data: data,
success: function(result){
$.each(result.records, function() {
landContracts.push(this);
});
if (result.hasOwnProperty("offset")) {
getLandContracts(result.offset);
} else {
resolve();
}
}
});
});
}
function getLocations(offset) {
return new Promise((resolve, reject) => {
var data = {};
data["view"] = 'Alla Uppställningsplatser';
if (offset !== undefined) {
data["offset"] = offset;
}
$.ajax({
url: url + "Uppställningsplatser",
headers: {
Authorization: apiKey
},
data: data,
success: function(result){
$.each(result.records, function() {
locations.push(this);
});
if (result.hasOwnProperty("offset")) {
getLocations(result.offset);
} else {
resolve();
}
}
});
});
}
function getCustomers(offset) {
return new Promise((resolve, reject) => {
var data = {};
data["view"] = 'Alla Kunder';
if (offset !== undefined) {
data["offset"] = offset;
}
$.ajax({
url: url + "Kunder",
headers: {
Authorization: apiKey
},
data: data,
success: function(result){
$.each(result.records, function() {
customers.push(this);
});
if (result.hasOwnProperty("offset")) {
getCustomers(result.offset);
} else {
resolve();
}
}
});
});
}
function getLandOwners(offset) {
return new Promise((resolve, reject) => {
var data = {};
data["view"] = 'Alla Markägare';
if (offset !== undefined) {
data["offset"] = offset;
}
$.ajax({
url: url + "Markägare",
headers: {
Authorization: apiKey
},
data: data,
success: function(result){
$.each(result.records, function() {
landOwners.push(this);
});
if (result.hasOwnProperty("offset")) {
getLandOwners(result.offset);
} else {
resolve();
}
}
});
});
}
function getFrameworkAgreements(offset) {
return new Promise((resolve, reject) => {
var data = {};
data["view"] = 'Alla Ramavtal';
if (offset !== undefined) {
data["offset"] = offset;
}
$.ajax({
url: url + "Ramavtal",
headers: {
Authorization: apiKey
},
data: data,
success: function(result){
$.each(result.records, function() {
frameworkAgreements.push(this);
});
if (result.hasOwnProperty("offset")) {
getFrameworkAgreements(result.offset);
} else {
resolve();
}
}
});
});
}
If I've understood your question perfectly, you want to resolve a Promise if there is no offset in the response from your Ajax request.
I haven't tested this code but you can do something like this:
function getContracts(offset) {
return new Promise((resolve, reject) => {
var data = {};
if (offset !== undefined) {
data['offset'] = offset;
}
$.ajax({
url: url,
headers: {
Authorization: apiKey,
},
data: data,
success: function(result) {
$.each(result.records, function() {
contracts.push(this);
});
if (result.hasOwnProperty('offset')) {
getContracts(result.offset);
} else {
// I guess this is what you want
// If there is no offset property => resolve the promise
resolve('Your result goes here');
}
},
});
});
}
See the else block.
You can pass your final result (whatever you want to achieve after the completion of your task) inside the resolve. For example, you can create an array and append your result to that and at the end, you can pass that array inside resolve.
You can resolve this using .then() or async/await
async () => {
const result = await getContracts(offset);
};
or
getContracts(offset).then(result => { console.log(result) });
If you see some Unhandled Promise Rejection warning/error, you can always use try/catch block with async/await and .catch after .then.
EDIT:
First, you're not passing anything inside resolve. Whatever you pass inside the resolve will be reflected in .then(result).
Second, you have global variables and storing all your data inside them. So now you don't need to pass them inside the resolve but this is not a good approach because any function or the code outside can modify it. So I'll give you one example.
function getObjectContracts(offset) {
return new Promise((resolve, reject) => {
var data = {};
const objectContracts = [];
data['view'] = 'Alla Objektsavtal';
if (offset !== undefined) {
data['offset'] = offset;
}
$.ajax({
url: url + 'Objektsavtal',
headers: {
Authorization: apiKey,
},
data: data,
success: function(result) {
$.each(result.records, function() {
objectContracts.push(this);
});
if (result.hasOwnProperty('offset')) {
getObjectContracts(result.offset);
} else {
resolve(objectContracts);
}
},
});
});
}
Now, the other question is, how to resolve all these promises at once.
const finalFunction = async () => {
const [result1, result2, result3] = await Promise.all([
getObjectContracts(offset1),
getLandContracts(offset2),
getLocations(offset3),
]);
console.log(result1, result2, result3);
};
finalFunction();

Execute a sequential requests handling each one

I need send requests in sequence but stopping sequence when one returns error, i'm using $http provider from angular.
My code is this:
const steps = [
{ url: '/api/test/1' },
{ url: '/api/test/2' },
{ url: '/api/test/3' },
{ url: '/api/test/4' },
]
const executeStep = index => {
// Validate step existence
if (typeof steps[index] == 'undefined') return
// Request
$http({method: 'get', url: steps[index].url})
.success(response => {
if (!response.success) throw response
executeStep(index+1)
}).error(err => {
console.error(err)
alert('Error')
})
}
executeStep(0)
My question is: Has a better way to do this job?
Here is the code, very similar to yours, but without handling or adding index numbers, so it is a bit more clear.
const queue = [
{ url: '/api/test/1' },
{ url: '/api/test/2' },
{ url: '/api/test/3' },
{ url: '/api/test/4' },
];
function runQueue() {
const req = queue[0];
$http({method: 'get', url: req.url})
.success(response => {
console.log(response);
queue.shift();
if (queue.length > 0) {
runQueue();
}
})
.error(err => {
console.error(err);
})
}
Solution using Bluebird Promises.
Another good option can be (especially if your code is server-side nodejs) using of bluebird Promise library.
That library is not very popular nowadays because Promises are supported natively almost everywhere. But still some methods are very useful. In you case Promise.each method will work perfectly.
const Promise = require('bluebird'); // or import * as Promise from 'bluebird';
const queue = [
{ url: '/api/test/1' },
{ url: '/api/test/2' },
{ url: '/api/test/3' },
{ url: '/api/test/4' },
];
Promise.each(queue, (req) => {
return http$({method: 'get', url: req.url}).then((resp) => {
console.log(resp);
});
}).then(() => {
console.log("done");
}).catch(err => {
console.error(err);
})
What you need is promise chaining and recursion.
const steps = [
{ url: '/api/test/1' },
{ url: '/api/test/2' },
{ url: '/api/test/3' },
{ url: '/api/test/4' },
]
const executeStep = index => {
// Validate step existence
if (typeof steps[index] == 'undefined') return true;
// Request
return $http({method: 'get', url: steps[index].url})
.then(response => {
if (!response.success) throw response
return executeStep(index+1)
})
}
executeStep(0)
.then( () => console.log('all done'))
.catch(e => console.error('something failed', e))
Here, every time a request is finished it returns back a new promise that contains the next request. That way executeStep(0) will be resolved only when every other promise has finished or it will be rejected immediately if any promise encounters an error.
You can use async function in combination with promises for that.
const queue = [
{ url: '/api/test/1' },
{ url: '/api/test/2' },
{ url: '/api/test/3' },
{ url: '/api/test/4' },
];
function oneRequest(req) {
return new Promise((resolve,reject)=>{
$http({method: 'get', url: req.url})
.success(response => resolve(response))
.error(err => reject(err));
});
}
async function allRequests(reqs) {
let resps = [];
for(var i=0;i<reqs.length;i++) {
let resp = await oneRequest(reqs[i]);
resps.push(resp);
}
return resps;
}
allRequests(queue)
.then(resps => console.log(resps))
.catch(err => console.log(err));

How to assign an Ajax response to a variable in another function in React?

I have an ajax call that responds with a list of movies, and then I have an other function with an other ajax call that returns the genre names, since the first call only has the id of the genre, then I relate the genreId with its name and I assign it to the JSON on the first ajax call, kind of like assigning it to a variable. The problem that I have is that since ajax is asynchronous, it always ends up undefined. I don't want to make it synchronous since it will end up in a bad user experience.
First ajax call
$.ajax({
url: urlString,
success: (searchResults) => {
const results = searchResults.results
var movieRows = [];
results.forEach(movie => {
movie.genres = this.getMovieGenres(movie.media_type, movie.genre_ids);
const movieRow = <MovieRow key={movie.id} movie={movie}/>;
movieRows.push(movieRow)
});
this.setState({rows: movieRows})
},
error: (xhr, status, err) => {
console.log("Failed to fetch data...")
}
})
Function that I call with the second ajax call
getMovieGenres (mediaType ,movieGenreIds) {
urlString = `https://api.themoviedb.org/3/genre/movie/list?api_key=${config.movieDBbApiKey}&language=en-US`ApiKey}&language=en-US`
var genres = []
$.ajax({
url: urlString,
async: true,
crossDomain: true,
method: "GET",
success: searchResults => {
for (let i = 0; i < movieGenreIds.length; i++) {
for (let j = 0; j < searchResults.genres.length; i++){
console.log(searchResults.genres[j].id)
if (movieGenreIds[i] === searchResults.genres[j].id) {
genres.push(searchResults.genres[j].name)
}
}
}
},
error: (xhr, status, err) => {
console.log("Failed to fetch data...")
}
})
return genres
}
There are variouse solutions to your problem.
The simplest is to make the second ajax call in the callback of the first ajax call. In the second call you'll have both results from first ajax call and the second. Then make transforms and set state in the second call.
Other options (the reccomended one) is to work with Axios or Fetch that rely on Promises.
Promises can be evaded with Promise.all:
var p1 = Promise.resolve(3);
var p2 = 1337;
var p3 = new Promise((resolve, reject) => {
setTimeout(resolve, 100, "foo");
});
Promise.all([p1, p2, p3]).then(values => {
console.log(values); // [3, 1337, "foo"]
});
you can try this:
var promise = new Promise((resolve,reject)=> {
return $.ajax({
url: urlString,
success: (searchResults) => {
return resolve(searchResults.results)
},
error: (xhr, status, err) => {
return reject("Failed to fetch data...")
}
})
})
function that return a promise
getMovieGenres (mediaType ,movieGenreIds) {
urlString = `https://api.themoviedb.org/3/genre/movie/list?api_key=${config.movieDBbApiKey}&language=en-US`ApiKey}&language=en-US`
var genres = []
return new Promise((resolve,reject)=> {
$.ajax({
url: urlString,
async: true,
crossDomain: true,
method: "GET",
success: searchResults => {
for (let i = 0; i < movieGenreIds.length; i++) {
for (let j = 0; j < searchResults.genres.length; i++){
console.log(searchResults.genres[j].id)
if (movieGenreIds[i] === searchResults.genres[j].id) {
genres.push(searchResults.genres[j].name)
}
}
}
return resolve(genres)
},
error: (xhr, status, err) => {
return reject("Failed to fetch data...")
}
})
})
}
finally:
promise().then(results => {
var movieRows = [];
var tasks = []
results.forEach(movie => {
tasks.push(getMovieGenres(movie.media_type, movie.genre_ids))
});
Promise.all(tasks).then(output => {
output.forEach(movie => {
const movieRow = <MovieRow key={movie.id} movie={movie}/>;
movieRows.push(movieRow)
})
this.setState({rows: movieRows})
})
})
Hope to help you!

How to use promises for parallel uploads and not be constrained by having to wait for a promise to resolve?

I'm trying to parallelize uploading files to dropbox and am stuck at the maximum number of parallel requests a browser allows AND those requests needing to finish before the next requests commence. The second part slows things down considerably so I'm wondering if someone has an idea where I'm making things too complicated... Here is the code:
...
function DropboxStorage(spec) {
...
if (spec.batch_upload) {
this._batch_upload = spec.batch_upload;
this._batch_buffer = parseInt(spec.batch_buffer || 3000, 10);
this._batch_dict = {"entries": [], "done": [], "defer": {}};
this._batch_resolver_list = [];
}
this._access_token = spec.access_token;
}
// I'm calling putAttachment for both single and batch upload. My idea
// was to use a trigger, which fires after _buffer_batch milliseconds
// finishing pending items unless resolved "redundant" - rejecting would
// also reject the RSVP.all call. Here is the putAttachment code:
DropboxStorage.prototype.putAttachment = function (id, name, blob) {
var context = this,
id = restrictDocumentId(id),
path;
restrictAttachmentId(name);
if (!context._batch_upload) {
...
}
return new RSVP.Queue()
.push(function () {
return jIO.util.ajax({
type: "POST",
url: BATCH_START_URL,
headers: {
"Authorization": "Bearer " + context._access_token,
"Content-Type": "application/octet-stream",
"Dropbox-API-Arg": JSON.stringify({"close": false}),
}
});
})
.push(function (evt) {
var session_id = JSON.parse(evt.target.response).session_id;
path = id + "/" + name;
context._batch_dict.entries.push({
"cursor": {"session_id": session_id, "offset": blob.size},
"commit": {
"path": path,
"mode": "overwrite",
"autorename": false,
"mute": false
}
});
return jIO.util.ajax({
type: "POST",
url: BATCH_UPLOAD_URL,
headers: {
"Authorization": "Bearer " + context._access_token,
"Content-Type": "application/octet-stream",
"Dropbox-API-Arg": JSON.stringify({
"cursor": {"session_id": session_id, "offset": 0},
"close": true
})
},
data: blob
});
})
.push(function () {
var len = context._batch_resolver_list.length,
call_result_defer = new RSVP.defer(),
trigger = new RSVP.defer(),
resolver = triggerCancelableBatchResolver(context, trigger.promise);
// resolve previous call without finishing the batch
if (0 < len && len < 1000 &&
context._batch_dict.entries.length !== 0) {
context._batch_resolver_list[len - 1].resolve(true);
}
context._batch_dict.defer[path] = call_result_defer;
context._batch_resolver_list.push(trigger);
// ISSUE: doing the below will parallelize to the max of concurrent
// requests a browser supports and wait for those to finish
// return RSVP.all([call_result_defer.promise, resolver]);
// this works, but without an answer for every request made
return;
})
.push(undefined, function (error) {
throw error;
});
};
The triggerCancelableBatchResolver looks like this:
function triggerCancelableBatchResolver(context, trigger) {
return new RSVP.Queue()
// wait for 3000ms OR the trigger being resolved
.push(function () {
return RSVP.any([RSVP.delay(context._batch_buffer), trigger]);
})
.push(function (is_redundant) {
// trigger => nothing happens
if (is_redundant) {
return;
}
// finish all pending items
return new RSVP.Queue()
.push(function () {
return jIO.util.ajax({
type: "POST",
url: BATCH_FINISH_URL,
headers: {
"Authorization": "Bearer " + context._access_token,
"Content-Type": "application/json",
},
data: JSON.stringify({
"entries": context._batch_dict.entries
})
});
})
.push(function (evt) {
context._batch_dict.entries = [];
context._batch_resolver_list = [];
return pollForCommitFinish(
context, JSON.parse(evt.target.response).async_job_id
);
})
.push(function (entries) {
context._batch_dict.done = context._batch_dict.done.concat(entries);
// no more requests coming in, finish
if (context._batch_dict.entries.length === 0) {
return finishBatch(context);
}
});
});
}
// recursively loop until commits on Dropbox have completed
function pollForCommitFinish(context, job_id) {
return new RSVP.Queue()
.push(function () {
return jIO.util.ajax({
type: "POST",
url: BATCH_STATUS_URL,
headers: {
"Authorization": "Bearer " + context._access_token,
"Content-Type": "application/json",
},
data: JSON.stringify({"async_job_id": job_id})
});
})
.push(function (evt) {
var status = JSON.parse(evt.target.response);
if (status['.tag'] === "in_progress") {
return new RSVP.Queue()
.push(function () {
return RSVP.delay(context._batch_buffer);
})
.push(function () {
return pollForCommitFinish(context, job_id);
});
}
return status.entries;
})
}
// finish the batch returning the correct response for every request
function finishBatch(context) {
return new RSVP.Queue()
.push(function () {
var defers = context._batch_dict.defer;
return RSVP.all(context._batch_dict.done.map(function (item) {
var path = item.path_display;
if (defers[path] === undefined) {
//throw;
}
// return a response to every batch request made
if (item[".tag"] === "failure") {
return context._batch_dict.defer[path].reject(item);
}
return context._batch_dict.defer[path].resolve(item);
}));
})
.push(function () {
context._batch_dict.done = [];
context._batch_dict.defer = {};
console.log("DONE")
});
}
The code works, but as mentioned above, if I want to return a response for every request made (or throw in case of issues), I need to return:
return RSVP.all([call_result_defer.promise, resolver]);
which will stall until call_result_defer.promise is resolved at the end of the batch with batch size being limited to number of parallel requests allowed by the browser.
If I instead just
return;
I can load up to 1000 files into the batch before resolving. However at the cost of being able to return a reply for individual calls to putAttachment.
I have tried returning a promise instead of a defer and calling it's resolve/reject methods once done, but with the same result.
Question:
Is there a promise way to return something which "flags" the request as done and returns a result once available? I thought promises where just that, but somehow I cannot get it to work here.
Thanks for help!

how to call http call inside the for loop after success of before http call?

I want to call the api after completing the first is success.But in my code it call the api before the first one was completed
for(var w=0;w<Ids.length;w++){
$scope.msgObject = {
"SenderID":$scope.pageId,
"PageID" : $scope.pageId,
"Date":Date.now().toString(),
};
$http({
method: 'POST',
url: '///url',
async:true,
data: $scope.msgObject,
headers: {
'Content-Type': 'application/json'
}})
.then(function(response) {
console.log("success posting");
}
})
.catch(function(response){
});
$(".messageInput").val('');
}
}
}
}
function asyncForEach(arr, cb) {
return arr.reduce((p,c)=>{
return p.then(()=> cb(c));
}, Promise.resolve());
}
function fetch(id) {
return new Promise(resolve=>
setTimeout(resolve, 100)) // replace with your AJAX call
.then(()=>console.log('success posting', id));
}
function done() {
console.log('all done!');
}
const ids = [1, 2, 3, 4, 5];
asyncForEach(ids, fetch).then(done);
put your loop inside THEN
something like
function urPostMethod(url){
$scope.msgObject = {
"SenderID":$scope.pageId,
"PageID" : $scope.pageId,
"Date":Date.now().toString(),
};
$http({
method: 'POST',
url: url,
async:true,
data: $scope.msgObject,
headers: {
'Content-Type': 'application/json'
}})
.then(function(response) {
console.log("success posting");
while(Ids.length>0)urPostMethod(Ids.pop());
}
})
.catch(function(response){
});
$(".messageInput").val('');
}
}
}
}
What you are trying to do is to mix async and sync operations together which is not logical at all.
If you need to call those API's in order of your elements in your array, you may use a different approach like pipe the requests using Defer:
var dfd = $.Deferred(),
x = 0, // Loop index
Ids = [],
msgObject = {
"SenderID":$scope.pageId,
"PageID" : $scope.pageId,
"Date":Date.now().toString(),
};
callAjax = function (value) {
var dfdAjax = $.Deferred();
$.ajax({
method: 'POST',
url: '///url',
async:true,
data: msgObject,
headers: {
'Content-Type': 'application/json'
}})
.then(function(response) {
dfdAjax.resolve(response);
})
.catch(function(response){
dfdAjax.reject(response);
});
return dfdAjax.promise();
},
requestAjax = function (value) {
return callAjax(value);
};
dfd.resolve();
for (x = 1; x <= Ids.length; x++) {
dfdNext = dfdNext.pipe(function () {
return requestAjax(value).
done(function(response) {
// Process the response here.
});
});
}
You could use $q.all() and simplify your syntax, you'll just need to inject the $q service.
The below code adds all the promises returned by $http into a single array, executes the promises using $q.all() and then gathers their results.
var requests = [];
for(var w = 0; w < Ids.length; w++) {
var req = $http({
method: 'POST',
url: '///url',
headers: { 'Content-Type': 'application/json' },
data: {
SenderID: $scope.pageId,
PageID: $scope.pageId,
Date: Date.now().toString(),
}
})
.catch(function(err) {
// handle err
});
requests.push(req);
}
$q.all(requests)
.then(function (results) {
// handle results
});;

Categories

Resources