I am working on a page that uses JavaScript to manage a queue. My challenge is my code has nested callbacks. The nested callbacks are confusing me in regards to the scope of my queue. Currently, I have the following:
function MyApp() {}
module.exports = MyApp;
MyApp.myQueue = [];
MyApp.queueIsLocked = false;
MyApp.enqueue = function(item, onSuccess, onFailure) {
if (!MyApp.queueIsLocked) {
MyApp.queueIsLocked = true;
MyApp.myQueue.push(item);
MyApp.queueIsLocked = false;
item.send(
function() {
console.log('item: ' + item.id);
MyApp.queueIsLocked = true;
MyApp.findItemById(item.id,
function(index) {
if (index !== -1) {
MyApp.myQueue.splice(index, 1);
MyApp.queueIsLocked = false;
if (onSuccess) {
onSuccess(item.id);
}
}
}
);
},
function() {
alert('Unable to send item to the server.');
if (onFailure) {
onFailure();
}
}
);
}
};
MyApp.findItemById = function(id, onComplete) {
var index = -1;
if (MyApp.queueIsLocked) {
setTimeout(function() {
// Attempt to find the index again.
}, 100);
} else {
MyApp.queueIsLocked = true;
for (var i=0; i<MyApp.myQueue.length; i++) {
if (MyApp.myQueue[i].id === id) {
index = i;
break;
}
}
}
if (onComplete) {
onComplete(index);
}
};
The send function behaves differently based on the details of item. Sometimes the item will be sent to one server. Sometimes, it will be sent to multiple servers. Either way, I do not know when the item will be done being "sent". For that reason, I'm using a callback to manage the queue. When the item is done being "sent", I want to remove it from the queue. I need to use either a timeout or interval to check to see if the queue is locked or not. If its not locked, I want to remove the item from the queue. This check is adding another level of nesting that is confusing me.
My challenge is, I do not believe that the scope of index is working like I expected. I feel like I'm getting a race condition. I'm basing this on the fact that I've written the following Jasmine test:
describe('Queue', function() {
describe('Approach 1', function() {
it('should do something', function() {
MyApp.enqueue({id:'QRA', text:'Test A'});
});
});
describe('Approach 2', function() {
it('should successfully queue and dequeue items', function() {
MyApp.enqueue({id:'WX1', text:'Test 1'});
MyApp.enqueue({id:'QV2', text:'Test 2'});
MyApp.enqueue({id:'ZE3', text:'Test 3'});
});
});
});
When I execute the test, I see the following in the console window:
item: QRA index: 1
item: WX1 index: 2
item: QV2 index: 3
item: ZE3 index: 4
Its like the items aren't getting dequeued like I would expect. Am I way off base in my approach of managing a queue? What am I doing wrong?
Thank you for any assistance.
Here are some questions you need to think through and answer for yourself about your intent and design:
It sounds like the queue represents items you are trying to send to the server. You are adding items to the queue that need to be sent, and removing them from the queue after they have been successfully sent.
Do you want your code to send multiple items simultaneously, in parallel? For example, item A is added to the queue, then sent. Before the asynchronous send for A finishes, item B is added to the list. Should the code try to send item B before the send of item A finishes? Based on your code, it sounds like yes.
It seems that you don't really want/need a queue, per se, so much as you want a list to track which items are in the process of being sent. "Queue" implies that objects are being processed in some kind of FIFO order.
If you just want to track items based on id, then you can use an object instead. For example:
MyApp.items = {};
MyApp.addItem = function(item){
MyApp.items[item.id] = item;
item.send(
function(){ // success
MyApp.removeItem(item.id)
}
);
}
MyApp.removeItem = function(id){
delete MyApp.items[id];
onSuccess(id);
}
Also, I don't think you need a lock on the queue. Javascript is single-threaded, so you'll never have a case where two parts of your code are trying to operate on the queue at the same time. When an ajax call finishes asynchronously, your callback code won't actually be executed until any other code currently executing finishes.
The big flaw I'm seeing is that you call MyApp.queueIsLocked = true immediately before MyApp.findItemById. Because it's locked, the function sets up a timeout (that does nothing), and proceeds to call onComplete(-1). -1 is then explicitly ignored by onComplete, failing to dequeue, and locking your queue.
You probably meant to retry the find, like this:
setTimeout(function() {
// Attempt to find the index again.
MyApp.findItemById(id, onComplete);
}, 100);
I'm not sure, but I think Jasmine requires explicit instruction to get Timeout functions to fire, using jasmine.clock().tick
That said, I suggest removing all of the references to queueIsLocked, including the above timeout code. Also, if item.id is always a unique string, you can use an object instead of an array to store your values.
Here is a suggested iteration, staying as true to the original API as possible:
function MyApp() {}
module.exports = MyApp;
MyApp.myQueue = {};
//Sends the item, calling onSuccess or onFailure when finished
// item will appear in MyApp.myQueue while waiting for a response from send
MyApp.enqueue = function(item, onSuccess, onFailure) {
MyApp.myQueue[item.id] = item;
item.send(function() {
console.log('item: ' + item.id);
delete MyApp.myQueue[item.id];
if (onSuccess) {
onSuccess(item.id);
}
}, function() {
alert('Unable to send item to the server.');
if (onFailure) {
onFailure();
}
});
};
//Returns the Item in the queue, or undefined if not found
MyApp.findItemById = function(id, onComplete) {
if (onComplete) {
onComplete(id);
}
return MyApp.myQueue[id];
};
Try to use ECMA 6 Promise or any promise from js framework.Promiseis more suitable for this task. see more at https://developer.mozilla.org/
function MyApp() {}
module.exports = MyApp;
MyApp.myQueue = [];
MyApp.queueIsLocked = false;
MyApp.enqueue = function(item) {
return new Promise(function(resolve, reject) {
if (!MyApp.queueIsLocked) {
MyApp.queueIsLocked = true;
MyApp.myQueue.push(item);
MyApp.queueIsLocked = false;
var onResolve = function() {
console.log('item: ' + item.id);
MyApp.queueIsLocked = true;
MyApp.findItemById(item.id).then(function(index){
if (index !== -1) {
MyApp.myQueue.splice(index, 1);
MyApp.queueIsLocked = false;
resolve(item.id);
}
});
};
item.send(onResolve,reject);
}
});
};
MyApp.findItemById = function(id) {
return new Promise(function(resolve, reject) {
var index = -1;
if (MyApp.queueIsLocked) {
setTimeout(function() {
// Attempt to find the index again.
}, 100);
} else {
MyApp.queueIsLocked = true;
for (var i=0; i<MyApp.myQueue.length; i++) {
if (MyApp.myQueue[i].id === id) {
index = i;
break;
}
}
resolve(index);
}
});
};
move MyApp.queueIsLocked = false; to the callback of server send
Related
I'm trying to execute a class method after another has finished. The first one calls an API and fills an array and a dictionary. Then, the next method takes the data and creates a new dictionary. I cannot make it work sequentially, any idea?
I have tried to add callback when calling the first method, but it ain't working.
this.fillListDictionary(function(){
this.fillCatDictionary();
});
fillListDictionary(callback) {
this._categoryList = [];
this._propertyDictionary = [];
//Fill list and dictionary calling the API ()
callback();
}
fillCatDictionary() {
this._catDictionary = [];
this._categoryList.forEach((category) => {
this._propertyDictionary.forEach((property) => {
if(property.properties.includes(category)){
var f_index = this._catDictionary.findIndex(x => x.category == category);
if(f_index >= 0){
this._catDictionary[f_index].dbIds.push(property.dbId);
}
else {
var cat = new Object();
cat.category = category;
cat.dbIds = [property.dbId];
this._catDictionary.push(cat);
}
}
})
})
}
I'd like to make it work sequentially: fillCatDictionary has to execute after fillListDictionary has done its job.
I've some problem with a library calling a function on each item. I've to check the state for this item via an ajax request and don't want to call one request per item, but get a range of item states.
Because these items are dates I can get some range pretty easy - that's the good part :)
So to to give some code ...
var itemStates = {};
var libraryObj = {
itemCallback: function(item) {
return checkState(item);
}
}
function checkState(item) {
if(!itemStates.hasOwnProperty(item)) {
$.get('...', function(result) {
$.extend(true, itemStates, result);
});
}
return itemStates[item];
}
The library is now calling library.itemCallback() on each item, but I want to wait for the request made in checkState() before calling checkState() again (because the chance is extremly high the next items' state was allready requested within the previous request.
I read about the defer and wait(), then() and so on, but couldn't really get an idea how to implement this.
Many thanks to everybody who could help me with this :)
You can achieve this by using jQuery.Deferred or Javascript Promise. In the following code, itemCallback() will wait for previous calls to finish before calling checkState().
var queue = [];
var itemStates = {};
var libraryObj = {
itemCallback: function(item) {
var def = $.Deferred();
$.when.apply(null, queue)
.then(function() {
return checkState(item);
})
.then(function(result) {
def.resolve(result);
});
queue.push(def.promise());
return def.promise();
}
}
function checkState(item) {
var def = $.Deferred();
if (!itemStates.hasOwnProperty(item)) {
$.get('...', function(result) {
$.extend(true, itemStates, result);
def.resolve(itemStates[item]);
});
} else
def.resolve(itemStates[item]);
return def.promise();
}
//these will execute in order, waiting for the previous call
libraryObj.itemCallback(1).done(function(r) { console.log(r); });
libraryObj.itemCallback(2).done(function(r) { console.log(r); });
libraryObj.itemCallback(3).done(function(r) { console.log(r); });
libraryObj.itemCallback(4).done(function(r) { console.log(r); });
libraryObj.itemCallback(5).done(function(r) { console.log(r); });
Same example built with Javascript Promises
var queue = [];
var itemStates = {};
var libraryObj = {
itemCallback: function(item) {
var promise = new Promise(resolve => {
Promise.all(queue)
.then(() => checkState(item))
.then((result) => resolve(result));
});
queue.push(promise);
return promise;
}
}
function checkState(item) {
return new Promise(resolve => {
if (item in itemStates)
resolve(itemStates[item]);
else {
$.get('...', function(result) {
$.extend(true, itemStates, result);
resolve(itemStates[item]);
});
}
});
}
//these will execute in order, waiting for the previous call
libraryObj.itemCallback(1).then(function(r) { console.log(r); });
libraryObj.itemCallback(2).then(function(r) { console.log(r); });
libraryObj.itemCallback(3).then(function(r) { console.log(r); });
libraryObj.itemCallback(4).then(function(r) { console.log(r); });
libraryObj.itemCallback(5).then(function(r) { console.log(r); });
The library is now calling library.itemCallback() on each item, but I want to wait for the request made in checkState() before calling checkState() again (because the chance is extremely high the next items' state was already requested within the previous request.
One thing I can think of doing is making some caching function, depending on the last time the function was called return the previous value or make a new request
var cached = function(self, cachingTime, fn){
var paramMap = {};
return function( ) {
var arr = Array.prototype.slice.call(arguments);
var parameters = JSON.stringify(arr);
var returning;
if(!paramMap[parameters]){
returning = fn.apply(self,arr);
paramMap[parameters]={timeCalled: new Date(), value:returning};
} else {
var diffMs = Math.abs(paramMap[parameters].timeCalled - new Date());
var diffMins = ( diffMs / 1000 ) / 60;
if(diffMins > cachingTime){
returning = fn.apply(self,arr);
paramMap[parameters] = {timeCalled: new Date(), value:returning};
} else {
returning = paramMap[parameters].value;
}
}
return returning;
}
}
Then you'd wrap the ajax call into the function you've made
var fn = cached(null, 1 , function(item){
return $.get('...', function(result) {
$.extend(true, itemStates, result);
});
});
Executing the new function would get you the last promise called for those parameters within the last request made at the last minute with those parameters or make a new request
simplest and dirty way of taking control over the library is to override their methods
But I don't really know core problem here so other hints are below
If you have the control over the checkState then just collect your data and change your controller on the server side to work with arrays that's it
and if you don't know when the next checkState will be called to count your collection and make the request use setTimeout to check collection after some time or setIterval to check it continuously
if you don't want to get same item multiple times then store your checked items in some variable like alreadyChecked and before making request search for this item in alreadyChecked
to be notified when some library is using your item use getter,
and then collect your items.
When you will have enough items collected then you can make the request,
but when you will not have enought items then use setTimeout and wait for some time. If nothing changes, then it means that library finishes the iteration for now and you can make the request with items that left of.
let collection=[];// collection for request
let _items={};// real items for you when you don't want to perfrom actions while getting values
let itemStates={};// items for library
let timeoutId;
//instead of itemStates[someState]=someValue; use
function setItem(someState,someValue){
Object.defineProperty(itemStates, someState, { get: function () {
if(typeof timeoutId=="number")clearTimeout(timeoutId);
//here you can add someState to the collection for request
collection.push(_items[someState]);
if(collection.length>=10){
makeRequest();
}else{
timeoutId=setTimeout(()=>{...checkCollectionAndMakeRequest...},someTime);
}
return someValue;
} });
}
I'm new to promises and I'm sure there's an answer/pattern out there but I just couldn't find one that was obvious enough to me to be the right one. I'm using node.js v4.2.4 and https://www.promisejs.org/
This should be pretty easy I think...I need to do multiple blocks of async in a specific order, and one of the middle blocks will be looping through an array of HTTP GETs.
//New Promise = asyncblock1 - FTP List, resolve the returned list array
//.then(asynchblock2(list)) - loop through list array and HTTP GET needed files
//.then(asynchblock3(list)) - update local log
I tried creating a new Promise, resolving it, passing the list to the .then, doing the GET loop, then the file update. I tried using a nested promise.all inside asynchblock2, but it's actually going in reverse order, 3, 2, and 1 due to the timing of those events. Thanks for any help.
EDIT: Ok, this is the pattern that I'm using which works, I just need a GET loop in the middle one now.
var p = new Promise((resolve, reject) => {
setTimeout(() => {
console.log('2 sec');
resolve(1);
},
2000);
}).then(() => {
return new Promise((resolve) => {
setTimeout(() => {
console.log('1.5 sec');
// instead of this section, here I'd like to do something like:
// for(var i = 0; i < dynamicarray.length; i++){
// globalvar[i] = ftpclient.getfile(dynamicarray[i])
// }
// after this loop is done, resolve
resolve(1);
},
1500);
});
}).then(() => {
return new Promise((resolve) => {
setTimeout(() => {
console.log('1 sec');
resolve(1);
},
1000);
});
});
EDIT Here is the almost working code!
var pORecAlert = (function(){
var pa;
var newans = [];
var anstodownload = [];
var anfound = false;//anfound in log file
var nexttab;
var lastchar;
var po;
var fnar = [];
var antext = '';
//-->> This section works fine; it's just creating a JSON object from a local file
try{
console.log('trying');
porfile = fs.readFileSync('an_record_files.json', 'utf8');
if(porfile == null || porfile == ''){
console.log('No data in log file - uploaded_files_data.json being initialized!');
plogObj = [];
}
else{
plogObj = JSON.parse(porfile);
}
}
catch(jpfp){
console.log('Error parsing log file for PO Receiving Alert: ' + jpfp);
return endPORecAlertProgram();
};
if((typeof plogObj) === 'object'){
console.log('an_record_files.json log file found and parsed for PO Receiving Alert!');
}
else{
return mkError(ferror, 'pORecAlert');
};
//finish creating JSON Object
pa = new Client();
pa.connect(ftpoptions);
console.log('FTP Connection for FTP Check Acknowledgement begun...');
pa.on('greeting', function(msg){
console.log('FTP Received Greeting from Server for ftpCheckAcknowledgement: ' + msg);
});
pa.on('ready', function(){
console.log('on ready');
//START PROMISE LIST
var listpromise = new Promise((reslp, rejlp) => {
pa.list('/public_html/test/out', false, (cerr, clist) => {
if(cerr){
return mkError(ferror, 'pORecAlert');
}
else{
console.log('Resolving clist');
reslp(clist);
}
});
});
listpromise.then((reclist) => {
ftpplist:
for(var pcl = 0; pcl < reclist.length; pcl++){
console.log('reclist iteration: ' + pcl);
console.log('checking name: ', reclist[pcl].name);
if(reclist[pcl].name.substring(0, 2) !== 'AN'){
console.log('Not AN - skipping');
continue ftpplist;
}
else{//found an AN
for(var plc = 0; plc < plogObj.length; plc++){
if(reclist[pcl].name === plogObj[plc].anname){
//console.log('Found reclist[pcl].name in local log');
anfound = true;
};
};
if(anfound === false){
console.log('Found AN file to download: ', reclist[pcl].name);
anstodownload.push(reclist[pcl].name);
};
};
};
console.log('anstodownload array:');
console.dir(anstodownload);
return anstodownload;
}).then((fnar) => {
//for simplicity/transparency, here is the array being overwritten
fnar = new Array('AN_17650_37411.699.txt', 'AN_17650_37411.700', 'AN_17650_37411.701', 'AN_17650_37411.702.txt', 'AN_17650_37411.801', 'AN_17650_37411.802.txt');
return Promise.all(fnar.map((gfname) => {
var nsalertnames = [];
console.log('Getting: ', gfname);
debugger;
pa.get(('/public_html/test/out/' + gfname), function(err, anstream){//THE PROBLEM IS THAT THIS GET GETS TRIGGERED AN EXTRA TIME FOR EVERY OTHER FILE!!!
antext = '';
console.log('Get begun for: ', gfname);
debugger;
if(err){
ferror.nsrest_trace = 'Error - could not download new AN file!';
ferror.details = err;
console.log('Error - could not download new AN file!');
console.log('************************* Exiting *************************')
logError(ferror, gfname);
}
else{
// anstream.on('data', (anchunk) => {
// console.log('Receiving data for: ', gfname);
// antext += anchunk;
// });
// anstream.on('end', () => {
// console.log('GET end for: ', gfname);
// //console.log('path to update - gfname ', gfname, '|| end text.');
// fs.appendFileSync(path.resolve('test/from', gfname), antext);
// console.log('Appended file');
// return antext;
// });//end end
};
});//get end
}));//end Promise.all and map
}).then((res99) => {
// pa.end();
// return Promise(() => {
console.log('end all. res99: ', res99);
// //res4(1);
// return 1;
// });
});
});
})();
-->> What happens here:
So I added the almost working code. What is happening is that for every other file, an additional Get request gets made (I don't know how it's being triggered), which fails with an "Unable to make data connection".
So for my iteration over this array of 6, there ends up being 9 Get requests. Element 1 gets requested (works and expected), then 2 (works and expected), then 2 again (fails and unexpected/don't know why it was triggered). Then 3 (works and expected), then 4 (works and expected), then 4 again (fails and unexpected) etc
what you need is Promise.all(), sample code for your app:
...
}).then(() => {
return Promise.all(arry.map(item => ftpclient.getFile(item)))
}).then((resultArray) => {
...
So thanks for the help (and the negative votes with no useful direction!)
I actually reached out to a good nodejs programmer and he said that there seemed to be a bug in the ftp module I was using, and even when trying to use a blackbird .map, the quick succession of requests somehow kicked off an error. I ended up using promise-ftp, blackbird, and promiseTaksQueue - the kicker was that I needed interval. Without it the ftp would end up causing a strange illogical error in the ftp module.
You need the async library. Use the async.eachSeries in situations where you need to use asynchronous operations within a loop, then execute a function when all of those are complete. There are many variations depending on the flow you want but this library does it all.
https://github.com/caolan/async
async.each(theArrayToLoop, function(item, callback) {
// Perform async operation on item here.
doSomethingAsync(item).then(function(){
callback();
})
}, function(err){
//All your async calls are finished continue along here
});
I know this has been asked quite a few times already but after a day of search I still don't get it to work, although it's just like what is shown as a solution everywhere...
I have a async request to a database which returns an array of data. For each object in this array I need to start another async request to the database and as soon as ALL of these async requests resolve, I want to return them. I read you could do it with $q.all(...)
So here's the code:
Factory.firstAsyncRequest(id).then(function (arrayWithObjects) {
var promises = [];
var dataArr = [];
angular.forEach(arrayWithObjects, function (object, key) {
var deferred = $q.defer();
promises.push(deferred);
Factory.otherAsyncRequest(key).then(function (objectData) {
dataArr.push({
name: objectData.name,
key: key,
status: objectData.status
});
deferred.resolve();
console.info('Object ' + key + ' resolved');
});
});
$q.all(promises).then(function () {
$rootScope.data = dataArr;
console.info('All resolved');
});});
From the console I see that the $q.all is resolved BEFORE each object. Did I get something wrong? This seems to work for everyone...
Your help is highly appreciated, been looking the whole night, it's 5:30am now lol..
Cheers
EDIT:
So for anyone who's coming here later: It was just the promises.push(deferred.PROMISE) bit. Tho, I read that anguar.forEach is actually not a recommended method to loop through array because it was originally not constructed to be used by the end-user. Don't know if that's correct but I figured out another way if you don't want to use angular.forEach:
Users.getAll(uid).then(function (users) {
var uids = ObjHandler.getKeys(users); //own function just iterating through Object.keys and pushing them to the array
var cntr = 0;
function next() {
if (cntr < uids.length) {
Users.getProfile(uids[cntr]).then(function (profile) {
var Profile = {
name: profile.name,
key: uids[cntr],
status: profile.status
});
dataArr[uids[cntr]] = Profile;
if(cntr===uids.length-1) {
defer.resolve();
console.info('Service: query finished');
} else {cntr++;next}
});
}
}
next();
});
And the getKey function:
.factory('ObjHandler', [
function () {
return {
getKeys: function(obj) {
var r = [];
for (var k in obj) {
if (!obj.hasOwnProperty(k))
continue;
r.push(k)
}
return r
}
};
}])
Instead of
promises.push(deferred);
Try this:
promises.push(deferred.promise);
I have list of names separated by comma. What I want is I want to call server request for all names in a sequence and store result inside an array. I tried and it's working when I do have number of names which are there in string.
See Here - This is working when I know number of names
Now what I want is I want to make this code as generic. If I add one name in that string, It should handle automatically without adding any code for ajax request.
See Here - This is what I've tried. It's not working as expected.
shoppingList = shoppingList.split(",");
var result = [];
function fetchData(shoppingItem)
{
var s1 = $.ajax('/items/'+shoppingItem);
s1.then(function(res) {
result.push(new Item(res.label,res.price));
console.log("works fine");
});
if(shoppingList.length == 0)
{
completeCallback(result);
}
else
{
fetchData(shoppingList.splice(0,1)[0]);
}
}
fetchData(shoppingList.splice(0,1)[0]);
Problem
I am not getting how to detect that all promise object have been resolved so that I can call callback function.
To make the ajax requests in sequence, you have to put the recursive call in the callback:
function fetchList(shoppingList, completeCallback) {
var result = [];
function fetchData() {
if (shoppingList.length == 0) {
completeCallback(result);
} else {
$.ajax('/items/'+shoppingList.shift()).then(function(res) {
result.push(new Item(res.label,res.price));
console.log("works fine");
fetchData();
// ^^^^^^^^^^^
});
}
}
fetchData();
}
or you actually use promises and do
function fetchList(shoppingList) {
return shoppingList.reduce(function(resultPromise, shoppingItem) {
return resultPromise.then(function(result) {
return $.ajax('/items/'+shoppingItem).then(function(res) {
result.push(new Item(res.label,res.price));
return result;
});
});
}, $.when([]));
}
(updated jsfiddle)
Notice there is nothing in the requirements of the task about the ajax requests to be made sequentially. You could also let them run in parallel and wait for all of them to finish:
function fetchList(shoppingList) {
$.when.apply($, shoppingList.map(function(shoppingItem) {
return $.ajax('/items/'+shoppingItem).then(function(res) {
return new Item(res.label,res.price);
});
})).then(function() {
return Array.prototype.slice.call(arguments);
})
}
(updated jsfiddle)
// global:
var pendingRequests = 0;
// after each ajax request:
pendingRequests++;
// inside the callback:
if (--pendingRequest == 0) {
// all requests have completed
}
I have modified your code to minimal to make it work - Click here.
Please note your last assertion will fail as the item promise is not resolved in linear manner. Thus sequence of the item will change.