Rally custom data store will not update - javascript

My Rally custom data store will not update. Im having the problem described in [this][1] post.
My scenario is: I will be adding rows to a grid, which has a custom data store. then I sort a grid column, and all the new rows I added get deleted. There is nothing fancy about my custom store, and I've tried autoSync:true, but that does nothing.
Are custom stores Read-only, in the sense that any changes made to the original data are transient and will get deleted with a reload()?
This is my store that I add to the rallygrid
me.customStore = Ext.create('Rally.data.custom.Store', {
data: customData,
listeners:{
load: function(customStore){
//do some stuff
}
}
});

I looked at the source code for the memory proxy and it makes sense why nothing was getting added or removed or updating correctly with the Rally.data.custom.Store store. You have to override the create and destroy methods of the memory proxy.
CURRENT MEMORY PROXY FUNCTIONS
These are functions that are used to create and destroy records for the memory proxy. As you can see, they dont create or destroy any records...
updateOperation: function(operation, callback, scope) {
var i = 0,
recs = operation.getRecords(),
len = recs.length;
for (i; i < len; i++) {
recs[i].commit();
}
operation.setCompleted();
operation.setSuccessful();
Ext.callback(callback, scope || this, [operation]);
},
create: function() {
this.updateOperation.apply(this, arguments);
},
destroy: function() {
this.updateOperation.apply(this, arguments);
},
CORRECT MEMORY PROXY SETUP
Below is how to instantiate a custom store that will actually add and remove records in the custom store
me.customStore = Ext.create('Rally.data.custom.Store', {
data: //customData
model: //modelType
autoSync:true,
proxy: {
type:'memory',
create: function(operation) {
var me = this;
operation.getRecords().forEach(function(record){
console.log('adding record', record);
me.data.push(record);
});
this.updateOperation.apply(this, arguments);
},
destroy: function(operation) {
var me = this;
operation.getRecords().forEach(function(record){
console.log(record);
for(var i = 0;i<me.data.length;++i){
if(/*me.data[i] == record*/ ){
me.data.splice(i, 1);
return;
}
}
});
this.updateOperation.apply(this, arguments);
}
},
listeners://listener stuff here
});

Related

using backbone localStorage on a nested Collection

I'm trying to implement nested Collections exactly like the example I found here: https://stackoverflow.com/a/17453870/295133
The only difference being is that I'm trying to store the data locally using the localStorage plugin.
Here, my Lists would be the Hotels in the example above:
var app = app || {};
(function (){
'use strict';
// List Collection - list of words
//---------------------
var listCollection = Backbone.Collection.extend({
//referebce to this collection's model
model: app.ListModel,
localStorage: new Backbone.LocalStorage('translate-lists')
});
app.listCollection = new listCollection();
})();
(function (){
'use strict';
app.ListModel = Backbone.Model.extend({
initialize: function() {
// because initialize is called after parse
_.defaults(this, {
words: new app.wordCollection
});
},
parse: function(response) {
if (_.has(response, "words")) {
this.words = new app.wordCollection(response.words, {
parse: true
});
delete response.words;
}
return response;
}
});
})();
What the localStorage does is stores the ListModels, but if I add anything to the words collection it soon disappears after I refresh.
Any ideas how I should be saving the entire nested collection?
So got this working and it came down to something in parse but also if you want to ensure you just get the attributes out of your nested collection you should override the toJSON otherwise you get the full collection in what this returns.
Backbone.Model.prototype.toJSON = function() {
var json = _.clone(this.attributes);
for (var attr in json) {
if ((json[attr] instanceof Backbone.Model) || (json[attr] instanceof Backbone.Collection)) {
json[attr] = json[attr].toJSON();
}
}
return json;
};
The main thing that was breaking is in the parse. Is assigns words directly to the model,
this.words = new app.wordCollection(response.words, {
parse: true
});
but this means that it will not show up when toJSON is called as it is not in the attributes (it also means you can't access it via model.get)
so this should be changed to
initialize: function () {
// because initialize is called after parse
_.defaults(this.attributes, {
words: new app.WordCollection()
});
},
parse: function (response) {
if (_.has(response, "words")) {
this.attributes.words = new app.WordCollection(response.words, {
parse: true
});
delete response.words;
}
return response;
}
this way it is added to the attributes of the model on not directly on the model. If you look at this fiddle http://jsfiddle.net/leighking2/t2qcc7my/ and keep hitting run it will create a new model in the collection, save it in local storage then print the results to the console. each time you hit run you should see it grow by 1 (as it gets the previous results local storage) and contain the full information that you want.

amplify.js - amplify.store not storing data

I am using amplify.js with Knockout.js and I want to store data locally. I tried using this code: amplify guide
but it isn't working for me.
My view model
define(['services/datacontext'], function (dataContext) {
var store = amplify.store("offlineData"); // Why is agency undefined after retrieving from the store?!?!?!
var agency = ko.observableArray([]);
var initialized = false;
var save = function (agency) {
return dataContext.saveChanges(agency);
};
var vm = { // This is my view model, my functions are bound to it.
//These are wired up to my agency view
activate: activate,
agency: agency,
title: 'agency',
refresh: refresh, // call refresh function which calls get Agencies
save: save
};
return vm;
function activate() {
if (initialized) {
return;
}
initialized = true;
if (initialized == true) {
amplify.store("offlineData", vm.agency);
}
return refresh();
}
function refresh() {
return dataContext.getAgency(agency);
}
});
After refresh retrieves the data, I save this data to the local store. So when I make another request for this page. I would expect var store to contain this data but it is undefined.
Does anyone know how to use amplify?
amplify.store("offlineData", vm.agency);
vm.agency is a function, therefore you need to invoke it to get its value
amplify.store("offlineData", vm.agency());

Backbone JS: self populate Collection

I have a Backbone collection like this
var ContactsCollection = Backbone.Collection.extend({
model: Contact,
initialize: function () {
//retrieves contacts from web service
//contactsLoaded: is the callback that gets called after
//the contacts get received
loadContacts(this.contactsLoaded);
},
contactsLoaded: function (contacts) {
for (var i = 0; i < contacts.length; i++) {
//TODO populate the collection [models][1]
}
}
});
in other word, I want to self populate the collection's models,
how can I do this ?
Consider using the REST API as Collection#fetch should do exactly what you want.
var ContactsCollection = Backbone.Collection.extend({
model: Contact,
url: '', // set the URL
initialize: function () {
this.fetch();
}});
For a Backbone collection, you needn't to add the model one by one, the framework will do the job. Simply
var ContactsCollection = Backbone.Collection.extend({
model: Contact,
url: '/contacts' // whatever web service url you are using
initialize: function () {
this.fetch();
}
});
If your return from the server is not an JSON array, or it's wrapped with some node, you can override the parse method.
parse: function(response) {
// if it's like {data: [model1, model2, model3...]}
return response.data
}
Assuming you're passing in a list of contact models, do this (you don't need a for loop):
this.add(contacts);
The docs mention you can add models one by one, or as an array.
Agree with #Loamhoof. You should use backbones fetch to accomplish this.
I suggest you move the call to fetch outside the collections initialize method. It would be clearer and more handy to put it with the rest of your applications control flow / router logic. Fetch will return a jqXHR object implements a promise interface. Allowing you to do something like this:
var contactsCollection = new ContactsCollection({});
var p = contactsCollection.fetch();
p.done(function() { ... });
p.error(function() { ... });
p.always(function() { ... });

ExtJS4 memory leak

I have an application written in ExtJS 4.1.1, which uses one store a lot. I get data samples from server and after some validation I add it to the store using its "add" method. I do this
periodically and I remove records I don't need from a store as well.
Problem is, my application is eating more and more of RAM over time and it seems that I have found the source of the problem, but I do not know how to handle it.
Here is my store definition:
this.store = Ext.create('Ext.data.Store', {
fields: ['when', 'data1', 'data2', 'data3', 'data4', 'data5', 'data6', 'data7', 'data8', 'data9'],
proxy: {
type: 'memory',
reader: {
type: 'json',
root: 'users'
}
},
sorters: [{
property: 'when',
direction: 'ASC'
}]
});
And this is how I delete records from it:
var record = self.store.getAt(j);
if((record.get('when') <= newMinDate) && (record.get('data'+id) !==' ')) {
self.store.remove(record);
record.destroy();
record = null;
j--;
ln--;
}
But when I checked the console when I was debugging this issue I could see, that records are in fact deleted from a store, but not from memory.
EDIT/UPDATE:
I tried to fix the issue using advices in your answers, but neither could fix it. To be sure, that I got the problem source right, I extracted my store code to examine it more closely and see if it is really causing the problem. You can see the whole code below:
Ext.define('TestApp.App', {
extend: 'Ext.app.Application'
});
Ext.application({
extend: 'MyApp.app.Application',
store: null,
launch: function() {
var self = this;
self.store = Ext.create('Ext.data.Store', {
fields: ['when', 'data1', 'data2', 'data3', 'data4', 'data5', 'data6', 'data7', 'data8', 'data9'],
proxy: {
type: 'memory'
},
sorters: [{
property: 'when',
direction: 'ASC'
}]
});
self.beginTask();
},
beginTask: function() {
var self = this;
Ext.TaskManager.start({
run: function() {
var jsonRaw = *very large json*; //about 650 samples
var json = Ext.JSON.decode(jsonRaw, true);
// self.store.add(json.data.samples);
// var ln = self.store.getCount();
// for (var j=0; j<ln; j++) {
// var record = self.store.getAt(j);
// self.store.remove(self.store.getAt(j));
// j--;
// ln--;
// record.destroy();
// delete record;
// }
json = null;
jsonRaw = null;
},
interval: 1000
});
}
});
Now the strange part about this: memory leak is present even if the store part is commented like in code listing above. Did I made some mistake with task management?
One of the many gotchas I've discovered using Ext JS is that Ext.data.Model#destroy doesn't actually clean up the record locally. The destroy method uses the store's proxy to send a destroy request for that record (if you needed to remove the corresponding record from a database, for instance). If that's your intended behavior, then no worries.
When you remove a record from a store, that store keeps a reference to that record in an array called removed. You can see it towards the bottom of the Ext.data.Store#remove method. I recommend using a JavaScript debugger and inspecting your store object after a few removes to see if your records are being cached. If they are, it's simple enough to call store.removed.length = 0; to clear it out.
What happens if you do a store.sync() after the remove? Don't know if that helps for memory proxy, but it should remove references to removed records I think. Just doing a remove(record) on a record doesn't really remove the record, it just marks it for removal and stops exposing it as available in the store. At least that's true for other proxy types. The actual remove can only be performed after the store's modified records (add, remove, update) have been synced through store.sync();. That's when the store holds the records in their new state in it's internal list.
Perhaps sra is right anyway and you yourself hold a reference to the record in a closure somewhere else?

Backbone.js - problem when saving a model before previous save issues POST(create) instead of PUT(update) request

I've developed a nice rich application interface using Backbone.js where users can add objects very quickly, and then start updating properties of those objects by simply tabbing to the relevant fields. The problem I am having is that sometimes the user beats the server to its initial save and we end up saving two objects.
An example of how to recreate this problem is as follows:
User clicks the Add person button, we add this to the DOM but don't save anything yet as we don't have any data yet.
person = new Person();
User enters a value into the Name field, and tabs out of it (name field loses focus). This triggers a save so that we update the model on the server. As the model is new, Backbone.js will automatically issue a POST (create) request to the server.
person.set ({ name: 'John' });
person.save(); // create new model
User then very quickly types into the age field they have tabbed into, enters 20 and tabs to the next field (age therefore loses focus). This again triggers a save so that we update the model on the server.
person.set ({ age: 20 });
person.save(); // update the model
So we would expect in this scenario one POST request to create the model, and one PUT requests to update the model.
However, if the first request is still being processed and we have not had a response before the code in point 3 above has run, then what we actually get is two POST requests and thus two objects created instead of one.
So my question is whether there is some best practice way of dealing with this problem and Backbone.js? Or, should Backbone.js have a queuing system for save actions so that one request is not sent until the previous request on that object has succeeded/failed? Or, alternatively should I build something to handle this gracefully by either sending only one create request instead of multiple update requests, perhaps use throttling of some sort, or check if the Backbone model is in the middle of a request and wait until that request is completed.
Your advice on how to deal with this issue would be appreciated.
And I'm happy to take a stab at implementing some sort of queuing system, although you may need to put up with my code which just won't be as well formed as the existing code base!
I have tested and devised a patch solution, inspired by both #Paul and #Julien who posted in this thread. Here is the code:
(function() {
function proxyAjaxEvent(event, options, dit) {
var eventCallback = options[event];
options[event] = function() {
// check if callback for event exists and if so pass on request
if (eventCallback) { eventCallback(arguments) }
dit.processQueue(); // move onto next save request in the queue
}
}
Backbone.Model.prototype._save = Backbone.Model.prototype.save;
Backbone.Model.prototype.save = function( attrs, options ) {
if (!options) { options = {}; }
if (this.saving) {
this.saveQueue = this.saveQueue || new Array();
this.saveQueue.push({ attrs: _.extend({}, this.attributes, attrs), options: options });
} else {
this.saving = true;
proxyAjaxEvent('success', options, this);
proxyAjaxEvent('error', options, this);
Backbone.Model.prototype._save.call( this, attrs, options );
}
}
Backbone.Model.prototype.processQueue = function() {
if (this.saveQueue && this.saveQueue.length) {
var saveArgs = this.saveQueue.shift();
proxyAjaxEvent('success', saveArgs.options, this);
proxyAjaxEvent('error', saveArgs.options, this);
Backbone.Model.prototype._save.call( this, saveArgs.attrs, saveArgs.options );
} else {
this.saving = false;
}
}
})();
The reason this works is as follows:
When an update or create request method on a model is still being executed, the next request is simply put in a queue to be processed when one of the callbacks for error or success are called.
The attributes at the time of the request are stored in an attribute array and passed to the next save request. This therefore means that when the server responds with an updated model for the first request, the updated attributes from the queued request are not lost.
I have uploaded a Gist which can be forked here.
A light-weight solution would be to monkey-patch Backbone.Model.save, so you'll only try to create the model once; any further saves should be deferred until the model has an id. Something like this should work?
Backbone.Model.prototype._save = Backbone.Model.prototype.save;
Backbone.Model.prototype.save = function( attrs, options ) {
if ( this.isNew() && this.request ) {
var dit = this, args = arguments;
$.when( this.request ).always( function() {
Backbone.Model.prototype._save.apply( dit, args );
} );
}
else {
this.request = Backbone.Model.prototype._save.apply( this, arguments );
}
};
I have some code I call EventedModel:
EventedModel = Backbone.Model.extend({
save: function(attrs, options) {
var complete, self, success, value;
self = this;
options || (options = {});
success = options.success;
options.success = function(resp) {
self.trigger("save:success", self);
if (success) {
return success(self, resp);
}
};
complete = options.complete;
options.complete = function(resp) {
self.trigger("save:complete", self);
if (complete) {
return complete(self, resp);
}
};
this.trigger("save", this);
value = Backbone.Model.prototype.save.call(this, attrs, options);
return value;
}
});
You can use it as a backbone model. But it will trigger save and save:complete. You can boost this a little:
EventedSynchroneModel = Backbone.Model.extend({
save: function(attrs, options) {
var complete, self, success, value;
if(this.saving){
if(this.needsUpdate){
this.needsUpdate = {
attrs: _.extend(this.needsUpdate, attrs),
options: _.extend(this.needsUpdate, options)};
}else {
this.needsUpdate = { attrs: attrs, options: options };
}
return;
}
self = this;
options || (options = {});
success = options.success;
options.success = function(resp) {
self.trigger("save:success", self);
if (success) {
return success(self, resp);
}
};
complete = options.complete;
options.complete = function(resp) {
self.trigger("save:complete", self);
//call previous callback if any
if (complete) {
complete(self, resp);
}
this.saving = false;
if(self.needsUpdate){
self.save(self.needsUpdate.attrs, self.needsUpdate.options);
self.needsUpdate = null;
}
};
this.trigger("save", this);
// we are saving
this.saving = true;
value = Backbone.Model.prototype.save.call(this, attrs, options);
return value;
}
});
(untested code)
Upon the first save call it will save the record normally. If you quickly do a new save it will buffer that call (merging the different attributes and options into a single call). Once the first save succeed, you go forward with the second save.
As an alternative to the above answer, you could achieve the same affect by overloading the backbone.sync method to be synchronous for this model. Doing so would force each call to wait for the previous to finish.
Another option would be to just do the sets when the user is filing things out and do one save at the end. That well also reduce the amount of requests the app makes as well

Categories

Resources