I have many JavaScript objects in my application, something like:
function Person(age) {
this.age = age;
this.isOld = function (){
return this.age > 60;
}
}
// before serialize, ok
var p1 = new Person(77);
alert("Is old: " + p1.isOld());
// after, got error Object #<Object> has no method 'isOld'
var serialize = JSON.stringify(p1);
var _p1 = JSON.parse(serialize);
alert("Is old: " + _p1.isOld());
See in JS Fiddle.
My question is: is there a best practice/pattern/tip to recover my object in same type it was before serialization (instances of class Person, in this case)?
Requirements that I have:
Optimize disk usage: I have a big tree of objects in memory. So, I don't want to store functions.
Solution can use jQuery and another library to serialize/unserialize.
JSON has no functions as data types. You can only serialize strings, numbers, objects, arrays, and booleans (and null)
You could create your own toJson method, only passing the data that really has to be serialized:
Person.prototype.toJson = function() {
return JSON.stringify({age: this.age});
};
Similar for deserializing:
Person.fromJson = function(json) {
var data = JSON.parse(json); // Parsing the json string.
return new Person(data.age);
};
The usage would be:
var serialize = p1.toJson();
var _p1 = Person.fromJson(serialize);
alert("Is old: " + _p1.isOld());
To reduce the amount of work, you could consider to store all the data that needs to be serialized in a special "data" property for each Person instance. For example:
function Person(age) {
this.data = {
age: age
};
this.isOld = function (){
return this.data.age > 60 ? true : false;
}
}
then serializing and deserializing is merely calling JSON.stringify(this.data) and setting the data of an instance would be instance.data = JSON.parse(json).
This would keep the toJson and fromJson methods simple but you'd have to adjust your other functions.
Side note:
You should add the isOld method to the prototype of the function:
Person.prototype.isOld = function() {}
Otherwise, every instance has it's own instance of that function which also increases memory.
I wrote serialijse because I faced the same problem as you.
you can find it at https://github.com/erossignon/serialijse
It can be used in nodejs or in a browser and can serve to serialize and deserialize a complex set of objects from one context (nodejs) to the other (browser) or vice-versa.
var s = require("serialijse");
var assert = require("assert");
// testing serialization of a simple javascript object with date
function testing_javascript_serialization_object_with_date() {
var o = {
date: new Date(),
name: "foo"
};
console.log(o.name, o.date.toISOString());
// JSON will fail as JSON doesn't preserve dates
try {
var jstr = JSON.stringify(o);
var jo = JSON.parse(jstr);
console.log(jo.name, jo.date.toISOString());
} catch (err) {
console.log(" JSON has failed to preserve Date during stringify/parse ");
console.log(" and has generated the following error message", err.message);
}
console.log("");
var str = s.serialize(o);
var so = s.deserialize(str);
console.log(" However Serialijse knows how to preserve date during serialization/deserialization :");
console.log(so.name, so.date.toISOString());
console.log("");
}
testing_javascript_serialization_object_with_date();
// serializing a instance of a class
function testing_javascript_serialization_instance_of_a_class() {
function Person() {
this.firstName = "Joe";
this.lastName = "Doe";
this.age = 42;
}
Person.prototype.fullName = function () {
return this.firstName + " " + this.lastName;
};
// testing serialization using JSON.stringify/JSON.parse
var o = new Person();
console.log(o.fullName(), " age=", o.age);
try {
var jstr = JSON.stringify(o);
var jo = JSON.parse(jstr);
console.log(jo.fullName(), " age=", jo.age);
} catch (err) {
console.log(" JSON has failed to preserve the object class ");
console.log(" and has generated the following error message", err.message);
}
console.log("");
// now testing serialization using serialijse serialize/deserialize
s.declarePersistable(Person);
var str = s.serialize(o);
var so = s.deserialize(str);
console.log(" However Serialijse knows how to preserve object classes serialization/deserialization :");
console.log(so.fullName(), " age=", so.age);
}
testing_javascript_serialization_instance_of_a_class();
// serializing an object with cyclic dependencies
function testing_javascript_serialization_objects_with_cyclic_dependencies() {
var Mary = { name: "Mary", friends: [] };
var Bob = { name: "Bob", friends: [] };
Mary.friends.push(Bob);
Bob.friends.push(Mary);
var group = [ Mary, Bob];
console.log(group);
// testing serialization using JSON.stringify/JSON.parse
try {
var jstr = JSON.stringify(group);
var jo = JSON.parse(jstr);
console.log(jo);
} catch (err) {
console.log(" JSON has failed to manage object with cyclic deps");
console.log(" and has generated the following error message", err.message);
}
// now testing serialization using serialijse serialize/deserialize
var str = s.serialize(group);
var so = s.deserialize(str);
console.log(" However Serialijse knows to manage object with cyclic deps !");
console.log(so);
assert(so[0].friends[0] == so[1]); // Mary's friend is Bob
}
testing_javascript_serialization_objects_with_cyclic_dependencies();
I am the author of https://github.com/joonhocho/seri.
Seri is JSON + custom (nested) class support.
You simply need to provide toJSON and fromJSON to serialize and deserialize any class instances.
Here's an example with nested class objects:
import seri from 'seri';
class Item {
static fromJSON = (name) => new Item(name)
constructor(name) {
this.name = name;
}
toJSON() {
return this.name;
}
}
class Bag {
static fromJSON = (itemsJson) => new Bag(seri.parse(itemsJson))
constructor(items) {
this.items = items;
}
toJSON() {
return seri.stringify(this.items);
}
}
// register classes
seri.addClass(Item);
seri.addClass(Bag);
const bag = new Bag([
new Item('apple'),
new Item('orange'),
]);
const bagClone = seri.parse(seri.stringify(bag));
// validate
bagClone instanceof Bag;
bagClone.items[0] instanceof Item;
bagClone.items[0].name === 'apple';
bagClone.items[1] instanceof Item;
bagClone.items[1].name === 'orange';
Hope it helps address your problem.
The browser's native JSON API may not give you back your idOld function after you call JSON.stringify, however, if can stringify your JSON yourself (maybe use Crockford's json2.js instead of browser's API), then if you have a string of JSON e.g.
var person_json = "{ \"age:\" : 20, \"isOld:\": false, isOld: function() { return this.age > 60; } }";
then you can call
eval("(" + person + ")")
, and you will get back your function in the json object.
I had the exact same problem, and written a small tool to do the mixing of data and model. See https://github.com/khayll/jsmix
This is how you would do it:
//model object (or whatever you'd like the implementation to be)
var Person = function() {}
Person.prototype.isOld = function() {
return this.age > RETIREMENT_AGE;
}
//then you could say:
var result = JSMix(jsonData).withObject(Person.prototype, "persons").build();
//and use
console.log(result.persons[3].isOld());
It can handle complex objects, like nested collections recursively as well.
As for serializing JS functions, I wouldn't do such thing because of security reasons.
I've added yet another JavaScript serializer repo to GitHub.
Rather than take the approach of serializing and deserializing JavaScript objects to an internal format the approach here is to serialize JavaScript objects out to native JavaScript. This has the advantage that the format is totally agnostic from the serializer, and the object can be recreated simply by calling eval().
https://github.com/iconico/JavaScript-Serializer
I had a similar problem and since I couldn't find a sufficient solution, I also created a serialization library for javascript: https://github.com/wavesoft/jbb (as a matter of fact it's a bit more, since it's mainly intended for bundling resources)
It is close to Binary-JSON but it adds a couple of additional features, such as metadata for the objects being encoded and some extra optimizations like data de-duplication, cross-referencing to other bundles and structure-level compression.
However there is a catch: In order to keep the bundle size small there are no type information in the bundle. Such information are provided in a separate "profile" that describes your objects for encoding and decoding. For optimization reasons this information is given in a form of script.
But you can make your life easier using the gulp-jbb-profile (https://github.com/wavesoft/gulp-jbb-profile) utility for generating the encodeing/decoding scripts from simple YAML object specifications like this:
# The 'Person' object has the 'age' and 'isOld'
# properties
Person:
properties:
- age
- isOld
For example you can have a look on the jbb-profile-three profile.
When you have your profile ready, you can use JBB like this:
var JBBEncoder = require('jbb/encode');
var MyEncodeProfile = require('profile/profile-encode');
// Create a new bundle
var bundle = new JBBEncoder( 'path/to/bundle.jbb' );
// Add one or more profile(s) in order for JBB
// to understand your custom objects
bundle.addProfile(MyEncodeProfile);
// Encode your object(s) - They can be any valid
// javascript object, or objects described in
// the profiles you added previously.
var p1 = new Person(77);
bundle.encode( p1, 'person' );
var people = [
new Person(45),
new Person(77),
...
];
bundle.encode( people, 'people' );
// Close the bundle when you are done
bundle.close();
And you can read it back like this:
var JBBDecoder = require('jbb/decode');
var MyDecodeProfile = require('profile/profile-decode');
// Instantiate a new binary decoder
var binaryLoader = new JBBDecoder( 'path/to/bundle' );
// Add your decoding profile
binaryLoader.addProfile( MyDecodeProfile );
// Add one or more bundles to load
binaryLoader.add( 'bundle.jbb' );
// Load and callback when ready
binaryLoader.load(function( error, database ) {
// Your objects are in the database
// and ready to use!
var people = database['people'];
});
You can create an empty instance of of your class and assign values to it with Object.assign.
let p1 = new Person(77);
let serialized = JSON.stringify(p1);
let deserialized = Object.assign(new Person(), JSON.parse(serialized))
I tried to do this with Date with native JSON...
function stringify (obj: any) {
return JSON.stringify(
obj,
function (k, v) {
if (this[k] instanceof Date) {
return ['$date', +this[k]]
}
return v
}
)
}
function clone<T> (obj: T): T {
return JSON.parse(
stringify(obj),
(_, v) => (Array.isArray(v) && v[0] === '$date') ? new Date(v[1]) : v
)
}
What does this say? It says
There needs to be a unique identifier, better than $date, if you want it more secure.
class Klass {
static fromRepr (repr: string): Klass {
return new Klass(...)
}
static guid = '__Klass__'
__repr__ (): string {
return '...'
}
}
This is a serializable Klass, with
function serialize (obj: any) {
return JSON.stringify(
obj,
function (k, v) { return this[k] instanceof Klass ? [Klass.guid, this[k].__repr__()] : v }
)
}
function deserialize (repr: string) {
return JSON.parse(
repr,
(_, v) => (Array.isArray(v) && v[0] === Klass.guid) ? Klass.fromRepr(v[1]) : v
)
}
I tried to do it with Mongo-style Object ({ $date }) as well, but it failed in JSON.parse. Supplying k doesn't matter anymore...
BTW, if you don't care about libraries, you can use yaml.dump / yaml.load from js-yaml. Just make sure you do it the dangerous way.
I've made an npm module named esserializer to solve this problem: save JavaScript class object values during serialization, in plain JSON format, without storing any functions. During the serialization, the only overhead incurred is saving the class names information. Thus, the disk usage is optimized.
Later on during the deserialization stage, esserializer can recursively deserialize object instance, with all types/functions information retained. It works in both browser and Node.js environment.
In the OP's case, the code would be pretty easy:
var ESSerializer = require('esserializer');
function Person(age) {
this.age = age;
this.isOld = function (){
return this.age > 60;
}
}
// before serialize, ok
var p1 = new Person(77);
alert("Is old: " + p1.isOld());
// serialize
var serializedText = ESSerializer.serialize(p1);
//...do something, or send the above serializedText to another JavaScript environment.
// deserialize
var deserializedObj = ESSerializer.deserialize(serializedText, [Person]);
alert("Is old: " + deserializedObj.isOld());
the deserializedObj is a Person instance, which contains all values/functions/superclasses information.
Wish it could help.
Related
I am trying to create a generic document update handler.
I am using:
function(doc, req) {var field = req.query.field; var value =
req.query.value; var message = 'set '+field+' to '+value; doc[field] =
value; return [doc, message]; }
This works ok with simple json but not with a nested object such as
"abc":{"ax":"one", "by":"two" ...}
my curl command is:
curl -X PUT 'http://127.0.0.1:5984/db/_design/updatehandler/_update/inplace/id?field=abc.ax&value=three'
The result is a new field is created and the existing abc:{ax:one} is left
untouched.
With a simpler example:
if I have: "xyz":"five"
curl -X PUT 'http://127.0.0.1:5984/db/_design/updatehandler/_update/inplace/id?field=xyz&value=ten'
... works correctly.
I have not yet tried the generic process on "pqr":[s, t, u] yet but I guess
this may require a different design modification as well.
Ideally one wants something that works in at least the abovementioned three
cases if possible, as long as it is not too complex for it not to be worth
the effort.
Could someone possibly kindly help here or refer me to some javascript examples please.
Many thanks.
John
function (doc, req) {
function merge(nDoc,oDoc ) {
for (var f in nDoc) {
var tmpNewDoc = nDoc[f],
tmpDoc = oDoc[f];
var type = typeof(tmpNewDoc);
if (type === 'object' && tmpNewDoc.length === undefined && tmpDoc !== undefined) merge(tmpNewDoc, tmpDoc);
else oDoc[f] = tmpNewDoc;
}
}
if (!doc) {
return [null, toJSON({
error: 'not_found',
reason: 'No document were found with the specified ID or an incorrect method was used.'
})];
}
try {
var newDoc = JSON.parse(req.body);
merge(newDoc, doc);
}
catch (e) {
return [null, ToJSON({
error: 'bad_request',
reason: 'Invalid json or processing error'
})];
}
return [doc, toJSON({
doc: doc,
ok: true
})];
}"
}
Simply pass the new document to this handler. It will merge the new values to it (warning, the arrays will be overwrite). If you also want to merge array, you can either use a third party library or build your own recursive merge function.
I have an arbitrary (E)JSON that gets created and sent over the wire from client to server in my Meteor app. It uses RegExp objects to zero-in on results:
# on the client
selector =
"roles.user": { "$ne": null }
"profile.email": /^admin#/gi
All is dandy fine on the client side, but if I pass this to the server via Meteor.call or Meteor.subscribe, the resulting (E)JSON takes this form:
# on the server
selector =
"roles.user": { "$ne": null }
"profile.email": {}
...and somewhere an engineer dies a little on the inside.
There are plenty of resources on the Web explaining why RegEx is unserializable via JSON.stringify/JSON.parse or the equivalent EJSON methods.
I'm not convinced RegEx serialization is impossible. So how can it be done?
After reviewing this HowTo and the Meteor EJSON Docs, we may serialize RegEx using the EJSON.addType method.
Extend RegExp - Provide RegExp with the methods EJSON.addType requires for implementation.
RegExp::options = ->
opts = []
opts.push 'g' if #global
opts.push 'i' if #ignoreCase
opts.push 'm' if #multiline
return opts.join('')
RegExp::clone = ->
self = #
return new RegExp(self.source, self.options())
RegExp::equals = (other) ->
self = #
if other isnt instanceOf RegExp
return false
return EJSON.stringify(self) is EJSON.stringify(other)
RegExp::typeName = ->
return "RegExp"
RegExp::toJSONValue = ->
self = #
return {
'regex': self.source
'options': self.options()
}
Call EJSON.addType - Do this anywhere. It's best to make it available to client AND server though. This is going to deserialize the object defined in toJSONValue above.
EJSON.addType "RegExp", (value) ->
return new RegExp(value['regex'], value['options'])
Test In Your Console - Don't take my word for it. See for yourself.
> o = EJSON.stringify(/^Mooo/ig)
"{"$type":"RegExp","$value":{"regex":"^Mooo","options":"ig"}}"
> EJSON.parse(o)
/^Mooo/gi
And there you have a RegExp being serialized and parsed on client and server, able to be passed in over the wire, saved in a Session, and even possibly stored in a Collection of queries!
EDIT to addess IE10+ Error: Assignment to read-only properties is not allowed in strict mode Courtesy of #Tim Fletcher in the comments
import { EJSON } from 'meteor/ejson';
function getOptions(self) {
const opts = [];
if (self.global) opts.push('g');
if (self.ignoreCase) opts.push('i');
if (self.multiline) opts.push('m');
return opts.join('');
}
RegExp.prototype.clone = function clone() {
return new RegExp(this.source, getOptions(this));
};
RegExp.prototype.equals = function equals(other) {
if (!(other instanceof RegExp)) return false;
return EJSON.stringify(this) === EJSON.stringify(other);
};
RegExp.prototype.typeName = function typeName() {
return 'RegExp';
};
RegExp.prototype.toJSONValue = function toJSONValue() {
return { regex: this.source, options: getOptions(this) };
};
EJSON.addType('RegExp', value => new RegExp(value.regex, value.options));
There is a far simpler solution:
stringify your RegExp via .toString(), send it to the server and then parse it back to RegExp.
I'm using parse.com as the backend to my project and am creating a web page using javascript.
I'm extending PFObject as follow:
var Match = Parse.Object.extend("Match");
On the match object i have a couple properties, let say the first one is "player1"
My question is how can i make it so that when i try to get a property of my match object it succeeds
ie:
var matchQuery = new Parse.Query("Match");
matchQuery.find({
success: function (results) {
_.each(results, function (element, index, list) {
//
var test = element.player1 <<<< here player1 is undefined
})
},
error: function (error) {
alert("Error: " + error.code + " " + error.message);
}
});
Thanks for any tips!
Setting values on the backbone object works like regular JS. The value is retained for as long as the object is in memory, but no longer.
match.memoryOnlyAttribute = "I'll be gone soon";
If match is released and then queried again, memoryOnlyAttribute will be null, as you have observed.
To get a value for a property that persists, it must first be a property on the object. This is typically done in the data browser with the "+ Col" button. (It can also be done in code if your CLP permits).
With that done, the object can only be assigned persistent property values via the set() method...
var Match = Parse.Object.extend("Match");
var match = new Match();
match.set("player1", /* an object here that is of the right type */);
match.save();
Once the object is retrieved, the property in the parse data can be retrieved with the get() method...
matchQuery.first().then(function(matchResult) {
var player1 = match.get("player1");
// player1 will have a value
});
I want to log objects using log4javascript. For example consider the following code:
function LogObject() {
var blah = {
one: 42,
two: "486"
};
logger.Info(blah);
Assuming that logger is instance of log4javascript logger that is properly set up:
var logger = log4javascript.getLogger("InternalLogger");
var ajaxAppender = new log4javascript.AjaxAppender(url),
jsonLayout = new log4javascript.JsonLayout(false, false);
ajaxAppender.setLayout(jsonLayout);
ajaxAppender.addHeader("Content-Type", "application/json");
logger.addAppender(ajaxAppender);
I am expecting the result to the following: request payload contains array of messages first of which is my object serialized into JSON. What I see is array of messages first of which has string "Object object" (like toString() method was invoked). How can I achieve that?
JsonLayout formats the logging event (which includes log level, timestamp and logger name in addition to the log message(s)) as JSON rather than the log message, which is pretty much assumed to be a string. The reason for this is to avoid a dependency on a JSON library for older browsers; generating JSON for the simple, known data that JsonLayout deals with is no problem without a JSON library but handling arbitrary objects definitely requires one.
The workaround I'd suggest is simply to format the message before you pass it to the logging call:
logger.info( JSON.stringify(blah) );
We were following #Tim Down's suggestion
logger.info( JSON.stringify(blah) );
But we had performance issues since the JSON.stringify happens before logger.info is called, therefore it will always happen even if the logging level is set to ignore this log.
In order to work around this I wrote a new lazy layout so that the stringification only happens if the log is actually output. In order to be more flexible it also alows passing a function, in which case it outputs the result of running said function.
Usage:
logger.trace("Received ", widget, " which has ", () => countFrimbles(widget), ' frimbles');
Implementation:
function LazyFormatLayout() { }
LazyFormatLayout.prototype = new log4javascript.Layout();
LazyFormatLayout.prototype.format = function (loggingEvent) {
var time = loggingEvent.timeStamp.toTimeString().split(/\s/)[0];
var head = time + ' ' + loggingEvent.logger.name + ' [' + loggingEvent.level.name + '] - ';
var body = loggingEvent.messages.map(function (arg) {
try {
switch (typeof (arg)) {
case 'function':
return arg();
case 'object':
return JSON.stringify(arg);
}
}
catch (e) {
return '<<error while logging: ' + e.stack + '>>';
}
return arg;
}).join('');
if (!loggingEvent.exception)
return head + body;
return head + body + ' ==> Exception: ' + loggingEvent.exception.stack;
}
LazyFormatLayout.prototype.ignoresThrowable = function () { return false; };
LazyFormatLayout.prototype.toString = function () { return "LazyFormatLayout"; };
Question is somewhat dated, but a simple google search turned up this question and there seems to be a build-in way to log objects:
var log = log4javascript.getDefaultLogger();
log.info("log following object",{ data:5, text:"bla" });
output
12:49:43 INFO - log following object {
data: 5,
text: bla
}
I have a Single Page Application that is working pretty well so far but I have run into an issue I am unable to figure out. I am using breeze to populate a list of projects to be displayed in a table. There is way more info than what I actually need so I am doing a projection on the data. I want to add a knockout computed onto the entity. So to accomplish this I registered and entity constructor like so...
metadataStore.registerEntityTypeCtor(entityNames.project, function () { this.isPartial = false; }, initializeProject);
The initializeProject function uses some of the values in the project to determine what the values should be for the computed. For example if the Project.Type == "P" then the rowClass should = "Red".
The problem I am having is that all the properties of Project are null except for the ProjNum which happens to be the key. I believe the issue is because I am doing the projection because I have registered other initializers for other types and they work just fine. Is there a way to make this work?
EDIT: I thought I would just add a little more detail for clarification. The values of all the properties are set to knockout observables, when I interrogate the properties using the javascript debugger in Chrome the _latestValue of any of the properties is null. The only property that is set is the ProjNum which is also the entity key.
EDIT2: Here is the client side code that does the projection
var getProjectPartials = function (projectObservable, username, forceRemote) {
var p1 = new breeze.Predicate("ProjManager", "==", username);
var p2 = new breeze.Predicate("ApprovalStatus", "!=", "X");
var p3 = new breeze.Predicate("ApprovalStatus", "!=", "C");
var select = 'ProjNum,Title,Type,ApprovalStatus,CurrentStep,StartDate,ProjTargetDate,CurTargDate';
var isQaUser = cookies.getCookie("IsQaUser");
if (isQaUser == "True") {
p1 = new breeze.Predicate("QAManager", "==", username);
select = select + ',QAManager';
} else {
select = select + ',ProjManager';
}
var query = entityQuery
.from('Projects')
.where(p1.and(p2).and(p3))
.select(select);
if (!forceRemote) {
var p = getLocal(query);
if (p.length > 1) {
projectObservable(p);
return Q.resolve();
}
}
return manager.executeQuery(query).then(querySucceeded).fail(queryFailed);
function querySucceeded(data) {
var list = partialMapper.mapDtosToEntities(
manager,
data.results,
model.entityNames.project,
'ProjNum'
);
if (projectObservable) {
projectObservable(list);
}
log('Retrieved projects using breeze', data, true);
}
};
and the code for the partialMapper.mapDtosToEntities function.
var defaultExtension = { isPartial: true };
function mapDtosToEntities(manager,dtos,entityName,keyName,extendWith) {
return dtos.map(dtoToEntityMapper);
function dtoToEntityMapper(dto) {
var keyValue = dto[keyName];
var entity = manager.getEntityByKey(entityName, keyValue);
if (!entity) {
extendWith = $.extend({}, extendWith || defaultExtension);
extendWith[keyName] = keyValue;
entity = manager.createEntity(entityName, extendWith);
}
mapToEntity(entity, dto);
entity.entityAspect.setUnchanged();
return entity;
}
function mapToEntity(entity, dto) {
for (var prop in dto) {
if (dto.hasOwnProperty(prop)) {
entity[prop](dto[prop]);
}
}
return entity;
}
}
EDIT3: Looks like it was my mistake. I found the error when I looked closer at initializeProject. Below is what the function looked like before i fixed it.
function initializeProject(project) {
project.rowClass = ko.computed(function() {
if (project.Type == "R") {
return "project-list-item info";
} else if (project.Type == "P") {
return "project-list-item error";
}
return "project-list-item";
});
}
the issue was with project.Type I should have used project.Type() since it is an observable. It is a silly mistake that I have made too many times since starting this project.
EDIT4: Inside initializeProject some parts are working and others aren't. When I try to access project.ProjTargetDate() I get null, same with project.StartDate(). Because of the Null value I get an error thrown from the moment library as I am working with these dates to determine when a project is late. I tried removing the select from the client query and the call to the partial entity mapper and when I did that everything worked fine.
You seem to be getting closer. I think a few more guard clauses in your initializeProject method would help and, when working with Knockout, one is constantly battling the issue of parentheses.
Btw, I highly recommend the Knockout Context Debugger plugin for Chrome for diagnosing binding problems.
Try toType()
You're working very hard with your DTO mapping, following along with John's code from his course. Since then there's a new way to get projection data into an entity: add toType(...) to the end of the query like this:
var query = entityQuery
.from('Projects')
.where(p1.and(p2).and(p3))
.select(select)
.toType('Project'); // cast to Project
It won't solve everything but you may be able to do away with the dto mapping.
Consider DTOs on the server
I should have pointed this out first. If you're always cutting this data down to size, why not define the client-facing model to suit your client. Create DTO classes of the right shape(s) and project into them on the server before sending data over the wire.
You can also build metadata to match those DTOs so that Project on the client has exactly the properties it should have there ... and no more.
I'm writing about this now. Should have a page on it in a week or so.