I am using Cradle to store objects in CouchDB from my Node.js server. The objects contain functions....
function AnObject(a, b){
this.a = a; this.b = b;
this.addparts = function(){return this.a + this.b;};}
var cradle = require('cradle');
var db = new(cradle.Connection)('http://localhost', 5984, {cache: true, raw: false}).database('myDB');
var myObject = new AnObject(1, 2);
console.log("addparts:" + myObject.addparts());
db.save('myObjectId', myObject);
This works fine and the document is stored but when I retrieve it, I can no longer call the function on the returned document...
db.get('myObjectId', function(err, myRetrievedObject){
console.log("addparts:" + myRetrievedObject.addparts());
});
This fails with a (Property is not a function) Error..
node cradle_test
cradle_test.js:21
console.log("addparts:" + myRetrievedObject.addparts());
^
TypeError: Property 'addparts' of object {"_id":"myObjectId","_rev":"2-83535db5101fedfe30a1548fa2641f29","a":1,"b":2,"addparts":"function (){return this.a + this.b;}"} is not a function
CouchDB stores JSON. Functions are not valid JSON. Functions are never stored in the database.
I recommend you move the functions out into a prototype.
function AnObject(a, b){
this.a = a; this.b = b;
}
AnObject.prototype.addparts = function(){
return this.a + this.b;
};
db.get('myObjectId', function(err, myRetrievedObject){
var obj = Object.create(AnObject.prototype);
// for some value of extend ( https://github.com/Raynos/pd#pd.extend )
extend(obj, myRetrievedObject);
console.log("addparts:" + obj.addparts());
});
This way your not saving functions and you can still operate on your object using your methods. You just have to make sure that your retrieved object is made an instance of AnObject
There is a way to store functions in CouchDB: as attachments.
Define your functions in a separate .js file
(for example, a set of functions you want to share across multiple servers or app instances).
/modules/db.js:
var db = {}
db.insert = function(nanoDb, object, cb){
//insert new doc
nanoDb.insert(object, function(err, body, header) {
if (!err) {
cb.call(null, {success: true, data: body});
}else{
console.log('[insert] ', err.message);
cb.call(null, {success: false, error: err.message});
}
});
}
db.bulkInsert = function(nanoDb, array, cb){
//structure for bulk insert
var data = {
docs: array
}
//insert new doc
nanoDb.bulk(data, function(err, body, header) {
if (!err) {
cb.call(null, {success: true, data: body});
}else{
console.log('[bulkInsert] ', err.message);
cb.call(null, {success: false, error: err.message});
}
});
}
db.bulkDelete = function(nanoDb, array, cb){
for(i in array){
array[i]._deleted = true;
}
var data = {
docs: array
}
//check if the url exists in the db
nanoDb.bulk(data, function(err, body) {
if (!err){
cb.call(null, {success: true, data: data});
}else{
console.log('[bulkDelete] ', err.message);
cb.call(null, {success: false, error: err.message});
}
});
}
db.view = function(nanoDb, design, view, params, cb){
nanoDb.view(design, view, params, function(err, body) {
if (!err){
var docs = util.extractDocs(body);
cb.call(null, {success: true, data: docs});
}else{
console.log('[view] ', err.message);
cb.call(null, {success: false, error: err.message});
}
});
}
db.search = function(nanoDb, design, index, params, cb){
nanoDb.search(design, index, params, function(err, body) {
if (!err) {
var docs = util.extractDocsSearch(body);
cb.call(null, {success: true, data: docs});
}else{
console.log('[search] ', err.message);
cb.call(null, {success: false, error: err.message});
}
});
}
db.follow = function(nanoDb, params){
var feed = nanoDb.follow(params);
return feed;
}
module.exports = db;
Use a CouchApp to deploy the functions as attachments (in a design doc):
//your couchapp
var couchapp = require('couchapp')
//url to your database
var url = '...';
//empty design doc (for attachments)
ddoc = {
_id: '_design/mods'
};
//folder containing .js files
couchapp.loadAttachments(ddoc, './modules/');
//this function uploads your attachments
couchapp.createApp(ddoc, url, function(app) {
app.push(function(){
//do something
});
});
Now, get the functions wherever you need them:
//use mikaels request module if you like
var request = require('request');
//tell the app where to get your .js file
//sometimes a good idea to persist these references in memory or even in your couchdb
var fileUrl = '/_design/modules/db.js'
//set this variable in the proper scope
var db;
//we'll use this to 'require' the .js file
var _require = function(src, file) {
var m = new module.constructor();
m.paths = module.paths;
m._compile(src, file);
return m.exports;
}
request({ url: fileUrl, json: true }, function (err, response, data) {
if (!err && response.statusCode === 200) {
//now we assign our required .js file (object w/functions) back into usable form
//woot woot!
db = _require(data);
}else{
console.log('[request]', err);
}
});
Do stuff!
db.doSomething()
Related
I have a swift project in which i want use push notifications. I tried use parse server using a job schedule with .js files. The problem is that when i run the job on job status window i get this error:
"TypeError cannot read property 'entry' of undefined at main.js at 39:27"
Here is my main.js file:
var xmlreader = require('cloud/xmlreader.js');
var url = "http://www.ilsecoloxix.it/homepage/rss/homepage.xml";
function SavePost(title, link){
var PostClass = Parse.Object.extend("Post");
var post = new PostClass();
post.set("title", title);
post.set("link", link);
post.save();
}
function SendPush(title, link){
var query = new Parse.Query(Parse.Installation);
Parse.Push.send({
where: query,
data: {
url: link,
alert: title,
sound: "default"
}
}, {
success: function() {
SavePost(title, link);
response.success("Push sent to everyone");
},
error: function(error) {
response.error("Error sending push: "+error);
}
});
}
Parse.Cloud.job("fetchPosts", function(request, response) {
Parse.Cloud.httpRequest({
url: url,
success: function(httpResponse) {
xmlreader.read(httpResponse.text, function (err, res){
var newPost = res.feed.entry.at(0);
var title = newPost.title.text();
var link = "";
newPost.link.each(function (i, linkObj){
if (linkObj.attributes().rel == "alternate"){
link = linkObj.attributes().href;
}
});
var PostClass = Parse.Object.extend("Post");
var query = new Parse.Query(PostClass);
query.equalTo("link", link);
query.find({
success: function(results) {
console.log(results);
if (results.length == 0){
SendPush(title, link);
} else {
response.error("Post already pushed");
}
}
});
});
},
error: function(httpResponse) {
console.error('Request failed with response code ' + httpResponse.status);
response.error("Error fetching posts from feed");
}
});
});
How can i avoid this problem?
I am using Fiddler to display xml and find no feed node, should be res.rss.channel
http://www.ilsecoloxix.it/homepage/rss/homepage.xml
Response data res.feed is undefined. You can console.log(res) to figure out what is wrong. Also make sure the signature is correct for xmlreader.read(httpResponse.text, function (err, res), not sure why err is the first parameter of the function, most likely res is the first.
if(res && res.feed && res.feed.entry){
var newPost = res.feed.entry.at(0);
var title = newPost.title.text();
var link = "";
newPost.link.each(function (i, linkObj){
if (linkObj.attributes().rel == "alternate"){
link = linkObj.attributes().href;
}
});
var PostClass = Parse.Object.extend("Post");
var query = new Parse.Query(PostClass);
query.equalTo("link", link);
query.find({
success: function(results) {
console.log(results);
if (results.length == 0){
SendPush(title, link);
} else {
response.error("Post already pushed");
}
}
});
});
}
I am having db.js with db related functions, I want to make call to db.js and wait until it returns the query result.
But the result is returned after the execution of the db call. Can anyone please help how to solve this.
Code sample:
var Q = require('q');
db= require("./dbaccess.js");
function waitfor(ms){
var deferred = Q.defer();
setTimeout(function() {
deferred.resolve(db);
}, 5000);
return deferred.promise;
}
waitfor(2000).done(function(dbcall) {
console.log('contrived example '+ dbcall.query1());
});
dbacess.js:
var sql = require('mssql');
var config = {
user: 'xx',
password: 'xxx',
server: 'aaa',
database: 'RequestCenter',
stream: true,
}
this.query1=function() {
sql.connect(config, function(err) {
var result;
var request = new sql.Request();
request.query("select * from dbo.AcAccount where Name like 'AutomationCli%' ");
request.on('row', function(row) {
console.log(row.Name);
result = row.Name;
});
request.on('error', function(err) {
console.log("err : "+err);
});
request.on('done', function(returnValue) {
console.log("done");
});
return result;
});
sql.on('error', function(err) {
console.log("sql err : "+err);
});
}
Output:
contrived example undefined
in db: AutomationClient
Expected output:
in db: AutomationClient
contrived example AutomationClient
Not sure why your main code passes in 2000 for the ms argument and then does a 5000ms timeout, in fact, why are you doing a timeout at all, if that was some attempt to wait for the db function to complete, then you don't need it
If you must use promises - personally I'd use a simple callback for such simple code, however, I get that you want to learn how to use Promises
Your original code looked like it was attempting to return the value of the LAST row.name
This code returns an array of row.name
Not knowing the type of data you'd be getting, I don't know which is correct
dbacess.js
var Q = require('q');
var sql = require('mssql');
var config = {
user: 'xx',
password: 'xxx',
server: 'aaa',
database: 'RequestCenter',
stream: true,
}
this.query1 = function() {
var deferred = Q.defer();
sql.connect(config, function(err) {
var result = []; // return all rows - modify as required
var request = new sql.Request();
request.query("select * from dbo.AcAccount where Name like 'AutomationCli%' ");
request.on('row', function(row) {
console.log(row.Name);
result.push(row.Name);
});
request.on('error', function(err) {
console.log("err : " + err);
deferred.reject(err);
});
request.on('done', function(returnValue) {
deferred.resolve(result);
});
});
sql.on('error', function(err) {
console.log("sql err : " + err);
deferred.reject(err);
});
return deferred.promise;
}
Code sample:
db = require("./dbaccess.js");
db.query1().then(function(result) {
console.log('contrived example ' + result);
});
I am using azure mobile services, with following custom API:
var returnVal = new Object;
exports.post = function (request, response) {
// Use "request.service" to access features of your mobile service, e.g.:
// var tables = request.service.tables;
// var push = request.service.push;
var merchantdetailsTable = request.service.tables.getTable("merchantdetails");
var resourceName;
//console.log(JSON.stringify(request.parameters));
merchantdetailsTable.insert({
name: request.body.workerNameInput,
emailid: request.body.workerEmailIDInput,
contact: request.body.workerContactNumberInput
}).then(function (merchantInserted) {
returnVal.workerId = merchantInserted.id;
resourceName = returnVal.workerId.toLowerCase();
var shopworkersTable = request.service.tables.getTable("shopworkers");
return shopworkersTable.insert({
id: merchantInserted.id,
shopid: request.body.shopId
});
}, function(err){
return response.send(statusCodes.INTERNAL_SERVER_ERROR, err);
}).then(function () {
var accountName = appSettings.STORAGE_ACCOUNT_NAME;
var accountKey = appSettings.STORAGE_ACCOUNT_ACCESS_KEY;
var host = accountName + '.blob.core.windows.net';
var blobService = azure.createBlobService(accountName, accountKey, host);
return blobService.createContainerIfNotExists("merchant-image", { publicAccessLevel: 'blob' });
}, function (err) {
return response.send(statusCodes.INTERNAL_SERVER_ERROR, err);
}).then(function(error){
if (!error) {
// Provide write access to the container for the next 5 mins.
var sharedAccessPolicy = {
AccessPolicy: {
Permissions: azure.Constants.BlobConstants.SharedAccessPermissions.WRITE,
Expiry: new Date(new Date().getTime() + 5 * 60 * 1000)
}
};
// Generate the upload URL with SAS for the new image.
var sasQueryUrl =
blobService.generateSharedAccessSignature("merchant-image",
'', sharedAccessPolicy);
// Set the query string.
returnVal["merchantImage"].sasQueryString = qs.stringify(sasQueryUrl.queryString);
// Set the full path on the new new item,
// which is used for data binding on the client.
returnVal["merchantImage"].imageUri = sasQueryUrl.baseUrl + sasQueryUrl.path + '/'
+ resourceName;
var accountName = appSettings.STORAGE_ACCOUNT_NAME;
var accountKey = appSettings.STORAGE_ACCOUNT_ACCESS_KEY;
var host = accountName + '.blob.core.windows.net';
var blobService = azure.createBlobService(accountName, accountKey, host);
return blobService.createContainerIfNotExists("pharmacy-certificate", { publicAccessLevel: 'blob' });
}
else {
return response.send(statusCodes.INTERNAL_SERVER_ERROR);
}
}, function (err) {
return response.send(statusCodes.INTERNAL_SERVER_ERROR, err);
}).done(function (error) {
if (!error) {
// Provide write access to the container for the next 5 mins.
var sharedAccessPolicy = {
AccessPolicy: {
Permissions: azure.Constants.BlobConstants.SharedAccessPermissions.WRITE,
Expiry: new Date(new Date().getTime() + 5 * 60 * 1000)
}
};
// Generate the upload URL with SAS for the new image.
var sasQueryUrl =
blobService.generateSharedAccessSignature("pharmacy-certificate",
'', sharedAccessPolicy);
// Set the query string.
returnVal["pharmacyCertificate"].sasQueryString = qs.stringify(sasQueryUrl.queryString);
// Set the full path on the new new item,
// which is used for data binding on the client.
returnVal["pharmacyCertificate"].imageUri = sasQueryUrl.baseUrl + sasQueryUrl.path + '/'
+ resourceName;
response.send(statusCodes.OK, returnVal);
}
else {
return response.send(statusCodes.INTERNAL_SERVER_ERROR);
}
}, function (err) {
return response.send(statusCodes.INTERNAL_SERVER_ERROR, err);
});
response.send(statusCodes.OK, { message : 'Hello World!' });
};
exports.get = function(request, response) {
response.send(statusCodes.OK, { message : 'Hello World!' });
};
I am getting following error:
TypeError: Cannot call method 'then' of undefined
at exports.post (D:\home\site\wwwroot\App_Data\config\scripts\api\addWorker.js:17:8)
Azure Mobile Services does not return promises from table operations. You need to pass an options object that contains success and error callbacks, as described at https://msdn.microsoft.com/en-us/library/azure/jj554210.aspx.
I highly recommend that you take a look at the newer implementation of the product, Azure Mobile Apps - https://www.npmjs.com/package/azure-mobile-apps. (Disclaimer: I work for Microsoft on the Azure Mobile Apps team)
Is there a better way to call the same function from the client and another node js module. Without having 2 separate functions. The only difference method the value is returned?
exports.getFiles = function(req,res){
var globPattern = req.body.globPattern;
var globOptions =req.body.globOptions;
glob(globPattern, globOptions, function (err, files) {
if(err)
{
res.status(400);
winston.log('error', err);
return res.send({success:false,reason: err});
}
res.send({success:true,data:files});
});
};
exports.getFilesFunc = function(payload){
var deferred = q.defer();
var globPattern = payload.globPattern;
var globOptions = payload.globOptions;
glob(globPattern, globOptions, function (err, files) {
if(err)
deferred.resolve({success:false,reason: err});
deferred.resolve({success:true,data: files});
});
return deferred.promise;
};
You can basically call getFilesFunc from getFiles:
exports.getFilesFunc = function(payload){
return Q.nfcall(glob, payload.globPattern, payload.globOptions);
};
exports.getFiles = function(req,res){
this.getFilesFunct(req.body).then(function(files) {
res.send({success:true, data:files});
}, function(err) {
res.status(400);
winston.log('error', err);
return res.send({success:false, reason: err});
});
};
Since you're using Promises, you can shorten your code to the following:
var Q = require('q'),
glob = Q.denodeify(require('glob'));
exports.getFiles = function(pattern, options) {
return glob(pattern, options);
};
Then in your controller, or wherever you're calling the function you'd control what you're going to do with your retured data:
module
.getfiles(req.body.globPattern, req.body.globOptions)
//or .getFiles(payload.globPattern, payload.globOptions)
.then(function (files) {
}, function (error) {
});
I am upgrading to Sails.js version 0.10 and now need to use Skipper to manage my file uploads.
When I upload a file I generate a new name for it using a UUID, and save it in the public/files/ folder (this will change when I've got this all working but it's good for testing right now)
I save the original name, and the uploaded name + path into a Mongo database.
This was all quite straightforward under Sails v0.9.x but using Skipper I can't figure out how to read the new file name and path. (Obviously if I could read the name I could construct the path though so it's really only the name I need)
My Controller looks like this
var uuid = require('node-uuid'),
path = require('path'),
blobAdapter = require('skipper-disk');
module.exports = {
upload: function(req, res) {
var receiver = blobAdapter().receive({
dirname: sails.config.appPath + "/public/files/",
saveAs: function(file) {
var filename = file.filename,
newName = uuid.v4() + path.extname(filename);
return newName;
}
}),
results = [];
req.file('docs').upload(receiver, function (err, files) {
if (err) return res.serverError(err);
async.forEach(files, function(file, next) {
Document.create({
name: file.filename,
size: file.size,
localName: // ***** how do I get the `saveAs()` value from the uploaded file *****,
path: // *** and likewise how do i get the path ******
}).exec(function(err, savedFile){
if (err) {
next(err);
} else {
results.push({
id: savedFile.id,
url: '/files/' + savedFile.localName
});
next();
}
});
}, function(err){
if (err) {
sails.log.error('caught error', err);
return res.serverError({error: err});
} else {
return res.json({ files: results });
}
});
});
},
_config: {}
};
How do I do this?
I've worked this out now and thought I'd share my solution for the benefit of others struggling with similar issues.
The solution was to not use skipper-disk at all but to write my own custom receiver. I've created this as a Sails Service object.
So in file api/services/Uploader.js
// Uploader utilities and helper methods
// designed to be relatively generic.
var fs = require('fs'),
Writable = require('stream').Writable;
exports.documentReceiverStream = function(options) {
var defaults = {
dirname: '/dev/null',
saveAs: function(file){
return file.filename;
},
completed: function(file, done){
done();
}
};
// I don't have access to jQuery here so this is the simplest way I
// could think of to merge the options.
opts = defaults;
if (options.dirname) opts.dirname = options.dirname;
if (options.saveAs) opts.saveAs = options.saveAs;
if (options.completed) opts.completed = options.completed;
var documentReceiver = Writable({objectMode: true});
// This `_write` method is invoked each time a new file is received
// from the Readable stream (Upstream) which is pumping filestreams
// into this receiver. (filename === `file.filename`).
documentReceiver._write = function onFile(file, encoding, done) {
var newFilename = opts.saveAs(file),
fileSavePath = opts.dirname + newFilename,
outputs = fs.createWriteStream(fileSavePath, encoding);
file.pipe(outputs);
// Garbage-collect the bytes that were already written for this file.
// (called when a read or write error occurs)
function gc(err) {
sails.log.debug("Garbage collecting file '" + file.filename + "' located at '" + fileSavePath + "'");
fs.unlink(fileSavePath, function (gcErr) {
if (gcErr) {
return done([err].concat([gcErr]));
} else {
return done(err);
}
});
};
file.on('error', function (err) {
sails.log.error('READ error on file ' + file.filename, '::', err);
});
outputs.on('error', function failedToWriteFile (err) {
sails.log.error('failed to write file', file.filename, 'with encoding', encoding, ': done =', done);
gc(err);
});
outputs.on('finish', function successfullyWroteFile () {
sails.log.debug("file uploaded")
opts.completed({
name: file.filename,
size: file.size,
localName: newFilename,
path: fileSavePath
}, done);
});
};
return documentReceiver;
}
and then my controller just became (in api/controllers/DocumentController.js)
var uuid = require('node-uuid'),
path = require('path');
module.exports = {
upload: function(req, res) {
var results = [],
streamOptions = {
dirname: sails.config.appPath + "/public/files/",
saveAs: function(file) {
var filename = file.filename,
newName = uuid.v4() + path.extname(filename);
return newName;
},
completed: function(fileData, next) {
Document.create(fileData).exec(function(err, savedFile){
if (err) {
next(err);
} else {
results.push({
id: savedFile.id,
url: '/files/' + savedFile.localName
});
next();
}
});
}
};
req.file('docs').upload(Uploader.documentReceiverStream(streamOptions),
function (err, files) {
if (err) return res.serverError(err);
res.json({
message: files.length + ' file(s) uploaded successfully!',
files: results
});
}
);
},
_config: {}
};
I'm sure it can be improved further but this works perfectly for me.
The uploaded file object contains all data you need:
req.file('fileTest').upload({
// You can apply a file upload limit (in bytes)
maxBytes: maxUpload,
adapter: require('skipper-disk')
}, function whenDone(err, uploadedFiles) {
if (err) {
var error = { "status": 500, "error" : err };
res.status(500);
return res.json(error);
} else {
for (var u in uploadedFiles) {
//"fd" contains the actual file path (and name) of your file on disk
fileOnDisk = uploadedFiles[u].fd;
// I suggest you stringify the object to see what it contains and might be useful to you
console.log(JSON.stringify(uploadedFiles[u]));
}
}
});