Creating database in PouchDB - javascript

I am currently using Microsoft Visual Studio 2015 & Cordova for mobile development.
I tried by going through PouchDB documentation and use whatever they gave
function insertDB() {
var db = new PouchDB("todos");
var remoteDB = new PouchDB("http://localhost:5984/todos");
db.put({
_id: 'mydoc',
title: 'Heroes'
}).then(function (response) {
// handle response
}).catch(function (err) {
console.log(err);
});
PouchDB.sync('remoteDB', 'http://localhost:5984/todos');
}
I used a button to invoke the insertDB() function, but I have errors.
<button onclick="insertDB()">Click Me!</button>
I kept getting error saying localDB not defined, PouchDB not defined. I know I did wrong because I've very new to PouchDB and I've never used it before.
May I know where I did wrong in?

Solution:
I realized that I linked the min.js of pouchdb but it couldn't find the file, thus the error of undefined. I then used <script src="//cdn.jsdelivr.net/pouchdb/5.4.5/pouchdb.min.js"></script> and problem was solved.

not defined means your pouchdb package is not initilized correctly
use npm or cdn
const PouchDB = require('pouchdb').default;
Everything is correct expect below line (Your Syntax is wrong)
var db = new PouchDB("todos");
var remoteDB = new PouchDB("http://localhost:5984/todos");
PouchDB.sync('remoteDB', 'http://localhost:5984/todos');
Correction
var db = new PouchDB("todos");//localdb
var remoteDB = new PouchDB("http://localhost:5984/todos");
PouchDB.sync('db', 'http://localhost:5984/todos');
in pouchdb when you call below line ,db will create.if db is not present
it update db ,if already present
var db = new PouchDB("todos");
so the above single line is used to create or update data by using put function
(if u pass _id and _rev) it update your db
if not pass _id and _rev it insert the new line
import your pouchdb
const PouchDB = require('pouchdb').default;
then
function insertDB() {
var db = new PouchDB("todos");
var remoteDB = new PouchDB("http://localhost:5984/todos");
db.put({
_id: 'mydoc',
title: 'Heroes'
}).then(function (response) {
// handle response
}).catch(function (err) {
console.log(err);
});
PouchDB.sync('db', 'http://localhost:5984/todos');
}

Related

MongoDB - Mongoose - TypeError: save is not a function

I am attempting to perform an update to a MongoDB document (using mongoose) by first using .findById to get the document, then updating the fields in that document with new values. I am still a bit new to this so I used a tutorial to figure out how to get it working, then I have been updating my code for my needs. Here is the tutorial: MEAN App Tutorial with Angular 4. The original code had a schema defined, but my requirement is for a generic MongoDB interface that will simply take whatever payload is sent to it and send it along to MongoDB. The original tutorial had something like this:
exports.updateTodo = async function(todo){
var id = todo.id
try{
//Find the old Todo Object by the Id
var oldTodo = await ToDo.findById(id);
}catch(e){
throw Error("Error occured while Finding the Todo")
}
// If no old Todo Object exists return false
if(!oldTodo){
return false;
}
console.log(oldTodo)
//Edit the Todo Object
oldTodo.title = todo.title
oldTodo.description = todo.description
oldTodo.status = todo.status
console.log(oldTodo)
try{
var savedTodo = await oldTodo.save()
return savedTodo;
}catch(e){
throw Error("And Error occured while updating the Todo");
}
}
However, since I don't want a schema and want to allow anything through, I don't want to assign static values to specific field names like, title, description, status, etc. So, I came up with this:
exports.updateData = async function(update){
var id = update.id
// Check the existence of the query parameters, If they don't exist then assign a default value
var dbName = update.dbName ? update.dbName : 'test'
var collection = update.collection ? update.collection : 'testing';
const Test = mongoose.model(dbName, TestSchema, collection);
try{
//Find the existing Test object by the Id
var existingData = await Test.findById(id);
}catch(e){
throw Error("Error occurred while finding the Test document - " + e)
}
// If no existing Test object exists return false
if(!existingData){
return false;
}
console.log("Existing document is " + existingData)
//Edit the Test object
existingData = JSON.parse(JSON.stringify(update))
//This was another way to overwrite existing field values, but
//performs a "shallow copy" so it's not desireable
//existingData = Object.assign({}, existingData, update)
//existingData.title = update.title
//existingData.description = update.description
//existingData.status = update.status
console.log("New data is " + existingData)
try{
var savedOutput = await existingData.save()
return savedOutput;
}catch(e){
throw Error("An error occurred while updating the Test document - " + e);
}
}
My original problem with this was that I had a lot of issues getting the new values to overwrite the old ones. Now that that's been solved, I am getting the error of "TypeError: existingData.save is not a function". I am thinking the data type changed or something, and now it is not being accepted. When I uncomment the static values that were in the old tutorial code, it works. This is further supported by my console logging before and after I join the objects, because the first one prints the actual data and the second one prints [object Object]. However, I can't seem to figure out what it's expecting. Any help would be greatly appreciated.
EDIT: I figured it out. Apparently Mongoose has its own data type of "Model" which gets changed if you do anything crazy to the underlying data by using things like JSON.stringify. I used Object.prototype.constructor to figure out the actual object type like so:
console.log("THIS IS BEFORE: " + existingData.constructor);
existingData = JSON.parse(JSON.stringify(update));
console.log("THIS IS AFTER: " + existingData.constructor);
And I got this:
THIS IS BEFORE: function model(doc, fields, skipId) {
model.hooks.execPreSync('createModel', doc);
if (!(this instanceof model)) {
return new model(doc, fields, skipId);
}
Model.call(this, doc, fields, skipId);
}
THIS IS AFTER: function Object() { [native code] }
Which showed me what was actually going on. I added this to fix it:
existingData = new Test(JSON.parse(JSON.stringify(update)));
On a related note, I should probably just use the native MongoDB driver at this point, but it's working, so I'll just put it on my to do list for now.
You've now found a solution but I would suggest using the MongoDB driver which would make your code look something along the lines of this and would make the origional issue disappear:
// MongoDB Settings
const MongoClient = require(`mongodb`).MongoClient;
const mongodb_uri = `mongodb+srv://${REPLACE_mongodb_username}:${REPLACE_mongodb_password}#url-here.gcp.mongodb.net/test`;
const db_name = `test`;
let db; // allows us to reuse the database connection once it is opened
// Open MongoDB Connection
const open_database_connection = async () => {
try {
client = await MongoClient.connect(mongodb_uri);
} catch (err) { throw new Error(err); }
db = client.db(db_name);
};
exports.updateData = async update => {
// open database connection if it isn't already open
try {
if (!db) await open_database_connection();
} catch (err) { throw new Error(err); }
// update document
let savedOutput;
try {
savedOutput = await db.collection(`testing`).updateOne( // .save() is being depreciated
{ // filter
_id: update.id // the '_id' might need to be 'id' depending on how you have set your collection up, usually it is '_id'
},
$set: { // I've assumed that you are overwriting the fields you are updating hence the '$set' operator
update // update here - this is assuming that the update object only contains fields that should be updated
}
// If you want to add a new document if the id isn't found add the below line
// ,{ upsert: true }
);
} catch (err) { throw new Error(`An error occurred while updating the Test document - ${err}`); }
if (savedOutput.matchedCount !== 1) return false; // if you add in '{ upsert: true }' above, then remove this line as it will create a new document
return savedOutput;
}
The collection testing would need to be created before this code but this is only a one-time thing and is very easy - if you are using MongoDB Atlas then you can use MongoDB Compass / go in your online admin to create the collection without a single line of code...
As far as I can see you should need to duplicate the update object. The above reduces the database calls from 2 to one and allows you to reuse the database connection, potentially anywhere else in the application which would help to speed things up. Also don't store your MongoDB credentials directly in the code.

JS SDK, Thrift error code 12 when getting SharedNotebooksList

It's the first time that I work with evernote,
Like the example given in the JS SDK, I create my client with the token that I get from the OAuth and I get all the notebooks of my current user so it was good for me.
But I'm facing a problem that I can't understand, when I use any method of my shared store it throw an Thrift exception with error code 12 and giving the shard id in the message.
I know that 12 error code is that the shard is temporary unavailable..
But I know that it's another thing because it's not temporary...
I have a full access api key, it work with the note store, did I miss something ?
// This is the example in the JS SDK
var linkedNotebook = noteStore.listLinkedNotebooks()
.then(function(linkedNotebooks) {
// just pick the first LinkedNotebook for this example
return client.getSharedNoteStore(linkedNotebooks[0]);
}).then(function(sharedNoteStore) {
// /!\ There is the problem, throw Thrift exception !
return sharedNoteStore.listNotebooks().then(function(notebooks) {
return sharedNoteStore.listTagsByNotebook(notebooks[0].guid);
}).then(function(tags) {
// tags here is a list of Tag objects
});
});
this seems to be an error with the SDK. I created a PR (https://github.com/evernote/evernote-sdk-js/pull/90).
You can work around this by using authenticateToSharedNotebook yourself.
const client = new Evernote.Client({ token, sandbox });
const noteStore = client.getNoteStore();
const notebooks = await noteStore
.listLinkedNotebooks()
.catch(err => console.error(err));
const notebook = notebooks.find(x => x.guid === guid);
const { authenticationToken } = await client
.getNoteStore(notebook.noteStoreUrl)
.authenticateToSharedNotebook(notebook.sharedNotebookGlobalId);
const client2 = new Evernote.Client({
token: authenticationToken,
sandbox
});
const noteStore2 = client2.getNoteStore();
const [notebook2] = await noteStore2.listNotebooks();
noteStore2.listTagsByNotebook(notebook2.guid)

Model return values from another file not working with Sequelize.js

I have the following code that does not work currently.
var config = require('./libs/sequelize-lib.js');
var connection = config.getSequelizeConnection();//Choosing to not pass in variable this time since this should only run via script.
var models = config.setModels(connection);//Creates live references to the models.
//Alter table as needed but do NOT force the change. If an error occurs we will fix manually.
connection.sync({ alter: true, force: false }).then(function() {
models.users.create({
name: 'joe',
loggedIn: true
}).then( task => {
console.log("saved user!!!!!");
});
process.exit();//close the nodeJS Script
}).catch(function(error) {
console.log(error);
});
sequelize-lib.js
var Sequelize = require('sequelize');
exports.getSequelizeConnection = function(stage){
var argv = require('minimist')(process.argv.slice(2)); //If this file is being used in a script, this will attempt to get information from the argument stage passed if it exists
//Change connection settings based on stage variable. Assume localhost by default.
var dbname = argv['stage'] ? argv['stage']+"_db" : 'localdb';
var dbuser = argv['stage'] ? process.env.RDS_USERNAME : 'admin';
var dbpass = argv['stage'] ? process.env.RDS_PASSWORD : 'local123';
var dbhost = argv['stage'] ? "database-"+argv['stage']+".whatever.com" : 'localhost';
//If state variable used during require overide any arguments passed.
if(stage){
dbname = stage+"_db";
dbuser = process.env.RDS_USERNAME
dbpass = process.env.RDS_PASSWORD
dbhost = "database-"+stage+".whatever.com"
}
var connection = new Sequelize(dbname,dbuser,dbpass, {
dialect: 'mysql',
operatorsAliases: false, //This gets rid of a sequelize deprecated warning , refer https://github.com/sequelize/sequelize/issues/8417
host: dbhost
});
return connection;
}
exports.setModels = function(connection){
//Import all the known models for the project.
const fs = require('fs');
const dir = __dirname+'/../models';
var models = {}; //empty model object for adding model instances in file loop below.
//#JA - Wait until this function finishes ~ hence readdirSync vs regular readdir which is async
fs.readdirSync(dir).forEach(file => {
console.log(file);
//Split the .js part of the filename
var arr = file.split(".");
var name = arr[0].toLowerCase();
//Create a modle object using the filename as the reference without the .js pointing to a created sequelize instance of the file.
models[name] = connection.import(__dirname + "/../models/"+file);
})
//Showcase the final model.
console.log(models);
return models; //This returns a model with reference to the sequelize models
}
I can't get the create command to work however with this setup. My guess is the variables must not be passing through correctly somehow. I'm not sure what I'm doing wrong?
The create command definitely works because if in the sequelize-lib.js I modify the setModels function to this...
exports.setModels = function(connection){
//Import all the known models for the project.
const fs = require('fs');
const dir = __dirname+'/../models';
var models = {}; //empty model object for adding model instances in file loop below.
//#JA - Wait until this function finishes ~ hence readdirSync vs regular readdir which is async
fs.readdirSync(dir).forEach(file => {
console.log(file);
//Split the .js part of the filename
var arr = file.split(".");
var name = arr[0].toLowerCase();
//Create a modle object using the filename as the reference without the .js pointing to a created sequelize instance of the file.
models[name] = connection.import(__dirname + "/../models/"+file);
models[name].create({
"name":"joe",
"loggedIn":true
});
})
//Showcase the final model.
console.log(models);
return models; //This returns a model with reference to the sequelize models
}
Then it works and I see the item added to the database! (refer to proof image below)
Take note, I am simply running create on the variable at this point. What am I doing wrong where the model object is not passing between files correctly? Weird part is I don't get any errors thrown in the main file?? It's as if everything is defined but empty or something and the command is never run and nothing added to the database.
I tried this in the main file also and no luck.
models["users"].create({
name: 'joe',
loggedIn: true
}).then( task => {
console.log("saved user!!!!!");
});
The purpose of this all is to read models automatically from the model directory and create instances that are ready to go for every model, even if new one's are added in the future.
UPDATE::
So I did another test that was interesting, it seems that the create function won't work in the .then() function of the sync command. It looks like it was passing it correctly though. After changing the front page to this...
var config = require('./libs/sequelize-lib.js');
var connection = config.getSequelizeConnection();//Choosing to not pass in variable this time since this should only run via script.
var models = config.setModels(connection);//Creates live references to the models using connection previosly created.
models["users"].create({
"name":"joe",
"loggedIn":true
});
//Alter table as needed but do NOT force the change. If an error occurs we will fix manually.
connection.sync({ alter: true, force: false }).then(function() {
process.exit();//close the nodeJS Script
}).catch(function(error) {
console.log(error);
});
Doing this seems to get create to work. I'm not sure if this is good form or not though since the database might not be created at this point? I need a way to get it to work in the sync function.
Well I answered my question finally, but I'm not sure I like the answer.
var config = require('./libs/sequelize-lib.js');
var connection = config.getSequelizeConnection();//Choosing to not pass in variable this time since this should only run via script.
var models = config.setModels(connection);//Creates live references to the models using connection previosly created.
//Alter table as needed but do NOT force the change. If an error occurs we will fix manually.
connection.sync({ alter: false, force: false }).then( () => {
models["users"].create({
"name":"joe",
"loggedIn":true
}).then( user => {
console.log("finished, with user.name="+user.name);
process.exit();
}).catch( error => {
console.log("Error Occured");
console.log(error);
});
}).catch(function(error) {
console.log(error);
});
turns out that process.exit was triggering before create would occur because create happens async. This means that all my code will have to constantly be running through callbacks...which seems like a nightmare a bit. I wonder if there is a better way?

Bluebird promisifyAll not creating entire set of Async functions

Maybe I'm not understanding the way Promise.promisifyAll works. I'm trying to promisify coinbase package.
Basically, the client's functions are promisified but the accounts returned by those functions don't appear to have the Async version: [TypeError: acc.getTransactionsAsync is not a function].
I've tried passing {multiArgs:true} as options to Promise.promisifyAll() as suggested in an answer to a similar question, but it didn't solve the problem. Any suggestion is appreciated.
Normal way of using the package (works):
var Client = require('coinbase').Client
var client = new Client({
'apiKey': '<snip>',
'apiSecret': '<snip>',
'baseApiUri': 'https://api.sandbox.coinbase.com/v2/',
'tokenUri': 'https://api.sandbox.coinbase.com/oauth/token'
});
//Callbacks
client.getAccounts({}, function(err, accounts) {
accounts.forEach(function(acc) {
acc.getTransactions(null, function(err, txns) {
txns.forEach(function(txn) {
console.log('txn: ' + txn.id);
});
});
});
});
Promisified version not working (getTransactionsAsync is undefined, but the getAccountsAsync returns the accounts correctly):
var Promise = require('bluebird');
var Client = require('coinbase').Client;
var client = new Client({
'apiKey': '<snip>',
'apiSecret': '<snip>',
'baseApiUri': 'https://api.sandbox.coinbase.com/v2/',
'tokenUri': 'https://api.sandbox.coinbase.com/oauth/token'
});
Promise.promisifyAll(client);
//Promises
client.getAccountsAsync({}) //Works perfectly, returns the accounts
.then(function(accounts) {
return Promise.map(accounts, function(acc) {
return acc.getTransactionsAsync(null); //This function call is throwing the TypeError
});
})
.then(function(transactions) {
console.log('Transactions:');
transactions.forEach(function(tx) {
console.log(tx);
});
})
.catch(function(err) {
console.log(err);
});
EDIT:
Looking through the package I want to promisify, I realized that the functions I'm trying to call are from the model objects returned by the package. I think promisifyAll only parses the client functions, and the models are not being processed. I'm just not that well versed on how the parsing is made :(
This is the index.js (module exported)
var Account = require('./lib/model/Account.js'),
Address = require('./lib/model/Address.js'),
Buy = require('./lib/model/Buy.js'),
Checkout = require('./lib/model/Checkout.js'),
Client = require('./lib/Client.js'),
Deposit = require('./lib/model/Deposit.js'),
Merchant = require('./lib/model/Merchant.js'),
Notification = require('./lib/model/Notification.js'),
Order = require('./lib/model/Order.js'),
PaymentMethod = require('./lib/model/PaymentMethod.js'),
Sell = require('./lib/model/Sell.js'),
Transaction = require('./lib/model/Transaction.js'),
User = require('./lib/model/User.js'),
Withdrawal = require('./lib/model/Withdrawal.js');
var model = {
'Account' : Account,
'Address' : Address,
'Buy' : Buy,
'Checkout' : Checkout,
'Deposit' : Deposit,
'Merchant' : Merchant,
'Notification' : Notification,
'Order' : Order,
'PaymentMethod' : PaymentMethod,
'Sell' : Sell,
'Transaction' : Transaction,
'User' : User,
'Withdrawal' : Withdrawal
};
module.exports = {
'Client' : Client,
'model' : model
};
EDIT 2:
The Client requires it's own model modules, so Promise.promisifyAll should as expected with the object's properties without having the previous edit interfering. At this point I think there might be no other option that making my own Promises for all functions I will need that don't live directly under the client.
I think promisifyAll only parses the client functions, and the models are not being processed.
Not exactly. This has nothing to do with how promisifyAll is looking through the methods and properties. Rather, you are explicitly passing only the client methods to be promisified:
var Client = require('coinbase').Client;
var client = new Client(…);
Promise.promisifyAll(client);
There's no link from client to the models. Rather try to call promisifyAll on the whole module, not only the Client class:
var Client = Promise.promisifyAll(require('coinbase')).Client;
or if that doesn't work, Call promisifyAll on both the Client class and the models collection of classes:
var coinbase = require('coinbase');
var Client = Promise.promisifyAll(coinbase.Client);
Promise.promisifyAll(coinbase.model);
So, instead of editing the question again I'm adding my temporary workaround (with a slightly different function call but that had the same problem of not having the Async function). This promisifies the model modules by it's own and I'll need to create them on demand whenever I need an promisified function on the non-promisified version of the models returned by the client.
It's not what I would like to do, since more memory allocation is needed for each model that I want to make a promisified function call. But I prefer this approach over creating new modules for grouping new Promise(...) functions.
If a better solution is suggested I'll still mark that as solved.
var Promise = require('bluebird');
var Client = require('coinbase').Client;
var cbModel = require('coinbase').model;
var client = new Client({
'apiKey': '<snip>',
'apiSecret': '<snip>',
'baseApiUri': 'https://api.sandbox.coinbase.com/v2/',
'tokenUri': 'https://api.sandbox.coinbase.com/oauth/token'
});
Promise.promisifyAll(client);
Promise.promisifyAll(cbModel);
//Promises
client.getAccountAsync('primary')
.then(function(account) {
account = new cbModel.Account(client, account);
return account.getTransactionsAsync(null);
})
.then(function(transactions) {
console.log('Transactions:');
transactions.forEach(function(tx) {
console.log(tx.id);
});
})
.catch(function(err) {
console.log(err);
});

How do I get a hold of a Strongloop loopback model?

This is maddening, how do I get a hold of a loopback model so I can programmatically work with it ? I have a Persisted model named "Notification". I can interact with it using the REST explorer. I want to be able to work with it within the server, i.e. Notification.find(...). I execute app.models() and can see it listed. I have done this:
var Notification = app.models.Notification;
and get a big fat "undefined". I have done this:
var Notification = loopback.Notification;
app.model(Notification);
var Notification = app.models.Notification;
and another big fat "undefined".
Please explain all I have to do to get a hold of a model I have defined using:
slc loopback:model
Thanks in advance
You can use ModelCtor.app.models.OtherModelName to access other models from you custom methods.
/** common/models/product.js **/
module.exports = function(Product) {
Product.createRandomName = function(cb) {
var Randomizer = Product.app.models.Randomizer;
Randomizer.createName(cb);
}
// this will not work as `Product.app` is not set yet
var Randomizer = Product.app.models.Randomizer;
}
/** common/models/randomizer.js **/
module.exports = function(Randomizer) {
Randomizer.createName = function(cb) {
process.nextTick(function() {
cb(null, 'random name');
});
};
}
/** server/model-config.js **/
{
"Product": {
"dataSource": "db"
},
"Randomizer": {
"dataSource": null
}
}
I know this post was here a long time ago. But since I got the same question recent days, here's what I figured out with the latest loopback api:
Loopback 2.19.0(the latest for 12th, July)
API, Get the Application object to which the Model is attached.: http://apidocs.strongloop.com/loopback/#model-getapp
You can get the application which your model was attached as following:
ModelX.js
module.exports = function(ModelX) {
//Example of disable the parent 'find' REST api, and creat a remote method called 'findA'
var isStatic = true;
ModelX.disableRemoteMethod('find', isStatic);
ModelX.findA = function (filter, cb) {
//Get the Application object which the model attached to, and we do what ever we want
ModelX.getApp(function(err, app){
if(err) throw err;
//App object returned in the callback
app.models.OtherModel.OtherMethod({}, function(){
if(err) throw err;
//Do whatever you what with the OtherModel.OtherMethod
//This give you the ability to access OtherModel within ModelX.
//...
});
});
}
//Expose the remote method with settings.
ModelX.remoteMethod(
'findA',
{
description: ["Remote method instaed of parent method from the PersistedModel",
"Can help you to impliment your own business logic"],
http:{path: '/finda', verb: 'get'},
accepts: {arg:'filter',
type:'object',
description: 'Filter defining fields, where, include, order, offset, and limit',
http:{source:'query'}},
returns: {type:'array', root:true}
}
);
};
Looks like I'm not doing well with the code block format here...
Also you should be careful about the timing when this 'getApp' get called, it matters because if you call this method very early when initializing the model, something like 'undefined' error will occur.

Categories

Resources