My app should update if tmx is newer, if older do nothing and if doesn't exist insert the document.
If the document is inserted, it works perfectly, else it doesn't update properly or says E11000 dup key.
trying to figure out if my callback are wrong or the logic. (I'm new to node.js+mongodb) MongoClient = require('mongodb').MongoClient,
assert = require('assert'),
url = 'mongodb://localhost:27017/pfc';
MongoClient.connect(url, function (err, db) {
run(db);
});
function run(db) {
fs.readFile('log.log', 'utf8', function (err, source) {
if (err) throw err;
var dataFile = JSON.parse(source);
dataFile.forEach(function (item) {
upsert(db, item, function (err, result) {
if (err) console.dir(err);
});
});
})
}
function upsert(db, doc, callback) {
db.collection('flags').findOne({vid: doc.vid}, function (err, item, result) {
if (item.vid != null) {
if (!(item.tmx instanceof Date)) {
item.tmx = new Date(item.tmx)
}
if(!(doc.tmx instanceof Date)){
doc.tmx = new Date(doc.tmx)
}
if (item.tmx < doc.tmx) {
console.dir("Date validation")
db.collection('flags').updateOne({vid: item.vid}, {
$set: {
"tmx": doc.tmx
}
},{upsert:true}, function (err, result) {
callback(err, result);
}
)
callback(err, result);
}
else{
console.dir("older")
callback(err, result);
}
}
else {
db.collection('flags').insertOne(doc, function(err, result) {
callback(err, result);
});
}
})}
Edit:
The documents from the 'log.log' file have this structure:
{
vid:2848
tmx: "2015-07-18T23:56:17.000Z"
}
{
vid: 2848
tmx: 2015-07-19T00:00:17.000Z
}
collection.find({vid: doc.vid},function(err,item){
if(!item) // didnt find in collection, items with vid: 2848
insert doc to collection
else if(item) //found an item with vid:2848
if (item.tmx < doc.tmx)//only update if doc.tmx is newer
update collection with the most recent document
with #Aaron Dufour help I got rid of the callback problem, thanks :)
but now the problem is when I have the collection already populated and go look for newest documents in log.log, it starts from the oldest document till the newest again :(
Your upsert is vulnerable to race conditions, and run calls it many times in parallel, so that is probably the issue. It is not clear exactly what doc will look like, so you might need slightly more complicated logic, but here's a version that uses Mongo's upsert to make things a bit safer:
function upsert(db, doc, callback) {
db.collection('flags').update({vid: doc.vid}, {$set: doc}, {upsert: true}, function(err) {
db.collection('flags').update({vid: doc.vid, tmx: {$lt: doc.tmx}}, {$set: tmx: doc.tmx}, function(err) {
callback();
});
});
}
Related
I'm very new to javascript/node.js and I'm having trouble with the following code. This is the handler for API an call. The 2nd code segment is just like the 1st, except there is an additional database lookup Merchant.findOne(...), and therefor the 'newTransaction.save()' function is nested one level deeper.
Both code segments return the 'output' variable value correctly. However, the second code segment does NOT also properly save the 'newTransaction' to the Mongo database.
I'm pretty sure the issue has to do with how/when the code returning from newTransaction.save(function (err, transaction){..} but I can't seem to get it straightened out.
I have been looking all over the internet trying to understand and fix this, with no success. Any help is appreciated...
Here is the older, simpler code that works as expected:
handler : function(request, reply) {
var output = {
"success": true,
"operations": [],
"epoch": Date.now()
};
Terminal.findById(request.payload.deviceNumber, function (err, terminal) {
if (err) {
return reply(Boom.internal('Error looking up terminal.', err));
}
if (terminal) {
ticket.quote("bitstamp", "USD", 1, function (err, exchangeRate) {
if (err) {
console.error(err);
return reply(Boom.internal('Error obtaining ticket quote.', err));
}
var newTransaction = new Transaction({
terminal: request.payload.deviceNumber,
merchant: terminal.merchant,
ccExchangeRate: exchangeRate.buy,
fiatAmtDue: request.payload.transactionValue,
ccAmtDue: ccAmtDueTruncated
});
newTransaction.save(function (err, transaction){
if (err) {
return reply(Boom.internal('Error creating new transaction.', err));
}
output.operations.push(
{
"control": "KeyPairGenControl",
"rand": cc.pseudoRandomBytes(32).toString('hex'),
"follow": {
"url": "/pos/v1/AddressAndEncKey",
"post": {
"transactionId": transaction.transactionId
}
}
}
);
return reply(output);
});
});
} else {
return reply(Boom.internal('Error looking up terminal.', err));
}
});
}
Here is the new code that does NOT save the newTransaction data into the Mongo DB.
handler : function(request, reply) {
var output = {
"success": true,
"operations": [],
"epoch": Date.now()
};
Terminal.findById(request.payload.deviceNumber, function (err, terminal) {
if (err) {
return reply(Boom.internal('Error looking up terminal.', err));
}
if (terminal) {
Merchant.findOne({merchantId: terminal.merchant}, function(err, merchant) {
if (err) {
console.log('Cannot find merchant');
return reply(output);
}
var processor = merchant.backendPaymentProcessor.name;
var localCurrency = merchant.localFiatCurrency;
//###################
ticket.quote(processor, localCurrency, 1, function (err, exchangeRate) {
if (err) {
console.error(err);
return reply(Boom.internal('Error obtaining ticket quote.', err));
}
var newTransaction = new Transaction({
terminal: request.payload.deviceNumber,
merchant: terminal.merchant,
ccExchangeRate: exchangeRate.buy,
fiatAmtDue: request.payload.transactionValue,
ccAmtDue: ccAmtDueTruncated
});
newTransaction.save(function (err, transaction){
if (err) {
return reply(Boom.internal('Error creating new transaction.', err));
}
output.operations.push(
{
"control": "KeyPairGenControl",
"rand": cc.pseudoRandomBytes(32).toString('hex'),
"follow": {
"url": "/pos/v1/AddressAndEncKey",
"post": {
"transactionId": transaction.transactionId
}
}
}
);
return reply(output);
});
//return reply(output);
});
//###################
});
} else {
return reply(Boom.internal('Error looking up terminal.', err));
}
});
}
I did a diff of your 2 version:
Check 1
ticket.quote
Callback are identical for both version
processor, localCurrency are different
Is exchangeRate pass into callback correct?
Check 2
newTransaction.save
newTransaction and callback for .save are setup identical
Check(console.log()) the values used in setting up new Transaction({...})
Check transaction object received by callback
Check/debug the code of Transaction.save().
I don't think the issue is with the code you posted. Both version reached return reply(output); inside newTransaction.save's callback. Very likely issue is inside Transaction class or Transaction.save() logic.
One scenario I can think of is when a transaction failed:
Transaction object is available (even for failed transaction)
Transaction Class / Transaction.save() does not write to db because transaction failed
Transaction.save() pass transaction object to callback, but NOT setting err, even when it should.
Mongoose having a feature to specify the collection name under the schema, or as the third argument when declaring the model. Otherwise it will use the pluralized version given by the name you map to the model.
Mongoose official doc having following statement:
Mongoose by default produces a collection name by passing the model name to the utils.toCollectionName method. This method pluralizes the name. Set this option if you need a different name for your collection.
schema-mapped:
new Schema({ <key>: <value>},
{ collection : '<collection name>' }); // collection name
model-mapped:
mongoose.model('<Model name>',
new Schema({ <key>: <value>}),
'<collection name>'); // collection name
You may also find same here
I have about 30,000 documents in a MongoDB collection. And have been stuck in developing a node.js script to retrieve only the records with a specific string key-value pair.
this query on MongoDB server returns me the exact results I've been looking for:
db.getCollection('posts').find({authorName: "Ashwin-kumar"})
Returns me about 33 documents instantly. Likewise I've about 40 authors with different names.
Here's my node.js script to retrieve posts by authorName (Yes, it is based on Name, a string, as there is no ID for these authors :( ):
var fs = require('fs'),
request = require('request'),
async = require("async"),
assert = require('assert');
_ = require('lodash'),
MongoClient = require('mongodb').MongoClient;
var db, postsCollection, postCol;
async.series([dbConnect, checkCollection, createMeta, dbClose], function(){
console.log("Executed all calls in series.");
process.exit(0);
});
function dbConnect(callback){
MongoClient.connect("mongodb://localhost:27017/jPosts", function(pErr, pDb) {
if(pErr) {
console.dir(pDb);
return 0;
}
db = pDb;
callback();
});
}
function dbClose(callback){
db.close(true, function (err) {
if (err) console.error(err);
else console.log("close complete");
callback();
});
}
function checkCollection(callback) {
db.collection('posts', function(err, collection) {});
postsCollection = db.collection('posts');
postCol = db.collection('posts');
callback();
}
function createMeta(callback){
var meta = [];
postsCollection.aggregate([
{
$group : {_id : "$authorName"}
}]).toArray(function(err, result) {
assert.equal(err, null);
async.forEachLimit(result, 1, function(pPost, callback) {
getPosts(pPost._id, callback);
}, function(err) {
console.log(err);
callback();
});
});
}
function getPosts(pAuthor, callback){
var cursor = postCol.find({ "authorName": pAuthor});
cursor.toArray(function(err,items){
if(err)
callback(err);
else
callback(null, items);
});
}
This does not seem to work for me. cursor.toArray() does nothing but wait forever. Is it because of too many fields in each document?
I tried to get the count of the documents the cursor fetched and it works well.
function getPosts(pAuthor, callback){
var cursor = postCol.find({ "authourName": pAuthor});
cursor.count().then(function(items_count) {
console.log(items_count);
callback();
});
}
Also, I tried the cursor's .each method to iterate the documents fetched. But no luck yet.
function getPosts(pAuthor, callback){
var cursor = postCol.find({ "authourName": pAuthor});
cursor.each(function(err, doc) {
assert.equal(err, null);
if (doc != null) {
console.dir(doc);
} else {
console.log(err);
}
});
}
Am I missing something here? What else can be done to make this work? Is there any issues with the way I'm using async?
P.S: The idea here is to query the dump and generate the PDF's for authours in the jPost collection.
P.S 2: Here's a sample document
{
"_id" : ObjectId("571d36b55672f713fe346a66"),
"id" : 56517,
"authorName" : "Ashwin-kumar",
"comment_count" : 380,
"tagline" : "... Opinions you don't really need",
"vote_count" : 5152,
"exclusive" : null,
"post": [
],
"post_comments" : [
//comment_count objects
],
"date" : "2016-03-27"
}
(I've omitted post & post_comments parts for brevity.)
try this:
var collection = db.collection("collection_name");
collection.find({authourName: "Ashwin-kumar"}).toArray(function (err,items) {
if (err) {
console.dir(err);
} else {
//do something with items array
console.dir(items);
}
});
Did you check what is the value of pAuthor in getPosts? Because when you do aggregation, you receive a collection of objects with _id field (not authourName), so you should do:
// not sure why you need meta array, at least it's not used in the code you provided
meta.push({
author: pPost._id
});
getPosts(pPost._id, callback);
Background:
I must create or update an document based on post request that I have zero control over. I'm calling the function updateOrCreate()
Question:
How can I properly find a document by an field called nuid without using _id in mongo/mongoose
example payload:
curl -H "Content-Type: application/json" -X POST -d '{"participant":{"nuid":"98ASDF988SDF89SDF89989SDF9898"}}' http://localhost:9000/api/things
thing.controller:
exports.updateOrCreate = function(req, res) {
//Thing.findByNuid() will not work but it will explain what i'm trying to accomplish
/**
Thing.findByNuid(req.body.participant.nuid, function (err, thing) {
if (err) { return handleError(res, err); }
if(!thing) {
Thing.create(req.body.participant, function(err, thing) {
if(err) { return handleError(res, err); }
});
}
var updated = _.merge(thing, req.body.participant);
updated.save(function (err) {
if (err) { return handleError(res, err); }
});
});
**/
//this block will fetch all the things that have nuids but that seems really heavy and awful practice
Thing.find({'nuid':req.body.participant.nuid}, function(err, thing){
console.log(thing);
});
// This block is here to communicate this will create a new thing as expected.
Thing.create(req.body.participant, function(err, thing) {
if(err) { return handleError(res, err); }
});
}
Schema
var ThingSchema = new Schema({
nuid: String
});
UPDATE:
var query = {"nuid": req.body.participant.nuid};
var update = {nuid: 'heyyy'};
Thing.findOneAndUpdate(
query,
update,
{upsert: true},
function(err, thing){
console.log(thing, "thing");
console.log(err, "err");
}
);
I would use findOneAndUpdate first and then based on the result do an insert. findOneAndUpdate use mongoDB findAndModify command.
You should also look at new & upsert options of it which would create a document if not found.
I am doing an online course about MongoDB which is unfortunately a little out of date. It seems some of the functions have changed (course is using version 1.4 while I am using 3.0.)
Here is the code I am having trouble with, which I have tried to bring up to date with the current version of MongoDB:
app.js
var MongoClient = require('mongodb').MongoClient;
MongoClient.connect('mongodb://localhost:27017/course', function(err, db) {
if (err) throw err;
db.collection['counters'].findAndModify({
query: {
name: 'comments'
},
update: {
$inc: {
counter: 1
}
},
new: true
}, function(err, doc) {
if (err) throw err;
if (!doc) {
console.dir('No counter found for comments.');
} else {
console.dir('Number of comments: ' + doc.counter);
}
return db.close();
});
});
If I run the same findAndModify through the Mongo shell I get the anticipated result (increment the counter and display the new document,) but when I run this with node it has no effect on the database and throws this error:
TypeError: Cannot call method 'findAndModify' of undefined
Any tips?
Please try:
db.counters('counters').findAndModify
instead of:
db.collection['counters'].findAndModify
use this now:
db.collection('counters').findOneAndUpdate(
{name: 'comments'}, //query
{$inc: {counter: 1}}, //update
{ //options
upsert: true, // create the doc when it's not there
returnOriginal:false // return the modified doc *(new is not supported here!)
},
function(err, r){ //callback
if(err) throw err;
console.log('counter: '+r.value.counter);
}
);
Whoops, I just had the wrong kind of brackets. Should have had:
db.collection('counters')
instead of
db.collection['counters']
Almost like T_G said.
From the mongodb docs:
Existing collections can be opened with collection
db.collection([[name[, options]], callback);
If strict mode is off, then a new collection is created if not already
present.
So you need to do this:
db.collection('counters', function(err, collection){
collection.findAndModify({
query: {
name: 'comments'
},
update: {
$inc: {
counter: 1
}
},
new: true
}, function(err, doc) {
if (err) throw err;
if (!doc) {
console.dir('No counter found for comments.');
} else {
console.dir('Number of comments: ' + doc.counter);
}
});
});
Suppose I have a CMS application written in Node.js which persists data on a Redis database. When this application creates a new content, it should increment the id counter, add the new id to a list of ides and then set a new hash with content. What I would do for now is to create a function to perform this execution. This function (let us call it createArticle()) would have a callback and would execute the increment. Once the increment was executed, a callback function would push it into the list of ids. After that, another callback would create the hash. The hash-creating callback would call the function passed as parameter to createArticle():
function createArticle(title, content, callback) {
var client = redis.createClient();
client.incr("idCounter", function(err, id) {
if (err) return callback(err, data);
client.lpush("articleIds", id, function (err, data) {
if (err) return callback(err, data);
var key = "article:"+id;
client.hmset(key, "title", title, "content", content, callback);
});
});
}
I would use this function more or less this way (using Express in this example):
app.post('/createarticle', function(req, res) {
var title = req.body.article.title,
content = req.body.article.content;
createArticle(title, content, function(err, data) {
if (err) return res.render('error', { status: 500, message: 'Internal Server Error' });
res.render('index', { status: 200, message: 'Article created!' });
});
});
However, this code looks a bit cumbersome to me. Is this the way to go. Or is there a better way to do a series of I/O steps? I used Express and Redis in my example, but the answer do not need to use them.
You can make those error-catchers single-lined:
function createArticle(title, content, callback) {
var client = redis.createClient()
client.incr("idCounter", function(err, id) {
if (err) return callback(err, data)
client.lpush("articleIds", id, function (err, data) {
if (err) return callback(err, data)
var key = "article:"+id
client.hmset(key, "title", title, "content", content", callback)
})
})
}
And you could use a helper for handling errors:
function noError(errorCb, cb) {
var slice = Array.prototype.slice
return function (err) {
var currentCb = err ? errorCb : cb
currentCb.apply(this, slice.apply(arguments, err?0:1)
}
}
function createArticle(title, content, cb) {
var client = redis.createClient()
client.incr("idCounter", noError(cb, function(id) {
client.lpush("articleIds", id, noError(function (data) {
var key = "article:"+id
client.hmset(key, "title", title, "content", content", callback)
}))
})
}
Or something like that.