Suppose I have a CMS application written in Node.js which persists data on a Redis database. When this application creates a new content, it should increment the id counter, add the new id to a list of ides and then set a new hash with content. What I would do for now is to create a function to perform this execution. This function (let us call it createArticle()) would have a callback and would execute the increment. Once the increment was executed, a callback function would push it into the list of ids. After that, another callback would create the hash. The hash-creating callback would call the function passed as parameter to createArticle():
function createArticle(title, content, callback) {
var client = redis.createClient();
client.incr("idCounter", function(err, id) {
if (err) return callback(err, data);
client.lpush("articleIds", id, function (err, data) {
if (err) return callback(err, data);
var key = "article:"+id;
client.hmset(key, "title", title, "content", content, callback);
});
});
}
I would use this function more or less this way (using Express in this example):
app.post('/createarticle', function(req, res) {
var title = req.body.article.title,
content = req.body.article.content;
createArticle(title, content, function(err, data) {
if (err) return res.render('error', { status: 500, message: 'Internal Server Error' });
res.render('index', { status: 200, message: 'Article created!' });
});
});
However, this code looks a bit cumbersome to me. Is this the way to go. Or is there a better way to do a series of I/O steps? I used Express and Redis in my example, but the answer do not need to use them.
You can make those error-catchers single-lined:
function createArticle(title, content, callback) {
var client = redis.createClient()
client.incr("idCounter", function(err, id) {
if (err) return callback(err, data)
client.lpush("articleIds", id, function (err, data) {
if (err) return callback(err, data)
var key = "article:"+id
client.hmset(key, "title", title, "content", content", callback)
})
})
}
And you could use a helper for handling errors:
function noError(errorCb, cb) {
var slice = Array.prototype.slice
return function (err) {
var currentCb = err ? errorCb : cb
currentCb.apply(this, slice.apply(arguments, err?0:1)
}
}
function createArticle(title, content, cb) {
var client = redis.createClient()
client.incr("idCounter", noError(cb, function(id) {
client.lpush("articleIds", id, noError(function (data) {
var key = "article:"+id
client.hmset(key, "title", title, "content", content", callback)
}))
})
}
Or something like that.
Related
I am trying to scan the top 100 movie torrents on the pirate bay using node and add a movie poster for each result.
I am using these libraries
thepiratebay
imdb-api
I am able to find top 100 and return the results with no problems
app.get('/movies', function(req, res){
tpb.topTorrents(207).then(function(topMovies){
async.map(topMovies, tpb.getTorrent, function(err, results){
res.send(results);
})
})
});
I am also able to look up movies via an IMDB ID and return the results
app.get('/imdb', function(req, res){
imdb.getReq({ id: 'tt2660888' }, function(err, things) {
res.send(things);
});
});
What I am trying to do is loop over the top 100 results pull the imdb id out of the description field out and query imdb replacing the picture field with result.
app.get('/movies', function(req, res){
tpb.topTorrents(207).then(function(topMovies){
async.map(topMovies, tpb.getTorrent, function(err, results){
for (var value of results) {
if (S(value.description).contains('www.imdb.com/title/')) {
var imdbId = S(value.description).between('www.imdb.com/title/', '/').s
imdb.getReq({ id: imdbId }, function(err, movie) {
value["picture"] = movie.poster
});
}
}
res.send(results);
})
})
});
This isn't working for some reason but it makes sense to me intuitively. If I remove the imdb-api call and replace it with value["picture"] = "foo". It does work. I'm not sure if this is related to how node handles loops. I'm new to the JS world and have a ruby background
Thanks in advance
You are on the right track with the async module but the imdb requests are also asynchronous so res.send just gets called with the initial result of async.map
You can use another async.map for the imdb calls and them chain them with async.waterfall which will pass the results of the first function as an argument to the second (async.apply just invokes the tpb function with your topMovies).
function tpb (topMovies, done) {
async.map(topMovies, tpb.getTorrent, done);
}
function imdb (movies, done) {
function lookup (value, callback) {
if (S(value.description).contains('www.imdb.com/title/')) {
var imdbId = S(value.description).between('www.imdb.com/title/', '/').s
imdb.getReq({ id: imdbId }, function(err, movie) {
value["picture"] = movie.poster
return cb(err, value);
});
} else {
return callback(null);
}
}
async.map(movies, lookup, done);
}
app.get('/movies', function(req, res){
tpb.topTorrents(207).then(function(topMovies){
async.waterfall([async.apply(tpb, topMovies), imdb], function (err, results) {
if (err) {
// do error handling
}
return res.send(results);
});
});
});
I have about 30,000 documents in a MongoDB collection. And have been stuck in developing a node.js script to retrieve only the records with a specific string key-value pair.
this query on MongoDB server returns me the exact results I've been looking for:
db.getCollection('posts').find({authorName: "Ashwin-kumar"})
Returns me about 33 documents instantly. Likewise I've about 40 authors with different names.
Here's my node.js script to retrieve posts by authorName (Yes, it is based on Name, a string, as there is no ID for these authors :( ):
var fs = require('fs'),
request = require('request'),
async = require("async"),
assert = require('assert');
_ = require('lodash'),
MongoClient = require('mongodb').MongoClient;
var db, postsCollection, postCol;
async.series([dbConnect, checkCollection, createMeta, dbClose], function(){
console.log("Executed all calls in series.");
process.exit(0);
});
function dbConnect(callback){
MongoClient.connect("mongodb://localhost:27017/jPosts", function(pErr, pDb) {
if(pErr) {
console.dir(pDb);
return 0;
}
db = pDb;
callback();
});
}
function dbClose(callback){
db.close(true, function (err) {
if (err) console.error(err);
else console.log("close complete");
callback();
});
}
function checkCollection(callback) {
db.collection('posts', function(err, collection) {});
postsCollection = db.collection('posts');
postCol = db.collection('posts');
callback();
}
function createMeta(callback){
var meta = [];
postsCollection.aggregate([
{
$group : {_id : "$authorName"}
}]).toArray(function(err, result) {
assert.equal(err, null);
async.forEachLimit(result, 1, function(pPost, callback) {
getPosts(pPost._id, callback);
}, function(err) {
console.log(err);
callback();
});
});
}
function getPosts(pAuthor, callback){
var cursor = postCol.find({ "authorName": pAuthor});
cursor.toArray(function(err,items){
if(err)
callback(err);
else
callback(null, items);
});
}
This does not seem to work for me. cursor.toArray() does nothing but wait forever. Is it because of too many fields in each document?
I tried to get the count of the documents the cursor fetched and it works well.
function getPosts(pAuthor, callback){
var cursor = postCol.find({ "authourName": pAuthor});
cursor.count().then(function(items_count) {
console.log(items_count);
callback();
});
}
Also, I tried the cursor's .each method to iterate the documents fetched. But no luck yet.
function getPosts(pAuthor, callback){
var cursor = postCol.find({ "authourName": pAuthor});
cursor.each(function(err, doc) {
assert.equal(err, null);
if (doc != null) {
console.dir(doc);
} else {
console.log(err);
}
});
}
Am I missing something here? What else can be done to make this work? Is there any issues with the way I'm using async?
P.S: The idea here is to query the dump and generate the PDF's for authours in the jPost collection.
P.S 2: Here's a sample document
{
"_id" : ObjectId("571d36b55672f713fe346a66"),
"id" : 56517,
"authorName" : "Ashwin-kumar",
"comment_count" : 380,
"tagline" : "... Opinions you don't really need",
"vote_count" : 5152,
"exclusive" : null,
"post": [
],
"post_comments" : [
//comment_count objects
],
"date" : "2016-03-27"
}
(I've omitted post & post_comments parts for brevity.)
try this:
var collection = db.collection("collection_name");
collection.find({authourName: "Ashwin-kumar"}).toArray(function (err,items) {
if (err) {
console.dir(err);
} else {
//do something with items array
console.dir(items);
}
});
Did you check what is the value of pAuthor in getPosts? Because when you do aggregation, you receive a collection of objects with _id field (not authourName), so you should do:
// not sure why you need meta array, at least it's not used in the code you provided
meta.push({
author: pPost._id
});
getPosts(pPost._id, callback);
I've got this error when trying to POST
> process.nextTick(function() { throw err; });
> ^
>
> TypeError: first argument must be a string or Buffer
> at ServerResponse.OutgoingMessage.end (_http_outgoing.js:524:11)
Errors shows that something's wrong with utils and cursor both from mongodb module, but what are they?
Everything works nice on GET but brakes on POST (postman and passing as text {"name":"Computer","price":2500}) - i cannot trace which module or instance is braking the code.
This is my conn with db:
// Our primary interface for the MongoDB instance
var MongoClient = require('mongodb').MongoClient;
// Used in order verify correct return values
var assert = require('assert');
var connect = function (databaseName, callBack) {
var url = 'mongodb://localhost:27017/' + databaseName;
MongoClient.connect(url,
function (error, database) {
assert.equal(null, error);
console.log("Succesfully connected to MongoDB instance!");
callBack(database);
});
};
exports.find = function (databaseName, collectionName, query, callback) {
connect(databaseName, function (database) {
var collection = database.collection(collectionName);
collection.find(query).toArray(
// Callback method
function (err, documents) {
// Make sure nothing went wrong
assert.equal(err, null);
// Print all the documents which we found, if any
console.log("MongoDB returned the following documents:");
console.dir(documents)
callback(err, documents);
// Close the database connection to free resources
database.close();
})
})
};
exports.insert = function (databaseName, collectionName, object, callback) {
connect(databaseName, function (database) {
var collection = database.collection(collectionName);
collection.insert(document, {w: 1}, function (err, documents) {
console.log("Added a new document");
console.log(documents[0]);
callback(err, documents[0]);
});
})
};
exports.remove = function (databaseName, collectionName, object, callback) {
connect(databaseName, function (database) {
var collection = database.collection(collectionName);
collection.remove(object, function (err, result) {
callback(err, result);
database.close();
});
})
};
The issue is actually pretty straightforward, so I'm surprised that you're not getting a better error message.
In your code:
collection.insert(document, {w: 1}, function (err, documents) {
console.log("Added a new document");
console.log(documents[0]); // I expect this to log undefined
callback(err, documents[0]);
});
The second argument passed into the collection.insert callback is actually a results object, not the documents that were inserted. So, documents[0] ends up being undefined because it's not an array of documents. Thus, when you trying to send undefined as a response, it's failing.
If you intention is to pass the newly created documents, you're going to have to use the result object to get the _id and attach it to the document you inserted.
As a side note, I would consider keeping a connection open to your database rather than creating a new connection every time you want to talk with Mongo.
My app should update if tmx is newer, if older do nothing and if doesn't exist insert the document.
If the document is inserted, it works perfectly, else it doesn't update properly or says E11000 dup key.
trying to figure out if my callback are wrong or the logic. (I'm new to node.js+mongodb) MongoClient = require('mongodb').MongoClient,
assert = require('assert'),
url = 'mongodb://localhost:27017/pfc';
MongoClient.connect(url, function (err, db) {
run(db);
});
function run(db) {
fs.readFile('log.log', 'utf8', function (err, source) {
if (err) throw err;
var dataFile = JSON.parse(source);
dataFile.forEach(function (item) {
upsert(db, item, function (err, result) {
if (err) console.dir(err);
});
});
})
}
function upsert(db, doc, callback) {
db.collection('flags').findOne({vid: doc.vid}, function (err, item, result) {
if (item.vid != null) {
if (!(item.tmx instanceof Date)) {
item.tmx = new Date(item.tmx)
}
if(!(doc.tmx instanceof Date)){
doc.tmx = new Date(doc.tmx)
}
if (item.tmx < doc.tmx) {
console.dir("Date validation")
db.collection('flags').updateOne({vid: item.vid}, {
$set: {
"tmx": doc.tmx
}
},{upsert:true}, function (err, result) {
callback(err, result);
}
)
callback(err, result);
}
else{
console.dir("older")
callback(err, result);
}
}
else {
db.collection('flags').insertOne(doc, function(err, result) {
callback(err, result);
});
}
})}
Edit:
The documents from the 'log.log' file have this structure:
{
vid:2848
tmx: "2015-07-18T23:56:17.000Z"
}
{
vid: 2848
tmx: 2015-07-19T00:00:17.000Z
}
collection.find({vid: doc.vid},function(err,item){
if(!item) // didnt find in collection, items with vid: 2848
insert doc to collection
else if(item) //found an item with vid:2848
if (item.tmx < doc.tmx)//only update if doc.tmx is newer
update collection with the most recent document
with #Aaron Dufour help I got rid of the callback problem, thanks :)
but now the problem is when I have the collection already populated and go look for newest documents in log.log, it starts from the oldest document till the newest again :(
Your upsert is vulnerable to race conditions, and run calls it many times in parallel, so that is probably the issue. It is not clear exactly what doc will look like, so you might need slightly more complicated logic, but here's a version that uses Mongo's upsert to make things a bit safer:
function upsert(db, doc, callback) {
db.collection('flags').update({vid: doc.vid}, {$set: doc}, {upsert: true}, function(err) {
db.collection('flags').update({vid: doc.vid, tmx: {$lt: doc.tmx}}, {$set: tmx: doc.tmx}, function(err) {
callback();
});
});
}
I feel like a complete moron but me and a friend are working on a project together and having trouble getting the first route to return the items requested from the Etsy API and have getAllListings add the items to the database. If you can see something glaringly obvious that we are doing wrong please let me know.
I should also mention that while the statement console.dir(body) does print out the items to the terminal it does not look like the contents are being passed to GET '/api/etsy/getListings'
Thanks!
routes.js
//this i want to return a list of active listings from the users shop.
app.get('/api/etsy/getListings',function(req, res){
bEtsy.getAllListings(req, res, function(err, body) {
});
res.json(req.body);
});
bEtsy.js
var standardCallback = function (err, status, body, headers, callback) {
if (err) {
console.log(err);
return callback(err, null);
}
if (body) {
console.dir(body);
return callback(null, body); // this gives me an error
}
}
var getAllListings = function(itemId, callback){
var Item = mongoose.model('Item');
var listingsParams = {
include_private: true
}
etsy.auth().get(
'/shops/'+etsy.shop+'/listings/active',
listingsParams,
function(err, status, body, headers){
var newi = new Item({name: body.title, stock: body.count, owner: "00000000000000000000",
etsy:{listingId: body.listing_id, stock: body.count}});
newi.save(function(err){
if (err) return handError(err);
});
standardCallback(err, status, body, headers, callback);
}
);
};
You are calling this function with three parameters, when it only takes two
bEtsy.getAllListings(req, res, function(err, body) {
});
On top of that the first argument itemId is being passed the request object and then it is also never used inside the function itself but some global variable called listingsParams is?!?