NodeJS - FOREACH function does not run all of elements and stop - javascript

I have stored 2775 urls in my mlab database and then I take each URL down to get more information. All of the URL I store in an Array then pass it into a function to process .However, The code only run up to about 1700 urls and process it and then stop. Here is my code (sorry about the code, this is my first time using stackoverflow :
Product.find({}, (err, foundProducts) => {
if (err) {
console.log("err " + err);
} else {
foundProducts.forEach(function(foundProduct) {
var updateProduct = service.updateTikiProduct(foundProduct.url);
});
}
});
updateTikiProduct: function(url) {
const options = {
url: url,
json: true
};
request(options,
function(err, res, body) {
// SOME code to crawl data
Product.findOneAndUpdate({
url: options.url
}, {
$set: {
name: name,
brand: brand,
store: store,
location: location,
base_category: categoryType,
top_description: topDescription,
feature_description: featureDescription
}
}, {
upsert: true,
new: true
}, (err, createdProduct) => {
if (err) {
reject(err);
} else {
var currentDate = new Date();
if (!createdProduct.hasOwnProperty("price")) {
createdProduct.price.push({
current: currentPrice,
origin: originPrice
});
createdProduct.save();
} else if (createdProduct.hasOwnProperty("price") &&
createdProduct.price[0].date.getDate() != currentDate.getDate()) {
createdProduct.price.push({
current: currentPrice,
origin: originPrice
});
createdProduct.save();
console.log("Update price");
}
counter++;
console.log("url : " + options.url);
console.log("Created product " + counter + " success!");
}
});
}

i guess mongo have limits to get items from db, you should try findAll or https://stackoverflow.com/a/3705615/4187058

I think your code is not processing all the elements is because you are processing all the elements in parallel, which will stop processing at one time when the memory will get full.
foundProducts.forEach(function(foundProduct) {
var updateProduct = service.updateTikiProduct(foundProduct.url);
});
what you should do is process them in series. you can use async await for that, do the following changes it will work :-
for(let foundProduct of foundProducts){
var updateProduct = await
service.updateTikiProduct(foundProduct.url);
};

Related

Google Cloud Function frozen for over minute

have a strange thing happening running a Google cloud function. The function starts and logs the user id and job id as expected. Then it calls firestore db and basically sits there for 1 minute, sometimes 2 before it executes the first call... It was even timing out on 240 seconds.
const AWS = require('aws-sdk');
const functions = require('firebase-functions');
const admin = require('firebase-admin');
admin.initializeApp();
exports.run = functions.https.onCall((data, context) => {
var id = data.id;
var userid = data.uid;
var retry = data.retry;
var project;
var db = admin.firestore();
var storage = admin.storage();
console.log("Starting Collect");
console.log("UID: " + userid);
console.log("id ID: " + id);
// Times out on this call
db.collection("users").doc(userid).collection("ids").doc(id).get().then(function(doc) {
console.log("Loaded DB");
project = doc.data();
createexport();
}).catch(function(err) {
console.log(err);
error('Loading DB Error, ' + err, false);
});
function createexport() {
db.collection("exports").doc(id).set({
status: 'Collecting',
stage: 'Export Checker',
percent: 0,
id: id,
}).then(function() {
console.log("Creating Export");
setdb();
}).catch(function(err) {
error("Error creating export in database :" + err, true)
});
}
function setdb() {
db.collection("users").doc(userid).collection("ids").doc(id).update({
status: 'Analyzing Files',
stage: 'Collecting'
}).then(function() {
getaudio();
}).catch(function(err) {
error("Error updating users id in database :" + err, true)
});
}
function getaudio() {
const from = userid + '/projects/' + project.originalproject.id + '/audio.' + project.originalproject.extension;
const to = userid + '/' + id + '/audio.' + project.originalproject.extension;
storage.bucket('---------').file(from).copy(storage.bucket('---------').file(to)).then(function() {
console.log("Collecting files");
copy2();
}).catch(function(err) {
error('Collecting Audio Error, ' + err, true);
});
}
function copy2() {
const from = userid + '/projects/' + project.originalproject.id + '/overlay.png';
const to = userid + '/' + id + '/overlay.png';
storage.bucket('--------.appspot.com').file(from).copy(storage.bucket('---------').file(to)).then(function() {
updateexport();
}).catch(function(err) {
error('Collecting Overlay Error, ' + err, true);
});
}
function updateexport() {
db.collection("exports").doc(id).update({ status: "Waiting" }).then(function() {
console.log("All files collected");
return { status: 'Success' };
}).catch(function(err) {
error("Error creating export entry in database :" + err, true)
});
}
function error(evt, evt2) {
AWS.config.update({ region: "us-east-1" });
var html;
var sub = 'Error with id ' + id;
console.log(evt);
if (evt2) {
db.collection('users').doc(userid).collection('ids').doc(id).update({
status: 'Error'
}).catch(function(err) {
console.log(err);
});
db.collection("exports").doc(id).update({
status: 'Error',
stage: 'Collecting',
error: evt,
}).catch(function(err) {
console.log(err);
});
html = `
Username: ${project.username} <br>
UserID: ${userid} <br>
Email: ${project.email} <br>
id: ${id}
`
} else {
html = `id: ${id}<br>
UserID: ${userid} <br>
Message: Error logged was: ${evt}
`
}
var params = {
Destination: {
ToAddresses: [
'errors#mail.com'
]
},
Message: {
Body: {
Html: {
Charset: "UTF-8",
Data: html
},
},
Subject: {
Charset: 'UTF-8',
Data: sub
}
},
Source: 'errors#mail.com',
ReplyToAddresses: [
project.email
],
};
var sendPromise = new AWS.SES({
apiVersion: "2010-12-01",
"accessKeyId": "-----------",
"secretAccessKey": "------------------------",
"region": "--------",
}).sendEmail(params).promise();
sendPromise.then(function(data) {
return { data: data };
}).catch(function(err) {
return { err: err };
});
}
});
Seems to me to be way too long for a database call of only a few kb. I will attach the cloud log to show time difference. After this initial slump it then performs as expected.
Cloud log image
Anyone got any ideas as to why this could be happening? Many thanks...
Your function is appearing to hang because it isn't handling promises correctly. Also, it doesn't appear to be sending a specific response to the client app. The main point of callable functions is to send a response.
I suggest reviewing the documentation, where you will learn that callable functions are required to return a promise that resolves with an object to send to the client app, after all the async work is complete.
Minimally, it will take a form like this:
return db.collection("users").doc(userid).collection("files").doc(id).get().then(function(doc) {
console.log("Loaded DB");
project = doc.data();
return { "data": "to send to the client" };
}
Note that the promise chain is being returned, and the promise itself resolves to an object to send to the client.

nodejs mongoDB findOneAndUpdate(); returns true even after database is updated

i am working on an Ionic-1 + nodejs + angular application. My mongoDb findOneAndUpdate() function returns true on each call even the first call updates database.
nodejs:
app.post('/booking', function (req, res) {
var collection = req.db.get('restaurant');
var id = req.body.id;
var status = req.body.status;
collection.findOneAndUpdate({status: status, id: id},{$set:{status:"booked"}}, function (e, doc) {
console.log(id, status);
if (e) {
console.log(e);
}
else if(!doc) {
res.send(false);
}
else {
res.send(true);
}
});
});
controller.js
$scope.bookMe = function(id){
var Obj = {status: "yes", id: id};
myService.booking(Obj).success(function(res){
console.log(Obj, "Checking status")
console.log(res);
if (res == true) {
var alertPopup = $ionicPopup.alert({
title: 'Booking Confirm',
template: 'Thanks For Booking'
});
}
else{
var alertPopup = $ionicPopup.alert({
title: 'Error',
template: ' Not available'
});
}
})
};
where i am doing wrong. my DB gets updated but it returns true always on next call.
The documentation about findOneAndUpdate says :
Finds a matching document, updates it according to the update arg, passing any options, and returns the found document (if any) to the callback. The query executes immediately if callback is passed.
So it's regular behavior you got a doc.
Note:
Since you are checking availability status="yes", Better hard code, instead of getting it from request query/data.
Change the response according to your requirement res.send(true)/ res.send(false).
Following code will work
app.post('/booking', function (req, res) {
var collection = req.db.get('restaurant');
collection.findOneAndUpdate({
status: "yes",
_id: req.body.id
}, {
$set: {
status: "booked"
}
}, function (err, result) {
//Error handling
if (err) {
return res.status(500).send('Something broke!');
}
//Send response based on the required
if (result.hasOwnProperty("value") &&
result.value !== null) {
res.send(true);
} else {
res.send(false);
}
});
});

Getting records from DynamoDB recursively using Q.Promises

I am having trouble implementing Q promises with recursive dynamodb call, new to nodejs and q, considering the limitations of the dynamodb to retrieve results, we need to run recursive query to get the required results.
normally we use the query with Q implementation something like this as
function getDBResults(){
var q = Q.defer();
var params = {
TableName: mytable,
IndexName: 'mytable-index',
KeyConditionExpression: 'id = :id',
FilterExpression: 'deliveryTime between :startTime and :endTime',
ExpressionAttributeValues: {
':startTime': {
N: startTime.toString()
},
":endTime": {
N: endTime.toString()
},
":id": {
S: id.toString()
}
},
Select: 'ALL_ATTRIBUTES',
ScanIndexForward: false,
};
dynamodb.query(params, function(err, data) {
if (err) {
console.log('Dynamo fail ' + err);
q.reject(err);
} else {
console.log('DATA'+ data);
var results = data.Items;
q.resolve(results);
}
});
return q.promise;
}
getDBResults.then(
function(data) {
// handle data
},
function(err) {
//handle error
}
);
Using recursive query I can get the results but I need those results to be used in another function, but because of nodejs async nature,the next function calls happens already before the recursive query function finishes its job, now I want that I get all the results from the recursive query function and then get as a promise to a new function and finally handle all the data.
recursive query for dynamodb looks like this.
function getDBResults(){
//var q = Q.defer();
params = {
TableName: mytable,
IndexName: 'mytable-index',
KeyConditionExpression: 'id = :id',
FilterExpression: 'deliveryTime between :startTime and :endTime',
ExpressionAttributeValues: {
':startTime': {
N: startTime.toString()
},
":endTime": {
N: endTime.toString()
},
":id": {
S: id.toString()
}
},
Select: 'ALL_ATTRIBUTES',
ScanIndexForward: false,
};
dynamodb.query(params, onQueryCallBack);
}
function onQueryCallBack(err, data) {
if (err) {
console.log('Dynamo fail ' + err);
console.error("Could not query db" + err);
} else {
if (typeof data.LastEvaluatedKey != "undefined") {
console.log("query for more...");
params.ExclusiveStartKey = data.LastEvaluatedKey;
dynamodb.query(params, onQueryCallBack);
}
data.Items.forEach(function(item) {
allResults.push(item);
});
//console.log('NO:OF Results:' + allResults.length);
//q.resolve(tickets);
//});
}
Now I want that I can get the results as promise finally so I can handle them in the next function like this.
getDBResults.then(
function(data) {
// handle data
},
function(err) {
//handle error
}
);
Please help me on this, sorry if its a stupid question but recursive calls with promises have made a hurdle for me.
Thanks
First of all, keep the promisified function you already have. Use it as a building block for the recursive solution, instead of trying to alter it!
It might need two small adjustments though:
function getDBResults(startKey){
// ^^^^^^^^
var q = Q.defer();
var params = {
ExclusiveStartKey: startKey,
// ^^^^^^^^^^^^^^^^^^^^^^^^^^^^
… // rest as before
};
dynamodb.query(params, function(err, data) {
if (err) {
q.reject(err);
} else {
q.resolve(data);
// ^^^^ Not `data.Items`
}
});
return q.promise;
}
Now we can use that to trivially implement the recursive solution:
function getRecursiveDBResults(key) {
return getDBResults(key).then(function(data) {
if (typeof data.LastEvaluatedKey != "undefined") {
return getRecursiveDBResults(data.LastEvaluatedKey).then(items) {
return data.Items.concat(items);
});
} else {
return data.Items
}
});
}
Here is how i solve the problem, Thanks Bergi for your solution as well
function getDBResults() {
var q = Q.defer();
var dynamodb = core.getDynamoDB();
params = {
TableName: mytable,
IndexName: 'mytable-index',
KeyConditionExpression: 'id = :id',
FilterExpression: 'deliveryTime between :startTime and :endTime',
ExpressionAttributeValues: {
':startTime': {
N: startTime.toString()
},
":endTime": {
N: endTime.toString()
},
":id": {
S: id.toString()
}
},
Select: 'ALL_ATTRIBUTES',
ScanIndexForward: false,
};
var results = [];
var callback = function(err, data) {
if (err) {
console.log('Dynamo fail ' + err);
q.reject(err);
} else if (data.LastEvaluatedKey) {
params.ExclusiveStartKey = data.LastEvaluatedKey;
dynamodb.query(params, callback);
} else {
q.resolve(results);
}
data.Items.forEach(function(item) {
results.push(item);
});
}
dynamodb.query(params, callback);
return q.promise;
}

Using async.js for deep populating sails.js

I have a big issue with my function in sails.js (v12). I'm trying to get all userDetail using async (v2.3) for deep populating my user info:
UserController.js:
userDetail: function (req, res) {
var currentUserID = authToken.getUserIDFromToken(req);
async.auto({
//Find the User
user: function (cb) {
User
.findOne({ id: req.params.id })
.populate('userFollowing')
.populate('userFollower')
.populate('trips', { sort: 'createdAt DESC' })
.exec(function (err, foundedUser) {
if (err) {
return res.negotiate(err);
}
if (!foundedUser) {
return res.badRequest();
}
// console.log('foundedUser :', foundedUser);
cb(null, foundedUser);
});
},
//Find me
me: function (cb) {
User
.findOne({ id: currentUserID })
.populate('myLikedTrips')
.populate('userFollowing')
.exec(function (err, user) {
var likedTripIDs = _.pluck(user.myLikedTrips, 'id');
var followingUserIDs = _.pluck(user.userFollowing, 'id');
cb(null, { likedTripIDs, followingUserIDs });
});
},
populatedTrip: ['user', function (results, cb) {
Trip.find({ id: _.pluck(results.user.trips, 'id') })
.populate('comments')
.populate('likes')
.exec(function (err, tripsResults) {
if (err) {
return res.negotiate(err);
}
if (!tripsResults) {
return res.badRequest();
}
cb(null, _.indexBy(tripsResults, 'id'));
});
}],
isLiked: ['populatedTrip', 'me', 'user', function (results, cb) {
var me = results.me;
async.map(results.user.trips, function (trip, callback) {
trip = results.populatedTrip[trip.id];
if (_.contains(me.likedTripIDs, trip.id)) {
trip.hasLiked = true;
} else {
trip.hasLiked = false;
}
callback(null, trip);
}, function (err, isLikedTrip) {
if (err) {
return res.negotiate(err);
}
cb(null, isLikedTrip);
});
}]
},
function finish(err, data) {
if (err) {
console.log('err = ', err);
return res.serverError(err);
}
var userFinal = data.user;
//userFinal.trips = data.isLiked;
userFinal.trips = "test";
return res.json(userFinal);
}
);
},
I tried almost everthing to get this fix but nothing is working...
I am able to get my array of trips(data.isLiked) but I couldn't get my userFInal trips.
I try to set string value on the userFinal.trips:
JSON response
{
"trips": [], // <-- my pb is here !!
"userFollower": [
{
"user": "5777fce1eeef472a1d69bafb",
"follower": "57e44a8997974abc646b29ca",
"id": "57efa5cf605b94666aca0f11"
}
],
"userFollowing": [
{
"user": "57e44a8997974abc646b29ca",
"follower": "5777fce1eeef472a1d69bafb",
"id": "5882099b9c0c9543706d74f6"
}
],
"email": "test2#test.com",
"userName": "dany",
"isPrivate": false,
"bio": "Hello",
"id": "5777fce1eeef472a1d69bafb"
}
Question
How should I do to get my array of trips (isLiked) paste to my user trips array?
Why my results is not what I'm expecting to have?
Thank you for your answers.
Use .toJSON() before overwriting any association in model.
Otherwise default toJSON implementation overrides any changes made to model associated data.
var userFinal = data.user.toJSON(); // Use of toJSON
userFinal.trips = data.isLiked;
return res.json(userFinal);
On another note, use JS .map or _.map in place of async.map as there is not asynchronous operation in inside function. Otherwise you may face RangeError: Maximum call stack size exceeded issue.
Also, it might be better to return any response from final callback only. (Remove res.negotiate, res.badRequest from async.auto's first argument). It allows to make response method terminal

angular promises and nodejs http get response

I would use the promises of angularJS to fill data to a grid. I'd like to load data "row by row" as soon as the nodeJS's server, on which use the module "mssql" with the "stream" enabled, back to client every single line from the DB.
On the client side I use these functions:
function asyncGreet() {
var deferred = $q.defer();
var _url = 'http://localhost:1212/test';
$http.get(_url).
then(function(result) {
deferred.resolve(result);
}, function(error) {
deferred.reject(error);
}, function(value) {
deferred.notify(value); //<<-- In "value" I would like to get every single row
});
return deferred.promise;
}
$scope.btnTest = function () {
var promise = asyncGreet();
promise.then(function(res) {
console.log('Success: ' + res.data + "\n");
}, function(reason) {
console.log('Failed: ' + reason);
}, function(update) {
console.log('Got notification: ' + update); //<<--
});
};
On nodeJS server those:
app.get('/test', function (req, res) {
//sql for test
var _query = 'select top 50 * from tb_test';
var sql = require('mssql');
var connection;
var config = {
user: 'testUser',
password: '12345',
server: 'localhost\\test',
database: 'testDB',
stream: true
};
connection = new sql.Connection(config, function (err) {
var request = new sql.Request(connection);
request.query(_query);
request.on('recordset', function(columns) {
// Emitted once for each recordset in a query
//res.send(columns);
});
request.on('row', function(row) {
res.write(JSON.stringify(row)); //<<-- I would like intercept this event on client side
// and get the result in my angularJS function on deferred.notify
});
request.on('error', function(err) {
// May be emitted multiple times
console.error(err)
});
request.on('done', function(returnValue) {
// Always emitted as the last one
res.end('DONE');
});
});
});
Anyone can help me with this?
Thanks!
I'm done it using socket.io :)
On angularJS side:
// count the row for test only
$scope.count = 0;
$scope.prova = function () {
mySocket.emit('getTableByRow', {});
mySocket.on('resRow', function (data) {
if (data.event == 'ROW') {
$scope.count += 1;
}else {
$scope.count += " !!DONE!! ";
}
});
};
On NodeJS side:
[ ... connection with DB ... ]
io.on('connection', function (socket) {
socket.on('getTableByRow', function (data) {
_getTableByRow(socket, data);
});
});
_getTableByRow function:
var _getTableByRow = function (socket, data) {
var _query = 'select top 50 * from tb_test';
request.query(_query);
request.on('row', function(row) {
// return only the ids for test
socket.emit('resRow', {event: 'ROW', data: row.id.toString()});
});
request.on('done', function(returnValue) {
socket.emit('resRow', {event: 'DONE'});
});
request.on('recordset', function(columns) {
console.log(columns);
});
request.on('error', function(err) {
socket.emit('resRow', {event: 'ERROR', data: err});
});
}
In this way, as soon as one row is read from the DB, it is immediately sent to the client :)

Categories

Resources