Why wont usernametoid function return the acual id? cause im trying to send the result of the userdata as the return. In this case, i want to only send the userdata`s _id attribute. but it seems like it wont work.
console.log(userdata._id); // works
return resolve(userdata._id); // wont work.
output of variable userdata:
{
cash: 7300002,
bank: 0,
xp: 0,
rank: 1,
points: 1,
location: 1,
health: 100,
protection: 1,
attack: 1,
family: '0',
password: 'jKa4qC7pRCgE5jvzD9Vv1pRUNxFlQEM7Jpq/IoJ/sUWOAv1Wx1RI/j/Vu6Zf8zyNkCFcg3QBtdfAC+lmPS8KIA==',
profileImageURL: 'modules/users/client/img/profile/default.png',
roles: [ 'user' ],
created: Sat Aug 27 2016 12:33:55 GMT-0400 (EDT),
__v: 0,
username: 'signature',
provider: 'local',
salt: '4ySlrr9ggESxBB3dR5bx4Q==',
_id: 57c1c0f3b6b20c011242bf22 }
when i do: `return resolve(userdata._id) it would get this error:
/server/factory/user_factory.js:52
return resolve(userdata._id);
^
TypeError: Cannot read property '_id' of null
node.js call:
var articles = require('../controllers/articles.server.controller'),
path = require('path'),
mongoose = require('mongoose'),
Article = mongoose.model('Article'),
Users = mongoose.model('User'),
errorHandler = require(path.resolve('./modules/core/server/controllers/errors.server.controller'));
var userFunc = require('../factory/user_factory.js');
app.post('/api/kill', function (req, res) {
console.log("starting");
var username = "signature";//req.query.username;
var result = ["test service"];
var data = req.user;
userFunc.usernametoid(username).then( function (otherplayerid) {
if (!(otherplayerid)) {
console.log("other player is acually " + otherplayerid);
result.push("denne brukeren finnes ikke! " + otherplayerid);
} else {
userFunc.usernametoid(otherplayerid).then( function (otherplayer) {
if (data.location != otherplayer.location) {
result.push("Du er ikke i samme lokasjon som " + username);
result.push(data.location + " vs " + otherplayer.location);
} else {
userFunc.addCash(req.user._id,100000);
result.push("starter lokasjonisering");
}
});
}
res.json(result);
});
});
user factory:
var articles = require('../controllers/articles.server.controller'),
path = require('path'),
mongoose = require('mongoose'),
Article = mongoose.model('Article'),
Users = mongoose.model('User'),
errorHandler = require(path.resolve('./modules/core/server/controllers/errors.server.controller'));
exports.usernametoid = usernametoid;
function usernametoid(id) {
return new Promise( function (resolve, reject) {
var query = Users.findOne( { username : id } );
// var query = Users.find({_id:id});
query.exec(function(err, userdata) {
if (err){
return reject({err : 'Error while getting user info'});
}
console.log(userdata._id);
return resolve(userdata);
});
}, function (){
return reject({err : 'error while fetching cash'});
});
}
Because you are not passing correctly the fetched user to the query.exec.
You need to do:
var Users = require('../models/users-model.js');
function usernametoid(id) {
return new Promise( function (resolve, reject) {
Users.findOne({ username : id }).then( function(user){
//If you use lodash you can do _.isNull(user)
if(user == null){
return reject({error : 'User not found'});
}
user.exec(function(userdata, error) {
if(userdata){
return resolve(userdata);
}
if(error){
return reject({error : 'Error while executing query'});
}
});
});
});
}
I don't really get why you are importing Users Model like that. I do not think Node will be able to fetch it like that.
And, you should require mongoose in your server.js
To catch the rejection you need the following code:
UserFactory.userNameToId(id).then( function(response){
if(response.error){
console.log('error '+response.error);
}
if(response){
console.log('Got response '+response);
}
});
Related
I am trying to automatically verify users emails when signing them up in my web app which is connected to cognito.
I am using the method to try and do this
adminUpdateUserAttributes
I immediately try to do this after i sign a user up. The user is signed up fine but the users email and phone are not verified in the console
What am i doing wrong in my code?
Here is my code:
$('#signUp').submit(function(event) {
var poolData = {
UserPoolId : '', // your user pool id here
ClientId : '' // your app client id here
};
var userPool = new AmazonCognitoIdentity.CognitoUserPool(poolData);
var attributeList = [];
var dataEmail = {
Name: 'email',
Value: $('input[name=email]').val(),
};
var dataPhoneNumber = {
Name: 'phone_number',
Value: $('input[name=telephone]').val(),
};
var attributeEmail = new AmazonCognitoIdentity.CognitoUserAttribute(dataEmail);
var attributePhoneNumber = new AmazonCognitoIdentity.CognitoUserAttribute(dataPhoneNumber);
attributeList.push(attributeEmail);
attributeList.push(attributePhoneNumber);
console.log(attributeList)
userPool.signUp($('input[name=usernameSignUp]').val(), $('input[name=passwordSignUp]').val(), attributeList, null, function(err, result) {
if (err) {
alert(err.message || JSON.stringify(err));
return;
}
console.log('signed up')
var cognitoUser = result.user;
console.log('user name is ' + cognitoUser.getUsername());
updateUserAttribute($('input[name=usernameSignUp]').val())
});
//updates the new user to have verified phone and email using adminUpdateUserAttributes() -- this isnt working rn
var cognitoidentityserviceprovider = new AWSCognito.CognitoIdentityServiceProvider();
cognitoidentityserviceprovider.adminUpdateUserAttributes({
UserAttributes: [{
Name: 'phone_number_verified',
Value: 'true'
}, {
Name: 'email_verified',
Value: 'true'
}
],
UserPoolId: '',
Username: $('input[name=usernameSignUp]').val()
}, function(err) {
if (err) {
console.log(err, err.stack)
} else {
console.log('Success!')
}
})
});```
For your usecase you should allow autoVerify when signing up.
This is done in a presignup lambda.
https://docs.aws.amazon.com/cognito/latest/developerguide/user-pool-lambda-pre-sign-up.html
You should create a presignup lambda similar like this:
exports.handler = (event, context, callback) => {
event.response.autoConfirmUser = true;
event.response.autoVerifyEmail = true;
event.response.autoVerifyPhone = true;
callback(null, event);
};
have a strange thing happening running a Google cloud function. The function starts and logs the user id and job id as expected. Then it calls firestore db and basically sits there for 1 minute, sometimes 2 before it executes the first call... It was even timing out on 240 seconds.
const AWS = require('aws-sdk');
const functions = require('firebase-functions');
const admin = require('firebase-admin');
admin.initializeApp();
exports.run = functions.https.onCall((data, context) => {
var id = data.id;
var userid = data.uid;
var retry = data.retry;
var project;
var db = admin.firestore();
var storage = admin.storage();
console.log("Starting Collect");
console.log("UID: " + userid);
console.log("id ID: " + id);
// Times out on this call
db.collection("users").doc(userid).collection("ids").doc(id).get().then(function(doc) {
console.log("Loaded DB");
project = doc.data();
createexport();
}).catch(function(err) {
console.log(err);
error('Loading DB Error, ' + err, false);
});
function createexport() {
db.collection("exports").doc(id).set({
status: 'Collecting',
stage: 'Export Checker',
percent: 0,
id: id,
}).then(function() {
console.log("Creating Export");
setdb();
}).catch(function(err) {
error("Error creating export in database :" + err, true)
});
}
function setdb() {
db.collection("users").doc(userid).collection("ids").doc(id).update({
status: 'Analyzing Files',
stage: 'Collecting'
}).then(function() {
getaudio();
}).catch(function(err) {
error("Error updating users id in database :" + err, true)
});
}
function getaudio() {
const from = userid + '/projects/' + project.originalproject.id + '/audio.' + project.originalproject.extension;
const to = userid + '/' + id + '/audio.' + project.originalproject.extension;
storage.bucket('---------').file(from).copy(storage.bucket('---------').file(to)).then(function() {
console.log("Collecting files");
copy2();
}).catch(function(err) {
error('Collecting Audio Error, ' + err, true);
});
}
function copy2() {
const from = userid + '/projects/' + project.originalproject.id + '/overlay.png';
const to = userid + '/' + id + '/overlay.png';
storage.bucket('--------.appspot.com').file(from).copy(storage.bucket('---------').file(to)).then(function() {
updateexport();
}).catch(function(err) {
error('Collecting Overlay Error, ' + err, true);
});
}
function updateexport() {
db.collection("exports").doc(id).update({ status: "Waiting" }).then(function() {
console.log("All files collected");
return { status: 'Success' };
}).catch(function(err) {
error("Error creating export entry in database :" + err, true)
});
}
function error(evt, evt2) {
AWS.config.update({ region: "us-east-1" });
var html;
var sub = 'Error with id ' + id;
console.log(evt);
if (evt2) {
db.collection('users').doc(userid).collection('ids').doc(id).update({
status: 'Error'
}).catch(function(err) {
console.log(err);
});
db.collection("exports").doc(id).update({
status: 'Error',
stage: 'Collecting',
error: evt,
}).catch(function(err) {
console.log(err);
});
html = `
Username: ${project.username} <br>
UserID: ${userid} <br>
Email: ${project.email} <br>
id: ${id}
`
} else {
html = `id: ${id}<br>
UserID: ${userid} <br>
Message: Error logged was: ${evt}
`
}
var params = {
Destination: {
ToAddresses: [
'errors#mail.com'
]
},
Message: {
Body: {
Html: {
Charset: "UTF-8",
Data: html
},
},
Subject: {
Charset: 'UTF-8',
Data: sub
}
},
Source: 'errors#mail.com',
ReplyToAddresses: [
project.email
],
};
var sendPromise = new AWS.SES({
apiVersion: "2010-12-01",
"accessKeyId": "-----------",
"secretAccessKey": "------------------------",
"region": "--------",
}).sendEmail(params).promise();
sendPromise.then(function(data) {
return { data: data };
}).catch(function(err) {
return { err: err };
});
}
});
Seems to me to be way too long for a database call of only a few kb. I will attach the cloud log to show time difference. After this initial slump it then performs as expected.
Cloud log image
Anyone got any ideas as to why this could be happening? Many thanks...
Your function is appearing to hang because it isn't handling promises correctly. Also, it doesn't appear to be sending a specific response to the client app. The main point of callable functions is to send a response.
I suggest reviewing the documentation, where you will learn that callable functions are required to return a promise that resolves with an object to send to the client app, after all the async work is complete.
Minimally, it will take a form like this:
return db.collection("users").doc(userid).collection("files").doc(id).get().then(function(doc) {
console.log("Loaded DB");
project = doc.data();
return { "data": "to send to the client" };
}
Note that the promise chain is being returned, and the promise itself resolves to an object to send to the client.
I'm a beginner in both Stackoverflow and NodeJS/Mongoose, I'm sorry if I have an error or break a rule. Thank you in advance.
I need a function that it return all the nearby products there are in my location, this is given through the user wich "id" is a request called "user".
I try making this function, where finalProducts return all the products that they exit at the search, but when I try to add as component of result body finalProducts return data empty.
The error is the following:
throw er; // Unhandled 'error' event
^
Error: Can't set headers after they are sent.
at ServerResponse.setHeader (_http_outgoing.js:371:11)
at ServerResponse.header (/home/frangaliana/Escritorio/client-thingy/node_modules/express/lib/response.js:730:10)
at ServerResponse.send (/home/frangaliana/Escritorio/client-thingy/node_modules/express/lib/response.js:170:12)
at ServerResponse.json (/home/frangaliana/Escritorio/client-thingy/node_modules/express/lib/response.js:256:15)
at ServerResponse.send (/home/frangaliana/Escritorio/client-thingy/node_modules/express/lib/response.js:158:21)
at /home/frangaliana/Escritorio/client-thingy/controllers/product.js:200:41
at /home/frangaliana/Escritorio/client-thingy/node_modules/mongoose/lib/query.js:2916:18
at newTickHandler (/home/frangaliana/Escritorio/client-thingy/node_modules/mpromise/lib/promise.js:234:18)
at _combinedTickCallback (internal/process/next_tick.js:73:7)
at process._tickCallback (internal/process/next_tick.js:104:9)
I show the code and the models for help to understand the trouble:
Function that search nearby products in controller product.js:
function getNearbyProducts(req, res) {
let userId = req.user;
let point;
var geoOptions = {
spherical: true,
maxDistance: 500
}
User.findById(userId, {password:0})
.populate('location','coordinates')
.exec(function (err, result) {
if (err) console.log('No se ha podido encontrar la localización')
point = {
type: "Point",
coordinates: [parseFloat(result.location.coordinates[0]),parseFloat(result.location.coordinates[1])]
}
Location.geoNear(point,geoOptions, function(err, resultLocations) {
for(var i = resultLocations.length - 1 ; i >= 0 ; i--){
var nearLocation = resultLocations[i].obj.id
var queryUser = {"location": nearLocation}
User.find(queryUser)
.exec(function (err, resultUsers) {
for(var j = resultUsers.length - 1 ; j >= 0; j--) {
if(resultUsers[j] !== undefined){
var exactUser = resultUsers[j].id
var limit;
if(req.query.limit) {
limit = parseInt(req.query.limit)
if(isNaN(limit)){
return next(new Error())
}
} else {
limit = 10;
}
var queryProduct = {"user": exactUser}
if(req.query.before) {
queryProduct = {"user": exactUser, "_id" : {$lt: req.query.before}};
}else if (req.query.after) {
queryProduct = {"user": exactUser, "_id" : {$gt: req.query.after}};
}
Product.find(queryProduct)
.limit(limit)
.populate('user')
.exec(function (err, resultProducts) {
var finalProducts = [];
for(var k = resultProducts.length - 1 ; k >= 0; k--){
if(resultProducts[k] !== undefined){
finalProducts.push(resultProducts[k])
}
}
if(finalProducts.length > 0){
if(req.query.before){
products.reverse();
}
var finalResult = {
data: finalProducts,
paging: {
cursors: {
before: finalProducts[0].id,
after: finalProducts[finalProducts.length-1].id
},
previous: 'localhost:3000/api/products?before='+finalProducts[0].id,
next: 'localhost:3000/api/products?after='+finalProducts[finalProducts.length-1].id,
},
links: {
self: 'localhost:3000/api/products',
users: 'localhost:3000/api/users'
}
}
} else {
var finalResult = {
data: finalProducts,
paging: {
cursors: {
before:undefined,
after:undefined
},
previous: undefined,
next: undefined
},
links: {
self: 'localhost:3000/api/products',
users: 'localhost:3000/api/users'
}
}
}
res.status(200).send(finalResult);
})
}
}
})
}
})
})
})
Models:
user.js
'use strict';
const mongoose = require('mongoose');
const Schema = mongoose.Schema;
const bcrypt = require('bcrypt-nodejs');
const Location = require('../models/location');
const crypto = require('crypto');
const UserSchema = new Schema({
email: {
type: String,
lowercase: true,
//Añadir campo unique: true para que sólo se pueda registrar un email
},
name: String,
password: String,
userimg: String,
gender: Boolean,
birthdate: Date,
signUpDate: {
type: Date,
default: Date.now(),
},
location:{
type: Schema.ObjectId,
ref: 'Location'
}
});
UserSchema.pre('save', function(next) {
let user = this;
if (!user.isModified('password')) return next();
bcrypt.genSalt(10, (err, salt) => {
if (err) return next(err);
bcrypt.hash(user.password, salt, null, (err, hash) => {
if (err) return next(err);
user.password = hash;
next();
});
});
});
UserSchema.methods.gravatar = function() {
if(!this.email) return `https://gravatar.com/avatar/?s=200&d=retro`
const md5 = crypto.createHash('md5').update(this.email).digest('hex')
return `https://gravatar.com/avatar/${md5}?s=200&d=retro`
}
module.exports = mongoose.model('User', UserSchema);
product.js
'use strict'
const mongoose = require('mongoose');
const Schema = mongoose.Schema;
const User = require('../models/user');
var max = [5 , 'The value of ({VALUE}) exceeds the limit ({MAX}). ']
var min = [1 , 'The value of ({VALUE}) is beneath the limit ({MIN}). ']
const ProductSchema = Schema({
title: String,
price: {
type: Number,
default: 0
},
user: {
type: Schema.ObjectId,
ref: 'User'
},
categoryproduct: {
type: String,
enum:['Moda y Accesorios', 'Motor', 'Electrónica', 'Deporte', 'Libros, Música y Películas', 'Electrodomésticos', 'Servicios', 'Muebles y Decoración', 'Otros'],
default: 'Electrónica'
},
description: {
type: String,
default: 'Objeto para vender'
},
visits: {
type: Number,
default: 0
},
status: {
type: Boolean,
default: false
},
publicationdate: {
type: Date,
default: Date.now()
},
salesrating: {
type: Number,
max: max,
min: min,
default: 1
},
salescomment: {
type: String,
default: 'Perfecto'
}
})
module.exports = mongoose.model('Product', ProductSchema);
location.js
'use strict';
const mongoose = require('mongoose');
const Schema = mongoose.Schema;
const LocationSchema = new Schema({
type: {
type: String,
default: "Point"
},
coordinates: {
type: [Number],
index: "2dsphere",
default: [38.280153, -0.712901]
}
})
module.exports = mongoose.model('Location', LocationSchema);
I hope this question can be resolved or at least someone explain me because it doesn't work well. A lot of thanks again!
EDIT: (Because I have fix the problem)
Thanks to skirtle who gave me the idea to solve this.
I didn't control the asynchronous calls that threw the searches with Mongoose and that generated multiple responses, so as he told me I started using Promises to keep track of them when the result was throwing me an array of id's whether they were from User, Location or Product I treated them one by one.
I recalled that a Mongoose query could be accompanied by a filter {$in:[array]} that returned all results containing any of these id's (in my case) that had the array looking like this:
function getNearbyProducts(req, res) {
var userId = req.user;
var promiseUser = User.findById(userId, {password: 0})
.populate('location')
.exec()
promiseUser
.then(function(result){
return result.location;
})
.then( function(resultUser){
return Location.geoNear(
{type:'Point', coordinates: [parseFloat(resultUser.coordinates[0]),parseFloat(resultUser.coordinates[1])]},
{maxDistance:100000, spherical: true}
).then(function(locsGeoNear){
var resultGeoNear = []
for(var i = locsGeoNear.length - 1; i >= 0; i--){
if(resultUser.id != locsGeoNear[i].obj.id){
resultGeoNear.push(locsGeoNear[i].obj.id)
}
}
return resultGeoNear
})
})
.then(function(resultSearchLocs){
var queryUsersByLocation = {'location': {$in: resultSearchLocs}}
return User.find(queryUsersByLocation, {password: 0})
.exec()
.then(function(usersSearchs){
var resultUsers = []
for(var i = usersSearchs.length - 1; i >= 0; i--){
if(userId != usersSearchs[i].id){
resultUsers.push(usersSearchs[i].id)
}
}
return resultUsers
})
})
.then(function(resultSearchUsers){
var limit;
if(req.query.limit) {
limit = parseInt(req.query.limit)
if(isNaN(limit)){
return next(new Error())
}
} else {
limit = 10;
}
var queryProductsByUsers = {'user': {$in: resultSearchUsers}}
//Para obtener la página anterior a un id
if (req.query.before) {
queryProductsByUsers = {'user': {$in: resultSearchUsers}, "_id" : {$lt: req.query.before}};
//Para obtener la página posterior a un id
} else if (req.query.after) {
queryProductsByUsers = {'user': {$in: resultSearchUsers}, "_id": {$gt: req.query.after}};
}
return Product.find(queryProductsByUsers)
.limit(limit)
.exec()
})
.then(function(resultSearchProducts){
if(resultSearchProducts.length > 0){
if(req.query.before){
resultSearchProducts.reverse();
}
var resultFinal = {
data: resultSearchProducts,
paging: {
cursors: {
before: resultSearchProducts[0].id,
after: resultSearchProducts[resultSearchProducts.length-1].id
},
previous: 'localhost:3000/api/products?before='+resultSearchProducts[0].id,
next: 'localhost:3000/api/products?after='+resultSearchProducts[resultSearchProducts.length-1].id,
},
links: {
self: 'localhost:3000/api/products',
users: 'localhost:3000/api/users'
}
}
} else {
var resultFinal = {
data: resultSearchProducts,
paging: {
cursors: {
before:undefined,
after:undefined
},
previous: undefined,
next: undefined
},
links: {
self: 'localhost:3000/api/products',
users: 'localhost:3000/api/users'
}
}
}
res.setHeader('Content-Type', 'application/json');
res.status(200).send(resultFinal);
})
.catch(function(err){
console.log(`${err}`)
})
}
Many thanks to the community but above all to skirtle who gave me the keys to reach my solution.
Greetings!
If you add the following logging before you call send:
console.log('sending response');
res.status(200).send(finalResult);
I believe you'll find that you're calling send multiple times on the same request, which isn't allowed. When you call send the first time the request/response is over and any attempt to send more data will result in an error.
I'm struggling to follow the code but I believe the cause is all that looping you're doing. You need to wait until all your DB queries are done and you've gathered your final data before you call send.
You may find Promises a useful way to reduce the complexity in products.js but even if you don't fancy using them I highly recommend a bit of refactoring to make that file intelligible. As a general rule the Pyramid of Doom is a sign that you've got problems https://en.wikipedia.org/wiki/Pyramid_of_doom_(programming)
How can I access a variable or set of variables inside app.get and make it accessible to the whole project? I am trying to capture the information sent from the SMS text, organized into the "messageData" variable, then sent to the "MessageSchema," where it is then sent and stored in the Mongo Database. My issue is that I believe none of the variables(message, number, answer) are truly being found. How can I fix that?
app.js
app.get('/smssent', function(req, res, callback) {
var message = req.query.Body;
var number = req.query.From;
var twilioNumber = req.query.To;
var context = null;
var index = 0;
var contextIndex = 0;
contexts.forEach(function(value)
{
console.log(value.from);
if (value.from == number)
{
context = value.context;
contextIndex = index;
}
index = index + 1;
});
console.log('Recieved message from ' + number + ' saying \'' + message + '\'');
var conversation = new ConversationV1({
username: '',
password: '',
version_date: ConversationV1.VERSION_DATE_2016_09_20
});
//console.log(JSON.stringify(context));
//console.log(contexts.length);
conversation.message({
input: { text: message },
workspace_id: '',
context: context
}, function(err, response) {
if (err) {
console.error(err);
} else {
var answer = response.output.text[0];
if (context == null) {
contexts.push({'from': number, 'context': response.context});
} else {
contexts[contextIndex].context = response.context;
}
var intent = response.intents[0].intent;
console.log(intent);
if (intent == "done") {
//contexts.splice(contexts.indexOf({'from': number, 'context': response.context}),1);
contexts.splice(contextIndex,1);
// Call REST API here (order pizza, etc.)
}
var client = require('twilio')(
'',
''
);
client.messages.create({
from: twilioNumber,
to: number,
body: response.output.text[0]
}, function(err, message) {
if(err) {
console.error(err.message);
}
});
}
});
res.send('');
});
(ABOVE)This is what retrieves the SMS text, and conducts Twilio, Node, and IBM Bluemix.
index.js
router.post('/app', function(req, res, next) {
if (req.body.number &&
req.body.message &&
req.body.answer
) {
// create object with form input
var messageData = {
number: req.body.number,
message: req.body.message,
answer: req.body.answer
};
// use schema's `create` method to insert document into Mongo
Message.create(messageData, function (error, message) {
if (error) {
return next(error);
} else {
return res.redirect('/');
}
});
} else {
var err = new Error('All fields required.');
err.status = 400;
return next(err);
}
});
(ABOVE)This is the code to organize and prepare everything for the "MessageSchema."
message.js
var mongoose = require('mongoose');
var bcrypt = require('bcryptjs');
var MessageSchema = new mongoose.Schema({
number: {
type: String,
required: true,
trim: true
},
message: {
type: String,
required: true,
trim: true
},
answer: {
type: String,
required: true,
trim: true
}
});
var Message = mongoose.model('Message', MessageSchema);
module.exports = Message;
Finally, this is what sends all the info to the DB.
If anyone finds anything or has any tips to improve this code, please let me know, I am open for all the input you can give me!
THANKS!
var messageData = {
number: req.body.number,
message: req.body.message,
answer: req.body.answer
};
This is OK. However, keep in mind that these variables are sent through POST body, not queries like the other one.
queries: ?param = ...
body: (like POST form; hidden from the URL)
If you unsure whether you are passing them alright, you can test that easily by console.info(...) on a certain point; and using Postman to easily test POST behavior.
I created a function to save data into mongoDB with logic below, but I really have difficulty to refactor the code and make it cleaner, there are so many annoying code duplication, how can I have DRY principle?
Logic:
1. pass in a flag to decide either close DB connection or not at end.
2. create different mongoDB models according to the passed in returnedArray and save into DB.
var saveArrayToDB = function(returnedArray, flagToCloseDBConnection) {
var objectToSave,
object,
type = returnedArray[0].type,
arrayToSave = [];
if (type === 'user') {
for (var i = 0; i < returnedArray.length; i++) {
object = returnedArray[i];
objectToSave = new User({
fullName: object['full_name'],
activatedAt: object['activated_at'],
location: object['location'],
timezone: object['timezone'],
imageURL: object['mugshot_url'],
stats: object['stats']
});
arrayToSave.push(objectToSave);
}
User.create(arrayToSave, function(err) {
if (err) {
console.log('err ' + err);
}
if(flagToCloseDBConnection) {
mongoose.connection.close();
}
});
} else if (type === 'group') {
for (var j = 0; j < returnedArray.length; j++) {
object = returnedArray[j];
objectToSave = new Group({
fullName: object['full_name'],
createdAt: object['created_at'],
stats: object['stats'],
url: object['web_url']
});
arrayToSave.push(objectToSave);
}
Group.create(arrayToSave, function(err) {
if (err) {
console.log('err ' + err);
}
if(flagToCloseDBConnection) {
mongoose.connection.close();
}
});
} else {
objectToSave = null;
console.log('ERROR: unrecognized type in data. Not saved.');
}
};
Just to add on to what #JohnnyHK commented on your question, it would be best if you keep your mongoose connection open during your application lifecycle. Other than that you could use some JavaScript functions like map() to initialize the arrays, define common callback functions that you can reuse in both create and map methods:
var saveArrayToDB = function(returnedArray, flagToCloseDBConnection) {
var type = returnedArray[0].type,
arrayToSave = [];
var callback = function(err) {
if (err) { console.log('err ' + err); }
};
var newUser = function(u){
return new User({
fullName: u['full_name'],
activatedAt: u['activated_at'],
location: u['location'],
timezone: u['timezone'],
imageURL: u['mugshot_url'],
stats: u['stats']
});
};
var newGroup = function(g){
return new Group({
fullName: g['full_name'],
createdAt: g['created_at'],
stats: g['stats'],
url: g['web_url']
});
};
if (type === 'user') {
arrayToSave = returnedArray.map(newUser);
User.create(arrayToSave, callback);
} else if (type === 'group') {
arrayToSave = returnedArray.map(newGroup);
Group.create(arrayToSave, callback);
} else {
console.log('ERROR: unrecognized type in data. Not saved.');
}
};
No need for closing the connection. Here's an already much improved version:
var factories = {
'user': {
method: function(object){
return {
fullName: object['full_name'],
activatedAt: object['activated_at'],
location: object['location'],
timezone: object['timezone'],
imageURL: object['mugshot_url'],
stats: object['stats']
};
},
model: User
},
'group': {
method: function(object){
return {
fullName: object['full_name'],
createdAt: object['created_at'],
stats: object['stats'],
url: object['web_url']
};
},
model: Group
}
}
var saveArrayToDB = function(returnedArray) {
var saveQueue=[],
factory = factories[returnedArray[0].type];
if(!factory){
return console.log('ERROR: unrecognized type in data. Not saved.');
}
returnedArray.forEach(function(item){
saveQueue.push(factory.method(item));
});
factory.model.create(saveQueue, function(err){
if(err){
console.log('err ' + err);
}
});
};
(You don't need to pass document instances, plain objects are good enough for Model.create)