I'm building my first app using mongodb and node.js and I've come across a brick wall. When i try to .update() or .updateMany() my server comes back with:
UnhandledPromiseRejectionWarning: Unhandled promise rejection (rejection id: 2): MongoError: Wrong type for 'q'. Expected a object, got a array.
Now the thing is, my code worked before and I have managed to insert an array of about 150 objects into my database. (When i first did this i used .insert() instead of .update(), but now neither works.)
Here's the exact code I'm using to first store an array of objects, and then .update() my database.
Store the array (I am using an external API for this):
var all = [];
$("#update").click(function(){
$.ajax({
type: "GET",
dataType: 'JSON',
url: 'some valid url',
success: function (result){
$.each(result, function(i, item) {
var obj = {
"name" : item['name'],
"id" : item['id'],
"count" : 0
};
all.push(obj);
});
}
});
});
Send to my router.js to be processed:
$('#sendupdate').click(function(){
var jsonstring = JSON.stringify(all);
console.log(jsonstring);
$.ajax({
type: "POST",
dataType: "JSON",
contentType: "application/json",
data: jsonstring,
url: "/s",
success: function(response) {
console.log('Successfully Updated');
}
});
});
});
and finally here's what my router.js does to the received data:
router.post('/s', function(req, res){
var MongoClient = mongodb.MongoClient;
var url = 'mongodb://localhost:27017/someDB';
MongoClient.connect(url, function(err, db){
if(err){
console.log('unable to connect to the server', err);
} else {
console.log('connected');
var collection = db.collection('someCollection');
var data = req.body;
console.log('data collected');
console.log(data);
collection.updateMany(data, data, {upsert:true});
}
});
});
Now I know I could loop through the array and make a request for each object, but that feels like I'll be using too much resources, especially knowing that I will need a similar function later on for most users that enter my site.
What else could I do to update the database with so many new objects? I understand that mongodb expects to receive an object, but I don't understand why it worked before when the data received was always an array of objects.
Thank you in advance and I hope someone can shed some light on this for me.
________________________________________
EDIT: I have come up with a temporary solution, I'm still unsure on how good it is but if anyone has any ideas on how to improve this please do let me know.
Here's what I've changed, for anyone having a similar issue:
Instead of trying to update an array with an array I've used a for loop to loop through the array[i] and update each object individually. It happens server-side and is probably not the best solution but it works for now.
Here's the changed code:
router.post('/s', function(req, res){
var MongoClient = mongodb.MongoClient;
var url = 'mongodb://localhost:27017/someDB';
MongoClient.connect(url, function(err, db){
if(err){
console.log('unable to connect to the server', err);
} else {
console.log('connected');
var collection = db.collection('someCollection');
var data = req.body;
console.log('data collected');
var i;
for(i = 0; i <= data.length; ++i){
collection.update(data[i], {$set:data[i]}, {upsert:true});
}
}
});
});
Related
I have this front-end function:
var finalSelValue = ""
function changeFunc() {
var selectBox = document.getElementById("selectBox");
var selectedValue = selectBox.options[selectBox.selectedIndex].value;
finalSelValue = selectedValue
fetch('/adminFilter', {
method: 'POST',
headers: {'content-type': 'application/json'},
body: JSON.stringify({
finalSelValue
})
})
}
and then this on the backend:
router.post('/adminFilter', auth.ensureAuthenticated, auth.roleCheck('ADMIN'), (req, res) => {
console.log("filter " + req.body.finalSelValue)
var query = "select * from tkwdottawa where STATUS = '" + req.body.finalSelValue + "'"
ibmdb.open(DBCredentials.getDBCredentials(), function(err, conn) {
if (err) return console.log(err);
conn.query(query, function(err, rows) {
if (err) {
res.writeHead(404);
}
for (var i = 0; i < rows.length; i++) {
console.log(rows[i])
}
res.render('AdminDash', {
page_title: "AdminDash",
data: rows
});
conn.close(function() {
console.log('done adminFilter');
});
});
});
})
it is logging the new values properly, where the loop is, but it is not re-rendering the page with the new data! How do I fix this!
The issue might be because you have not fully configured your nodejs application. You should be aware that the render() method must have a templating engine that handles views to be available. You can just install one of the below templating engine i.e
Pug
EJS
Handlebars
from Npm repository.
Just use npm install [pug or ejs] i suggest this two because they have very easy learning curve
Another solution is to just use send() or sendFile() methods. This will send back the data to the client in this case the fetch request, then you can get the response from the .then callback.
and update the view from front end script.
Just a note
Render method is used with a templating engine and mostly is a server side rendering oriented approach
I'm facing this problem and I'm a newbie with Javascript and NodeJS, below I have this code at my Route on /update/:id
controller.save = (req, res) => {
const data = req.body
const name = req.body.name
const cpf = req.body.cpf
req.getConnection((err, connection) => {
const query = connection.query(
`INSERT INTO clientes(Nome, CPF)
VALUES('${name}','${cpf}')`,
data,
(err, clientes) => {
res.json(clientes)
}
)
})
}
and I have a form that have a Button called "Update", when I click , the AJAX made this .
$(document).on("click", ".update", function() {
var user_id = $(this).attr("id")
$.ajax({
url: "/update/" + user_id,
method: "GET",
dataType: "json",
success: function(to) {
alert(to)
}
})
})
I'm receive a alert [Object Object], when I go to my Network response I have this:
[{"ID":5,"Nome":"tobiaas","CPF":"107"}]
when I change alert to alert(to.Nome), I receive a alert Undefined
I don't wanna use .map, because i thing that this is a simple way to made work .
You are receiving an array as the response, Array.Nome does not exists, your output is normal.
You need to get the item with response[0] or response.pop() to access it:
success: function(response) {
const to = response[0]
alert(to.Nome)
}
For more info on javascript array access, see W3School
I'm utilizing a MEAN stack and Socket.io to pull images from the real-time Instagram API. Everything is working great, but I now want to begin saving image data to a MongoDB database so I have a "history" of images from locations (rather than simply the most recent photos).
Below is the relevant (working) code I have so far:
Node server-side code to handle new photo updates from Instagram API and emit event to Angular controller:
// for each new post Instagram sends the data
app.post('/callback', function(req, res) {
var data = req.body;
// grab the object_id (as geo_id) of the subscription and send as an argument to the client side
data.forEach(function(data) {
var geo_id = data.object_id;
sendUpdate(geo_id);
});
res.end();
});
// send the url with the geo_id to the client side
// to do the ajax call
function sendUpdate(geo_id) {
io.sockets.emit('newImage', { geo_id: geo_id });
}
Angular controller code when 'newImage' event is received:
socket.on('newImage', function(geo_id) {
// pass geo_id into Instagram API call
Instagram.get(geo_id).success(function(response) {
instagramSuccess(response.geo_id, response);
});
// Instagram API callback
var instagramSuccess = function(scope,res) {
if (res.meta.code !== 200) {
scope.error = res.meta.error_type + ' | ' + res.meta.error_message;
return;
}
if (res.data.length > 0) {
$scope.items = res.data;
} else {
scope.error = "This location has returned no results";
}
};
});
Angular factory to handle calls to Instagram API:
angular.module('InstaFactory', []).factory('Instagram', function($http) {
var base = "https://api.instagram.com/v1";
var client_id = 'MY-CLIENT-ID';
return {
'get': function(geo_id) {
var request = '/geographies/' + geo_id.geo_id + '/media/recent?client_id=' + client_id;
var url = base + request;
var config = {
'params': {
'callback': 'JSON_CALLBACK'
}
};
return $http.jsonp(url, config);
}
};
});
I also have the following Angular Controller which currently GETS details of each location from my Stadia mongoDB model. This model also contains an (empty for now) 'photos' array that I want to PUSH photo details (url, username, user profile url, etc.) onto each time I receive them from Instagram:
angular.module('StadiaFactory', []).factory('Stadia', function($http) {
var base = "http://localhost:6060/api/stadia/";
return {
'get': function(id) {
var request = id;
var url = base + request;
var config = {
'params': {
'callback': 'JSON_CALLBACK'
}
};
return $http.jsonp(url, config);
}
};
});
This is where I get confused. Where do I fire off the PUT request to my Stadia API and does this Node route for my Stadia API look reasonable? Note: I omitted my GET route which works perfectly. PUT is just throwing me for a loop:
// add photos to stadium photos array
app.put('/api/stadia/:stadium_id', function(req, res) {
// use mongoose to get and update stadium
Stadium.findByIdAndUpdate(req.params.stadium_id,
{$push: {"photos": {img: ?, link: ?, username: ?, profile_picture: ?}}},
{safe: true, upsert: true},
function(err, stadium) {
// if there is an error retrieving, send the error. nothing after res.send(err) will execute
if (err)
res.send(err)
res.jsonp(stadium); // return stadium in JSON format
});
});
Well there are a few problems with your current structure.
When your callback route is called, with a possibility of N objects in it, you're triggering your socket event and retrieving all the last photos of your geography each time. So let's say you will have 3 new objects, you will call 3 times the same thing to get the same data, which is a bit loss when you have the power of the sockets.
You can also have problems if you try to get the object data from the client-side and PUTing it to your server, since all your clients may receive the socket and you could end-up with duplicates, not to mention that this is a lot of traffic for not much, and this will burn your quota API limit, which is also not safe on the client-side since everyone can see your key.
To me, a good way to get something working (even if I don't really know what your :stadium_id param stands for) is to get the info you want directly on the server side in your callback using the request module.
You should only get the pictures, because you can retrieve a lot of things like users, tags or videos that you may don't want to get. So you will have to listen for the image objects, and nothing else.
You could have something like this:
var request = require('request');
var CLIENT_ID = 'yourId';
function newImage(data) {
io.sockets.emit('newImage', data);
}
app.post('/callback', function (req, res) {
//loop in all the new objects
req.body.forEach(function (data) {
if (data.type !== 'image') { return ; }
//BTW I think you should try with the id property instead of object_id
request('https://api.instagram.com/v1/media/' + data.object_id + '?access_token=' + CLIENT_ID,
function (error, response, body) {
if (error) { return ; }
//Here we have one JSON object with all the info about the image
var image = JSON.parse(body);
//Save the new object to your DB. (replace the STADIUM_ID)
Stadium.findByIdAndUpdate(STADIUM_ID, { $push: {'photos':
{ img: image.images.standard_resolution.url,
link: image.link,
username: image.user.username,
profile_picture: image.user.profile_picture
}}},
{ safe: true, upsert: true });
//Send a socket to your client with the new image
newImage({
id: image.id,
img: image.images.standard_resolution.url,
link: image.link,
username: image.user.username,
profile: image.user.profile_picture
});
}
});
res.end();
});
And then in your client, you will only have to push the new images received in the newImage socket event in the $scope.items.
ive never used cloud code/javascript and I am trying to write some parse cloud code to find a user using a objectId passed in to the cloud function, and then update that users relation that holds friends and finally save that user.
below is the function im using:
Parse.Cloud.define("addFriendToFriendsRelation", function(request, response) {
Parse.Cloud.useMasterKey();
var fromUserObjectId = request.params.fromUserObjectId;
var acceptingUser = request.params.user;
var query = new Parse.Query(Parse.User);
// find the user the request was from using the objectId
query.get(fromUserObjectId, {
success: function(user) {
var fromUser = user
var relation = fromUser.relation("friends");
relation.add(acceptingUser);
fromUser.save({
success: function() {
response.success("Successfully saved the users relation")
},
error: function() {
response.error("Save failed");
}
});
},
error: function() {
response.error("Save failed");
}
});
});
I managed to piece this together using the Parse docs. but Im really not following it to well. Never used javascript and am finding the syntax confusing.
then im calling the function with
//fromUser is a PFUser object defined further up
[PFCloud callFunctionInBackground:#"addFriendToFriendsRelation" withParameters:#{#"fromUserObjectId" : fromUser.objectId} block:^(id object, NSError *error) {
}
however whenever this function is called I get a success/error was not called error. Though im calling response.success and response.error in the function so I dont know why that is? Can anyone lend a hand?
edit: after doing some more searching it looks like response.success and response.error should only be called once each, so I modified my function to look like this:
arse.Cloud.define("addFriendToFriendsRelation", function(request, response) {
Parse.Cloud.useMasterKey();
var fromUserId = request.params.fromUserObjectId;
console.log("fromUserId:");
console.log(fromUserId);
var acceptingUser = request.params.user;
console.log("acceptingUser:")
console.log(acceptingUser);
var query = new Parse.Query(Parse.User);
query.get(fromUserId, {
success: function(user) {
console.log("found user:");
console.log(user);
var fromUser = user;
var relation = fromUser.relation("friends");
relation.add(acceptingUser);
console.log("added accepting user to relation");
fromUser.save({
success: function() {
response.success("successfully saved user")
},
error: function() {
response.error("error saving user");
}
});
console.log("found a user");
},
error: function() {
console.log("error finding user");
}
});
});
An old question, but since it's been up-voted, maybe answering can help someone else :).
First off, there is an error in how you are saving fromUser.
fromUser.save({ success: ...
If you look at the api you can see that it should be of the form:
fromUser.save(null, { success: ...
But the larger problem that kept you from finding your bug is that errors are getting eaten 'cause you are using the old style method of dealing with async code instead of using promises.
Below, I have re-written to use promises. Note:
I always return promise generating calls (there are other options for catching errors in async code, but start with this.)
Put a .catch at the end. The .catch is effectively the same things as .then(null, response.error) but either way, it is imperative that there is final backstop to catch errors. In your code above, the error was in a success block, that was running async, so when there was an error, it failed with no one to hear it :).
Parse.Cloud.define("addFriendToFriendsRelation", (request, response) => {
const fromUserId = request.params.fromUserObjectId;
console.log("fromUserId:", fromUserId);
const acceptingUser = request.user;
console.log("acceptingUser:", acceptingUser)
new Parse.Query(Parse.User);
.get(fromUserId, { useMasterKey: true })
.then((fromUser) => {
console.log("found fromUser:", fromUser);
const relation = fromUser.relation("friends");
relation.add(acceptingUser);
console.log("added accepting user to relation");
return fromUser.save(null, { useMasterKey: true })
})
.then(response.success)
.catch(response.error);
});
I have no idea why I'm getting this error. Please help me out? It's happening at the queryUser.get line.
Parse.Cloud.define("getStacksForUser", function(request, response) {
console.log(request);
console.log(response);
var User = Parse.Object.extend("User")
var Stack = Parse.Object.extend("Stack")
var StackUser = Parse.Object.extend("StackUser")
var queryUser = new Parse.Query(User);
queryUser.get(request.object.get("user"), {
success: function(user) {
console.log(user);
},
error: function(object, error) {
response.error("Error retrieving User");
}
});
});
Thanks!
I found my mistake.
I should do:
request.params.user not request.object.get("user")
Another thing that I realized was that Parse sends the PFUser who is logged in currently along with the request.
Simply doing request.user will give you the current user. So I'm going to do that instead of what I was doing in my question.