not rendering page on request node.js - javascript

I have this front-end function:
var finalSelValue = ""
function changeFunc() {
var selectBox = document.getElementById("selectBox");
var selectedValue = selectBox.options[selectBox.selectedIndex].value;
finalSelValue = selectedValue
fetch('/adminFilter', {
method: 'POST',
headers: {'content-type': 'application/json'},
body: JSON.stringify({
finalSelValue
})
})
}
and then this on the backend:
router.post('/adminFilter', auth.ensureAuthenticated, auth.roleCheck('ADMIN'), (req, res) => {
console.log("filter " + req.body.finalSelValue)
var query = "select * from tkwdottawa where STATUS = '" + req.body.finalSelValue + "'"
ibmdb.open(DBCredentials.getDBCredentials(), function(err, conn) {
if (err) return console.log(err);
conn.query(query, function(err, rows) {
if (err) {
res.writeHead(404);
}
for (var i = 0; i < rows.length; i++) {
console.log(rows[i])
}
res.render('AdminDash', {
page_title: "AdminDash",
data: rows
});
conn.close(function() {
console.log('done adminFilter');
});
});
});
})
it is logging the new values properly, where the loop is, but it is not re-rendering the page with the new data! How do I fix this!

The issue might be because you have not fully configured your nodejs application. You should be aware that the render() method must have a templating engine that handles views to be available. You can just install one of the below templating engine i.e
Pug
EJS
Handlebars
from Npm repository.
Just use npm install [pug or ejs] i suggest this two because they have very easy learning curve
Another solution is to just use send() or sendFile() methods. This will send back the data to the client in this case the fetch request, then you can get the response from the .then callback.
and update the view from front end script.
Just a note
Render method is used with a templating engine and mostly is a server side rendering oriented approach

Related

I'm unable to send a response to my react.js using http.get in node

I'm trying to get the temperature data from my node.js backend sent to react.js but i kept getting res.send is not a funtion
Sample code here
app.get("/gettemperature", (req, res) => {
const email = req.query.email;
let stmt = `SELECT * FROM users WHERE email=?`;
let todo = [email];
db.query(stmt, todo, (err, results, fields) => {
if (err) {
console.error(err.message);
}
if(results.length > 0 ){
let id = results[0].id;
let getID = `SELECT * FROM controlModules WHERE deviceowner=?`;
let getidData = [id];
db.query(getID, getidData, (err, resulta, fields) => {
if (err) {
console.error(err.message);
}
if(resulta.length > 0){
let lanip = resulta[0].ipaddress;
let url = "http://"+lanip+"/data";
http.get(url,(res) => {
let body = "";
res.on("data", (chunk) => {
body += chunk;
});
res.on("end", () => {
try {
let json = JSON.parse(body);
const temp_actual = json.temperature.value;
console.log(temp_actual);
res.setHeader('Content-Type', 'application/json');
res.end(
JSON.stringify({
value: temp_actual
})
);
} catch (error) {
console.error(error.message);
};
});
}).on("error", (error) => {
console.error(error.message);
});
}
});
}
});
});
i really need to return/send/respond the temperature data to my front end but i'm getting said error, is there a different way to return data?
It looks like you are mixing up an HTTP server you wrote in Node (although you haven't shown any relevant code) and an HTTP client you also wrote in Node.
res is an argument received by the callback you pass to http.get and contains data about the response received by your HTTP client.
Meanwhile, somewhere else (not shown) you have a different variable also called res which is the object your HTTP server uses to send its response to the browser running your React code.
You are calling res.send and wanting res to be the latter but it is really the former.
Since you haven't shown us the HTTP server code, it is hard to say where that res is, but there is a good chance you have shadowed it and can solve your problem by using different names (e.g. client_res and server_res).
That said. I strongly recommend avoiding using the http module directly as the API follows out of date design patterns and isn't very friendly. Consider using fetch or axios for making HTTP requests and Express.js for writing HTTP servers.

Ajax DELETE request path inconsistency

I have set up an express server to handle different requests one of which is a delete request. It works some times and gives a 404 other times. I noticed that the url it is sending is different. So if I change my server code to handle one path it works until the client sends a different path. I am unable to understand why it is sending different urls and not consistent. I am very new to web programming; still a student. May be I am missing something very basic.
The request is being sent from
http://localhost:3000/notes
page.
Yesterday the request was sent with this path:
Today the request is :
Just in case the images do not load, These are the urls:
http://localhost:3000/api/notes/id
http://localhost:3000/notes/api/notes/id
This is the client side request: (I have verified that its calling the delete with correct value)
var deleteNote = function(id) {
return $.ajax({
url: "api/notes/" + id,
method: "DELETE"
});
};
This is the server code:
app.delete("/api/notes/:id", (req, res) => {
let chosenNoteToDelete = req.params.id;
fs.readFile(__dirname + "/db/db.json", (err, data) => {
if(err){
throw err;
}
let json = JSON.parse(data);
for(let i=0; i<json.length; i++){
if(json[i].id === chosenNoteToDelete){
json.splice(i,1);
}
}
fs.writeFile(__dirname + "/db/db.json", JSON.stringify(json), (err) => {
if(err){
throw err;
}
res.send("Successfully deleted");
})
})
});
Can someone help me understand why its inconsistent? And how do I handle it on the server?
Change the client code from this:
var deleteNote = function(id) {
return $.ajax({
url: "api/notes/" + id,
method: "DELETE"
});
};
to this:
var deleteNote = function(id) {
return $.ajax({
url: "/api/notes/" + id,
method: "DELETE"
});
};
Your relative path tells jQuery to combine your path with the path from the page's URL. You don't want a relative path. You always want it to be /api/notes/id so you need the leading slash.
Some other things to cleanup in your server code.
Log all possible errors with console.log(err) or some similar logging mechanism.
NEVER, EVER write if (err) throw err in your server inside an asynchronous callback. That does you no good as nobody can catch that error. Instead, you must always log the error and then HANDLE the error by sending an error response.
When parsing JSON from an external source that can throw an error use try/catch around it.
When you .splice() an array that you are iterating, you need to either return after processing the .splice() or you need to correct the iterating index (because you just moved the array elements down that are after it so you will miss the next item in the array) or you need to iterate the array backwards so a .splice() operation won't affect the iteration.
Here's a fixed version of your code:
app.delete("/api/notes/:id", (req, res) => {
let chosenNoteToDelete = req.params.id;
fs.readFile(__dirname + "/db/db.json", (err, data) => {
if (err) {
console.log(err);
res.sendStatus(500);
return;
}
try {
let json = JSON.parse(data);
} catch(e) {
console.log(err);
res.sendStatus(500);
return;
}
for (let i = 0; i < json.length; i++) {
if (json[i].id === chosenNoteToDelete) {
json.splice(i, 1);
return;
}
}
fs.writeFile(__dirname + "/db/db.json", JSON.stringify(json), (err) => {
if (err) {
console.log(err);
res.sendStatus(500);
return;
}
res.send("Successfully deleted");
});
});
});
And, here's a cleaner implementation using fs.promises and async/await with more centralized error handling and detection if the chosenNote is not found:
const fsp = require('fs').promises;
const path = require('path');
app.delete("/api/notes/:id", async (req, res) => {
let chosenNoteToDelete = req.params.id;
let dataFilename = path.join(__dirname, "/db/db.json");
try {
let data = await fsp.readFile(dataFilename);
let dataArray = JSON.parse(data);
// iterate array backwards so .splice() doesn't cause us to miss elements of the array
let found = false;
for (let i = dataArray.length - 1; i >= 0; i--) {
if (dataArray[i].id === chosenNoteToDelete) {
found = true;
dataArray.splice(i, 1);
}
}
if (found) {
await fsp.writeFile(dataFilename, JSON.stringify(dataArray));
res.send("Successfully deleted");
} else {
res.status(404).send(`Note id ${chosenNoteToDelete} not found.`);
}
} catch(e) {
console.log(e);
res.sendStatus(500);
}
});

Client and server side both have the updated data but rendering doesn't show the updated data

The technologies I am using are: Nodejs, Express, MySQL, EJS.
What I am doing:
Have an app.get which gets the needed data(posts) from MySQL and then renders a file using that data
app.get("/" + element.name, function(req, res){
connection.query(`SELECT * FROM sub ${element.name} posts`, function(error, result) {
if(error) {
console.log(`Nick error`)
} else {
console.log(`Works get !`)
posts = result;
}
});
res.render("sub" + element.name, {posts:posts}), console.log(posts + " Here")
When someone visits that url, that data(posts) is rendered and a FETCH POST request is made using that data(I am using the length of it),
// Before this, the data gets mapped into the variable called "final"
console.log("!!!###")
console.log(`Above the FETCH`)
console.log(`yes in normal position 1 `)
console.log(` BTW ${JSON.stringify(final)}`) // I CAN SEE THE NEW POST HERE #########################
fetch(`/ookook`, { method: 'POST', body: JSON.stringify(final), headers: new Headers({ "Content-Type": "application/json" }) })
.then(res => res.json())
.then(jsonRes => {
console.log(`yes in normal position 2 `)
console.log(`DRAGON BTW ${JSON.stringify(final)}`) // I CAN SEE THE NEW POST HERE ###############################
})
The POST request then sees the lenght of the data(posts) and for each data(It's an array of array of objects, so it does this for each array of array) creates a app.get route, the url is "/pagename/2" (the number gets incremented)
app.post("/" + element.name, function(req, res) {
for(let i = 0; i < req.body.length; i++) {
for(let b = 0; b < req.body[i].length; b++) {
console.log(req.body[i][b].id)
let posts = req.body[i];
console.log(posts) // I CAN SEE THE NEW POST HERE ###########################
console.log("in app.post friendo")
app.get(`/${element.name}/${i + 2}`, function(req, res) {
res.render("sub"+ element.name, { posts:posts})
})
}
}
res.status(200).json({voteNum: 5}) // Ignore the voteNum
});
The problem: When I create a new post, it gives it to my DB and I can see it in my client and server side using console.log BUT, it doesn't render it. For me to see the post I have to restart my server.
I wrote "// I CAN SEE THE POST HERE #############" in the code where I can see the new post in the console.log(server/client side) BUT it doesn't get rendered
when starting script, starting your server, app which is express and all it's middlewares are set.
Your change won't work unless restart server, reset app.
I think your concept is not correct
First
maybe try something like this
let list = [];
//list is the data in posts, you can init it by query the DB.
app.get('/:elementname' ,()=>{
//check whether req.params.elementname match list
//if match, it is a validate url, or else 404 NOT FOUND=>call next()
})
app.post('/newPostName', ()=>{
//add new post to DB and update list.
})
Second
By the way,
You better check whether connection.query is a async function or not.
if it is, the code should be
connection.query('query...' ,()=>{
//...
res.render()
})
not
connection.query('query...' ,()=>{
//...
})
res.render()

Node JS, make HTTPS request synchronously from two links

I want to make a HTTPS request to an external link through Node JS. On my first call, I need to fetch user id by looping through several users. On my second call, I need to input that user id in the URL link and fetch user properties. Keep repeating this process till I go through all users. The end goal is to store data of every user in a JSON format. There is no front-end involved. Any direction/advice is much appreciated.
I can't share the actual link due to api keys. But here is the hypothetical scenario. I only show 2 users here. I have about 10,000 users in my actual data set.
Link 1
https://www.google.com/all_users
JSON Output
{
"name": "joe",
"uri": "/id/UserObject/User/1234-1234",
},
{
"name": "matt",
"uri": "/id/UserObject/User/5678-5678",
}
Link 2
https://www.google.com//id/UserObject/User/1234-1234
JSON Output
{
"name": "joe",
"uri": "/id/UserObject/User/1234-1234",
"Property Values": {
"height": "2",
"location": "canada"
},
"Other Values": {
"work": "google",
"occupation": "developer"
}
}
Nested JSON
{
"PropertySetClassChildrenResponse": {
"PropertySetClassChildren": {
"PropertySetInstances": {
"totalCount": "1",
"Elements": [
{
"name": "SystemObject",
"uri": "/type/PropertySetClasses/SystemObject"
}
]
}
}
}
}
Not tested, but this should point you in the right direction. It uses Promises and assumes that run in an ES6 environment:
const rp = require('request-promise');
const Promise = require('bluebird');
fetchAllUsers()
.then(extractUserUris)
.then(extractUserIds)
.then(buildUserDetailRequests)
.then(Promise.all) // run all the user detail requests in parallel
.then(allUserData => {
// allUserData is an array of all users' data
});
function fetchAllUsers() {
return rp('https://api.whatever.com/all_users');
}
function extractUserUris(users) {
return users.map(user => user.uri);
}
function extractUserIds(userUris) {
return userUris.map(userUri => userUri.split('/').pop());
}
function buildUserDetailRequests(userIds) {
return userIds.map(userId => rp("https://api.whatever.com/user/" + userId));
}
I'd suggest using the request package to make your HTTP requests easier.
> npm install request
Then you would obtain a list of all users with something like this:
var request = require('request');
request.get({url: "https://example.org/all_users"}, handleUsersResponse);
You'd handle the request response like this:
function(err, response, body) {
if (!err && response.statusCode == 200) {
// parse json (assuming array of users)
var users = JSON.parse(body);
// iterate through each user and obtain user info
for(var i = 0; i < users.length; i++) {
var userUri = users[i].uri;
obtainUserInfo(userUri)
}
}
}
obtainUserInfo function would be similar to the above code.
One important thing to keep in mind is that since the HTTP requests are being made asynchronously, when you make the requests in a loop, the next iteration of the loop does not wait until the work is finished before moving to the next iteration and starting the next request. So in effect, your loop would start all the HTTP requests nearly in parallel. This can easily overwhelm both your client and the server. One way to get around this is to use a worker queue to enqueue the work and ensure that only a maximum number of HTTP requests are being executed at any given time.
You don't want to do synchronous calls, it defeats the purpose of using Node. So by the Node powers invested in me by the State of Texas I hereby cast that synchronous way I thinking out of you!
Just kidding :), but let's do this the Node way.
Install these two libraries:
sudo npm install Promise
sudo npm install request
And set your code to look like:
var Promise = require('promise');
var request = require('request');
//Get your user data, and print the data in JSON:
getUserData()
.then(function(userData) {
console.log(JSON.stringify(userData));
}).catch(function(err) {
console.log('Error: ' +err);
});
/**
* Prepares an Object containing data for all users.
* #return Promise - Contains object with all user data.
*/
function getUserData() {
return new Promise(function(fulfill, reject) {
// Make the first request to get the user IDs:
var url1 = 'https://www.google.com/all_users';
get(url1)
.then(function(res) {
res = JSON.parse(res);
// Loop through the object to get what you need:
// Set a counter though so we know once we are done.
var counter = 0;
for (x=0; x<res.users.length; x++) {
var url2 = 'https://www.google.com//id/UserObject/User/';
url2 = url2 + res.users.id; //Wherever the individual ID is stored.
var returnDataArr = [];
get(url2)
.then(function(res2) {
// Get what you need from the response from the 2nd URL.
returnDataArr.push(res2);
counter++;
if (counter === res.users.length) {
fulfill({data: returnDataArr}); //Return/Fulfill an object containing an array of the user data.
}
}).catch(function(err) {
// Catch any errors from the 2nd HTTPS request:
reject('Error: ' +err);
});
}).catch(function(err) {
// Catch any errors from the 1st HTTPS request:
reject('Error: ' +err);
});
}
/**
* Your HTTPS GET Request Function
* #param url - The url to GET
* #return Promise - Promise containing the JSON response.
*/
function get(url) {
return new Promise(function(fulfill, reject) {
var options = {
url: url,
headers: {
'Header Name': 'Header Value',
'Accept': 'application/json',
'Content-Type': 'application/json'
};
request(options, function(err, res, body) {
if (err) {
reject(err);
} else {
fulfill(body);
}
});
});
}
So what this Promise does, is that it returns the value once we actually have it. In the code above, we are first getting that list of users, and then as we parse through it, we are making a new asynchronous HTTP request to get the additional data on it. Once we get the user data, we push it to an array.
Finally, once our counter hits its endpoint, we know that we have gotten all the user data, and so we call fulfill which essentially means return, and it returns an object containing an array of the user data.
Let me know if this makes sense.
The answers above helped me go further with my solution and get the desired outcome. However, I spent a lot of time trying to understand node, promises in node, making an API call, etc. Hopefully, this will help to a beginner level node developer.
NODE
Node.jsĀ® is a JavaScript runtime built on Chrome's V8 JavaScript engine. Node.js uses an event-driven, non-blocking I/O model that makes it lightweight and efficient. Node.js' package ecosystem, npm, is the largest ecosystem of open source libraries in the world.
If you are a JavaScript developer, you would prefer to use Node as you wouldn't have to spend time learning a new language like Java or Python.
GOAL
Make a HTTPS call to an external link to fetch all server URIs. Pass in the URI as a param to create a second link to fetch all server properties. Loop through to all server uris and properties. Refer the original post on the top for the data structure. The external link also required basic auth and headers.
CODE
Install NPM modules request (https call), bluebird (promises) and lodash(utility) and express(node framework).
/
********************** MODULES/DEPENDENCIES **********************/
var express = require('express');
var request = require('request');
var Promise = require('bluebird');
var _ = require("lodash");
/********************** INITIATE APP **********************/
var app = express();
console.log("Starting node server...");
/**
* Your HTTPS GET Request Function
* #param url - The url to GET
* #return Promise - Promise containing the JSON response.
*/
function get(url) {
return new Promise(function(resolve, reject) {
// var auth = "Basic " + new Buffer(username + ':' + password).toString("base64");
var options = {
url: url,
headers: {
// 'Authorization': auth,
'Content-Type': 'application/json',
'Accept': 'application/json'
}
};
console.log("Calling GET: ", url);
if ('development' == app.get('env')) {
console.log("Rejecting node tls");
process.env.NODE_TLS_REJECT_UNAUTHORIZED = "0";
}
request(options, function(error, response, body) {
if (error) {
reject(error);
} else {
// console.log("THIS IS BODY: ", body);
resolve(body);
}
});
});
}
/********************** GET DATA FUNCTION **********************/
function getServerData() {
/********************** URI VARIABLES **********************/
var username = 'username',
password = 'password',
role = 'Read-Only',
url_host = 'https://link.com:10843';
/********************** URL 1 **********************/
var url1 = url_host + '/type/PropertySetClasses/SystemObject/Server/?maxResults=1000&username=' + username + '&password=' + password + '&role=' + role;
console.log("Getting server data...", url1);
/********************** GET REQUEST 1 **********************/
return get(url1)
.then(function(res) {
console.log("Got response!");
res = JSON.parse(res);
res = res.PropertySetClassChildrenResponse.PropertySetClassChildren.PropertySetInstances.Elements;
// console.log("THIS IS RES: ", res);
/********************** FETCH URI FROM RES NESTED OBJECT **********************/
var server_ids = _.map(res, function(server) {
return server.uri;
});
console.log("Calling server urls", server_ids);
// Loop through the object to get what you need:
// Set a counter though so we know once we are done.
return Promise.map(server_ids, function (id) {
var url2 = url_host + id + '?username=' + username + '&password=' + password + '&role=' + role;
console.log("Calling URL", url2);
return get(url2)
.then(function(res2) {
res2 = JSON.parse(res2);
var elements = res2.PropertySetInstanceResponse.PropertySetInstance.PropertyValues.Elements;
console.log("Got second response", res2, elements);
return elements;
});
})
.then(function (allUrls) {
console.log("Got all URLS", allUrls);
return allUrls;
});
})
.catch(function(err) {
console.error(err);
throw err;
});
};
app.listen(8080, function() {
console.log("Server listening and booted on: " + 8080);
app.get("/serverInfo", function (req, res) {
console.log("Calling server info");
return getServerData()
.then(function(userData) {
var userData = JSON.stringify(userData, null, "\t");
console.log("This is USERDATA Data: ", userData);
res.send(userData);
})
.catch(function(err) {
console.error(err);
res.send({
__error: err,
message: err.message
});
});
});
});

Saving to MongoDB when Socket.io event is emitted

I'm utilizing a MEAN stack and Socket.io to pull images from the real-time Instagram API. Everything is working great, but I now want to begin saving image data to a MongoDB database so I have a "history" of images from locations (rather than simply the most recent photos).
Below is the relevant (working) code I have so far:
Node server-side code to handle new photo updates from Instagram API and emit event to Angular controller:
// for each new post Instagram sends the data
app.post('/callback', function(req, res) {
var data = req.body;
// grab the object_id (as geo_id) of the subscription and send as an argument to the client side
data.forEach(function(data) {
var geo_id = data.object_id;
sendUpdate(geo_id);
});
res.end();
});
// send the url with the geo_id to the client side
// to do the ajax call
function sendUpdate(geo_id) {
io.sockets.emit('newImage', { geo_id: geo_id });
}
Angular controller code when 'newImage' event is received:
socket.on('newImage', function(geo_id) {
// pass geo_id into Instagram API call
Instagram.get(geo_id).success(function(response) {
instagramSuccess(response.geo_id, response);
});
// Instagram API callback
var instagramSuccess = function(scope,res) {
if (res.meta.code !== 200) {
scope.error = res.meta.error_type + ' | ' + res.meta.error_message;
return;
}
if (res.data.length > 0) {
$scope.items = res.data;
} else {
scope.error = "This location has returned no results";
}
};
});
Angular factory to handle calls to Instagram API:
angular.module('InstaFactory', []).factory('Instagram', function($http) {
var base = "https://api.instagram.com/v1";
var client_id = 'MY-CLIENT-ID';
return {
'get': function(geo_id) {
var request = '/geographies/' + geo_id.geo_id + '/media/recent?client_id=' + client_id;
var url = base + request;
var config = {
'params': {
'callback': 'JSON_CALLBACK'
}
};
return $http.jsonp(url, config);
}
};
});
I also have the following Angular Controller which currently GETS details of each location from my Stadia mongoDB model. This model also contains an (empty for now) 'photos' array that I want to PUSH photo details (url, username, user profile url, etc.) onto each time I receive them from Instagram:
angular.module('StadiaFactory', []).factory('Stadia', function($http) {
var base = "http://localhost:6060/api/stadia/";
return {
'get': function(id) {
var request = id;
var url = base + request;
var config = {
'params': {
'callback': 'JSON_CALLBACK'
}
};
return $http.jsonp(url, config);
}
};
});
This is where I get confused. Where do I fire off the PUT request to my Stadia API and does this Node route for my Stadia API look reasonable? Note: I omitted my GET route which works perfectly. PUT is just throwing me for a loop:
// add photos to stadium photos array
app.put('/api/stadia/:stadium_id', function(req, res) {
// use mongoose to get and update stadium
Stadium.findByIdAndUpdate(req.params.stadium_id,
{$push: {"photos": {img: ?, link: ?, username: ?, profile_picture: ?}}},
{safe: true, upsert: true},
function(err, stadium) {
// if there is an error retrieving, send the error. nothing after res.send(err) will execute
if (err)
res.send(err)
res.jsonp(stadium); // return stadium in JSON format
});
});
Well there are a few problems with your current structure.
When your callback route is called, with a possibility of N objects in it, you're triggering your socket event and retrieving all the last photos of your geography each time. So let's say you will have 3 new objects, you will call 3 times the same thing to get the same data, which is a bit loss when you have the power of the sockets.
You can also have problems if you try to get the object data from the client-side and PUTing it to your server, since all your clients may receive the socket and you could end-up with duplicates, not to mention that this is a lot of traffic for not much, and this will burn your quota API limit, which is also not safe on the client-side since everyone can see your key.
To me, a good way to get something working (even if I don't really know what your :stadium_id param stands for) is to get the info you want directly on the server side in your callback using the request module.
You should only get the pictures, because you can retrieve a lot of things like users, tags or videos that you may don't want to get. So you will have to listen for the image objects, and nothing else.
You could have something like this:
var request = require('request');
var CLIENT_ID = 'yourId';
function newImage(data) {
io.sockets.emit('newImage', data);
}
app.post('/callback', function (req, res) {
//loop in all the new objects
req.body.forEach(function (data) {
if (data.type !== 'image') { return ; }
//BTW I think you should try with the id property instead of object_id
request('https://api.instagram.com/v1/media/' + data.object_id + '?access_token=' + CLIENT_ID,
function (error, response, body) {
if (error) { return ; }
//Here we have one JSON object with all the info about the image
var image = JSON.parse(body);
//Save the new object to your DB. (replace the STADIUM_ID)
Stadium.findByIdAndUpdate(STADIUM_ID, { $push: {'photos':
{ img: image.images.standard_resolution.url,
link: image.link,
username: image.user.username,
profile_picture: image.user.profile_picture
}}},
{ safe: true, upsert: true });
//Send a socket to your client with the new image
newImage({
id: image.id,
img: image.images.standard_resolution.url,
link: image.link,
username: image.user.username,
profile: image.user.profile_picture
});
}
});
res.end();
});
And then in your client, you will only have to push the new images received in the newImage socket event in the $scope.items.

Categories

Resources