I'm a bit of newbie to Node so be gentle. I'm creating an app for my wedding which takes an uploaded guestlist (in Excel file format) and turns it into a JSON array which I can then use to build profiles about each guest (dietary requirements, rsvp answer, etc).
So far I've got a form on the homepage which allows the user to upload the .xlsx file and when the form is submitted the user is redirected back to the homepage again.
I've created the following route:
router.post('/',
guestsController.upload,
guestsController.getGuestlist,
guestsController.displayGuestlist
);
and here's my guestsController:
const multer = require('multer');
const convertExcel = require('excel-as-json').processFile;
const storage = multer.diskStorage({ //multers disk storage settings
destination: function (req, file, cb) {
cb(null, './uploads/')
},
filename: function (req, file, cb) {
var datetimestamp = Date.now();
cb(null, file.fieldname + '-' + datetimestamp + '.' + file.originalname.split('.')[file.originalname.split('.').length -1])
}
});
exports.upload = multer({storage: storage}).single('file');
exports.getGuestlist = async (req, res, next) => {
try {
await convertExcel(req.file.path, null, null, (err, guestData) => {
req.guestlist = guestData.map((guestObj) => Object.values(guestObj)[0]);
});
console.log(req.guestlist);
next();
} catch (e){
res.json({error_code:1,err_desc:"Corrupted Excel file"});
next();
}
};
exports.displayGuestlist = (req, res) => {
console.log(req.guestlist);
};
At the moment because of the synchronous nature of the functions, displayGuestlist is returning undefined to the console because covertExcel has not finished grabbing the data. You can see I have tried to use the new async await syntax to resolve this but it hasn't fixed it unfortunately.
I have also tried putting the log in displayGuestlist in a timeout function which has proven that this is purely a timing issue.
Any helpers would be much appreciated.
It looks like convertExcel is not a Promise-returning function, but rather uses an old-school callback. await does not work with those, so it's instead awaiting Promise.resolve(undefined) since the function returns undefined, not a Promise. Thankfully, in Node 8 and later, there's a promisify utility to convert callback-style functions to Promise-returning functions so that await can be used.
const { promisify } = require('util');
const convertExcel = promisify(require('excel-as-json').processFile);
// ...
const guestData = await convertExcel(req.file.path, null, null);
req.guestlist = guestData.map((guestObj) => Object.values(guestObj)[0]);
You can encapsulate your code in a promise and await this promise to resolve.
exports.getGuestlist = async (req, res, next) => {
let promise = new Promise((resolve, reject) => {
convertExcel(req.file.path, null, null, (err, guestData) => {
if(err) reject(err);
else resolve(guestData);
});
});
try {
let guestData = await promise;
req.guestlist = guestData.map((guestObj) => Object.values(guestObj)[0]);
console.log(req.guestlist);
next();
} catch (e){
res.json({error_code:1,err_desc:"Corrupted Excel file"});
next();
}
};
Related
My self and another developer are working on an API using node.js and we are not advanced coders yet. We ran into problem. I will try and explain the problem here with sample references.
We have an API route that would be called on the frontend, please this is just a sample and not the real code. client said the codes should be private. We want to stop the function when an error is detected in another function we called. I am sure that we are not doing it rightly. Here is the API route for this post request and we called another function that we exported and imported here.
We simply want the httpCreateHarsh function to end if there is an error in the anotherFunction. With what we have, the error is seen in console.log when the user is not an admin for example but the httpCreateHarshfunction keeps running until it reaches the last line. Is this possible? Or is there another way we can structure the code to achieve this?
Shared sample of my code
const callAnotherFunction = require("../anotherfunction")
const httpCreateHarsh = async(req, res) => {
await callAnotherFunction(req, res);
return res.status(200).json('created')
}
//This is the second function we called:
const obj = {
status: 'success',
code: '244'
}
const anotherFunction = async(req, res) => {
if (req.body.user !== 'admin') {
return res.status(401).json('Unauthorized')
}
return obj
}
export default = anotherFunction
//The route here:
const express = require('express');
const router = express.Router();
const httpCreateHarsh = require('../httpCreateHarsh');
router.post("/harsh", httpCreateHarsh)
You couldn't return the res object in your second function.
To solve this problem you could throw exceptions and catch them in your handler function.
// request handler function
const httpCreateHarsh = async (req, res) => {
try {
await callAnotherFunction(req, res);
} catch (e) {
return res.status(401).json('Unauthorized')
}
return res.status(200).json('created')
}
const anotherFunction = asyn(req, res) => {
if (req.body.user !== 'admin') {
throw new Error('Unauthorized')
}
return obj
}
What you can do is you can wrap the code in httpCreateHarsh in an try...catch so whenever there is error inside it it will trigger the catch block and u exit the api.
const httpCreateHarsh = async(req, res)=>{
try{
await callAnotherFunction(req, res);
return res.status(200).json('created')
} catch(err){
return res.status(401).json('Unauthorized')
}
}
As an addition to this code you can return a promise from anotherFunction so that the catch block will be triggered once the promise is rejected.
For Exmaple:
const anotherFunction = async(req, res) => {
return new Promise(function(myResolve, myReject) {
if (req.body.user !== 'admin') {
myReject();
}
myResolve(obj);
});
}
If the code runs as you want it, it will generate the "cannot set headers after they are sent to the client" error, because you will be returning 2 responses
the first will be "unauthorized" by "anotherFunction" function and then the other response which is "created" of the current function "httpCreateHarsh".
what you should do instead is to call the "anotherFunction" as a middleware before moving to the "httpCreateHarsh" function.
it can be done this way:
// anotherfunction.js file containing the function you want to import
module.exports = {
async anotherFunction(req, res) {
if (req.body.user !== 'admin') {
return res.status(401).json('Unauthorized')
}
// this way, you can access this object from the "httpCreateHarsh" function by using req.body.obj
req.body.obj = {
status: 'success',
code: '244'
}
// this next indicates that there were no errors, and the next function will be called
next();
}
}
const httpCreateHarsh = async(req, res) => {
// do wathever you want here
return res.status(200).json('created')
}
//The route here:
const express = require('express');
const router = express.Router();
const httpCreateHarsh = require('../httpCreateHarsh');
const callAnotherFunction = require("../anotherfunction")
router.post("/harsh", (req, res, next) => callAnotherFunction(req, res, next), httpCreateHarsh)
I have the following files:
My routes - where the orders_count route lives:
routes/index.js
const express = require('express');
const router = express.Router();
const transactionsController = require('../controllers/transactionsController');
const ordersController = require('../controllers/ordersController');
const ordersCountController = require('../controllers/ordersCountController');
router.get('/transactions', transactionsController);
router.get('/orders', ordersController);
router.get('/orders_count', ordersCountController);
module.exports = router;
I then have my orders count controller living in the controllers directory:
controllers/ordersCountController.js
const ordersCountService = require('../services/ordersCountService');
const ordersCountController = (req, res) => {
ordersCountService((error, data) => {
if (error) {
return res.send({ error });
}
res.send({ data })
});
};
module.exports = ordersCountController;
My controller then calls my order count service which fetches data from another API.
services/ordersService.js
const fetch = require('node-fetch');
// connect to api and make initial call
const ordersCountService = (req, res) => {
const url = ...;
const settings = { method: 'Get'};
fetch(url, settings)
.then(res => {
if (res.ok) {
res.json().then((data) => {
return data;
});
} else {
throw 'Unable to retrieve data';
}
}).catch(error => {
console.log(error);
});
}
module.exports = ordersCountService;
I'm trying to return the JSON response. I initially had it setup with requests but looking at the NPM site, it appears that it's depreciated so have been digging through how to use node-fetch.
I have tried both 'return data' and res.send({data}), but neither are solving the problem.
I am still new to this so I am likely missing something very obvious, but how come I am not sending the JSON back through so that it displays at the /api/orders_count endpoint?
I keep thinking I messed something up in my controller but have been looking at it for so long and can't seem to figure it out.
Any help would be greatly appreciated and if there is anything I can add for clarity, please don't hesitate to ask.
Best.
please learn promises and await syntax. life will be easier.
never throw a string. always prefer a real error object, like that : throw new Error('xxx'); that way you will always get a stack. its way easier to debug.
avoid the callback hell : http://callbackhell.com/
you need to decide if you want to catch the error in the controller or in the service. no need to do in both.
in the controller you call the service that way :
ordersCountService((error, data) => {
but you declare it like that :
const ordersCountService = (req, res) => {
which is not compatible. it should look like this if you work with callback style :
const ordersCountService = (callback) => {
...
if (error) return callback(error)
...
callback(null, gooddata);
here is an example to flatten your ordersCountService function to await syntax, which allows the "return data" you were trying to do :
const fetch = require('node-fetch');
// connect to api and make initial call
const ordersCountService = async (req, res) => {
const url = ...;
const settings = { method: 'Get'};
try {
const res = await fetch(url, settings);
if (!res.ok) throw new Error('Unable to retrieve data');
return await res.json();
} catch(error) {
console.log(error);
}
}
module.exports = ordersCountService;
in fact i would prefer to error handle in the controller. then this woud be sufficient as a service
const fetch = require('node-fetch');
// connect to api and make initial call
const ordersCountService = async () => {
const url = ...;
const settings = { method: 'Get'};
const res = await fetch(url, settings);
if (!res.ok) throw new Error('Unable to retrieve data');
return await res.json();
}
module.exports = ordersCountService;
then you can call this funtion like this :
try {
const data = await ordersCountService(req, res);
} catch(err) {
console.log(err);
}
//or
ordersCountService(req, res).then((data) => console.log(data)).catch((err) => console.error(err));
I'm getting an empty response ({}), whereas my expected response is of format:
{
locationResponse: "location foo",
forecastResponse: "forecast bar"
}
In my index file I have:
const {getCity} = require('./routes/city');
const {getForecasts} = require('./routes/forecast');
app.get('/forecasts', function (req, res) {
var location = getCity(req, res);
var forecast = getForecasts(req, res);
//these are logged as undefined
console.log("Inside index.js");
console.log(location);
console.log(forecast);
res.send({locationResponse: location, forecastResponse: forecast});
});
Inside forecast file I have the following, and a similar one is in city file:
module.exports = {
getForecasts: (req, res) => {
var result = //mySQL DB calls and processing
console.log("Inside getForecasts");
console.log(result); //actual result printed
return "Forecast";
}
UPDATE: So I added some logs right before each call's return statements and figured out that the logs are printed in the following order, which means, it is not working as expected because I have not considered the fact that they are asynchronous calls.
Inside index.js
undefined
undefined
Inside getForecasts
{result}
The problem here is that in your ./routes/forecast/ getForecasts method, you're telling the response to send, with the data "Forecast". You should only ever use res.send once per request, as this will resolve the response and return to the client.
Instead, your getForecasts method should just return whatever data you need, and your index file should handle the response. If you need getForecasts to handle a response too, perhaps because you're sending requests directly to a forecasts endpoint that doesn't require location data, then you can refactor your code so that both index and forecasts make a call to get the data you need. For example:
/* index.js */
const {getCity} = require('./data/city');
const {getForecasts} = require('./data/forecast');
app.get('/forecasts', function (req, res) {
var location = getCity();
var forecast = getForecasts();
res.send({locationResponse: location, forecastResponse: forecast});
});
/* data/forecast.js */
module.exports = {
getForecasts: () => {
return "Forecast";
}
};
/* data/city.js */
module.exports = {
getCity: () => {
return "City";
}
};
Then you can also have:
/* routes/forecast.js */
const {getForecasts} = require('../data/forecast');
module.exports = {
getForecasts: (req, res) => {
res.send(getForecasts());
}
};
The above may be overcomplicating things, but I made the assumption that if you're using a routes directory, you probably want route handlers to be stored there. Hope this helps.
Seems both of your getCity() and getForecasts() functions are async. These asynchronous functions return a promise rather actual response.
So you can use simple asysn/await or Promise.all in JS to solve the issue.
Option 1: Use await for the promise to resolve before logging the message to the console:
app.get('/forecasts', async function (req, res) {
var location = await getCity(req, res);
var forecast = await getForecasts(req, res);
//these are logged as undefined
console.log("Inside index.js");
console.log(location);
console.log(forecast);
res.send({locationResponse: location, forecastResponse: forecast});
});
Option 2: Use Promise.all() to wait for all the promises to have fulfilled.
app.get('/forecasts', function (req, res) {
var list = await Promise.all([getCity(req, res), getForecasts(req, res)]);
//these are logged as undefined
console.log("Inside index.js");
console.log(list[0]);
console.log(list[1]);
res.send({locationResponse: list[0], forecastResponse: list[1]});
});
You can make use of async/await syntax.
app.get('/forecasts', async function (req, res) {
var location = await getCity(req, res);
var forecast = await getForecasts(req, res);
//these are logged as undefined
console.log("Inside index.js");
console.log(location);
console.log(forecast);
res.send({locationResponse: location, forecastResponse: forecast});
});
I am working on post method in the server side to retrieve all files inside the requested directory (not recursive), and below is my code.
I am having difficulty sending the response back (res.json(pathContent);) with the updated pathContent without using the setTimeout.
I understand that this is due to the asynchronous behavior of the file system methods used (readdir and stat) and need to use some sort of callback, async, or promise technique.
I tried to use the async.waterfall with the entire body of readdir as one function and the res.json(pathContent) as the other, but it didn't send the updated array to the client side.
I know that there have been thousands of questions regarding this asynchronous operation but could not figure out how to solve my case after reading number of posts.
Any comments would be appreciated. Thanks.
const express = require('express');
const bodyParser = require('body-parser');
const fs = require('fs');
const path = require('path');
var pathName = '';
const pathContent = [];
app.post('/api/files', (req, res) => {
const newPath = req.body.path;
fs.readdir(newPath, (err, files) => {
if (err) {
res.status(422).json({ message: `${err}` });
return;
}
// set the pathName and empty pathContent
pathName = newPath;
pathContent.length = 0;
// iterate each file
const absPath = path.resolve(pathName);
files.forEach(file => {
// get file info and store in pathContent
fs.stat(absPath + '/' + file, (err, stats) => {
if (err) {
console.log(`${err}`);
return;
}
if (stats.isFile()) {
pathContent.push({
path: pathName,
name: file.substring(0, file.lastIndexOf('.')),
type: file.substring(file.lastIndexOf('.') + 1).concat(' File'),
})
} else if (stats.isDirectory()) {
pathContent.push({
path: pathName,
name: file,
type: 'Directory',
});
}
});
});
});
setTimeout(() => { res.json(pathContent); }, 100);
});
The easiest and cleanest way would be use await/async, that way you can make use of promises and the code will almost look like synchronous code.
You therefor need a promisified version of readdir and stat that can be create by the promisify of the utils core lib.
const { promisify } = require('util')
const readdir = promisify(require('fs').readdir)
const stat = promisify(require('fs').stat)
async function getPathContent(newPath) {
// move pathContent otherwise can have conflicts with concurrent requests
const pathContent = [];
let files = await readdir(newPath)
let pathName = newPath;
// pathContent.length = 0; // not needed anymore because pathContent is new for each request
const absPath = path.resolve(pathName);
// iterate each file
// replace forEach with (for ... of) because this makes it easier
// to work with "async"
// otherwise you would need to use files.map and Promise.all
for (let file of files) {
// get file info and store in pathContent
try {
let stats = await stat(absPath + '/' + file)
if (stats.isFile()) {
pathContent.push({
path: pathName,
name: file.substring(0, file.lastIndexOf('.')),
type: file.substring(file.lastIndexOf('.') + 1).concat(' File'),
})
} else if (stats.isDirectory()) {
pathContent.push({
path: pathName,
name: file,
type: 'Directory',
});
}
} catch (err) {
console.log(`${err}`);
}
}
return pathContent;
}
app.post('/api/files', (req, res, next) => {
const newPath = req.body.path;
getPathContent(newPath).then((pathContent) => {
res.json(pathContent);
}, (err) => {
res.status(422).json({
message: `${err}`
});
})
})
And you should not concatenated paths using + (absPath + '/' + file), use path.join(absPath, file) or path.resolve(absPath, file) instead.
And you never should write your code in a way that the code executed for the request, relays on global variables like var pathName = ''; and const pathContent = [];. This might work in your testing environment, but will for sure lead to problems in production. Where two request work on the variable at the "same time"
Based on the initial comment I received and the reference, I used readdirSync and statSync instead and was able to make it work. I will review other answers as well and learn about other ways to implement this.
Thank you all for your kind inputs.
Here is my solution.
const express = require('express');
const bodyParser = require('body-parser');
const fs = require('fs');
const path = require('path');
var pathName = '';
const pathContent = [];
app.post('/api/files', (req, res) => {
const newPath = req.body.path;
// validate path
let files;
try {
files = fs.readdirSync(newPath);
} catch (err) {
res.status(422).json({ message: `${err}` });
return;
}
// set the pathName and empty pathContent
pathName = newPath;
pathContent.length = 0;
// iterate each file
let absPath = path.resolve(pathName);
files.forEach(file => {
// get file info and store in pathContent
let fileStat = fs.statSync(absPath + '/' + file);
if (fileStat.isFile()) {
pathContent.push({
path: pathName,
name: file.substring(0, file.lastIndexOf('.')),
type: file.substring(file.lastIndexOf('.') + 1).concat(' File'),
})
} else if (fileStat.isDirectory()) {
pathContent.push({
path: pathName,
name: file,
type: 'Directory',
});
}
});
res.json(pathContent);
});
There is different way to do it :
You can first promisify the function with using new Promise() then second, use async/await or .then()
You can use the function ProsifyAll() of the Bluebird package (https://www.npmjs.com/package/bluebird)
You can use the synchrone version of the fs functions
https://nodejs.org/api/fs.html#fs_fs_readdirsync_path_options
https://nodejs.org/api/fs.html#fs_fs_statsync_path_options
Here's some options:
Use the synchronous file methods (check the docs, but they usually end with Sync). Slower, but a fairly simple code change, and very easy to understand.
Use promises (or util.promisify) to create a promise for each stat, and Promise.all to wait for all the stats to complete. After that, you can use async functions and await as well for easier to read code and simpler error handling. (Probably the largest code change, but it will make the async code easier to follow)
Keep a counter of the number of stats you have done, and if that number is the size you expect, then call res.json form inside the stat callback (smallest code change, but very error prone)
I'm trying to use a promise on this code:
//Listando arquivos
app.post('/readList', function(req, res) {
var cleared = false
var readList = new Promise(function(resolve, reject){
fs.readdir(req.body.path, (err, files) => {
files.forEach(file => {
console.log(file)
var fileDetail = {
name: '',
local: true,
filetype: 'fas fa-folder-open',
filepath: '',
isFile: false
}
if(!cleared){
listedFiles = []
cleared = true
}
fileDetail.name = file
fileDetail.filepath = req.body.path + file
fs.stat(req.body.path + file, function(err, stats) {
fileDetail.isFile = stats.isFile()
if(stats.isFile()) fileDetail.filetype = 'far fa-file-alt'
else fileDetail.filetype = 'fas fa-folder-open'
})
listedFiles.push(fileDetail)
})
})
})
readList.then(
console.log('vorta'),
res.end(JSON.stringify(listedFiles))
)
})
I've putted this line to show the itens listing:
console.log(file)
And put this line to execute after promise:
readList.then(
console.log('vorta'),
res.end(JSON.stringify(listedFiles))
)
I don't know where is the mistake, but console is showing 'vorta' before the files names.
What am I doing wrong?
Here you're passing two params:
readList.then(
//#1 In this case you're executing the log function and cause that the message is being printed.
console.log('vorta'),
res.end(JSON.stringify(listedFiles)) //# 2
)
So, you need to pass a function
readList.then(function() {
console.log('vorta');
res.end(JSON.stringify(listedFiles));
})
Further, you need to call the function resolve within the async logic.
As I said in my earlier comment, there are at least four problems here:
You aren't calling resolve(listedFiles) to resolve the promise so its .then() handler is never called
You need to pass a single function to .then()
You have no error handling for your async operations
You seem to be assuming that fs.stat() is synchronous when it is not
The best way to attack this problem is to promisify all your asynchronous functions and then use promises for controlling the flow and the error handling. Here's a way to fix all of these issues:
const util = require('util');
const fs = require('fs');
const readdirAsync = util.promisify(fs.readdir);
const statAsync = util.promisify(fs.stat);
//Listando arquivos
app.post('/readList', function(req, res) {
// add code here to sanitize req.body.path so it can only
// point to a specific sub-directory that is intended for public consumption
readdirAsync(req.body.path).then(files => {
return Promise.all(files.map(file => {
let fileDetail = {
name: file,
local: true,
filepath: req.body.path + file
};
return statAsync(fileDetail.filepath).then(stats => {
fileDetail.isFile = stats.isFile();
fileDetail.filetype = fileDetail.isFile ? 'far fa-file-alt' : 'fas fa-folder-open';
return fileDetail;
});
}));
}).then(listedFiles => {
res.json(listedFiles);
}).catch(err => {
console.log(err);
res.sendStatus(500);
});
});
FYI, this is kind of a dangerous implementation because it lists files on ANY path that the user passes in so any outsider can see the entire file listing on your server's hard drive. It could even list network attached drives.
You should be limiting the scope of the req.body.path to only a specific file hieararchy that is intended for public consumption.
You need to pass a function to then.
As it stands, you are calling log and end immediately and passing their return values.
Here is working copy of your code, I have made few changes that you can omit since those are only to give you a working code:
var express = require('express');
var fs = require('fs');
var app = express();
app.post('/readList', function(req, res) {
//Assuming sample data coming in req.body
//Remove this when you run at you side
req.body = {
path: 'temp_dir'
};
var cleared = false;
var listedFiles = [];
//Promising readList :)
function readList(){
return new Promise(function (resolve, reject) {
// Suppose req.body.path is 'temp_dir' and it has 2 files
fs.readdir(req.body.path, (err, files) => {
console.log(files);
//in following commented code you can do what you need
/*files.forEach(file = > {
console.log(file);
var fileDetail = {
name: '',
local: true,
filetype: 'fas fa-folder-open',
filepath: '',
isFile: false
}
if (!cleared) {
listedFiles = [];
cleared = true;
}
// really? you can think of it later!!
fileDetail.name = file;
fileDetail.filepath = req.body.path + file
// I changed below to avoid surprises for you in data!
const stats = fs.statSync(req.body.path + file);
fileDetail.isFile = stats.isFile();
if (stats.isFile())
fileDetail.filetype = 'far fa-file-alt';
else
fileDetail.filetype = 'fas fa-folder-open'
listedFiles.push(fileDetail);
});*/
resolve(listedFiles);
});
});
}
readList().then((data) => {
console.log('vorta');
// data here will be containing same data as listedFiles so choose your way of doing, I would recommend to go with data
res.send(JSON.stringify(listedFiles)); // fine
// res.send(JSON.stringify(data)); // better and recommended
});
})
app.listen(process.env.PORT || 3000);
console.log('Listening on port 3000');