I have node js server file (index.js) and client file (orderlist.js)
In index.js i am getting promise object , like that
function returnOrderArray() {
var i = 0;
const promise = new Promise((resolve, reject) => {
connection.query('SELECT * FROM orders', function(error, results) {
while (i < results.length) {
order.id[i] = results[i].id;
order.wavetype[i] = results[i].wavetype;
order.color[i] = results[i].color;
order.thick[i] = results[i].thick;
order.readydate[i] = results[i].readydate;
order.createdate[i] = results[i].createdate;
order.manager[i] = results[i].manager;
i++;
}
resolve(order);
// console.log(order);
});
});
return promise;
}
then i want to pass it to other js file.
I tried to do that with module.exports
app.get('/orderlist', checkUserSession, async function(request, response) {
returnOrderArray().catch(error => console.log(error)).then((() => {
module.exports.order = order;
response.render("orderlist.ejs", { username: request.session.username });
})).catch(error => console.log(error));
});
and then import it in orderlist.js
var ind = require('../../index')
function asd() {
alert(ind.order);
}
but it seems not to work.
What am i doing wrong , and what's the best way to pass objects to other files in js?
oh , and file architecture
filearch
You need to export your module like so: module.exports = returnOrderArray
try this,
orderlist.js
const returnOrderArray = () => {...some code..}
module.exports = returnOrderArray
index.js
const returnOrderArray = require('./path/to/orderlist.js')
const run = async() => {
const orderlist = await returnOrderArray() // use await or then as you prefer
}
run()
async_await link if curious!
Hope this will work :)
Related
I'm creating a web scraper using node, cheerio and calling the website using axios(async/await). I want the function to run every 30 seconds. I tried using setTimeout and setInterval but did not get the expected result. Instead got heap out of memory error. I want to run the mvcAppointmentSearch function in the while loop for every 30 seconds. Following is the code. also attaching the codepen link for better readability.
Code pen link
const express = require('express');
const request = require('request-promise');
const cheerio = require('cheerio');
const axios = require('axios');
const cssSelect = require('css-select');
const open = require('open');
// const mvcUrl = 'https://telegov.njportal.com/njmvc/AppointmentWizard/17/';
const mvcUrl = 'https://telegov.njportal.com/njmvc/AppointmentWizard/14/';
const mvcLocation = ['Edison', 'Rahway', 'SouthPlainfield'];
// const mvcLocationNumber = ['240', '252', '239'];
const mvcLocationNumber = ['163'];
const requiredMonths = ['September', 'October'];
const callUrl = async (url, locationNumberIndex) => {
try {
const response = await axios.get(url);
//console.log('call url', response.data);
getData(response.data, locationNumberIndex);
} catch (err) {
console.log(err);
}
};
const mvcAppointmentSearch = () => {
for (let i = 0; i < mvcLocationNumber.length; i++) {
const currentUrl = mvcUrl + mvcLocationNumber[i];
console.log(mvcLocationNumber[i]);
callUrl(currentUrl, i);
}
};
const getData = (html, locationNumberIndex) => {
let data = [];
let $ = cheerio.load(html);
console.log('datais ', $);
$.prototype.exists = function (selector) {
return this.find(selector).length > 0;
};
const checkerLength = $('div').exists('.alert-danger');
console.log(checkerLength);
if (checkerLength) {
console.log(
`No appointment available in ${mvcLocation[locationNumberIndex]}`
);
} else {
const dateString = $('.control-label').text();
const availableMonth = dateString.trim().split(' ')[7];
const exactDateAvailability = dateString.slice(24, -1);
console.log(availableMonth);
if (requiredMonths.includes(availableMonth)) {
console.log('Hurray there is an appointment available');
const message = `Appointment available for the location ${mvcLocation[locationNumberIndex]} on ${exactDateAvailability}`;
open(`${mvcUrl + mvcLocationNumber[locationNumberIndex]}`);
console.log(message);
} else {
console.log('required Month is not available still searching');
}
}
};
while (true) {
try {
// mvcAppointmentSearch();
// want to run the following function for every 30 seconds.
mvcAppointmentSearch();
} catch (err) {
console.log(`Error has Occured ${err}`);
}
}
I can't find what I am doing wrong here. I'm trying to unzip the files then go though them creating a new HTML in each folder and also renaming the image files.
I tested it and the script is waiting to unzip the files before go though them, but I'm always getting the following error:
(node:3656) UnhandledPromiseRejectionWarning: Error: ENOENT: no such file or directory, scandir 'D:\Sites\rename-files\files\cloud'
[0] at Object.readdirSync (fs.js:955:3)
[0] at processDirectories (D:\Sites\rename-files\src\controllers\goDirs.js:8:12)
[0] at goDirs (D:\Sites\rename-files\src\controllers\goDirs.js:31:5)
[0] at createBanners (D:\Sites\rename-files\src\controllers\RenameController.js:18:33)
[0] at processTicksAndRejections (internal/process/task_queues.js:97:5)
Here are my files:
RenameController.js
const unzipFiles = require('./unzipFiles')
const goDirs = require('./goDirs')
const RenameController = {
async root(req, res) {
res.send('hello root!');
},
async createBanners(req, res) {
const { name, link, template } = req.body
const { filename } = req.file
const projectName = name.replace(' ','-')
try{
const unzipPath = await unzipFiles(projectName, filename)
const files = await goDirs(
unzipPath,
projectName,
template,
link
)
return res.json(JSON.stringify(files))
} catch(err){
return res.json(err)
}
}
}
module.exports = RenameController
unzipFiles.js
const fs = require('fs')
const path = require('path')
const unzipper = require('unzipper')
const unzipFiles = (projectName, filename) => {
const zipFile = path.join(__dirname, `../../files/${filename}`)
const unzipPath = path.join(__dirname, `../../files/${projectName}`)
return new Promise( (resolve, reject) => {
fs.createReadStream(zipFile)
.pipe(unzipper.Extract({ path: unzipPath }))
.on('close', resolve(unzipPath))
})
}
module.exports = unzipFiles
goDirs.js
const fs = require('fs')
const path = require('path')
const createHtml = require('./createHtml')
let bannerFiles = []
const goDirs = (directory, projectName, template, link) => {
const processDirectories = async (directory, projectName, template, link) => {
fs.readdirSync(directory).forEach(function(file){
const absolute = path.join(directory, file)
let filename = ''
if(fs.lstatSync(absolute).isDirectory()){
createHtml(file, projectName, template, link)
return processDirectories(absolute, projectName, template, link)
} else {
if (file.indexOf('background') >= 0) filename = 'background.jpg'
else if (file.indexOf('copy') >= 0) filename = 'copy.png'
else if (file.indexOf('cta') >= 0) filename = 'cta.png'
else if (file.indexOf('logo') >= 0) filename = 'logo.png'
fs.rename(
absolute,
absolute.replace(file, filename),
() => {}
)
bannerFiles.push(absolute)
}
})
}
processDirectories(directory, projectName, template, link)
return new Promise((resolve, reject) => {
bannerFiles.length != 0 ? resolve(bannerFiles) : reject()
})
}
module.exports = goDirs
Thanks!!
Apparently, you need to treat the case of a rejection of the processDirectories Promise in the goDirs.js.
processDirectories(directory, projectName, template, link)
try to .catch(...) this ☝️ call to your async method. Like this:
processDirectories(directory, projectName, template, link).catch(/* your error treatment code */)
are you sure that path D:\Sites\rename-files\files\cloud exist before you're trying to read this dir? If not, you should create it first manually or with fs.mkdir
P.S. if you are using up-to-date version of node, you can use 'fs/promise' package instead of promisifying fs methods or using Sync one. https://nodejs.org/api/fs.html
I have written one POST endpoint in expressJS with node.when I a make call to API It runs a utility with setInterval() and I want to send the API response after utility executes clearInterval().
How I can I wait and send response after utility execution is finished?
Please see the code below
REST API code:
const router= express.Router();
const multer= require('multer');
const {readCSVFile}= require('../util/index');
var storage = multer.diskStorage({
destination: (req, file, cb) => {
cb(null, 'uploads');
},
filename: (req, file, cb) => {
cb(null, file.fieldname + '-' + Date.now()+'.xlsx');
}
});
var upload = multer({storage: storage});
router.post('/fileUpload', upload.single('filename'), async (req, res) => {
readCSVFile();
res.status(201).json({id:1});
});
router.get('/',(req,res)=>{
res.sendFile(__dirname+'/index.html');
});
module.exports=router;
Utilty Code
const config = require('config')
const excelToJson = require('convert-excel-to-json')
const HttpsProxyAgent = require('https-proxy-agent')
const AWS = require('aws-sdk')
const json2xls = require('json2xls')
const fs = require('fs')
const awsConfig = {
httpOptions: {
agent: new HttpsProxyAgent(
config.get('aws.proxy')
),
}
}
AWS.config.credentials = new AWS.SharedIniFileCredentials({
profile: config.get('aws.profile'),
})
AWS.config.update(awsConfig)
let uuidv4 = require('uuid/v4')
let csv = [];
const lexRunTime = new AWS.LexRuntime({
region: config.get('aws.region'),
})
let refreshId
const readCSVFile = () => {
const csvSheet = excelToJson({
sourceFile: './Test.xlsx',
})
csvSheet.Sheet1.forEach(element => {
csv.push((element.A.slice(0, element.A.length)))
})
runTask()
refreshId = setInterval(runTask, 1000)
}
let botParams = {
botAlias: config.get('bot.alias'),
botName: config.get('bot.name'),
sessionAttributes: {},
}
const missedUtterancesArray = []
const matchedUtterancesArray = []
let start = 0
let end = 50
let count = 50
const runTask = () => {
let itemsProcessed = 0
console.log('executing...')
const arrayChunks = csv.slice(start, end)
arrayChunks.forEach((element) => {
botParams.inputText = element
botParams.userId = `${uuidv4()}`
lexRunTime.postText(botParams, function (err, data) {
itemsProcessed++
if (err) console.log(err, err.stack)
else {
if (data.intentName === null) {
missedUtterancesArray.push({
Utterance: element,
})
}
else{
matchedUtterancesArray.push({
Utterance: element,
})
}
}
if (itemsProcessed === arrayChunks.length) {
start = csv.indexOf(csv[end])
end = start + count
}
if (start === -1) {
let xls = json2xls(missedUtterancesArray)
fs.writeFileSync('./MissedUtterances.xlsx', xls, 'binary')
let matchedXls = json2xls(matchedUtterancesArray)
fs.writeFileSync('./MatchedUtterances.xlsx', matchedXls, 'binary')
console.log('File saved successfully!! ')
console.log('Total Matched utterances count: ',csv.length-missedUtterancesArray.length)
console.log('Total Missed utterances count: ',missedUtterancesArray.length)
console.log('Total Utterances count: ',csv.length)
clearInterval(refreshId)
}
})
})
}
I would have needed few more information to answer this but pardon my try if this does not work -
the setInterval method in the readCSVFile the reason. Being an asynchronous function, this will not stop the code progression.
lexRunTime.postText also looks like asynchronous. I think you'd be better off with using promises while responding to the client.
I am new to Nodejs and as well as async programming so I am trying to make shopping project I just want to clarify that even though my program works just fine but is my approach correct or not
controllers file
exports.posteditproduct = (req, res, next) => {
const upprodid = req.body.productid;
const upprodtitle = req.body.title;
const upprodprice = req.body.price;
const upprodimg = req.body.imageurl;
const upproddesc = req.body.description;
Product(
upprodid,
upprodtitle,
upprodimg,
upprodprice,
upproddesc
);
Product.updateproduct(
upprodid,
upprodtitle,
upprodprice,
upprodimg,
upproddesc
).then(function(value){
console.log(value)
fs.writeFile(p,JSON.stringify(value),err=>{
console.log(err)
})
})
res.redirect('/')
}
models
static updateproduct(
upprodid,
upprodtitle,
upprodprice,
upprodimg,
upproddesc
) {
const p = path.join(
path.dirname(process.mainModule.filename),
'data',
'products.json'
)
return new Promise((resolve, reject) => {
fs.readFile(p, (err, data) => {
const allproducts = JSON.parse(data)
const existproductid = allproducts.findIndex(prod => prod.id === upprodid)
const upproduct = [...allproducts]
const spreadall = upproduct[existproductid]
spreadall.id = upprodid
spreadall.title = upprodtitle
spreadall.imageurl = upprodimg
spreadall.price = upprodprice
spreadall.description = upproddesc
upproduct[existproductid] = spreadall
return resolve(upproduct)
})
})
}
So is it okay if in my models file I do all the stuff and then return the final variable within the resolve ( ) and then in my controllers file I just use
.then(function(value) {
//and then write in file here
})
Would this approach be okay if I have a very large file with n no. of entries ?
Or should I write the writeFile method too inside the Promise in the updateproduct method?
I have 9577 unique records in a csv file.
This code inserts 9800 records and insert not all records, but duplicates of some of them. Any idea why it does not inserts the unique 9577 records and also duplicates of some of them? Below I also insert the remain part of the code so you get the whole picture
function bulkImportToMongo(arrayToImport, mongooseModel) {
const Model = require(`../../../models/${mongooseModel}`);
let batchCount = Math.ceil(arrayToImport.length / 100);
console.log(arrayToImport.length);
let ops = [];
for (let i = 0; i < batchCount; i++) {
// console.log(i);
let batch = arrayToImport.slice(i, i + 100);
console.log(batch.length);
ops.push(Model.insertMany(batch));
}
return ops;
return Promise.all(ops).then(results => {
// results is an array of results for each batch
console.log("results: ", results);
});
}
I parse the csv file
const Promise = require("bluebird");
const csv = require("fast-csv");
const path = require("path");
const fs = Promise.promisifyAll(require("fs"));
const promiseCSV = Promise.method((filePath, options) => {
return new Promise((resolve, reject) => {
var records = [];
csv
.fromPath(filePath, options)
.on("data", record => {
records.push(record);
})
.on("end", () => {
// console.log(records);
resolve(records);
});
});
});
And here is the script that connects it all together:
const path = require("path");
const promiseCSV = require("./helpers/ImportCSVFiles");
const {
connectToMongo,
bulkImportToMongo
} = require("./helpers/mongoOperations");
const filePath = path.join(__dirname, "../../data/parts.csv");
const options = {
delimiter: ";",
noheader: true,
headers: [
"facility",
"partNumber",
"partName",
"partDescription",
"netWeight",
"customsTariff"
]
};
connectToMongo("autoMDM");
promiseCSV(filePath, options).then(records => {
bulkImportToMongo(records, "parts.js");
});
//It looks like your issue is simply i++. Perhaps you meant i += 100?
for (let i = 0; i < batchCount; i+=100 /* NOT i++ */) {
//...
}
I solved it.
I hope this helps other... :-)
I had two errors, in the function promiseCSV (changed to parseCSV) and second I had bad logic in bulkImportToMongo.
Complete solution:
I parsed and imported 602.198 objects and here is how long time it took using node --max_old_space_size=8000 on a MacBook Pro with 8gb of ram.
console
➜ database git:(master) ✗ node --max_old_space_size=8000 partImport.js
Connected to db!
Time to parse file: : 5209.325ms
Disconnected from db!
Time to import parsed objects to db: : 153606.545ms
➜ database git:(master) ✗
parseCSV.js
const csv = require("fast-csv");
function promiseCSV(filePath, options) {
return new Promise((resolve, reject) => {
console.time("Time to parse file");
var records = [];
csv
.fromPath(filePath, options)
.on("data", record => {
records.push(record);
})
.on("end", () => {
console.timeEnd("Time to parse file");
resolve(records);
});
});
}
module.exports = promiseCSV;
mongodb.js
const mongoose = require("mongoose");
mongoose.Promise = global.Promise;
function connectToMongo(databaseName) {
mongoose.connect(`mongodb://localhost:27017/${databaseName}`, {
keepAlive: true,
reconnectTries: Number.MAX_VALUE,
useMongoClient: true
});
console.log("Connected to db!");
}
function disconnectFromMongo() {
mongoose.disconnect();
console.log("Disconnected from db!");
}
function bulkImportToMongo(arrayToImport, mongooseModel) {
const Model = require(`../../../models/${mongooseModel}`);
const batchSize = 100;
let batchCount = Math.ceil(arrayToImport.length / batchSize);
let recordsLeft = arrayToImport.length;
let ops = [];
let counter = 0;
for (let i = 0; i < batchCount; i++) {
let batch = arrayToImport.slice(counter, counter + batchSize);
counter += batchSize;
ops.push(Model.insertMany(batch));
}
return Promise.all(ops);
}
module.exports.bulkImportToMongo = bulkImportToMongo;
module.exports.connectToMongo = connectToMongo;
module.exports.disconnectFromMongo = disconnectFromMongo;
partImport.js
const path = require("path");
const parseCSV = require("./helpers/parseCSV");
const {
connectToMongo,
disconnectFromMongo,
bulkImportToMongo
} = require("./helpers/mongodb");
const filePath = path.join(__dirname, "../../data/parts.csv");
const options = {
delimiter: ";",
noheader: true,
headers: [
"facility",
"partNumber",
"partName",
"partDescription",
"netWeight",
"customsTariff"
]
};
connectToMongo("autoMDM");
parseCSV(filePath, options)
.then(records => {
console.time("Time to import parsed objects to db");
return bulkImportToMongo(records, "parts.js");
})
/* .then(result =>
console.log("Total batches inserted: ", result, result.length)
) */
.then(() => {
disconnectFromMongo();
console.timeEnd("Time to import parsed objects to db");
})
.catch(error => console.log(error));