Parallel query in Node.js - javascript

The function works, gives me the required object, but only after the second request to the server. At the first request, returns an empty array, then fills it, and at the second request already issues
'use strict';
const user2 = require('../models/base');
var array = [];
var i =0;
exports.getEda = email =>
new Promise((resolve,reject) => {
user2.count().exec(function(err, count){
var random = Math.floor(Math.random() * count);
var calories = 2500;
test(calories);
function test(calories, random) {
user2.findOne().skip(random).exec(
function (err, result) {
random = Math.floor(Math.random() * count);
var stringify = JSON.stringify(result);
var jsonContent = JSON.parse(stringify);
calories = calories - jsonContent.calories;
console.log(calories);
if (calories > 0){
test(calories, random);
}
array[i] = result;
i++;
});
}
console.log(array);
})
.then(eda => resolve(array))
.catch(err => reject({ status: 500, message: 'Internal Server Error !' }))
});

You forget about return:
...
return new Promise((resolve,reject) => {
...

Related

Empty array is returned outside mongoose function

I'm trying to take some random values from Mongo using mongoose and push it to an array.
But the array is empty outside the function:
exports.Run = (req, res) => {
var response = {}
var you = "you"
response[you] = [];
Model.estimatedDocumentCount().exec(function (err, count) {
for (let i = 0; i < 8; i++) {
let random = Math.floor(Math.random() * count)
Model.findOne()
.skip(random)
.exec( function (err, result) {
response[you].push(result);
console.log(response); // Array is increased each iteration
})
}
})
console.log(response); // Array is empty here
res.status(200).send(response);
};
Please, how to fix that?
Thanks in advance.
Check this way. Hope it should work
exports.Run =async (req, res) => {
var response = {}
var you = "you"
response[you] = [];
var result = await Model.estimatedDocumentCount().exec(function (err, count) {
for (let i = 0; i < 8; i++) {
let random = Math.floor(Math.random() * count)
await Model.findOne()
.skip(random)
.exec( function (err, result) {
response[you].push(result);
console.log(response);
})
}
return response;
})
console.log(result); // Array is empty here
res.status(200).send(result);
};
I found the solution and it works for me:
exports.Run = (req, res) => {
ex1( function(rp){
console.log(rp);
res.status(200).send(rp);
});
}
function ex1(callback) {
var response = {}
var you = "you"
response[you] = [];
Model.estimatedDocumentCount().exec(async function (err, count) {
for (let i = 0; i < 8; i++) {
let random = Math.floor(Math.random() * count)
var doc = await Model.findOne().skip(random).exec();
response[you].push(doc);
}
callback(response);
})
}

nodejs get video dureation

I've been trying this for ages now and I'm not making any progress.
I found this on google https://gist.github.com/Elements-/cf063254730cd754599e
and it's running but when I put that in a function and try to use it with my code its not running.
Code:
fs.readdir(`${__dirname}/data`, (err, files) => {
if (err) return console.error(`[ERROR] ${err}`);
files.forEach(file => {
if (file.endsWith(".mp4")) {
// getVideoDuration(`${__dirname}/data/${file}`)
group = new Group(file.split(".")[0], file, null, getVideoDuration(`${__dirname}/data/${file}`), 0);
groups.push(group);
}
});
console.log(groups);
});
function getVideoDuration(video) {
var buff = new Buffer.alloc(100);
fs.open(video, 'r', function (err, fd) {
fs.read(fd, buff, 0, 100, 0, function (err, bytesRead, buffer) {
var start = buffer.indexOf(new Buffer.from('mvhd')) + 17;
var timeScale = buffer.readUInt32BE(start, 4);
var duration = buffer.readUInt32BE(start + 4, 4);
var movieLength = Math.floor(duration / timeScale);
console.log('time scale: ' + timeScale);
console.log('duration: ' + duration);
console.log('movie length: ' + movieLength + ' seconds');
return movieLength;
});
});
}
Output:
[
Group {
_name: 'vid',
_video: 'vid.mp4',
_master: null,
_maxTime: undefined,
_currentTime: 0
},
Group {
_name: 'vid2',
_video: 'vid2.mp4',
_master: null,
_maxTime: undefined,
_currentTime: 0
}
]
time scale: 153600
duration: 4636416
movie length: 30 seconds
time scale: 153600
duration: 4636416
movie length: 30 seconds
its logging the information correctly but is returning undefined
This seems like a lot of extra work for little benefit, so I'm going to refer to get-video-duration https://www.npmjs.com/package/get-video-duration which does a great job of getting durations of any video file in seconds minutes and hours
Copying the last comment of the gist you sent, I came up with this:
const fs = require("fs").promises;
class Group {
constructor(name, video, master, maxTime, currentTime) {
this._name = name;
this._video = video;
this._master = master;
this._maxTime = maxTime;
this._currentTime = currentTime;
}
setMaster(master) {
if (this._master != null) {
this._master.emit('master');
}
this._master = master;
this._master.emit('master');
}
};
const asyncForEach = async (array, callback) => {
for (let index = 0; index < array.length; index++) {
await callback(array[index], index, array);
}
};
async function loadGroups() {
const files = await fs.readdir(`${__dirname}/data`);
const groups = []
await asyncForEach(files, async file => {
if (file.endsWith(".mp4")) {
const duration = await getVideoDuration(`${__dirname}/data/${file}`);
const group = new Group(file.split(".")[0], file, null, duration, 0);
groups.push(group);
}
});
console.log(groups);
}
async function getVideoDuration(video) {
const buff = Buffer.alloc(100);
const header = Buffer.from("mvhd");
const file = await fs.open(video, "r");
const {
buffer
} = await file.read(buff, 0, 100, 0);
await file.close();
const start = buffer.indexOf(header) + 17;
const timeScale = buffer.readUInt32BE(start);
const duration = buffer.readUInt32BE(start + 4);
const audioLength = Math.floor((duration / timeScale) * 1000) / 1000;
return audioLength;
}
loadGroups();
As to why your original code wasn't working, my guess is that returning inside the callback to fs.open or fs.read doesn't return for getVideoDuration. I couldn't easily figure out a way from the fs docs to figure out how to return the value of the callback, so I just switched over to promises and async/await, which will essentially run the code synchronously. This way you can save the output of fs.open and fs.read and use them to return a value in the scope of getVideoDuration.
I've figured out a work-around for this problem.
async function test() {
const duration = await getDuration(`${__dirname}/data/vid.mp4`);
console.log(duration);
}
test();
function getDuration(file) {
return new Promise((resolve, reject) => {
exec(`ffprobe -v error -show_entries format=duration -of default=noprint_wrappers=1:nokey=1 ${file}`, (err, stdout, stderr) => {
if (err) return console.error(err);
resolve(stdout ? stdout : stderr);
});
});
}
I only tested it on linux so I dont know if it'll work on windows

Why is my code not waiting for the completion of the function?

I am trying to read some data from a file and store it in a database.
This is part of a larger transaction and I need the returned ids for further steps.
async parseHeaders(mysqlCon, ghID, csv) {
var self = this;
var hIDs = [];
var skip = true;
var idx = 0;
console.log("Parsing headers");
return new Promise(async function(resolve, reject) {
try {
var lineReader = require('readline').createInterface({
input: require('fs').createReadStream(csv)
});
await lineReader.on('close', async function () {
console.log("done: ", JSON.stringify(hIDs));
resolve(hIDs);
});
await lineReader.on('line', async function (line) {
line = line.replace(/\"/g, '');
if (line.startsWith("Variable")) { //Variable,Statistics,Category,Control
console.log("found variables");
skip = false; //Ignore all data and skip to the parameter description.
return; //Skip also the header line.
}
if (!skip) {
var data = line.split(",");
if (data.length < 2) { //Variable section done return results.
console.log("Found sub?",line);
return lineReader.close();
}
var v = data[0];
var bidx = data[0].indexOf(" [");
if (bidx > 0)
v = data[0].substring(0, bidx); //[] are disturbing mysql (E.g.; Air temperature [�C])
var c = data[2];
hIDs[idx++] = await self.getParamID(mysqlCon, ghID, v, c, data);//, function(hID,sidx) { //add data in case the parameter is not in DB, yet.
}
});
} catch(e) {
console.log(JSON.stringify(e));
reject("some error occured: " + e);
}
});
}
async getParamID(mysqlCon,ghID,variable,category,data) {
return new Promise(function(resolve, reject) {
var sql = "SELECT ID FROM Parameter WHERE GreenHouseID="+ghID+" AND Variable = '" + variable + "' AND Category='" + category + "'";
mysqlCon.query(sql, function (err, result, fields) {
if(result.length === 0 || err) { //apparently not in DB, yet ... add it (Acronym and Machine need to be set manually).
sql = "INSERT INTO Parameter (GreenHouseID,Variable,Category,Control) VALUES ("+ghID+",'"+variable+"','"+category+"','"+data[3]+"')";
mysqlCon.query(sql, function (err, result) {
if(err) {
console.log(result,err,this.sql);
reject(err);
} else {
console.log("Inserting ",variable," into DB: ",JSON.stringify(result));
resolve(result.insertId); //added, return generated ID.
}
});
} else {
resolve(result[0].ID); //found in DB .. return ID.
}
});
});
}
The functions above are in the base class and called by the following code:
let headerIDs = await self.parseHeaders(mysqlCon, ghID, filePath);
console.log("headers:",JSON.stringify(headerIDs));
The sequence of events is that everything in parseHeaders completes except for the call to self.getParamID and control returns to the calling function which prints an empty array for headerIDs.
The console.log statements in self.getParamID are then printed afterward.
What am I missing?
Thank you
As you want to execute an asynchronous action for every line we could define a handler to do right that:
const once = (target, evt) => new Promise(res => target.on(evt, res));
function mapLines(reader, action) {
const results = [];
let index = 0;
reader.on("line", line => results.push(action(line, index++)));
return once(reader, "close").then(() => Promise.all(results));
}
So now you can solve that easily:
let skip = false;
const hIDs = [];
await mapLines(lineReader, async function (line, idx) {
line = line.replace(/\"/g, '');
if (line.startsWith("Variable")) { //Variable,Statistics,Category,Control
console.log("found variables");
skip = false; //Ignore all data and skip to the parameter description.
return; //Skip also the header line.
}
if (!skip) {
var data = line.split(",");
if (data.length < 2) { //Variable section done return results.
console.log("Found sub?",line);
return lineReader.close();
}
var v = data[0];
var bidx = data[0].indexOf(" [");
if (bidx > 0)
v = data[0].substring(0, bidx); //[] are disturbing mysql (E.g.; Air temperature [�C])
var c = data[2];
hIDs[idx] = await self.getParamID(mysqlCon, ghID, v, c, data);
}
});

Order of Promise Resolution Not As Expected in Promise-Containing For-Loop

I need to perform a rather complicated chain of promise resolutions to get and save data that is located in PDFs that are uploaded by a user. Everything works fine for single PDFs, but it breaks as soon as I try to upload multiple PDFs because the order in which the resolution proceeds isn't as I expect.
I call the function saveDoc on each file in an array:
saveDoc: function(){
var files = this.$refs.upload.uploadFiles
var self=this;
var promises = []
for (var i=0; i<files.length; i++){
(function(){
var file = files[i]
var name = file['name']
if (!(/\.pdf/i.test(name))){
name+='.pdf'
}
var type = mime.lookup(name)
var file = file['raw'];
var beforeUrl = self.selectedCategories.join('&&&')
console.log('going to save: ' + name) // Because of the IIFE, I'm not expecting this to log only after all steps 1 - 20 have been completed for each file
Store.getFileData(file).then(function(data){
console.log(2)
return Store.saveDoc(name, type, data, beforeUrl).then(url => {
console.log(8)
return Text.getText(url).then(text => {
console.log(12)
return Text.getMetadata(text, url).then(metadata => {
console.log(20)
if (metadata.length){
return Store.saveMetadata(beforeUrl, metadata, name)
}
return Store.createCategory(name, self.selectedCategories, '')
})
})
})
})
})()
}
},
I'm sure the promises could use some work, but what seems to be the problem is that the line console.log('going to save: ' + name) is called twice before the entire sequence 1-20 is carried out for one file (see error message of number sequence at bottom of this post). I tried to prevent this using an IIFE, but I guess I didn't do this right.
store.js
getData: function(url){
return new Promise(function(accept, reject){
console.log(4)
documentation.get(url, function(err, body) {
console.log(5)
if (!err){
console.log(body);
accept(body)
}
});
})
},
getFileData: function(file){
var reader = new FileReader();
return new Promise(function(accept, reject){
console.log(1)
reader.onload = (e) => {
// var data = e.target.result.replace(/^data:[A-Za-z]+\/[A-Za-z]+;base64,/, '')
// console.log('base 64: ' + data)
var res = new Uint8Array(e.target.result)
accept(res)
};
reader.readAsArrayBuffer(file);
})
},
saveDoc: function(name, type, filedata, url){
console.log(3)
var self=this
return new Promise(function(accept, reject){
return self.getData(url).then(data => {
console.log(6)
var rev = data['_rev']
return documentation.attachment.insert(url, name, filedata, type,
{ rev: rev }, function(err, body) {
if (!err){
console.log(7)
var fullUrl = 'http://dev04/documentation/'+url+'/'+name
accept(fullUrl)
}
else {
console.log(err)
}
})
}).catch(err => {
console.log(err)
})
})
},
saveMetadata: function(url, metadata, name){
var fileName = path.basename(name)
var self=this
return new Promise(function(accept, reject){
self.getData(url).then(data => {
var meta
var rev = data['_rev']
if(!data['metadata']){
data['metadata'] = {}
}
data['metadata'][fileName] = metadata
var datastring = JSON.stringify(data)
documentation.insert(data, url, function(err, body, header) {
if (err) {
console.log(err.message);
return;
}
});
}).catch(err => {
console.log(err)
})
})
},
text.js
export default {
getText: function(url){
console.log(9)
var result = []
return new Promise(function(accept, reject){
console.log(10)
return pdfjs.getDocument(url).then(pdf => {
console.log(11)
var pdf = pdfjs.getDocument(url);
return pdf.then(function(pdf) { // get all pages text
var maxPages = pdf.pdfInfo.numPages;
var countPromises = []; // collecting all page promises
for (var j = 1; j <= maxPages; j++) {
var page = pdf.getPage(j);
var txt = "";
countPromises.push(page.then(function(page) { // add page promise
var textContent = page.getTextContent();
// console.log('the content is ' + textContent)
return textContent.then(function(text){ // return content promise
var val = text.items.map(function (s) { return s.str; }).join('&&&')+'&&&'; // value page text
result.push(val)
// console.log(val + ' should be one page of text')
return val
});
}));
}
// Wait for all pages and join text
return Promise.all(countPromises).then(function (texts) {
accept(texts.join(''))
});
});
});
})
},
getMetadata: function(text, url){
console.log(13)
var result = []
var self = this
return new Promise(function(accept, reject){
self.getpageno(url).then(function(pagecount){
console.log(19)
try {
var dataMatch = rx.exec(text)
var produktDaten = dataMatch[1].split("&&&").filter(Boolean);
console.log(produktDaten)
var dokuArr = dataMatch[2].split("&&&").filter(Boolean);
for (var i=0; i<produktDaten.length; i+=4){
var entry = {}
entry.pagecount = pagecount
entry.kks = {}
entry.kks.pages = {}
var kksNummer = produktDaten[i];
entry.kks.nummer = kksNummer;
for(var j=0; j<dokuArr.length; j+=3){
var nummer = dokuArr[j];
var beschreibung = dokuArr[j+1];
var seite = dokuArr[j+2];
// make sure seite is a digit
if (!(/^\d+$/.test(seite))){
console.log(seite + ' was not a valid page number')
throw err
}
if (/(A|a)lle?/i.test(nummer)){
entry.kks.pages[beschreibung] = seite;
// self.tableEntry.kks.url = url;
// self.tableEntry.fileName = name;
///// kksNummern.forEach(function(kks){
// self.tableEntry.kks;
// })
}
else if (nummer === kksNummer) {
entry.kks.pages[beschreibung] = seite;
// entry.kks.url = url;
// entry.fileName = name
}
}
entry.hersteller = produktDaten[i+1]
entry.typ = produktDaten[i+2]
entry.artikelNummer = produktDaten[i+3]
result.push(entry)
}
}
catch(e){
return accept(result)
}
return accept(result)
/* if (result.length>0){
console.log('accepting the result')
}
reject()*/
}).catch(err => {
console.log(err)
})
})
},
getpageno: function(url){
console.log(14)
var self=this
var pdf = pdfjs.getDocument(url);
return pdf.then(function(pdf){
console.log(15)
var maxPages = pdf.pdfInfo.numPages;
var countPromises = []; // collecting all page promises
for (var j = 1; j <= maxPages; j++) {
try {
var page = pdf.getPage(j);
var txt = "";
countPromises.push(page.then(function(page) { // add page promise
var textContent = page.getTextContent();
return textContent.then(function(text){ // return content promise
console.log(16)
return text.items.map(function (s) { return s.str; }).join('&&&'); // value page text
});
}));
}
catch(e){
console.log(e)
}
}
// Wait for all pages and join text
return Promise.all(countPromises).then(function (texts) {
// since doumentation pages do not add the final '&&&', must add one manually (only after rx has been found)
console.log(17)
var fulltext = texts.reduce(function(full, text){
if (rx.test(full)){
var next = '&&&'+text
return full+=next
}
return full+=text
}, '')
return [fulltext, texts]
});
}).then(function(textarr){
console.log(18)
var fulltext = textarr[0]
self.fulltext = fulltext;
var texts = textarr[1]
try {
var partialmatch = rx.exec(fulltext)[0]
var count = texts.reduce(function(pageno, text){
var tomatch = text.replace(/.*Typ&&&/, '')
if (tomatch.length>0 && partialmatch.indexOf(tomatch) > -1){
pageno++
}
return pageno;
}, 0)
}
catch(e){
console.log(e)
}
return count;
}).catch(err => {console.log(err)})
}
}
I use the console to log the order I am expecting for the functions. I am expecting the numbers 1 - 20 in order, but I get the following.
going to save: 03_.pdf selector.js:211:6
1 store.js:227:4
going to save: 2017.07.05_0016 E161206.pdf selector.js:211:6
1 store.js:227:4
2 selector.js:213:7
3 store.js:239:3
4 store.js:213:4
2 selector.js:213:7
3 store.js:239:3
4 store.js:213:4
5 store.js:215:5
Object { _id: "Test", _rev: "36-85a08c0852ccab78c0b4c10369e83fb2", rank: 7, icon: "wrench", metadata: Object, _attachments: Object } store.js:217:6
6 store.js:243:5
5 store.js:215:5
Object { _id: "Test", _rev: "36-85a08c0852ccab78c0b4c10369e83fb2", rank: 7, icon: "wrench", metadata: Object, _attachments: Object } store.js:217:6
6 store.js:243:5
7 store.js:247:7
8 selector.js:215:8
9 text.js:12:3
10
11 text.js:17:5
12 selector.js:217:9
13 text.js:50:3
14 text.js:112:3
15 text.js:116:4
16 text.js:129:8
17 text.js:142:5
18 text.js:153:4
19 text.js:57:5
20
Can anyone offer any advice on how to get this order correct? Thank you.

NodeJS Loop issue due to async/synchronicity issues

I am porting an old ruby script over to use javascript setting the function as a cron instance so it will run on schedule. The function queries our mysql database and retrieves inventory information for our products and then sends requests to a trading partners api to update our inventory on their site.
Due to nodes a-synchronicity I am running into issues. We need to chunk requests into 1000 items per request, and we are sending 10k products. The issue is each request is just sending the last 1000 items each time. The for loop that is inside the while loop is moving forward before it finishes crafting the json request body. I tried creating anon setTimeout functions in the while loop to try and handle it, as well as creating an object with the request function and the variables to be passed and stuffing it into an array to iterate over once the while loop completes but I am getting the same result. Not sure whats the best way to handle it so that each requests gets the correct batch of items. I also need to wait 3 minutes between each request of 1000 items to not hit the request cap.
query.on('end',()=>{
connection.release();
writeArray = itemArray.slice(0),
alteredArray = [];
var csv = json2csv({data: writeArray,fields:fields}),
timestamp = new Date(Date.now());
timestamp = timestamp.getFullYear() + '-' +(timestamp.getMonth() + 1) + '-' + timestamp.getDate()+ ' '+timestamp.getHours() +':'+timestamp.getMinutes()+':'+timestamp.getSeconds();
let fpath = './public/assets/archives/opalEdiInventory-'+timestamp+'.csv';
while(itemArray.length > 0){
alteredArray = itemArray.splice(0,999);
for(let i = 0; i < alteredArray.length; i++){
jsonObjectArray.push({
sku: alteredArray[i]['sku'],
quantity: alteredArray[i]["quantity"],
overstockquantity: alteredArray[i]["osInv"],
warehouse: warehouse,
isdiscontinued: alteredArray[i]["disc"],
backorderdate: alteredArray[i]["etd"],
backorderavailability: alteredArray[i]["boq"]
});
}
var jsonObject = {
login: user,
password: password,
items: jsonObjectArray
};
postOptions.url = endpoint;
postOptions.body = JSON.stringify(jsonObject);
funcArray.push({func:function(postOptions){request(postOptions,(err,res,body)=>{if(err){console.error(err);throw err;}console.log(body);})},vars:postOptions});
jsonObjectArray.length = 0;
}
var mili = 180000;
for(let i = 0;i < funcArray.length; i++){
setTimeout(()=>{
var d = JSON.parse(funcArray[i]['vars'].body);
console.log(d);
console.log('request '+ i);
//funcArray[i]['func'](funcArray[i]['vars']);
}, mili * i);
}
});
});
You would need async/await or Promise to handle async actions in node js.
I am not sure if you have node version which supports Async/await so i have tried a promise based solution.
query.on('end', () => {
connection.release();
writeArray = itemArray.slice(0),
alteredArray = [];
var csv = json2csv({ data: writeArray, fields: fields }),
timestamp = new Date(Date.now());
timestamp = timestamp.getFullYear() + '-' + (timestamp.getMonth() + 1) + '-' + timestamp.getDate() + ' ' + timestamp.getHours() + ':' + timestamp.getMinutes() + ':' + timestamp.getSeconds();
let fpath = './public/assets/archives/opalEdiInventory-' + timestamp + '.csv';
var calls = chunk(itemArray, 1000)
.map(function(chunk) {
var renameditemsArray = chunk.map((item) => new renamedItem(item, warehouse));
var postOptions = {};
postOptions.url = endpoint;
postOptions.body = JSON.stringify({
login: user,
password: password,
items: renameditemsArray
});
return postOptions;
});
sequenceBatch(calls, makeRequest)
.then(function() {
console.log('done');
})
.catch(function(err) {
console.log('failed', err)
});
function sequenceBatch (calls, cb) {
var sequence = Promise.resolve();
var count = 1;
calls.forEach(function (callOptions) {
count++;
sequence = sequence.then(()=> {
return new Promise(function (resolve, reject){
setTimeout(function () {
try {
cb(callOptions);
resolve(`callsequence${count} done`);
}
catch(err) {
reject(`callsequence ${count} failed`);
}
}, 180000);
});
})
});
return sequence;
}
function makeRequest(postOptions) {
request(postOptions, (err, res, body) => {
if (err) {
console.error(err);
throw err;
}
console.log(body)
});
}
function chunk(arr, len) {
var chunks = [],
i = 0,
n = arr.length;
while (i < n) {
chunks.push(arr.slice(i, i += len));
}
return chunks;
}
function renamedItem(item, warehouse) {
this.sku = item['sku']
this.quantity = item["quantity"]
this.overstockquantity = item["osInv"]
this.warehouse = warehouse
this.isdiscontinued = item["disc"]
this.backorderdate = item["etd"]
this.backorderavailability= item["boq"]
}
});
Could you please try this snippet and let me know if it works?I couldn't test it since made it up on the fly. the core logic is in the sequenceBatch function. the The answer is based on an another question which explains how timeouts and promises works together.
Turns out this wasn't a closure or async issues at all, the request object I was building was using references to objects instead of shallow copies resulting in the data all being linked to the same object ref in the ending array.

Categories

Resources