Best practice to update database during an event flow - javascript

I'm using imagemin to compress files as they are being uploaded to our NodeJS server. The logic below is a short snippet to explain what is happening. All this works but I'm trying to figure out a more efficient way/best practice to make this more elegant.
What I'm looking for is a way to call only the TaskModel query once and when imagemin.run is complete to update the database with the generated base64Images. Playing with some async tasks but would like to get the basics first!
Imagemin Link: https://github.com/imagemin/imagemin
var fileList = [].concat(req.files.userFile);
for (var x = 0; x < fileList.length; x++) {
var fileItem = fileList[x];
var imagemin = new Imagemin()
.src(fileItem.path)
.use(Imagemin.jpegtran({progressive: true}))
.use(Imagemin.pngquant());
imagemin.run(function (err, files) {
if (err) {
console.log('Error on optmization!' + err);
}
files.forEach(function (tmpFile) {
var base64Image = new Buffer(tmpFile.contents).toString('base64');
TaskModel.findById(taskId, function (err, tmpTask) {
tmpTask.imgs.push({
bin: base64Image
})
tmpTask.save(function (err) {
if (!err) {
console.log("Image compressed and task updated");
} else {
console.log(err);
}
});
})
});
});
}

Use the async library: https://github.com/caolan/async
var fileList = [].concat(req.files.userFile);
var minified = [];
async.each(fileList, function(fileItem, done){
var imagemin = new Imagemin()
.src(fileItem.path)
.use(Imagemin.jpegtran({progressive: true}))
.use(Imagemin.pngquant());
imagemin.run(function (err, files) {
if(err){
console.log('Error on optmization!' + err);
return done(err);
}
if(files && files.length){
minified.push(files[0]);
}
done();
});
}, function(err){
if(err){
console.log('error during minfication', err)
return next(err); //assumes you're using express with a next parameter
}
TaskModel.findById(taskId, function (err, tmpTask) {
if(err){
console.log('unknown taxk', err);
return next(err);
}
for (var x = 0; x < minified.length; x++) {
var f = minified[x]
var base64Image = new Buffer(f.contents).toString('base64');
tmpTask.imgs.push({bin:base64Image});
}
tmpTask.save(function (err) {
if (!err) {
console.log("Image compressed and task updated");
} else {
console.log(err);
return next(err);
}
});
});
});

Related

NodeJS Html-pdf: fs.readfilesync how to async/await

I have a problem with my html-pdf document creation. The problem is that often the code runs to fast to complete the process of pdf-docutment creation. The Processes consists out of building an HTML-String by replacing placeholders in an Html file. Below you see the code what happens afterwards.
Object.keys(setter).forEach(function(element, key, _array) {
var regex = new RegExp(element, "g");
data = data.replace(regex, setter[element])
})
var result = data;
fs.writeFile(mergeFileRes, result, 'utf8', function (err) {
if(err) {
console.log(err);
return;
} else {
let html2 = fs.readFileSync(mergeFileRes, 'utf8');
let options = {
format: 'a4' ,
"directory" : "/tmp",
};
if(html2){
pdf.create(html2, options).toStream(function(err, stream2){
if(err) console.log(err);
stream2.pipe(res);
stream2.on('end', function () {
try{
fs.unlink(mergeFileRes)
console.log(3090, "deleted file");
}
catch (err){
console.log(3090, "Did not delete file");
}
});
});
} else {
}
}
});
My problem is that in many cases the html2 variable is not yet created before the pdf.create process starts. This is probably because the readFileSync takes too long to finish.
I was wondering, how can I fix this. How can I make the pdf.create wait for the readFileSync to finish and the html2 variable to be filled.
You can use fs.readFile to read the file asynchronously and html2 will be available within the callback function.
Object.keys(setter).forEach(function(element, key, _array) {
var regex = new RegExp(element, "g");
data = data.replace(regex, setter[element])
})
var result = data;
fs.writeFile(mergeFileRes, result, 'utf8', function (err) {
if(err) {
console.log(err);
return;
} else {
fs.readFile(mergeFileRes, 'utf8', function(err, html2){
if (err) throw err;
let options = {
format: 'a4' ,
"directory" : "/tmp",
};
pdf.create(html2, options).toStream(function(err, stream2){
if(err) console.log(err);
stream2.pipe(res);
stream2.on('end', function () {
try{
fs.unlink(mergeFileRes)
console.log(3090, "deleted file");
}
catch (err){
console.log(3090, "Did not delete file");
}
});
});
});
}
});

Node.js: Async fs.writeFile queue is creating race condition?

I am trying to use async with node.js to handle multiple incoming POST requests to edit a JSON file. No matter how I refactor it, it will always make one of the edits and not the other. I though that using async.queue would force the operations to handle sequentially? What am I doing wrong?
My code:
var editHandler = function(task, done) {
var req = task.req;
var res = task.res;
fs.stat( "./app//public/json/" + "data.json", function(err, stat) {
if(err == null) {
console.log('File exists');
} else if(err.code == 'ENOENT') {
console.log("Error");
} else {
console.log('Some other error: ', err.code);
}
});
console.log(req.params.id);
console.log(req.body);
fs.readFile( "./app//public/json/" + "data.json", 'utf8', function (err, data) {
data = JSON.parse( data );
data[req.params.id] = req.body.school;
//console.log( data );
fs.writeFile("./app//public/json/" + "data.json", JSON.stringify(data), function (err){
if(err) {
return console.log(err);
}
})
res.redirect('/');
});
};
//Make a queue for the services
var serviceQ = async.queue(editHandler, 20);
serviceQ.drain = function() {
console.log('all services have been processed');
}
app.post('/edit_school/:id', function(req, res) {
serviceQ.push({req: req, res: res })
})
Thanks in advance for any insights! I am really new to using node.js for anything other than npm/webpack.

How to make sure call is asynchronous?

I have a program where user first create a file once file is created i am appending data to the file that is coming from client consistently.The below code is working as expected. I am new to nodejs so just want to get an expert opinion in case when multiple users creating and recording files on their machines at same time, will it work asynchronously or do i need to make some changes to the code ?
io.js
socket.on('createlogfile', function() {
logsRecording.userLogs(function(filename) {
socket.emit('filename', filename);
});
});
socket.on('startrecording', function(obj) {
logsRecording.recordLogs(obj);
});
server.js
userLogs: function (callback) {
var filename = uuid.v4() + '.log';
var file = filePath + '/' + filename;
fs.openSync(file, 'a',function () {
console.log('file created');
});
console.log('userLogs');
callback(filename);
},
recordLogs: function (obj) {
var dir = './app/records/templogs'
var fileAppend = dir + '/'+ obj.file;
console.log('data from recording', obj.data);
fs.readdir(dir, function(err, items) {
items.forEach(function(file){
if(obj.file === file){
fs.appendFile(fileAppend, obj.data+ "\r\n", null, 'utf8', function (err) {
if (err) throw err;
});
console.log('filename in records',obj.file);
}
});
});
}
You are using fs.openSync, which is synchronous and as such can hang the event loop.
You should be using fs.open and callback inside it:
userLogs: function (callback) {
var filename = uuid.v4() + '.log';
var file = filePath + '/' + filename;
fs.open(file, 'a', function (err) {
console.log('file created');
console.log('userLogs');
callback(err, filename);
});
},
And you can flatten recordLogs using async.
Also, it is bad practice to throw error in synchronous function, you should be passing the error in the callback.
As a last tip, Array.forEach is synchronous, and can hang the process, you should be using async.each
recordLogs: function (obj, callback) {
var dir = './app/records/templogs'
var fileAppend = dir + '/'+ obj.file;
console.log('data from recording', obj.data);
async.waterfall([
(callback) => {
fs.readdir(dir, (err, items) => {
callback(err, items);
});
},
(items, callback) => {
async.each(items, (file, callback) => {
if(obj.file === file) {
fs.appendFile(fileAppend, obj.data+ "\r\n", null, 'utf8', function (err) {
callback(err);
});
console.log('filename in records',obj.file);
} else {
callback();
}
}, (err) => {
callback(err);
});
}
], (err, file) => {
if(callback) {
callback(err);
}
});
}

Node.js async.each - "callback was already called"

I am a node.js noob and am trying to do some file processing. I'm using async to process an array of files but the callback function is never called. I believe this is due to calling the next() function twice but I can't see where I'm doing this. If I comment out the last "return next()" I finish with no errors but the final callback doesn't execute. If I uncomment out this line I get the error message "callback was already called". Any help would be greatly appreciated. Here is the code:
/*jslint node: true */
"use strict";
var fs = require('fs'),
dive = require('dive'),
subdirs = require('subdirs'),
async = require('async'),
currentYear = new Date().getFullYear(),
cpFile = __dirname + "/" + "header.txt",
noCopy = __dirname + "/" + "noCopyright.txt",
currentHeads = __dirname + "/" + "currentHeaders.txt",
reYear = /\s(\d{4})[-\s]/i, // matches first 4 digit year
reComment = /(\/\*(?:(?!\*\/).|[\n\r])*\*\/)/, // matches first multi-line comment
allHeaders = {},
stringObj,
year,
top;
function needsHeader (file) {
if ((file.match(/.*\.js$/) || file.match(/.*\.less$/) || file.match(/.*\.groovy$/) || file.match(/.*\.java$/) || file.match(/.*\.template$/) || file.match(/.*\.html$/))) {
fs.appendFile(noCopy, file + "\n", function (err) {
if (err) {
return console.log(err);
}
});
}
}
fs.readFile(cpFile, 'utf8', function (err, copyRight) {
if (err) {
return console.log(err);
}
subdirs(__dirname, 4, function (err, dirs) {
if (err) {
return console.log(err);
}
async.each(dirs, function (dir, next) {
if (! dir.match(/.*\/src$/)) {
return next();
} else {
dive(dir, {all: false}, function (err, file) {
if (err) {
return next(err);
} else {
fs.readFile(file, 'utf8', function (err, data) {
if (err) {
return next(err);
} else {
if (data.match(reComment) && (file.match(/.*\.js$/) || file.match(/.*\.less$/) || file.match(/.*\.groovy$/) || file.match(/.*\.java$/) || file.match(/.*\.template$/))) {
top = data.match(reComment)[0];
if (top.match(reYear)) {
year = top.match(reYear)[1];
if (allHeaders[year]) {
allHeaders[year].push(file);
} else {
allHeaders[year] = [file];
}
} else {
needsHeader(file);
}
} else {
needsHeader(file);
}
return next();
}
});
}
});
}
}, function (err) {
if (err) {
console.log(err);
}
stringObj = JSON.stringify(allHeaders, null, 4);
fs.writeFile(currentHeads, stringObj, function (err) {
if (err) {
return console.log(err);
}
});
});
});
});
It expects you to call next() for each directory, and you are calling it for each file found in the directory. So as soon as some directory contains 2 or more files, you get the error.
To fix it, try call next() on dive complete. See the dive documentation:
complete [optional] may define a second callback, that is called,
when all files have been processed. It takes no arguments.
dive(dir, {all: false}, function (err, file) {
if (err) {
return next(err);
} else {
// your file handling code here
}
}, function complete() {
next();
});

Make forEach asynchronous in JavaScript

I'm trying to understand the asynchronous programming Node.js but stalled on this code.
This function in their callback returns an array of files in a directory:
function openDir(path, callback) {
path = __dirname + path;
fs.exists(path, function (exists) {
if (exists) {
fs.readdir(path, function (err, files) {
if (err) {
throw err;
}
var result = [];
files.forEach(function (filename, index) {
result[index] = filename;
});
return callback(result);
});
}
});
}
But when I use asynchronous code inside.forEach, it returns nothing:
function openDir(path, callback) {
path = __dirname + path;
fs.exists(path, function (exists) {
if (exists) {
fs.readdir(path, function (err, files) {
if (err) {
throw err;
}
var result = [];
files.forEach(function (filename, index) {
fs.stat(path + filename, function (err, stats) {
if (err) {
throw err;
}
result[index] = filename;
});
});
return callback(result);
});
}
});
}
I understand why it happens, but don't understand how to write correct code.
The issue is that fs.stat is also async, but you could probably do something like:
var result = [],
expectedLoadCount = files.length,
loadCount = 0;
files.forEach(function (filename, index) {
fs.stat(path + filename, function (err, stats) {
if (err) {
throw err;
}
result[index] = filename;
if (++loadCount === expectedLoadCount) callback(result);
});
});
The other answers may work well, but they are currently quite different semantically from the original code: they both execute stats in parallel, rather than sequentially. The forEach will initiate as many asynchronous stats operation as there are files in the list of files. The completion order of those operations may quite well be different from the original order of the list. This may substantially affect the error handling logic.
The following approach implements a state machine, which is aimed to executes stats asynchronously, yet sequentially (untested):
function openDir(path, callback) {
path = __dirname + path;
fs.exists(path, function (exists) {
if (!exists)
callback(null, null); // node (err, result) convention
else {
fs.readdir(path, function (err, files) {
if (err)
callback(err, null); // node (err, result) convention
else {
var results = [];
var i = 0;
nextStep(); // process the first file (the first step)
function nextStep() {
if (i >= files.length) // no more files?
callback(null, result); // node (err, result) convention
else {
fs.stat(path + files[i], function (err, stats) {
if (err)
callback(err, null); // node (err, result) convention
else {
results[i++] = stats;
// proceed to the next file
nextStep();
}
});
}
}
}
}
}
});
});
Promises may help to reduce the nesting level of the famous "Pyramid of Doom" like above.
try this:
function openDir(path, callback) {
path = __dirname + path;
fs.exists(path, function (exists) {
var totalFiles = 0;;
if (exists) {
fs.readdir(path, function (err, files) {
if (err) {
throw err;
}
var result = [];
files.forEach(function (filename, index) {
fs.stat(path + filename, function (err, stats) {
if (err) {
throw err;
}
result[index] = filename;
totalFiles++;
if(totalFiles === files.length){
callback(result);
}
});
});
});
}
});
}
you can also use the Async module, to help on these kinds of situations

Categories

Resources