Checking if writeFileSync successfully wrote the file - javascript

I have a simple route defined with express.js:
exports.save = function (request, response)
{
var file = request.body.file;
var content = request.body.content;
var saved = false;
if (fs.existsSync( file ))
{
saved = fs.writeFileSync(file, content, 'utf8');
}
console.log( saved ); // undefined or false, never true
response.send(saved ? 200 : 500, saved ? 'saved' : 'error'); // 500, error
};
Is if (typeof saved === 'undefined') saved = true; the only option? Feels hacky.

According to node.js source-code fs.writeFileSync doesn't return anything.
It throws an Error object if something goes wrong. So you should write fs.writeFileSync(file, content, 'utf8'); within a try-catch block.

fs.writeFileSync does not return any value, if there is no exception happens that means the save succeeded; otherwise failed.
you may want to try the async version of file read
fs.exists(file, function (exists) {
if (exists) {
fs.writeFiles(file, content, 'utf-8', function (err) {
if (err) {
response.send("failed to save");
} else {
response.send("succeeded in saving");
}
} else {
console.log('file does not exists');
}
}

fs.exists(file, function (exists) {
if (exists) {
fs.writeFiles(file, content, err=> {
if (err) res.status(500).send({error: "failed to save"});
else res.status(200).send({message : "succeeded in saving"});
} else {
res.status(404).send({error: "file not exists"})
}
}
Use async instead of sync. This will work.

Related

Wait for function to finish to do something else node.js formidable

so my issue here is that the console.log() at the end of the code always prints before the parse for the formidable form is finish and i dont know how to avoid this
exports.scrape = (req,res) => {
let form = new formidable.IncomingForm();
form.keepExtensions = true;
form.parse(req, (err, fields, files) => {
if (err) {
return res.status(400).json({
error: 'Image could not be uploaded'
});
}
const { endpoint } = fields;
if (!endpoint ) {
return res.status(400).json({
error: 'All fields are required'
});
}
if (files.file) {
if (files.file.size > 1000000) {
return res.status(400).json({
error: 'Image should be less than 1mb in size'
});
}
var oldPath = files.file.path;
console.log(oldPath);
var newPath = path.join(__dirname, '../src/files/csv')
+ '/'+files.file.name
var rawData = fs.readFileSync(oldPath)
fs.writeFile(newPath, rawData, function(err){
if(err) console.log(err)
return console.log(newPath);
})
}
})
console.log("test!");
}
And then when the parse is complete make another action bc i need the file being created to later use it in another script that will be initialized when the parse is over, that would bere where the console.log("test!") is
Get a basic understanding of how callbacks work. Then you realize that putting console.log at the bottom is not chronologically the end.

Using "fs" to read JSON file throws "invalid json" Node.js

This makes me crazy. I don't know why but when I use the filesystem require('fs') to read valid Json file I get error: Invalid json. Here it is the code:
var fs = require('fs');
const FILE_NAME = './json.json'
/*
// This way it works!
var jsonData = require('./json.json')
console.log(jsonData);
*/
async function readFile(filePath){
return new Promise(function(resolve, reject){
fs.readFile(filePath, 'utf8', function(err, contents) {
if(err){
console.log("Cant read the file " + err);
reject(err)
}
else{
resolve(contents)
}
})
})
}
async function getNames(fileName){
readFile(fileName).then(function(data){
try {
console.log(`Type of data: ` + typeof data);
console.log("File data: " + data);
return JSON.parse(data);
} catch (error) {
throw new Error( "Invalid JSON: " + error);
}
}).then(function(data){
console.log(`FILE OBJECT: ` + data);
}).catch(function(err){
console.error(err);
})
}
getNames(FILE_NAME)
This is the file content:
{
"name": "riko"
}
This is the console output:
Type of data: string
File data: {
"name": "riko"
}
Error: Invalid JSON: SyntaxError: Unexpected token in JSON at position 0
at C:\Users\rojer\Desktop\Node\test\main.js:31:13
I know I could use var jsonData = require('./json.json'), but
I want to know the reason it doesn't work.
What if the JSON data is embedded somewhere in a regular text file.
There seems to be some garbage.
Please help.
This:
Error: Invalid JSON: SyntaxError: Unexpected token in JSON at position 0
at C:\Users\rojer\Desktop\Node\test\main.js:31:13
tells us that there's an invisible character at the beginning of the file, probably a byte order mark (BOM), that require is handling but your code isn't. If the file is really in UTF-8, that BOM will be \xEF\xBB\xBF. Once read as UTF-8 into a JavaScript string, that will be the code point \u{FEFF} (because JavaScript strings are UTF-16 [but tolerate invalid surrogate pairs]). Update: Your binary listing of it confirms that.
I can confirm that if I have a UTF-8 JSON file with a BOM, require reads it and handles the BOM, but readFile returns the contents of the with the BOM intact, which trips up JSON.parse.
You can check for the BOM and strip it off, see *** lines:
const UTF8_BOM = "\u{FEFF}"; // ***
async function getNames(fileName){
readFile(fileName).then(function(data){
try {
console.log(`Type of data: ` + typeof data);
if (data.startsWith(UTF8_BOM)) { // ***
data = data.substring(UTF8_BOM.length); // ***
}
console.log("File data: " + data);
return JSON.parse(data);
} catch (error) {
throw new Error( "Invalid JSON: " + error);
}
}).then(function(data){
console.log(`FILE OBJECT: ` + data);
}).catch(function(err){
console.error(err);
})
}
Alternately, if you don't want the BOM there, here's a quick and dirty tool to add/remove BOMs on UTF-8 files:
const fs = require("fs");
const UTF8_BOM = "\u{FEFF}";
const actions = new Map([
["-a", addBOM],
["-r", removeBOM],
["-t", toggleBOM]
]);
main();
function main() {
const filename = process.argv[2];
const option = process.argv[3] || "-t";
const action = actions.get(option);
if (!filename) {
help();
return;
}
if (!action) {
console.error(`Invalid option ${option}`);
help();
return;
}
fs.readFile(filename, 'utf-8', (err, data) => {
if (err) {
console.error(`${filename}: Error reading file: ${err}`);
return;
}
const hasBOM = data.startsWith(UTF8_BOM);
action(filename, data, hasBOM);
});
}
function writeResult(filename, data, toggle, successMessage, failMessage) {
fs.writeFile(filename, data, "utf-8", (err) => {
if (err) {
console.error(`${filename}: ${failMessage}: ${err}`);
} else {
console.log(`${filename}: ${successMessage}${toggle ? " (toggled)" : ""}`);
}
});
}
function addBOM(filename, data, hasBOM, toggle) {
if (hasBOM) {
console.log(`${filename}: Already has a BOM`);
} else {
writeResult(filename, UTF8_BOM + data, toggle, "Added BOM", "Error adding BOM");
}
}
function removeBOM(filename, data, hasBOM, toggle) {
if (!hasBOM) {
console.log(`${filename}: Already doesn't have a BOM`);
} else {
writeResult(filename, data.substring(UTF8_BOM.length), toggle, "Removed BOM", "Error removing BOM");
}
}
function toggleBOM(filename, data, hasBOM) {
if (hasBOM) {
removeBOM(filename, data, hasBOM, true);
} else {
addBOM(filename, data, hasBOM, true);
}
}
function help() {
console.log("Usage: node utf8bomtoggle [filename] {options}");
console.log("{options} can be:");
console.log(" -t Toggle a BOM [default]");
console.log(" -a Add a BOM if not present");
console.log(" -r Remove a BOM if present");
}

NodeJS Html-pdf: fs.readfilesync how to async/await

I have a problem with my html-pdf document creation. The problem is that often the code runs to fast to complete the process of pdf-docutment creation. The Processes consists out of building an HTML-String by replacing placeholders in an Html file. Below you see the code what happens afterwards.
Object.keys(setter).forEach(function(element, key, _array) {
var regex = new RegExp(element, "g");
data = data.replace(regex, setter[element])
})
var result = data;
fs.writeFile(mergeFileRes, result, 'utf8', function (err) {
if(err) {
console.log(err);
return;
} else {
let html2 = fs.readFileSync(mergeFileRes, 'utf8');
let options = {
format: 'a4' ,
"directory" : "/tmp",
};
if(html2){
pdf.create(html2, options).toStream(function(err, stream2){
if(err) console.log(err);
stream2.pipe(res);
stream2.on('end', function () {
try{
fs.unlink(mergeFileRes)
console.log(3090, "deleted file");
}
catch (err){
console.log(3090, "Did not delete file");
}
});
});
} else {
}
}
});
My problem is that in many cases the html2 variable is not yet created before the pdf.create process starts. This is probably because the readFileSync takes too long to finish.
I was wondering, how can I fix this. How can I make the pdf.create wait for the readFileSync to finish and the html2 variable to be filled.
You can use fs.readFile to read the file asynchronously and html2 will be available within the callback function.
Object.keys(setter).forEach(function(element, key, _array) {
var regex = new RegExp(element, "g");
data = data.replace(regex, setter[element])
})
var result = data;
fs.writeFile(mergeFileRes, result, 'utf8', function (err) {
if(err) {
console.log(err);
return;
} else {
fs.readFile(mergeFileRes, 'utf8', function(err, html2){
if (err) throw err;
let options = {
format: 'a4' ,
"directory" : "/tmp",
};
pdf.create(html2, options).toStream(function(err, stream2){
if(err) console.log(err);
stream2.pipe(res);
stream2.on('end', function () {
try{
fs.unlink(mergeFileRes)
console.log(3090, "deleted file");
}
catch (err){
console.log(3090, "Did not delete file");
}
});
});
});
}
});

node.js ignores awaitZip building with express

I want to fetch icon PNGS from gridfs out of our mongodb database with mongoose. These icons then should be zipped and served at a specific route.
My current code is as follows:
var zip = require("node-native-zip");
async function getZipFile() {
//get the events out of the DB
db.Category.find({}).populate('icons.file').exec(async function (err, cats) {
if (err) {
//oh oh something went wrong, better pass the error along
return ({
"success": "false",
message: err
});
}
else {
//all good, build the message and return
try {
const result = await buildZip(cats);
return ({
"success": "true",
message: result
});
}
catch (err) {
console.log("ZIP Build Failed")
}
}
});
}
async function buildZip(cats) {
let archive = new zip();
for (let i = 0; i < cats.length; i++) {
cats[i].icons.forEach(function (icon) {
if (icon.size === "3x") {
db.Attachment.readById(icon.file._id, function (err, buffer) {
if (err)
return;
archive.add(cats[i]._id + ".png", buffer);
});
}
});
//return when everything is done
if (i === cats.length - 1) {
return archive.toBuffer();
}
}
}
module.exports =
{
run: getZipFile
};
I don't want to build the zip before runtime, as I want to rename the icons acording to the category ID. I tried going for a async/await structure, but my callback is being returned before the building of the zip file even started.
I'm calling the function with
case 'categoryZip':
categoryHelper.getZipFile.run().then((result) => {
callback(result);
});
break;
This should (as far as I understood it) fire the callback when the zipping is done, but I think I'm missing something essential here.
I wrapped both your callback methods into promises, and also awaited your double for-loop of callbacks in parallel using Promise.all() since they don't rely on each other and I assume they don't need to be in any particular order in the zip file:
async function getZipFile() {
//get the events out of the DB
return new Promise((resolve, reject) => {
db.Category.find({}).populate('icons.file').exec(async function(err, cats) {
if (err) {
//oh oh something went wrong, better pass the error along
reject({
success: false,
message: err
});
} else {
//all good, build the message and return
try {
const result = await buildZip(cats);
resolve({
success: true,
message: result
});
} catch (err) {
console.log("ZIP Build Failed")
reject({
success: false,
message: err
});
}
}
});
});
}
async function buildZip(cats) {
let archive = new zip();
await Promise.all(
cats.map(cat => Promise.all(cat.icons
.filter(icon => icon.size === '3x')
.map(icon => new Promise((resolve, reject) => {
db.Attachment.readById(icon.file._id, function(err, buffer) {
if (err) return reject(err);
archive.add(cat._id + ".png", buffer);
resolve();
});
}))
))
);
return archive.toBuffer()
}

Node.js fires callback twice, why?

In the following code, why does the createFile callback fire twice? This only happens when the server (below) is processing two or more requests at the same time, not if only one request has been made. Output at the bottom of the post. The client making the request is not a browser, but another node.js script iterating through a directory and sending a http post request with the file to the server. The request is created like this:
fs.createReadStream(fileName).pipe(httprequest(options, function(error, response, body) { }));
function myRequest(request, response) {
function writeFile(filePath, request, callback) {
newFilePath = "/home/pi/upload"+filePath; //filePath looks like this: /home/blah/file.txt, the code below creates this structure under another directory, so newFilePath becomes /home/pi/upload/home/blah/file.txt
tempFileName = path.basename(filePath)+".temp";
console.log("Processing "+filePath+"->"+newFilePath+" with tempname " +tempFileName);
var createFile = request.pipe(fs.createWriteStream(tempFileName));
createFile.on("finish", function(error) { //Why does it fire the callback twice?
if(error) {
throw error;
} else {
moveFile(tempFileName, newFilePath, function(error) {
if(error) {
throw error;
} else {
console.log("OK");
}
});
}
});
}
function moveFile(tempFileName, newFilePath, callback) {
dirPath = path.dirname(newFilePath);
fs.stat(dirPath, function(error, stats) { //check if dir exists
if(error == null) {
console.log(dirPath+" already exists");
fs.stat(tempFileName, function(error, stats) { //check if file exists
if(error == null) {
console.log("OK, writing "+newFilePath);
fs.rename(tempFileName, newFilePath, function(error) {
if(error) { //Error on the second run, because the file has been moved in the first run, shouldn't happen?
throw error;
} else {
var myCB = JSON.stringify({fMove: "OK"});
callback(myCB);
}
});
} else {
console.log("File exists");
}
});
}
});
}
writeFile(fileName, request, function() {
//Do some stuff
});
request.on("end", function() {
//Do other stuff
}
});
http.createServer(myRequest).listen(8888);
Output from my script
Processing /home/pi/app/temp/client.js->/home/pi/upload/home/pi/app/temp/client.js with tempname client.js.temp
/home/pi/upload/home/pi/app/temp already exists
/home/pi/upload/home/pi/app/temp already exists
OK, Writing /home/pi/upload/home/pi/app/temp/client.js
OK, Writing /home/pi/upload/home/pi/app/temp/client.js
/home/pi/app/server.js:67
throw error;
^
{"fMove":"OK"}
Incorrect error handling made the script faulty.
In the moveFile function this part was wrong:
fs.rename(tempFileName, newFilePath, function(error) {
if(error) {
throw error;
} else {
var myCB = JSON.stringify({fMove: "OK"});
callback(myCB); // <-- Malformatted callback! Should be callback(null, myCB);
}
Which made this part of writeFile trigger on error and for some reason run twice:
moveFile(tempFileName, newFilePath, function(error) { //Should be moveFile(tempFileName, newFilePath, function(error, status) {
if(error) {
throw error;
} else {
console.log("OK");
}
});
When I fixed my code so it handles the error correctly, it works as intended!

Categories

Resources