I am trying to store my created file's path after using SaveFileDialog, here is my code:
let path;
dialog.showSaveDialog((fileName) => {
if (fileName === undefined){
console.log("You didn't save the file");
return;
}
fs.writeFile(fileName, text, (err) => {
if(err){
alert("An error ocurred creating the file "+ err.message)
}
alert("The file has been succesfully saved");
});
});
What I want to happen is that after the user creates the file he would enter file's path in the variable path, is it even possible?
you can use the sync version of dialog.showSaveDialog. with the sync version no need for you to declare the path variable without intializing it
let path = dialog.showSaveDialog( {
title: "Save file",
filters: [ { name:"png pictures", ext: [ "png" ] } ], // what kind of files do you want to see when this box is opend
defaultPath: app.getPath("document") // the default path to save file
});
if ( ! path ) {
// path is undefined
return;
}
fs.writeFile( path , text , ( err , buf ) => {
if ( err )
return alert("saved");
return alert("not saved");
});
or the async version
dialog.showSaveDialog( {
title: "Save file",
filters: [ { name:"png pictures", ext: [ "png" ] } ], // what kind of files do you want to see when this box is opend, ( users will be able to save this kind of files )
defaultPath: app.getPath("document") // the default path to save file
}, ( filePath ) => {
if ( ! filePath ) {
// nothing was pressed
return;
}
path = filePath;
fs.writeFile( path , text , ( err , buf ) => {
if ( err )
return alert("saved");
return alert("not saved");
});
});
this worked for me:
const { dialog } = require('electron')
dialog.showSaveDialog({
title: "Save file"
}).then((filePath_obj)=>{
if (filePath_obj.canceled)
console.log("canceled")
else
console.log('absolute path: ',filePath_obj.filePath);
});
You are almost there. You need to wait for the result of the dialog through the callback. Check the docs.
So something like:
let path;
function saveProjectAs(text) {
var options = {
title: "Save project as ",
message: "Save project as ",
nameFieldLabel: "Project Name:",
// defaultPath: directory to show (optional)
}
dialog.showSaveDialog(mainWindow, options, saveProjectAsCallback);
function saveProjectAsCallback(filePath) {
// if user pressed "cancel" then `filePath` will be null
if (filePath) {
// check for extension; optional. upath is a node package.
if (upath.toUnix(upath.extname(filePath)).toLowerCase() != ".json") {
filePath = filePath + ".json"
}
path = filePath;
fs.writeFile(path, text, (err) => {
if(err){
alert("An error ocurred creating the file "+ err.message)
}
alert("The file has been succesfully saved");
});
}
}
}
Related
How to zip and download folder from D:/downloads path. As a 1st step, I was able to create a folder inside 'downloads' with dummy content. As a next step I want to zip and download that folder.
async downloadFolder(selectedProduct) {
try {
let completeZip = await this.jobService.zipBlobs(selectedProduct.path, this.role).toPromise();
if(completeZip['status']=='success'){
let download = await this.jobService.downloadBlobs(selectedProduct.path, this.role).toPromise();
console.log(download)
}
} catch (error) {
console.log(error);
}
}
API:
Once file is written , I want to zip that folder and download that folder to local but nothing happens
exports.zipBlobs = async function (req, res) {
var userrole = req.body.userrole;
var path = req.body.path;
fileUploadPath="d:/downloads";
blobService.listBlobsSegmentedWithPrefix(containerName, path, null, (err, data) => {
if (err) {
reject(err);
} else {
data.entries.forEach(entry => {
console.log(entry.name);//'155ce0e4-d763-4153-909a-407dc4e328d0/63690689-e183-46ae-abbe-bb4ba5507f1a_MULTI_0_3/output/res2/res2.fcs';
if (fs.existsSync(fileUploadPath)) {
var sourceFilePath = fileUploadPath +'/'+entry.name ;
if (!fs.existsSync(sourceFilePath)) {
fs.mkdir(require('path').dirname(sourceFilePath), { recursive: true }, (err) => {
if (err) {
console.log("Failed :" + err);
}
else{
console.log('folder created,create file');
const fstream = fs.createWriteStream(sourceFilePath);
fstream.write('fileContent');
fstream.end();
fstream.on("finish", f => {
console.log('finish',f) ;
});
fstream.on("error", e => {
console.log('error',e);
});
}
});
}else{
console.log('folders already exists,create file');
const fstream = fs.createWriteStream(sourceFilePath);
fstream.write('fileContent');
fstream.end();
fstream.on("finish", f => {
console.log('finish',f) ;
});
fstream.on("error", e => {
console.log('error',e);
});
}
}else{
console.log('downloads folder does not exists!')
}
});
}
});
}
API to zip and download folder :
exports.downloadFolders = async function (req, res) {
var userrole = req.body.userrole;
var path = req.body.path;
try {
const folderpath = 'D:\downloads\622b6a148a813f18b8b2de81';
require('child_process').execSync(`zip -r archive *`, {
cwd: folderpath
});
// does not create zip, neither downloads
res.download(folderpath + '/archive.zip');
return;
}catch(error_1) {
res.status(200).json({
status: error_1
});
return;
}
}
In Javascript strings, backslashes must be doubled:
const folderpath = 'D:\\downloads\\622b6a148a813f18b8b2de81';
Without doubling them, you effectively get
const folderpath = 'D:downloads22b6a148a813f18b8b2de81'
because '\d' === 'd' and '\6' is a non-printable character.
You can also write the result of zip to the standard output and pipe it into the response object:
res.set("Content-Disposition", "attachment;filename=archive.zip");
require("child_process").exec("zip -r - *", {
cwd: folderpath
}).stdout.pipe(res);
This is something I used in one of my projects where I needed the whole directory downloaded as zip:
require the following library:
const zipdir = require('zip-dir')
Then, when you need to download the zip, call it as follows:
zipdir(
'D:/downloads/622b6a148a813f18b8b2de81',
{ saveTo: 'D:/downloads/622b6a148a813f18b8b2de81/archive.zip' },
(err, buffer) => {
if (err) throw err;
console.log('New zip file created!');
}
);
Following is the API signature:
app.get('/api/zip', function (req, res) {
//create new zip
zipdir(
'D:/downloads/622b6a148a813f18b8b2de81',
{ saveTo: 'D:/downloads/622b6a148a813f18b8b2de81/archive.zip' },
(err, buffer) => {
if (err) throw err;
console.log('New zip file created!');
res.download('D:/downloads/622b6a148a813f18b8b2de81/archive.zip');
}
);
});
I'm using the npm package formidable to:
Check the content of a file
If the content satisfies the condition, the file upload can be done; otherwise, reject the request with an error message
Here's the layout of my code:
import Formidable from 'formidable';
const form = new Formidable({
uploadDir: 'myFolder',
allowEmptyFiles: false,
keepExtensions: true,
});
form.on('fileBegin', (formname, file) => {
// Changing the filename
});
let canUploadFile = false;
form.onPart = (part) => {
// Checking if the filename is mentioned in the file content
// If so, upload the file
// Else, reject and send a 400 HTTP response
const dmnFileName = path.basename(part?.filename ?? '', '.dmn');
part.on('data', (buffer) => {
// do whatever you want here
const bufferData = Buffer.from(buffer).toString();
const parser = new htmlparser2.Parser({
onopentag(name: any, attributes: any) {
// check is done here by setting canUploadFile
},
});
parser.write(bufferData);
parser.end();
});
};
form.parse(req, (err) => {
if (err) {
next(err);
return;
}
logger.info(`File is being parsed...`);
});
form.on('end', async function () {
if(canUploadFile) {
res.send({ message: 'File uploaded!' });
} else {
res.status(400).send({
message: 'The file cannot be uploaded',
});
}
}
WhenI execute this code, the file doesn't get uploaded, but when I remove the form.onPart, the file gets uploaded!
What am I missing?
This makes me crazy. I don't know why but when I use the filesystem require('fs') to read valid Json file I get error: Invalid json. Here it is the code:
var fs = require('fs');
const FILE_NAME = './json.json'
/*
// This way it works!
var jsonData = require('./json.json')
console.log(jsonData);
*/
async function readFile(filePath){
return new Promise(function(resolve, reject){
fs.readFile(filePath, 'utf8', function(err, contents) {
if(err){
console.log("Cant read the file " + err);
reject(err)
}
else{
resolve(contents)
}
})
})
}
async function getNames(fileName){
readFile(fileName).then(function(data){
try {
console.log(`Type of data: ` + typeof data);
console.log("File data: " + data);
return JSON.parse(data);
} catch (error) {
throw new Error( "Invalid JSON: " + error);
}
}).then(function(data){
console.log(`FILE OBJECT: ` + data);
}).catch(function(err){
console.error(err);
})
}
getNames(FILE_NAME)
This is the file content:
{
"name": "riko"
}
This is the console output:
Type of data: string
File data: {
"name": "riko"
}
Error: Invalid JSON: SyntaxError: Unexpected token in JSON at position 0
at C:\Users\rojer\Desktop\Node\test\main.js:31:13
I know I could use var jsonData = require('./json.json'), but
I want to know the reason it doesn't work.
What if the JSON data is embedded somewhere in a regular text file.
There seems to be some garbage.
Please help.
This:
Error: Invalid JSON: SyntaxError: Unexpected token in JSON at position 0
at C:\Users\rojer\Desktop\Node\test\main.js:31:13
tells us that there's an invisible character at the beginning of the file, probably a byte order mark (BOM), that require is handling but your code isn't. If the file is really in UTF-8, that BOM will be \xEF\xBB\xBF. Once read as UTF-8 into a JavaScript string, that will be the code point \u{FEFF} (because JavaScript strings are UTF-16 [but tolerate invalid surrogate pairs]). Update: Your binary listing of it confirms that.
I can confirm that if I have a UTF-8 JSON file with a BOM, require reads it and handles the BOM, but readFile returns the contents of the with the BOM intact, which trips up JSON.parse.
You can check for the BOM and strip it off, see *** lines:
const UTF8_BOM = "\u{FEFF}"; // ***
async function getNames(fileName){
readFile(fileName).then(function(data){
try {
console.log(`Type of data: ` + typeof data);
if (data.startsWith(UTF8_BOM)) { // ***
data = data.substring(UTF8_BOM.length); // ***
}
console.log("File data: " + data);
return JSON.parse(data);
} catch (error) {
throw new Error( "Invalid JSON: " + error);
}
}).then(function(data){
console.log(`FILE OBJECT: ` + data);
}).catch(function(err){
console.error(err);
})
}
Alternately, if you don't want the BOM there, here's a quick and dirty tool to add/remove BOMs on UTF-8 files:
const fs = require("fs");
const UTF8_BOM = "\u{FEFF}";
const actions = new Map([
["-a", addBOM],
["-r", removeBOM],
["-t", toggleBOM]
]);
main();
function main() {
const filename = process.argv[2];
const option = process.argv[3] || "-t";
const action = actions.get(option);
if (!filename) {
help();
return;
}
if (!action) {
console.error(`Invalid option ${option}`);
help();
return;
}
fs.readFile(filename, 'utf-8', (err, data) => {
if (err) {
console.error(`${filename}: Error reading file: ${err}`);
return;
}
const hasBOM = data.startsWith(UTF8_BOM);
action(filename, data, hasBOM);
});
}
function writeResult(filename, data, toggle, successMessage, failMessage) {
fs.writeFile(filename, data, "utf-8", (err) => {
if (err) {
console.error(`${filename}: ${failMessage}: ${err}`);
} else {
console.log(`${filename}: ${successMessage}${toggle ? " (toggled)" : ""}`);
}
});
}
function addBOM(filename, data, hasBOM, toggle) {
if (hasBOM) {
console.log(`${filename}: Already has a BOM`);
} else {
writeResult(filename, UTF8_BOM + data, toggle, "Added BOM", "Error adding BOM");
}
}
function removeBOM(filename, data, hasBOM, toggle) {
if (!hasBOM) {
console.log(`${filename}: Already doesn't have a BOM`);
} else {
writeResult(filename, data.substring(UTF8_BOM.length), toggle, "Removed BOM", "Error removing BOM");
}
}
function toggleBOM(filename, data, hasBOM) {
if (hasBOM) {
removeBOM(filename, data, hasBOM, true);
} else {
addBOM(filename, data, hasBOM, true);
}
}
function help() {
console.log("Usage: node utf8bomtoggle [filename] {options}");
console.log("{options} can be:");
console.log(" -t Toggle a BOM [default]");
console.log(" -a Add a BOM if not present");
console.log(" -r Remove a BOM if present");
}
I am upgrading to Sails.js version 0.10 and now need to use Skipper to manage my file uploads.
When I upload a file I generate a new name for it using a UUID, and save it in the public/files/ folder (this will change when I've got this all working but it's good for testing right now)
I save the original name, and the uploaded name + path into a Mongo database.
This was all quite straightforward under Sails v0.9.x but using Skipper I can't figure out how to read the new file name and path. (Obviously if I could read the name I could construct the path though so it's really only the name I need)
My Controller looks like this
var uuid = require('node-uuid'),
path = require('path'),
blobAdapter = require('skipper-disk');
module.exports = {
upload: function(req, res) {
var receiver = blobAdapter().receive({
dirname: sails.config.appPath + "/public/files/",
saveAs: function(file) {
var filename = file.filename,
newName = uuid.v4() + path.extname(filename);
return newName;
}
}),
results = [];
req.file('docs').upload(receiver, function (err, files) {
if (err) return res.serverError(err);
async.forEach(files, function(file, next) {
Document.create({
name: file.filename,
size: file.size,
localName: // ***** how do I get the `saveAs()` value from the uploaded file *****,
path: // *** and likewise how do i get the path ******
}).exec(function(err, savedFile){
if (err) {
next(err);
} else {
results.push({
id: savedFile.id,
url: '/files/' + savedFile.localName
});
next();
}
});
}, function(err){
if (err) {
sails.log.error('caught error', err);
return res.serverError({error: err});
} else {
return res.json({ files: results });
}
});
});
},
_config: {}
};
How do I do this?
I've worked this out now and thought I'd share my solution for the benefit of others struggling with similar issues.
The solution was to not use skipper-disk at all but to write my own custom receiver. I've created this as a Sails Service object.
So in file api/services/Uploader.js
// Uploader utilities and helper methods
// designed to be relatively generic.
var fs = require('fs'),
Writable = require('stream').Writable;
exports.documentReceiverStream = function(options) {
var defaults = {
dirname: '/dev/null',
saveAs: function(file){
return file.filename;
},
completed: function(file, done){
done();
}
};
// I don't have access to jQuery here so this is the simplest way I
// could think of to merge the options.
opts = defaults;
if (options.dirname) opts.dirname = options.dirname;
if (options.saveAs) opts.saveAs = options.saveAs;
if (options.completed) opts.completed = options.completed;
var documentReceiver = Writable({objectMode: true});
// This `_write` method is invoked each time a new file is received
// from the Readable stream (Upstream) which is pumping filestreams
// into this receiver. (filename === `file.filename`).
documentReceiver._write = function onFile(file, encoding, done) {
var newFilename = opts.saveAs(file),
fileSavePath = opts.dirname + newFilename,
outputs = fs.createWriteStream(fileSavePath, encoding);
file.pipe(outputs);
// Garbage-collect the bytes that were already written for this file.
// (called when a read or write error occurs)
function gc(err) {
sails.log.debug("Garbage collecting file '" + file.filename + "' located at '" + fileSavePath + "'");
fs.unlink(fileSavePath, function (gcErr) {
if (gcErr) {
return done([err].concat([gcErr]));
} else {
return done(err);
}
});
};
file.on('error', function (err) {
sails.log.error('READ error on file ' + file.filename, '::', err);
});
outputs.on('error', function failedToWriteFile (err) {
sails.log.error('failed to write file', file.filename, 'with encoding', encoding, ': done =', done);
gc(err);
});
outputs.on('finish', function successfullyWroteFile () {
sails.log.debug("file uploaded")
opts.completed({
name: file.filename,
size: file.size,
localName: newFilename,
path: fileSavePath
}, done);
});
};
return documentReceiver;
}
and then my controller just became (in api/controllers/DocumentController.js)
var uuid = require('node-uuid'),
path = require('path');
module.exports = {
upload: function(req, res) {
var results = [],
streamOptions = {
dirname: sails.config.appPath + "/public/files/",
saveAs: function(file) {
var filename = file.filename,
newName = uuid.v4() + path.extname(filename);
return newName;
},
completed: function(fileData, next) {
Document.create(fileData).exec(function(err, savedFile){
if (err) {
next(err);
} else {
results.push({
id: savedFile.id,
url: '/files/' + savedFile.localName
});
next();
}
});
}
};
req.file('docs').upload(Uploader.documentReceiverStream(streamOptions),
function (err, files) {
if (err) return res.serverError(err);
res.json({
message: files.length + ' file(s) uploaded successfully!',
files: results
});
}
);
},
_config: {}
};
I'm sure it can be improved further but this works perfectly for me.
The uploaded file object contains all data you need:
req.file('fileTest').upload({
// You can apply a file upload limit (in bytes)
maxBytes: maxUpload,
adapter: require('skipper-disk')
}, function whenDone(err, uploadedFiles) {
if (err) {
var error = { "status": 500, "error" : err };
res.status(500);
return res.json(error);
} else {
for (var u in uploadedFiles) {
//"fd" contains the actual file path (and name) of your file on disk
fileOnDisk = uploadedFiles[u].fd;
// I suggest you stringify the object to see what it contains and might be useful to you
console.log(JSON.stringify(uploadedFiles[u]));
}
}
});
I have a simple route defined with express.js:
exports.save = function (request, response)
{
var file = request.body.file;
var content = request.body.content;
var saved = false;
if (fs.existsSync( file ))
{
saved = fs.writeFileSync(file, content, 'utf8');
}
console.log( saved ); // undefined or false, never true
response.send(saved ? 200 : 500, saved ? 'saved' : 'error'); // 500, error
};
Is if (typeof saved === 'undefined') saved = true; the only option? Feels hacky.
According to node.js source-code fs.writeFileSync doesn't return anything.
It throws an Error object if something goes wrong. So you should write fs.writeFileSync(file, content, 'utf8'); within a try-catch block.
fs.writeFileSync does not return any value, if there is no exception happens that means the save succeeded; otherwise failed.
you may want to try the async version of file read
fs.exists(file, function (exists) {
if (exists) {
fs.writeFiles(file, content, 'utf-8', function (err) {
if (err) {
response.send("failed to save");
} else {
response.send("succeeded in saving");
}
} else {
console.log('file does not exists');
}
}
fs.exists(file, function (exists) {
if (exists) {
fs.writeFiles(file, content, err=> {
if (err) res.status(500).send({error: "failed to save"});
else res.status(200).send({message : "succeeded in saving"});
} else {
res.status(404).send({error: "file not exists"})
}
}
Use async instead of sync. This will work.