Electron Dialog not saving the file - javascript

Electron version: 1.3.3
Operating system: Ubuntu 14.04
I want to save a XML object into a .xml file with Electron. I try this:
const {dialog} = require("electron").remote;
dialog.showSaveDialog(myObj)
A new windows is opening, I fill the name of the file but nothing has been saving.

it's recommended to use returned path from dialog.showSaveDialog to get filepath in new versions of electron: (which is result.filePath in the below code)
filename = dialog.showSaveDialog({}
).then(result => {
filename = result.filePath;
if (filename === undefined) {
alert('the user clicked the btn but didn\'t created a file');
return;
}
fs.writeFile(filename, content, (err) => {
if (err) {
alert('an error ocurred with file creation ' + err.message);
return
}
alert('WE CREATED YOUR FILE SUCCESFULLY');
})
alert('we End');
}).catch(err => {
alert(err)
})

The showSaveDialog() API does not save the file for you. You must use the returned path and use Node to save your file.
const {dialog} = require('electron').remote;
const fs = require('fs');
dialog.showSaveDialog({}).then((result) => {
fs.writeFile(result.filePath, MyFileData, (err) => {
// file saved or err
});
}).catch((err) => {
// err
});

Related

Nodejs upload multiple images - create dir when doesnt exist

Im using nodejs and multer to upload multiple images.
Firsat check is if dir exist. If not will be created.
Error: When folder doesnt exist all images are passing the first condition fs.access by giving the message "Directory doesnt exist" but then dir is created so the second image gets an error "Directory exist".
var storage = multer.diskStorage({
destination: (req, file, cb) => {
const userId = encryptFolder(req.params["id"]);
const dtrToCreate = "C:/Users/User/Pictures/" + userId;
fs.access(dtrToCreate, (error) => {
if (error) {
console.log("Directory does not exist.", userId);
fs.mkdirSync(dtrToCreate, (error, data) => {
if (error) {
throw error;
}
cb(null, "C:/Users/User/Pictures/");
});
} else {
console.log("Directory exists.", userId);
cb(null, "C:/Users/User/Pictures/");
}
});
},
When directory exist images are uploaded sucessfully.
Working solution:
Since there are multiple files should be a recusrsive function to check if folder exist each time files passing.
fs.mkdirSync(dtrToCreate, { recursive: true })
return cb(null, dtrToCreate)

EISDIR - EISDIR: illegal operation on a directory, read

When I try to upload an image into a bucket on the server side I'm getting the error above. I checked using the debugger that the file parameter contains the file's path and not the folder's path. Here's the code :
function uploadFile(file, directory) {
return new Promise((resolve, reject) => {
try {
const bucket = storage.bucket(BUCKET_NAME);
const bucketFile = bucket.file(directory ? `${directory}/${file.originalname}` : file.originalname);
const blobStream = bucketFile.createWriteStream();
blobStream.on('error', err => {
const status = err.status || 500;
console.log(err, status);
reject(err);
});
blobStream.on('finish', async () => {
// The public URL can be used to directly access the file via HTTP.
await bucketFile.makePublic();
const publicUrl = `https://storage.googleapis.com/${bucket.name}/${bucketFile.name}`;
resolve(publicUrl);
});
blobStream.end(file.buffer);
} catch (err) {
reject(err);
}
});
}
Can you help me?
The path of the file was right. But the path to the credentials was wrong

Read Stream not doing firing / catching errors

I am trying to create a read stream to use Cloudinary's upload stream function, I am also using resumable.js to chunk the initial file, while the create read stream is working perfectly fine (as the whole file gets written perfectly fine.) the read stream / cloudinary upload function seems to not even be firing and failing silently.
router.post("/upload", (req, res, next) => {
console.log("the params are.. ", req.body);
resumable.post(req, function(
status,
filename,
original_filename,
identifier
) {
if (status === "done") {
let timestamp = new Date().getTime().toString();
//stich the chunks
var s = fs.createWriteStream(timestamp + filename);
resumable.write(identifier, s);
var upload_stream = cloudinary.uploader.upload_stream(
{ tags: "basic_sample" },
function(err, image) {
console.log();
console.log("** Stream Upload");
if (err) {
console.warn(err);
}
console.log("* Same image, uploaded via stream");
console.log("* " + image.public_id);
console.log("* " + image.url);
waitForAllUploads(timestamp + filename, err, image);
}
);
fs.createReadStream(timestamp + filename)
.pipe(upload_stream)
.on("error", err => {
console.log(err);
});
s.on("finish", function() {
// Stream upload
console.log("ive finished...");
// delete chunks
setTimeout(() => {
resumable.clean(identifier);
}, 1000);
});
}
res.send(status);
});
});
Here are the resources to what I am using:
https://github.com/cloudinary/cloudinary_npm/blob/master/samples/basic/basic.js
https://github.com/mrawdon/resumable-node
fs.createReadStream(timestamp + filename) accepts the file path but looks like you are passing the timestamp as well. Also, waitForAllUploads is not defined. You can try the following code just using Node and Cloudinary to test it out.
var upload_stream= cloudinary.uploader.upload_stream({tags: 'basic_sample'},function(err,image) {
console.log("** Stream Upload");
if (err){ console.warn(err);}
console.log("* "+image.url)
});
var file_reader = fs.createReadStream('<file path>').pipe(upload_stream);

Download file via FTP, write to /tmp/ and output .txt contents to the console with AWS Lambda

I am using just a single Node package, basic-ftp to try and download a TXT file and write the contents to the console. Further down the line I will be editing the text so will need to use fs. Just struggling to work with the output from createWriteStream from within the FTP program.
Can anyone help me write a TXT file to the /tmp/ file within AWS Lambda and then the correct syntax to open and edit the file after createWriteStream has been used?
var fs = require('fs');
const ftp = require("basic-ftp")
var path = require('path');
exports.handler = (event, context, callback) => {
var fullPath = "/home/example/public_html/_uploads/15_1_5c653e6f6780f.txt"; // File Name FULL PATH -------
const extension = path.extname(fullPath); // Used to calculate filenames below
const wooFileName = path.basename(fullPath, extension); // Uploaded filename with no path or extension eg. filename
const myFileNameWithExtension = path.basename(fullPath); // Uploaded filename with the file extension eg. filename.txt
const FileNameWithExtension = path.basename(fullPath); // Uploaded filename with the file extension eg. filename.txt
example()
async function example() {
const client = new ftp.Client()
client.ftp.verbose = true
try {
await client.access({
host: "XXXX",
user: "XXXX",
password: "XXXX",
//secure: true
})
await client.download(fs.createWriteStream('./tmp/' + myFileNameWithExtension), myFileNameWithExtension)
}
catch(err) {
console.log(err)
}
client.close()
}
//Read the content from the /tmp directory to check it's empty
fs.readdir("/tmp/", function (err, data) {
console.log(data);
console.log('Contents of AWS Lambda /tmp/ directory');
});
/*
downloadedFile = fs.readFile('./tmp/' + myFileNameWithExtension)
console.log(downloadedFile)
console.log("Raw text:\n" + downloadedFile.Body.toString('ascii'));
*/
}
Pretty sure your fs.createWriteStream() has to use an absolute path to /tmp in Lambdas. Your actual working directory is var/task not /.
Also, if you're using fs.createWriteStream() you'll need to wait for the finish event before reading from the file. Somethign like this...
async function example() {
var finalData = '';
const client = new ftp.Client()
client.ftp.verbose = true
try {
await client.access({
host: "XXXX",
user: "XXXX",
password: "XXXX",
//secure: true
})
let writeStream = fs.createWriteStream('/tmp/' + myFileNameWithExtension);
await client.download(writeStream, myFileNameWithExtension)
await finalData = (()=>{
return new Promise((resolve, reject)=> {
writeStream
.on('finish', ()=>{
fs.readFile("/tmp/"+myFileNameWithExtension, function (err, data) {
if (err) {
reject(err)
} else {
console.log('Contents of AWS Lambda /tmp/ directory', data);
resolve(data);
}
});
})
.on('error', (err)=> {
console.log(err);
reject(err);
})
})
})();
}
catch(err) {
console.log(err)
}
client.close();
return finalData;
}
You'll also need to access the file using fs.readFile(). What you were using fs.readdir() gives you a list of files in the directory, not the file's contents.
If you want to used readdir() you could do it like this, but as you can see it is redundant in your case. To handle errors I would suggest just handling the error event in the initial createWriteStream() instead of adding this extra overhead (added to previous example)...
writeStream
.on('finish', ()=>{
fs.readdir('/tmp',(err, files)=> {
let saved = files.find(file => file === myFileNameWithExtension);
fs.readFile("/tmp/"+saved, function (err, data) {
if (err) throw new Error();
console.log(data);
console.log('Contents of AWS Lambda /tmp/ directory');
});
})
})
.on('error', (err)=> {
console.log(err);
throw new Error();
})
NOTE: Please log out the result of saved, I can't remember if the files array is absolute of relative paths.

Retrieving data from a ZIP File - NodeJS

I asked myself a question,
I can read files (csv mainly) on a cloud platform but when it's a zip I just get a bunch of:
j�\lȜ��&��3+xT��J��=��y��7���vu� {d�T���?��!�
Which is normal, so I wonder if there is a way to put that in a variable and unzip it using a lib or something like that.
Thanks for your time
you should use npm install node-stream-zip
const StreamZip = require('node-stream-zip');
const zip = new StreamZip({
file: 'archive.zip',
storeEntries: true
});
and get the info like this
zip.on('ready', () => {
console.log('Entries read: ' + zip.entriesCount);
for (const entry of Object.values(zip.entries())) {
const desc = entry.isDirectory ? 'directory' : `${entry.size} bytes`;
console.log(`Entry ${entry.name}: ${desc}`);
}
// Do not forget to close the file once you're done
zip.close()
});
Hope it helps :-)
You should use jszip npm package. This allows you to quickly read zip files.
Example:
var fs = require("fs");
var JSZip = require("jszip");
// read a zip file
fs.readFile("project.zip", function(err, data) {
if (err) throw err;
JSZip.loadAsync(data).then(function (zip) {
files = Object.keys(zip.files);
console.log(files);
});
});
To read the contents of a file in the zip archive you can use the following.
// read a zip file
fs.readFile("project.zip", function(err, data) {
if (err) throw err;
JSZip.loadAsync(data).then(function (zip) {
// Read the contents of the 'Hello.txt' file
zip.file("Hello.txt").async("string").then(function (data) {
// data is "Hello World!"
console.log(data);
});
});
});
and to download the zip file from the server:
request('yourserverurl/helloworld.zip')
.pipe(fs.createWriteStream('helloworld.zip'))
.on('close', function () {
console.log('File written!');
});

Categories

Resources