Express-busboy npm create folder in public directory ExpressJs - javascript

In my controller, when I try to readFile send from browser by AJAX, suddenly 1 directory created into my public folder with something like
'3d6c3049-839b-40ce-9aa3-b76f08bf140b' -> file -> myfile
exports.assetAdd = function(req, res) {
var d = JSON.parse(req.body.data);
var f = req.files.file;
return ;
//here i can see my unwanted created directory
// Create S3 service object
var s3 = new AWS.S3({
apiVersion: '2017-03-01'
});
// console.log("file",f)
fs.readFile(f.file, function(err, data) {
return res.json(data);
How to remove this?

This is issue with the package, Already opened issue
https://github.com/yahoo/express-busboy/issues/16

Related

Multiple file stream instead of download to disk and then zip?

I have an API method that when called and passed an array of file keys, downloads them from S3. I'd like to stream them, rather than download to disk, followed by zipping the files and returning that to the client.
This is what my current code looks like:
reports.get('/xxx/:filenames ', async (req, res) => {
var AWS = require('aws-sdk');
var s3 = new AWS.S3();
var str_array = filenames.split(',');
for (var i = 0; i < str_array.length; i++) {
var filename = str_array[i].trim();
localFileName = './' + filename;
var params = {
Bucket: config.reportBucket,
Key: filename
}
s3.getObject(params, (err, data) => {
if (err) console.error(err)
var file = require('fs').createWriteStream(localFileName);
s3.getObject(params).createReadStream().pipe(file);
console.log(file);
})
}
});
How would I stream the files rather than downloading them to disk and how would I zip them to return that to the client?
Main problem is to zip multiple files.
More specifically, download them from AWS S3 in bulk.
I've searched through AWS SDK and didn't find bulk s3 operations.
Which brings us to one possible solution:
Load files one by one and store them to folder
Zip folder (with some package like this)
Send zipped folder
This is raw and untested example, but it might give you the idea:
// Always import packages at the beginning of the file.
const AWS = require('aws-sdk');
const fs = require('fs');
const zipFolder = require('zip-folder');
const s3 = new AWS.S3();
reports.get('/xxx/:filenames ', async (req, res) => {
const filesArray = filenames.split(',');
for (const fileName of filesArray) {
const localFileName = './' + filename.trim();
const params = {
Bucket: config.reportBucket,
Key: filename
}
// Probably you'll need here some Promise logic, to handle stream operation end.
const fileStream = fs.createWriteStream(localFileName);
s3.getObject(params).createReadStream().pipe(fileStream);
}
// After that all required files would be in some target folder.
// Now you need to compress the folder and send it back to user.
// We cover callback function in promise, to make code looks "sync" way.
await new Promise(resolve => zipFolder('/path/to/the/folder', '/path/to/archive.zip', (err) => {resolve()});
// And now you can send zipped folder to user (also using streams).
fs.createReadStream('/path/to/archive.zip').pipe(res);
});
Info about streams link and link
Attention: You'll probably could have some problems with async behaviour, according to streams nature, so, please, first of all, check if all files are stored in folder before zipping.
Just a mention, I've not tested this code. So if any questions appear, let's debug together

How to delete file after upload using node?

I am using multiparty to upload some file on the server, I have notice that when using form.parse a file is being added in temp in SO file system.
I need to remove that file after form close but I cannot get information of file path.
Any idea how to solve this problem?
function onUpload(req, res) {
var form = new multiparty.Form();
form.parse(req, function(err, fields, files) {
onSimpleUpload(fields, files[fileInputName][0], res);
});
// Close emitted after form parsed
form.on('close', function() {
// cannot get file here to be deleted
});
}
To be specific:
var fs = require('fs');
var filePath = files[fileInputName][0].path;
fs.unlinkSync(filePath);
or async:
var fs = require('fs');
var filePath = files[fileInputName][0].path;
fs.unlink(filePath, function(err){
if(err) // do something with error
else // delete successful
});
You can get the path of file saved on local filesystem, by files[fileInputName][0].path

unzip error [Error: invalid signature: 0xff000001]

Im using the following library for node unzip
https://github.com/EvanOxfeld/node-unzip
The code which I use is
var extractor = unzip.Extract({
path: 'C://TestFolder//TestZip'
}).on('close', function () {
console.log("Success to unzip");
}).on('error', function (err) {
console.log("error to unzip", err);
});
req.pipe(extractor);
The problem that In some zip file Im getting the error (in others its works fine)
[Error: invalid signature: 0x8080014]
[Error: invalid signature: 0x83870008]
....
This error doesnt give a lot info...
searching the web I found this
https://github.com/EvanOxfeld/node-unzip/issues/41
And install and require the unzip2 package instead of unzip ,
the issue now that Im getting the following error
unzip Error: invalid signature: 0xff000001
I use the same code for unzip and unzip2 (which I provided in the post above),do I need to use it different? any hints how to solve it?
UPDATE
I send the zip file from postman like following
You can temporary save the ZIP file on your disk, and then extract it using adm-zip.
Here is a sample code:
Client Side:
<form action="/upload" method="post" enctype="multipart/form-data">
Select image to upload:
<input type="file" name="fileToUpload" id="fileToUpload">
<input type="submit" value="Upload Image" name="submit">
</form>
Server Side
Using multer to save the uploaded file, and adm-zip to extract it.
You need to install both:
npm install --save multer
npm install --save adm-zip
After installing here an example of using them together:
var multer=require('multer') // a module for saving file from form.
var AdmZip = require('adm-zip'); // a module for extracting files
var express=require('express') // module for receving HTTP traffic
var app=express()
var upload = multer({ dest: 'uploads/' })
app.post('/upload',upload.single('fileToUpload'),function(req,res){
console.log('The file uploaded to:' + req.file.path)
var zip = new AdmZip(req.file.path);
zip.extractAllTo( "/detination_folder/");
})
Information about the modules I used:
https://github.com/expressjs/multer , https://github.com/cthackers/adm-zip
Node-unzip2 patches this problem.
example :
var readStream = fs.createReadStream('path/to/archive.zip');
var writeStream = fstream.Writer('output/path');
readStream
.pipe(unzip.Parse())
.pipe(writeStream)
Try your unzip solution, but for receiving the binary data, attach this middleware and then get your file from req.rawBody:
app.use(function(req, res, next) {
var data = new Buffer('');
req.on('data', function(chunk) {
data = Buffer.concat([data, chunk]);
});
req.on('end', function() {
req.rawBody = data;
next();
});
});
As #Amina said.
You can temporary save the ZIP file on your disk, and then extract it
using whatever unzipper package like unzip,adm-zip,unzip2,unzippy or whatever you like it.
For some information, My App structure like this below :
//App path --> C:\xampp\htdocs\service
service\
|
-- tmp\
|
-- app.js
|
-- index.html
You're using unzip2 right ?
Here's my code:
Server Side:
I'm using unzip2 to extract the zipFile, you can test it using postman too. Don't forget using enctype="multipart/form-data" when you post it. :D
var express = require("express");
var fs = require("fs");
var unzip = require("unzip2");
var app = express();
var multer = require("multer");
var multer_dest = multer({dest: "./tmp"}).single('zipFile');
app.post("/upload_zip",multer_dest,function(req,res){
console.log(req.file);
fs.createReadStream(req.file.path).pipe(unzip.Extract({path: 'C:\\TestFolder\\TestZip'}));
result = {
file:req.file,
message:"File has been extracted"
};
fs.unlink(req.file.path, function (e) {
if (e) throw e;
console.log('successfully deleted '+req.file.path);
});
res.end(JSON.stringify(result));
});
var server = app.listen(8081,function(){
var host = server.address().address;
var port = server.address().port;
console.log("Example App Listening at http://%s:%s",host,port);
})
Output :

Can't download AWS S3 File in nodejs

I'm trying to use Amazon's S3 service, I managed to upload GZipped files to my bucket but I can't retrieve them. I tried using the code example that I've found here, everything works fine when I'm uploading the files, but I can't download them.
This is my upload code:
var s3 = new AWS.S3();
s3.headBucket({Bucket: bucketName}, function (err) {
if (err) s3.createBucket({Bucket: bucketName}, cb);
var body = fs.createReadStream(file).pipe(zlib.createGzip());
s3.upload({Bucket: bucketName, Key: key, Body: body}).send(cb);
});
ANd this is my download code:
var s3 = new AWS.S3();
var params = {Bucket: bucketName, Key: key};
var outFile = require('fs').createWriteStream(file);
s3.getObject(params).createReadStream().pipe(zlib.createGunzip()).pipe(outFile);
But I get error throw new Error('Cannot switch to old mode now.'); on the last line.
and I can't figure out how to fix it, I'm using node 0.10.25(and I can't change it).
So I tried using this:
var params = {Bucket: bucketName, Key: key};
s3.getObject(params, function(err, data) {
var outFile = require('fs').createWriteStream(file);
var read = AWS.util.buffer.toStream(data.Body);
read.pipe(zlib.createGzip()).pipe(outFile);
read.on('end', function(){cb();});
});
but often I get error 104(unexpected end of input).
Anyone has some ideas?
Unexpected end of input is perhaps due to pipe getting closed prematurely or some other error was encountered in the middle of reading a fixed-size block or data structure.
You can look at - https://github.com/minio/minio-js instead as well as an alternative, it is fully written in Streams2 style.
Here is an example.
$ npm install minio
$ cat >> get-object.js << EOF
var Minio = require('minio')
var fs = require('fs')
// find out your s3 end point here:
// http://docs.aws.amazon.com/general/latest/gr/rande.html#s3_region
var s3Client = new Minio({
url: 'https://<your-s3-endpoint>',
accessKey: 'YOUR-ACCESSKEYID',
secretKey: 'YOUR-SECRETACCESSKEY'
})
var outFile = fs.createWriteStream('test.txt');
s3Client.getObject('mybucket', 'my-key', function(e, dataStream) {
if (e) {
return console.log(e)
}
dataStream.pipe(outFile)
})
EOF

Node/Express Generate a one time route / link / download?

How would I go about creating a one time download link in nodeJS or Express?
I'm trying to find the simplest way to accomplish this. My ideas so far are:
Use fs stream to read and then delete the file
or
Somehow generate a link/route that gets removed once the download button is clicked
Are any of these implementations possible?
Is there a simpler way?
Any help or example code would be greatly appreciated!
-Thanks
Check this simple implementation:
You store the information of the download in a file. The filename is the download session id. The file content is the real path of the file to be downloaded.
Use these three functions to manage the lifecycle of the download sessions:
var fs = require('fs');
var crypto = require('crypto');
var path = require('path');
// Path where we store the download sessions
const DL_SESSION_FOLDER = '/var/download_sessions';
/* Creates a download session */
function createDownload(filePath, callback) {
// Check the existence of DL_SESSION_FOLDER
if (!fs.existsSync(DL_SESSION_FOLDER)) return callback(new Error('Session directory does not exist'));
// Check the existence of the file
if (!fs.existsSync(filePath)) return callback(new Error('File doest not exist'));
// Generate the download sid (session id)
var downloadSid = crypto.createHash('md5').update(Math.random().toString()).digest('hex');
// Generate the download session filename
var dlSessionFileName = path.join(DL_SESSION_FOLDER, downloadSid + '.download');
// Write the link of the file to the download session file
fs.writeFile(dlSessionFileName, filePath, function(err) {
if (err) return callback(err);
// If succeeded, return the new download sid
callback(null, downloadSid);
});
}
/* Gets the download file path related to a download sid */
function getDownloadFilePath(downloadSid, callback) {
// Get the download session file name
var dlSessionFileName = path.join(DL_SESSION_FOLDER, downloadSid + '.download');
// Check if the download session exists
if (!fs.existsSync(dlSessionFileName)) return callback(new Error('Download does not exist'));
// Get the file path
fs.readFile(dlSessionFileName, function(err, data) {
if (err) return callback(err);
// Return the file path
callback(null, data);
});
}
/* Deletes a download session */
function deleteDownload(downloadSid, callback) {
// Get the download session file name
var dlSessionFileName = path.join(DL_SESSION_FOLDER, downloadSid + '.download');
// Check if the download session exists
if (!fs.existsSync(dlSessionFileName)) return callback(new Error('Download does not exist'));
// Delete the download session
fs.unlink(dlSessionFileName, function(err) {
if (err) return callback(err);
// Return success (no error)
callback();
});
}
Use createDownload() to create download sessions wherever you need to. It returns the download sid, then you can use it to build your download URL like: http://your.server.com/download?sid=<RETURNED SID>.
Finally you can add a simple handler to your /download route:
app.get('/download', function(req, res, next) {
// Get the download sid
var downloadSid = req.query.sid;
// Get the download file path
getDownloadFilePath(downloadSid, function(err, path) {
if (err) return res.end('Error');
// Read and send the file here...
// Finally, delete the download session to invalidate the link
deleteDownload(downloadSid, function(err) {
// ...
});
});
});
With this method, you don't have to create/move/delete big download files, which could cause slow responses and unnecessary resource consumption.
You can delete routes from the app.routes object. See Remove route mappings in NodeJS Express for more info.
Here is my quick and not very well tested way of doing what you ask:
var express = require('express');
var app = express();
app.get('/download', function(req,res,next){
res.download('./path/to/your.file');
//find this route and delete it.
for(i = 0; i < app.routes.get.length; i++){
if(app.routes.get[i].path === '/download'){
app.routes.get.splice(i,1);
}
}
});
app.listen(80);
I'd probably map a single route to manage downloads, and then upon downloading the file, move or delete it. That way I can prevent a lot of cashing of routes, or a lot of small temp files from the other two answers, but YMMV. Something like this:
// say your downloads are in /downloads
app.get('/dl/:filename', function(req, res) {
var fileStream = fs.createReadStream('/downloads' + req.params.filename);
// error handler, ie. file not there...
fileStream.on('error', function(err) {
if(err) {
res.status(404); // or something
return res.end();
}
});
// here you ow pipe that stream to the response,
fileStream.on('data', downloadHandler);
// and here delete the file or move it to other folder or whatever, do cleanup
fileStream.on('end', deleteFileHandler);
}
Note: This is a possible security vulnerability, it could let the adversary download files outside your downloads location. That filename param is passed directly to fs.

Categories

Resources