Busboy Save Stream For Use Later - javascript

I'm trying to use busboy to allow clients to upload files to my Express web server.
I have the following middleware function I'm running for Express.
module.exports = (req, res, next) => {
req.files = {};
let busboy;
try {
busboy = new Busboy({
headers: req.headers
});
} catch (e) {
return next();
}
busboy.on("file", (fieldname, file, filename, encoding, mimetype) => {
req.files[fieldname] = {
file,
filename,
encoding,
mimetype
};
// Need to call `file.resume` to consume the stream somehow (https://stackoverflow.com/a/24588458/894067)
file.resume();
});
busboy.on("finish", next);
req.pipe(busboy);
};
As you can see, I had to add file.resume(); so that the "finish" event would be triggered, and call the next function for the middleware (https://stackoverflow.com/a/24588458/894067).
The problem is, later on, when I want to consume the stream, it says readable: false. So I'm assuming the file.resume(); discards the stream and doesn't allow it to be used in the future.
I basically want to get all the uploaded files and information associated with those files, store them on the req.files object, then consume the streams later, or not consume them if I don't want to use it. That way they remain streams and don't take up much memory, until I'm ready to consume the stream and actually do something with it (or choose to discard it).
What can I use in place of file.resume(); to ensure that the "finish" event get triggers, while allowing me to use the stream later on in the lifecycle of the request (the actual app.post routes, instead of middleware)?
The client might also upload multiple files. So I need any solution to handle multiple files.

Would it make any sense to pipe the input stream into a PassThrough stream, like this?
const Busboy = require('busboy')
const { PassThrough } = require('stream')
const multipart = (req, res, next) => {
req.files = new Map()
req.fields = new Map()
const busboy = new Busboy({ headers: req.headers })
busboy.on('file', (fieldname, file, filename, encoding, mimetype) => {
const stream = new PassThrough()
file.pipe(stream)
req.files.set(fieldname, { stream, filename, encoding, mimetype })
})
busboy.on(
'field',
(fieldname, val, fieldnameTruncated, valTruncated, encoding, mimetype) => {
req.fields.set(fieldname, { val, encoding, mimetype })
}
)
busboy.on('error', (error) => {
next(error)
})
busboy.on('finish', () => {
next()
})
busboy.end(req.rawBody)
}

If you want to handle multiple files in a single request, the procedure is a bit tricky.
Busboy goes through a single stream and fires events whenever files arrive (in sequence). You cannot get separate streams for all files at the same time with Busboy. This is not a limitation from the library, this is how HTTP works.
Your best option would be to store all files in a temporary storage, and keep information for the next middlewares with res.locals :
const Busboy = require('busboy');
const path = require('path');
const fs = require('fs');
module.exports = (req, res, next) => {
res.locals.files = {};
// You need to ensure the directory exists
res.locals.someTemporaryDirectory = '/some/temp/dir/with/randomString/in/it';
let busboy;
try {
busboy = new Busboy({
headers: req.headers
});
} catch (e) {
return next(e);
}
busboy.on("file", (fieldname, file, filename, encoding, mimetype) => {
res.locals.files[fieldname + '_' + filename] = {
filename,
encoding,
mimetype
};
// I skipped error handling for the sake of simplicity. Cleanup phase will be required as well
const tempFilePath = path.join(res.locals.someTemporaryDirectory, fieldname + '_' + filename);
file.pipe(fs.createWriteStream(tempFilePath));
});
busboy.on("finish", next);
req.pipe(busboy);
};
The next middleware shall use res.locals.someTemporaryDirectory and res.locals.files to mind their businesses (that will require a clean-up phase).
This solution may seem sub-optimal, but HTTP is like it is. You may want to issue a separate HTTP request for each file instead, but I would not recommend it as you would encounter a bunch of other issues (such as synchronization of all requests + memory management).
Whatever the solution is, it requires to get your hands dirty.

Related

How do I recover from failure when uploading multiple files via my server to S3?

New to NodeJS and S3, I wrote the following exploratory code to upload files to S3 via my NodeJS server without saving the file to disk or memory:
var express = require('express');
var Busboy = require('busboy');
var S3 = require('../utils/s3Util');
var router = express.Router(); // mounted at /uploads
router.post("/", function (req, res, next) {
let bb = new Busboy({ headers: req.headers });
const uploads = [];
bb.on('file', (fieldname, stream, filename, encoding, mimeType) => {
console.log(`Uploaded fieldname: ${fieldname}; filename: ${filename}, mimeType: ${mimeType}`);
uploads.push(S3.svc.upload({ Bucket: 'my-test-bucket', Key: filename, Body: stream }).promise());
});
bb.on('finish', () => {
console.log("# of promises:", uploads.length);
Promise.all(uploads).then(retVals => {
for (let i = 0; retVals && i < retVals.length; i++) {
console.log(`File ${i + 1}::`, retVals[i]);
}
res.end();
}).catch(err => {
console.log("Error::", err);
res.status(500).send(`${err.name}: ${err.message}`);
});
});
req.pipe(bb);
});
module.exports = router;
In the general failure case, how do I handle the scenario where the upload of 1 or more of x files being uploaded fails? Some uploads would have succeeded, some would have failed. However, in the catch clause I wouldn't know which ones have failed...
It would be good to be able to make this upload process somewhat transactional (i.e., either all uploads succeed, or none do). When errors happen, ideally I would be able to "rollback" the subset of successful uploads.
You could do it like this:
Push an object into uploads, with the data you need to retry, so:
uploads.push({
fieldname,
filename,
mimeType,
uploaded: S3.svc.upload({ Bucket: 'my-test-bucket', Key: filename, Body: stream })
.promise()
.then(() => true)
.catch(() => false)
});
...
const failed = await
(Promise.all(uploads.map(async upload => ({...upload, uploaded: await upload.uploaded})))).then(u => u.filter(upload => !upload.uploaded))
const failedFiles = failed.join(', ')
console.log(`The following files failed to upload: ${failedFiles}`);
You need to make your event handlers async to use await inside them, so, for example:
bb.on('file', async (fieldname, stream, filename, encoding, mimeType) => {
I finally went with the following code, which is an expansion of #JoshWulf's answer:
function handleUpload(req, res, bucket, key) {
let bb = new Busboy({ headers: req.headers });
const uploads = [];
bb.on('file', (fieldname, stream, filename, encoding, mimeType) => {
console.log(`Uploaded fieldname: ${fieldname}; filename: ${filename}, mimeType: ${mimeType}`);
const params = { Bucket: bucket, Key: key, Body: stream, ContentType: mimeType };
uploads.push({ filename, result: S3.svc.upload(params).promise().then(data => data).catch(err => err) });
});
bb.on('finish', async () => {
const results = await Promise.all(uploads.map(async (upload) => ({ ...upload, result: await upload.result })));
// handle success/failure with their respective objects
});
req.pipe(bb);
}
The difference here from #Josh Wulf's answer is that in my upload promise I am returning the returned data object (if successful) and the returned error object (in case of failure) as-is. This then enables me to later use them as I need.

Multer is not working in firebaseapp hosted in firebase [duplicate]

I am trying to upload a file to Cloud Functions, using Express to handle requests there, but i am not succeeding. I created a version that works locally:
serverside js
const express = require('express');
const cors = require('cors');
const fileUpload = require('express-fileupload');
const app = express();
app.use(fileUpload());
app.use(cors());
app.post('/upload', (req, res) => {
res.send('files: ' + Object.keys(req.files).join(', '));
});
clientside js
const formData = new FormData();
Array.from(this.$refs.fileSelect.files).forEach((file, index) => {
formData.append('sample' + index, file, 'sample');
});
axios.post(
url,
formData,
{
headers: { 'Content-Type': 'multipart/form-data' },
}
);
This exact same code seems to break when deployed to Cloud Functions, where req.files is undefined. Does anyone have any idea what is happening here?
EDIT
I also had a go at using multer, which worked fine locally, but once uploaded to Cloud Functions, this got me an empty array (same clientside code):
const app = express();
const upload = multer();
app.use(cors());
app.post('/upload', upload.any(), (req, res) => {
res.send(JSON.stringify(req.files));
});
There was indeed a breaking change in the Cloud Functions setup that triggered this issue. It has to do with the way the middleware works that gets applied to all Express apps (including the default app) used to serve HTTPS functions. Basically, Cloud Functions will parse the body of the request and decide what to do with it, leaving the raw contents of the body in a Buffer in req.rawBody. You can use this to directly parse your multipart content, but you can't do it with middleware (like multer).
Instead, you can use a module called busboy to deal with the raw body content directly. It can accept the rawBody buffer and will call you back with the files it found. Here is some sample code that will iterate all the uploaded content, save them as files, then delete them. You'll obviously want to do something more useful.
const path = require('path');
const os = require('os');
const fs = require('fs');
const Busboy = require('busboy');
exports.upload = functions.https.onRequest((req, res) => {
if (req.method === 'POST') {
const busboy = new Busboy({ headers: req.headers });
// This object will accumulate all the uploaded files, keyed by their name
const uploads = {}
// This callback will be invoked for each file uploaded
busboy.on('file', (fieldname, file, filename, encoding, mimetype) => {
console.log(`File [${fieldname}] filename: ${filename}, encoding: ${encoding}, mimetype: ${mimetype}`);
// Note that os.tmpdir() is an in-memory file system, so should only
// be used for files small enough to fit in memory.
const filepath = path.join(os.tmpdir(), fieldname);
uploads[fieldname] = { file: filepath }
console.log(`Saving '${fieldname}' to ${filepath}`);
file.pipe(fs.createWriteStream(filepath));
});
// This callback will be invoked after all uploaded files are saved.
busboy.on('finish', () => {
for (const name in uploads) {
const upload = uploads[name];
const file = upload.file;
res.write(`${file}\n`);
fs.unlinkSync(file);
}
res.end();
});
// The raw bytes of the upload will be in req.rawBody. Send it to busboy, and get
// a callback when it's finished.
busboy.end(req.rawBody);
} else {
// Client error - only support POST
res.status(405).end();
}
})
Bear in mind that files saved to temp space occupy memory, so their sizes should be limited to a total of 10MB. For larger files, you should upload those to Cloud Storage and process them with a storage trigger.
Also bear in mind that the default selection of middleware added by Cloud Functions is not currently added to the local emulator via firebase serve. So this sample will not work (rawBody won't be available) in that case.
The team is working on updating the documentation to be more clear about what all happens during HTTPS requests that's different than a standard Express app.
Thanks to the answers above I've built a npm module for this (github)
It works with google cloud functions, just install it (npm install --save express-multipart-file-parser) and use it like this:
const fileMiddleware = require('express-multipart-file-parser')
...
app.use(fileMiddleware)
...
app.post('/file', (req, res) => {
const {
fieldname,
filename,
encoding,
mimetype,
buffer,
} = req.files[0]
...
})
I was able to combine both Brian's and Doug's response. Here's my middleware that end's up mimicking the req.files in multer so no breaking changes to the rest of your code.
module.exports = (path, app) => {
app.use(bodyParser.json())
app.use(bodyParser.urlencoded({ extended: true }))
app.use((req, res, next) => {
if(req.rawBody === undefined && req.method === 'POST' && req.headers['content-type'].startsWith('multipart/form-data')){
getRawBody(req, {
length: req.headers['content-length'],
limit: '10mb',
encoding: contentType.parse(req).parameters.charset
}, function(err, string){
if (err) return next(err)
req.rawBody = string
next()
})
} else {
next()
}
})
app.use((req, res, next) => {
if (req.method === 'POST' && req.headers['content-type'].startsWith('multipart/form-data')) {
const busboy = new Busboy({ headers: req.headers })
let fileBuffer = new Buffer('')
req.files = {
file: []
}
busboy.on('field', (fieldname, value) => {
req.body[fieldname] = value
})
busboy.on('file', (fieldname, file, filename, encoding, mimetype) => {
file.on('data', (data) => {
fileBuffer = Buffer.concat([fileBuffer, data])
})
file.on('end', () => {
const file_object = {
fieldname,
'originalname': filename,
encoding,
mimetype,
buffer: fileBuffer
}
req.files.file.push(file_object)
})
})
busboy.on('finish', () => {
next()
})
busboy.end(req.rawBody)
req.pipe(busboy)
} else {
next()
}
})}
I have been suffering from the same problem for a few days, turns out that firebase team has put the raw body of multipart/form-data into req.body with their middleware. If you try console.log(req.body.toString()) BEFORE processing your request with multer, you will see your data. As multer creates a new req.body object which is overriding the resulting req, the data is gone and all we can get is an empty req.body. Hopefully the firebase team could correct this soon.
To add to the official Cloud Function team answer, you can emulate this behavior locally by doing the following (add this middleware higher than the busboy code they posted, obviously)
const getRawBody = require('raw-body');
const contentType = require('content-type');
app.use(function(req, res, next){
if(req.rawBody === undefined && req.method === 'POST' && req.headers['content-type'] !== undefined && req.headers['content-type'].startsWith('multipart/form-data')){
getRawBody(req, {
length: req.headers['content-length'],
limit: '10mb',
encoding: contentType.parse(req).parameters.charset
}, function(err, string){
if (err) return next(err);
req.rawBody = string;
next();
});
}
else{
next();
}
});
Cloud functions pre-processes the request object before passing it on further. As such the original multer middleware doesn't work. Furthermore, using busboy is too low level and you need to take care of everything on your own which isn't ideal. Instead you can use a forked version of multer middleware for processing multipart/form-data on cloud functions.
Here's what you can do.
Install the fork
npm install --save emadalam/multer#master
Use startProcessing configuration for custom handling of req.rawBody added by cloud functions.
const express = require('express')
const multer = require('multer')
const SIZE_LIMIT = 10 * 1024 * 1024 // 10MB
const app = express()
const multipartFormDataParser = multer({
storage: multer.memoryStorage(),
// increase size limit if needed
limits: {fieldSize: SIZE_LIMIT},
// support firebase cloud functions
// the multipart form-data request object is pre-processed by the cloud functions
// currently the `multer` library doesn't natively support this behaviour
// as such, a custom fork is maintained to enable this by adding `startProcessing`
// https://github.com/emadalam/multer
startProcessing(req, busboy) {
req.rawBody ? busboy.end(req.rawBody) : req.pipe(busboy)
},
})
app.post('/some_route', multipartFormDataParser.any(), function (req, res, next) {
// req.files is array of uploaded files
// req.body will contain the text fields
})
I ran into this issue today, check here for more details on how to handle files on google cloud (basically you don't need multer).
Here is a middleware I use to extract files. This will keep all your files on request.files and other form fields on request.body for all POST with multipart/form-data content type. It will leave everything else the same for your other middlewares to handle.
// multiparts.js
const { createWriteStream } = require('fs')
const { tmpdir } = require('os')
const { join } = require('path')
const BusBoy = require('busboy')
exports.extractFiles = async(req, res, next) => {
const multipart = req.method === 'POST' && req.headers['content-type'].startsWith('multipart/form-data')
if (!multipart) return next()
//
const busboy = new BusBoy({ headers: req.headers })
const incomingFields = {}
const incomingFiles = {}
const writes = []
// Process fields
busboy.on('field', (name, value) => {
try {
// This will keep a field created like so form.append('product', JSON.stringify(product)) intact
incomingFields[name] = JSON.parse(value)
} catch (e) {
// Numbers will still be strings here (i.e 1 will be '1')
incomingFields[name] = value
}
})
// Process files
busboy.on('file', (field, file, filename, encoding, contentType) => {
// Doing this to not have to deal with duplicate file names
// (i.e. TIMESTAMP-originalName. Hmm what are the odds that I'll still have dups?)
const path = join(tmpdir(), `${(new Date()).toISOString()}-${filename}`)
// NOTE: Multiple files could have same fieldname (which is y I'm using arrays here)
incomingFiles[field] = incomingFiles[field] || []
incomingFiles[field].push({ path, encoding, contentType })
//
const writeStream = createWriteStream(path)
//
writes.push(new Promise((resolve, reject) => {
file.on('end', () => { writeStream.end() })
writeStream.on('finish', resolve)
writeStream.on('error', reject)
}))
//
file.pipe(writeStream)
})
//
busboy.on('finish', async () => {
await Promise.all(writes)
req.files = incomingFiles
req.body = incomingFields
next()
})
busboy.end(req.rawBody)
}
And now in your function, make sure that this is the first middleware you use.
// index.js
const { onRequest } = require('firebase-functions').https
const bodyParser = require('body-parser')
const express = require('express')
const cors = require('cors')
const app = express()
// First middleware I'm adding
const { extractFiles } = require('./multiparts')
app.use(extractFiles)
app.use(bodyParser.urlencoded({ extended: true }))
app.use(bodyParser.json())
app.use(cors({ origin: true }))
app.use((req) => console.log(req.originalUrl))
exports.MyFunction = onRequest(app);
I fixed some bugs G. Rodriguez's response. I add 'field' and 'finish' event for Busboy, and do next() in 'finish' event. This is work for me. As follow:
module.exports = (path, app) => {
app.use(bodyParser.json())
app.use(bodyParser.urlencoded({ extended: true }))
app.use((req, res, next) => {
if(req.rawBody === undefined && req.method === 'POST' && req.headers['content-type'].startsWith('multipart/form-data')){
getRawBody(req, {
length: req.headers['content-length'],
limit: '10mb',
encoding: contentType.parse(req).parameters.charset
}, function(err, string){
if (err) return next(err)
req.rawBody = string
next()
})
} else {
next()
}
})
app.use((req, res, next) => {
if (req.method === 'POST' && req.headers['content-type'].startsWith('multipart/form-data')) {
const busboy = new Busboy({ headers: req.headers })
let fileBuffer = new Buffer('')
req.files = {
file: []
}
busboy.on('file', (fieldname, file, filename, encoding, mimetype) => {
file.on('data', (data) => {
fileBuffer = Buffer.concat([fileBuffer, data])
})
file.on('end', () => {
const file_object = {
fieldname,
'originalname': filename,
encoding,
mimetype,
buffer: fileBuffer
}
req.files.file.push(file_object)
})
})
busboy.on('field', function(fieldname, val, fieldnameTruncated, valTruncated) {
console.log('Field [' + fieldname + ']: value: ' + inspect(val));
});
busboy.on('finish', function() {
next()
});
busboy.end(req.rawBody)
req.pipe(busboy);
} else {
next()
}
})}
Thanks for everyone's help on this thread. I wasted a whole day trying every possible combination and all these different libraries... only to discover this after exhausting all other options.
Combined some of the above solutions to create a TypeScript and middleware capable script here:
https://gist.github.com/jasonbyrne/8dcd15701f686a4703a72f13e3f800c0
If you just want to get a single uploaded file from the request, use busboy to get the file as a readable stream:
const express = require('express')
const Busboy = require('busboy')
express().post('/', (req, res) => {
const busboy = new Busboy({ headers: req.headers })
busboy.on('file', (fieldname, file, filename, encoding, mimetype) => {
// Do something with `file`, e.g. pipe it to an output stream.
// file.pipe(fs.createWriteStream('upload.pdf')
})
// The original input was moved to `req.rawBody`
busboy.write(req.rawBody)
})
Note that, on top of using Busboy on the server and parsing the rawReq, you may also need to add the following config to your Axios request:
{ headers: { 'content-type': `multipart/form-data; boundary=${formData._boundary}` }};
If you only specify the content-type and not the boundary you get a Boundary not found error on the server. If you remove the headers altogether, instead, Busboy won't parse the fields properly.
See: Firebase Cloud Functions and Busboy not parsing fields or files
I experience the same issue when i deployed my app using firebase function. I was using multer to upload image to amazon s3. I resolve this issue by using the above npm https://stackoverflow.com/a/48648805/5213790 created by Cristóvão.
const { mimetype, buffer, } = req.files[0]
let s3bucket = new aws.S3({
accessKeyId: functions.config().aws.access_key,
secretAccessKey: functions.config().aws.secret_key,
});
const config = {
Bucket: functions.config().aws.bucket_name,
ContentType: mimetype,
ACL: 'public-read',
Key: Date.now().toString(),
Body: buffer,
}
s3bucket.upload(config, (err, data) => {
if(err) console.log(err)
req.file = data;
next()
})
Note that this is for a single file image upload.
The next middleware will have the returned object from s3
{
ETag: '"cacd6d406f891e216f9946911a69aac5"',
Location:'https://react-significant.s3.us-west1.amazonaws.com/posts/1567282665593',
key: 'posts/1567282665593',
Key: 'posts/1567282665593',
Bucket: 'react-significant'
}
In this case, you might need the Location url before you save your data in the db.
I've tried Dougs answer, however the finish was never fired, so i tweaked the code a little bit and got this which works for me:
// It's very crucial that the file name matches the name attribute in your html
app.post('/', (req, res) => {
const busboy = new Busboy({ headers: req.headers })
// This object will accumulate all the uploaded files, keyed by their name
const uploads = {}
// This callback will be invoked for each file uploaded
busboy.on('file', (fieldname, file, filename, encoding, mimetype) => {
console.log(`File [${fieldname}] filename: ${filename}, encoding: ${encoding}, mimetype: ${mimetype}`)
// Note that os.tmpdir() is an in-memory file system, so should only
// be used for files small enough to fit in memory.
const filepath = path.join(os.tmpdir(), filename)
uploads[fieldname] = { file: filepath }
console.log(`Saving '${fieldname}' to ${filepath}`)
const stream = fs.createWriteStream(filepath)
stream.on('open', () => file.pipe(stream))
})
// This callback will be invoked after all uploaded files are saved.
busboy.on('finish', () => {
console.log('look im firing!')
// Do whatever you want here
res.end()
})
// The raw bytes of the upload will be in req.rawBody. Send it to busboy, and get
// a callback when it's finished.
busboy.end(req.rawBody)
})

Full control over files sent with ajax in nodejs

I use multer to parse multiple files sent as multipart/data-form with axios
...
const storage = multer.diskStorage({
destination: './gallery',
filename(req, file, cb) {
(1) ....
},
});
const upload = multer({ storage });
router.post('/products', upload.array('images'), (req, res, next) => {
Product.create(...)
.then((product) => {
(2) ...
})
.catch(..)
})
...
at this point everything is fine and my images are saved.
the problem is that i want to make a loop in (1) or (2) and name my files like this
files.forEach((file, index) => {
// rename file to => product_id + '_' + index + '.jpeg'
}
For example if i have 3 files they will be named to
5a9e881c3ebb4e1bd8911126_1.jpeg
5a9e881c3ebb4e1bd8911126_2.jpeg
5a9e881c3ebb4e1bd8911126_3.jpeg
where 5a9e881c3ebb4e1bd8911126 is the id of the product document saved by mongoose.
how to solve this naming issue ?
is multer the best solution cause i want full control over my files ?
Is there a better approach with another node package ?
is it good to send images as multipart/data-form or data URL base64 ?
This is easy, as long as you understand how express works. So before jumping to solution its important to have a clear understanding.
When you have a express code like below
router.post('/abc', function(req, res) {res.send('hello world');})
Express passes the request from chains of middlewares/functions. Now each function gets req, res, next parameters. The next is function, which a middleware is suppose to call when the processing is complete. If the middleware decides not to call next the request ends there and no more middlewares are called further.
When we used function(req, res) {res.send('hello world');}, we didn't take the next parameter at all, which means we are not interested in any other code to do anything. Now getting back to our problem
router.post('/products', upload.array('images'), (req, res, next) => {...}
You have used upload.array('images') first and then your actual product creation code. So I would show two approaches to solve this problem
One more middleware to rename the files
router.post('/products', upload.array('images'), (req, res, next) => {
Product.create(...)
.then((product) => {
req.product = product
next();
})
.catch(..)
}, (req, res, next) => {
//Get the product id using req.product
//Move the files as per the name you desire
})
Reverse the processing order
In this approach you first create the product and then let image processing happen. I have created a sample for the showing the same
let express = require('express');
let bodyParser = require('body-parser');
app = express();
let multer = require('multer');
const storage = multer.diskStorage({
destination: './gallery',
filename: (req, file, cb) => {
console.log('Product id - ' + req.product_id);
cb(null, req.product_id + '.js');
},
});
const upload = multer({ storage });
app.all('/', (req, res, next) => {
console.log('Hello you');
promise = new Promise((resolve) => {
// simulate a async product creation
setTimeout(() => resolve(1234), 100);
});
promise.then((product_id) => {
console.log('create the product and get the new product id')
// set the product id in the request object, so the multer
// filename function can access it
req.product_id = product_id;
res.send('uploaded files');
if (next)
next();
});
}, upload.array('images'));
module.exports = {
app
};
app.listen(8020);
And testing it using postman works fine
Edit: 19-Mar-2018
For multiple files you can easily update your filename function code like below
const storage = multer.diskStorage({
destination: './gallery',
filename: (req, file, cb) => {
req.file_id = req.file_id || 0;
req.file_id++;
console.log('Product id - ' + req.product_id);
cb(null, req.product_id +'_' + req.file_id +'.js');
},
});
This will make sure that you get all the files for that product. Now coming to your questions
how to solve this naming issue ?
This answer already does that
is multer the best solution cause i want full control over my files ?
I can't say, as long it works and does what you want, it should be good enough
Is there a better approach with another node package ?
I couldn't find lot of packages. But you can explore this if you want
is it good to send images as multipart/data-form or data URL base64 ?
I would use multipart/data-form, so that no base64 conversion is needed at client side. But again this is a matter of opinion as well.
You can't set the name purely in (1) since at that point you do not know the ID of the product yet.
You can't set the name purely in (2) since at that point the files have already been saved (with filename generated by your filename(req, file, cb) function).
So I think the best solution might be to move the files after they are uploaded.
This could be done in (2). When you process the files in the router, req.files will be an array of files that have already been uploaded.
In your promise callback for Product.create, you have access to the product (which you need for the ID) and the list of files (which you need for the number).
For that, you could use fs.rename(oldPath, newPath, callback).
https://nodejs.org/docs/latest/api/fs.html#fs_fs_rename_oldpath_newpath_callback
Something like this should work:
Product.create(...).then((product) => {
req.files.forEach((file, index) => {
// file.path is the full path to the file that was uploaded.
// newPath is where you want to put it.
// Let's use the same destination and just change the filename.
const newPath = file.destination + product.id + '_' + index
fs.rename(file.path, newPath)
})
})

How to perform an HTTP file upload using express on Cloud Functions for Firebase (multer, busboy)

I am trying to upload a file to Cloud Functions, using Express to handle requests there, but i am not succeeding. I created a version that works locally:
serverside js
const express = require('express');
const cors = require('cors');
const fileUpload = require('express-fileupload');
const app = express();
app.use(fileUpload());
app.use(cors());
app.post('/upload', (req, res) => {
res.send('files: ' + Object.keys(req.files).join(', '));
});
clientside js
const formData = new FormData();
Array.from(this.$refs.fileSelect.files).forEach((file, index) => {
formData.append('sample' + index, file, 'sample');
});
axios.post(
url,
formData,
{
headers: { 'Content-Type': 'multipart/form-data' },
}
);
This exact same code seems to break when deployed to Cloud Functions, where req.files is undefined. Does anyone have any idea what is happening here?
EDIT
I also had a go at using multer, which worked fine locally, but once uploaded to Cloud Functions, this got me an empty array (same clientside code):
const app = express();
const upload = multer();
app.use(cors());
app.post('/upload', upload.any(), (req, res) => {
res.send(JSON.stringify(req.files));
});
There was indeed a breaking change in the Cloud Functions setup that triggered this issue. It has to do with the way the middleware works that gets applied to all Express apps (including the default app) used to serve HTTPS functions. Basically, Cloud Functions will parse the body of the request and decide what to do with it, leaving the raw contents of the body in a Buffer in req.rawBody. You can use this to directly parse your multipart content, but you can't do it with middleware (like multer).
Instead, you can use a module called busboy to deal with the raw body content directly. It can accept the rawBody buffer and will call you back with the files it found. Here is some sample code that will iterate all the uploaded content, save them as files, then delete them. You'll obviously want to do something more useful.
const path = require('path');
const os = require('os');
const fs = require('fs');
const Busboy = require('busboy');
exports.upload = functions.https.onRequest((req, res) => {
if (req.method === 'POST') {
const busboy = new Busboy({ headers: req.headers });
// This object will accumulate all the uploaded files, keyed by their name
const uploads = {}
// This callback will be invoked for each file uploaded
busboy.on('file', (fieldname, file, filename, encoding, mimetype) => {
console.log(`File [${fieldname}] filename: ${filename}, encoding: ${encoding}, mimetype: ${mimetype}`);
// Note that os.tmpdir() is an in-memory file system, so should only
// be used for files small enough to fit in memory.
const filepath = path.join(os.tmpdir(), fieldname);
uploads[fieldname] = { file: filepath }
console.log(`Saving '${fieldname}' to ${filepath}`);
file.pipe(fs.createWriteStream(filepath));
});
// This callback will be invoked after all uploaded files are saved.
busboy.on('finish', () => {
for (const name in uploads) {
const upload = uploads[name];
const file = upload.file;
res.write(`${file}\n`);
fs.unlinkSync(file);
}
res.end();
});
// The raw bytes of the upload will be in req.rawBody. Send it to busboy, and get
// a callback when it's finished.
busboy.end(req.rawBody);
} else {
// Client error - only support POST
res.status(405).end();
}
})
Bear in mind that files saved to temp space occupy memory, so their sizes should be limited to a total of 10MB. For larger files, you should upload those to Cloud Storage and process them with a storage trigger.
Also bear in mind that the default selection of middleware added by Cloud Functions is not currently added to the local emulator via firebase serve. So this sample will not work (rawBody won't be available) in that case.
The team is working on updating the documentation to be more clear about what all happens during HTTPS requests that's different than a standard Express app.
Thanks to the answers above I've built a npm module for this (github)
It works with google cloud functions, just install it (npm install --save express-multipart-file-parser) and use it like this:
const fileMiddleware = require('express-multipart-file-parser')
...
app.use(fileMiddleware)
...
app.post('/file', (req, res) => {
const {
fieldname,
filename,
encoding,
mimetype,
buffer,
} = req.files[0]
...
})
I was able to combine both Brian's and Doug's response. Here's my middleware that end's up mimicking the req.files in multer so no breaking changes to the rest of your code.
module.exports = (path, app) => {
app.use(bodyParser.json())
app.use(bodyParser.urlencoded({ extended: true }))
app.use((req, res, next) => {
if(req.rawBody === undefined && req.method === 'POST' && req.headers['content-type'].startsWith('multipart/form-data')){
getRawBody(req, {
length: req.headers['content-length'],
limit: '10mb',
encoding: contentType.parse(req).parameters.charset
}, function(err, string){
if (err) return next(err)
req.rawBody = string
next()
})
} else {
next()
}
})
app.use((req, res, next) => {
if (req.method === 'POST' && req.headers['content-type'].startsWith('multipart/form-data')) {
const busboy = new Busboy({ headers: req.headers })
let fileBuffer = new Buffer('')
req.files = {
file: []
}
busboy.on('field', (fieldname, value) => {
req.body[fieldname] = value
})
busboy.on('file', (fieldname, file, filename, encoding, mimetype) => {
file.on('data', (data) => {
fileBuffer = Buffer.concat([fileBuffer, data])
})
file.on('end', () => {
const file_object = {
fieldname,
'originalname': filename,
encoding,
mimetype,
buffer: fileBuffer
}
req.files.file.push(file_object)
})
})
busboy.on('finish', () => {
next()
})
busboy.end(req.rawBody)
req.pipe(busboy)
} else {
next()
}
})}
I have been suffering from the same problem for a few days, turns out that firebase team has put the raw body of multipart/form-data into req.body with their middleware. If you try console.log(req.body.toString()) BEFORE processing your request with multer, you will see your data. As multer creates a new req.body object which is overriding the resulting req, the data is gone and all we can get is an empty req.body. Hopefully the firebase team could correct this soon.
To add to the official Cloud Function team answer, you can emulate this behavior locally by doing the following (add this middleware higher than the busboy code they posted, obviously)
const getRawBody = require('raw-body');
const contentType = require('content-type');
app.use(function(req, res, next){
if(req.rawBody === undefined && req.method === 'POST' && req.headers['content-type'] !== undefined && req.headers['content-type'].startsWith('multipart/form-data')){
getRawBody(req, {
length: req.headers['content-length'],
limit: '10mb',
encoding: contentType.parse(req).parameters.charset
}, function(err, string){
if (err) return next(err);
req.rawBody = string;
next();
});
}
else{
next();
}
});
Cloud functions pre-processes the request object before passing it on further. As such the original multer middleware doesn't work. Furthermore, using busboy is too low level and you need to take care of everything on your own which isn't ideal. Instead you can use a forked version of multer middleware for processing multipart/form-data on cloud functions.
Here's what you can do.
Install the fork
npm install --save emadalam/multer#master
Use startProcessing configuration for custom handling of req.rawBody added by cloud functions.
const express = require('express')
const multer = require('multer')
const SIZE_LIMIT = 10 * 1024 * 1024 // 10MB
const app = express()
const multipartFormDataParser = multer({
storage: multer.memoryStorage(),
// increase size limit if needed
limits: {fieldSize: SIZE_LIMIT},
// support firebase cloud functions
// the multipart form-data request object is pre-processed by the cloud functions
// currently the `multer` library doesn't natively support this behaviour
// as such, a custom fork is maintained to enable this by adding `startProcessing`
// https://github.com/emadalam/multer
startProcessing(req, busboy) {
req.rawBody ? busboy.end(req.rawBody) : req.pipe(busboy)
},
})
app.post('/some_route', multipartFormDataParser.any(), function (req, res, next) {
// req.files is array of uploaded files
// req.body will contain the text fields
})
I ran into this issue today, check here for more details on how to handle files on google cloud (basically you don't need multer).
Here is a middleware I use to extract files. This will keep all your files on request.files and other form fields on request.body for all POST with multipart/form-data content type. It will leave everything else the same for your other middlewares to handle.
// multiparts.js
const { createWriteStream } = require('fs')
const { tmpdir } = require('os')
const { join } = require('path')
const BusBoy = require('busboy')
exports.extractFiles = async(req, res, next) => {
const multipart = req.method === 'POST' && req.headers['content-type'].startsWith('multipart/form-data')
if (!multipart) return next()
//
const busboy = new BusBoy({ headers: req.headers })
const incomingFields = {}
const incomingFiles = {}
const writes = []
// Process fields
busboy.on('field', (name, value) => {
try {
// This will keep a field created like so form.append('product', JSON.stringify(product)) intact
incomingFields[name] = JSON.parse(value)
} catch (e) {
// Numbers will still be strings here (i.e 1 will be '1')
incomingFields[name] = value
}
})
// Process files
busboy.on('file', (field, file, filename, encoding, contentType) => {
// Doing this to not have to deal with duplicate file names
// (i.e. TIMESTAMP-originalName. Hmm what are the odds that I'll still have dups?)
const path = join(tmpdir(), `${(new Date()).toISOString()}-${filename}`)
// NOTE: Multiple files could have same fieldname (which is y I'm using arrays here)
incomingFiles[field] = incomingFiles[field] || []
incomingFiles[field].push({ path, encoding, contentType })
//
const writeStream = createWriteStream(path)
//
writes.push(new Promise((resolve, reject) => {
file.on('end', () => { writeStream.end() })
writeStream.on('finish', resolve)
writeStream.on('error', reject)
}))
//
file.pipe(writeStream)
})
//
busboy.on('finish', async () => {
await Promise.all(writes)
req.files = incomingFiles
req.body = incomingFields
next()
})
busboy.end(req.rawBody)
}
And now in your function, make sure that this is the first middleware you use.
// index.js
const { onRequest } = require('firebase-functions').https
const bodyParser = require('body-parser')
const express = require('express')
const cors = require('cors')
const app = express()
// First middleware I'm adding
const { extractFiles } = require('./multiparts')
app.use(extractFiles)
app.use(bodyParser.urlencoded({ extended: true }))
app.use(bodyParser.json())
app.use(cors({ origin: true }))
app.use((req) => console.log(req.originalUrl))
exports.MyFunction = onRequest(app);
I fixed some bugs G. Rodriguez's response. I add 'field' and 'finish' event for Busboy, and do next() in 'finish' event. This is work for me. As follow:
module.exports = (path, app) => {
app.use(bodyParser.json())
app.use(bodyParser.urlencoded({ extended: true }))
app.use((req, res, next) => {
if(req.rawBody === undefined && req.method === 'POST' && req.headers['content-type'].startsWith('multipart/form-data')){
getRawBody(req, {
length: req.headers['content-length'],
limit: '10mb',
encoding: contentType.parse(req).parameters.charset
}, function(err, string){
if (err) return next(err)
req.rawBody = string
next()
})
} else {
next()
}
})
app.use((req, res, next) => {
if (req.method === 'POST' && req.headers['content-type'].startsWith('multipart/form-data')) {
const busboy = new Busboy({ headers: req.headers })
let fileBuffer = new Buffer('')
req.files = {
file: []
}
busboy.on('file', (fieldname, file, filename, encoding, mimetype) => {
file.on('data', (data) => {
fileBuffer = Buffer.concat([fileBuffer, data])
})
file.on('end', () => {
const file_object = {
fieldname,
'originalname': filename,
encoding,
mimetype,
buffer: fileBuffer
}
req.files.file.push(file_object)
})
})
busboy.on('field', function(fieldname, val, fieldnameTruncated, valTruncated) {
console.log('Field [' + fieldname + ']: value: ' + inspect(val));
});
busboy.on('finish', function() {
next()
});
busboy.end(req.rawBody)
req.pipe(busboy);
} else {
next()
}
})}
Thanks for everyone's help on this thread. I wasted a whole day trying every possible combination and all these different libraries... only to discover this after exhausting all other options.
Combined some of the above solutions to create a TypeScript and middleware capable script here:
https://gist.github.com/jasonbyrne/8dcd15701f686a4703a72f13e3f800c0
If you just want to get a single uploaded file from the request, use busboy to get the file as a readable stream:
const express = require('express')
const Busboy = require('busboy')
express().post('/', (req, res) => {
const busboy = new Busboy({ headers: req.headers })
busboy.on('file', (fieldname, file, filename, encoding, mimetype) => {
// Do something with `file`, e.g. pipe it to an output stream.
// file.pipe(fs.createWriteStream('upload.pdf')
})
// The original input was moved to `req.rawBody`
busboy.write(req.rawBody)
})
Note that, on top of using Busboy on the server and parsing the rawReq, you may also need to add the following config to your Axios request:
{ headers: { 'content-type': `multipart/form-data; boundary=${formData._boundary}` }};
If you only specify the content-type and not the boundary you get a Boundary not found error on the server. If you remove the headers altogether, instead, Busboy won't parse the fields properly.
See: Firebase Cloud Functions and Busboy not parsing fields or files
I experience the same issue when i deployed my app using firebase function. I was using multer to upload image to amazon s3. I resolve this issue by using the above npm https://stackoverflow.com/a/48648805/5213790 created by Cristóvão.
const { mimetype, buffer, } = req.files[0]
let s3bucket = new aws.S3({
accessKeyId: functions.config().aws.access_key,
secretAccessKey: functions.config().aws.secret_key,
});
const config = {
Bucket: functions.config().aws.bucket_name,
ContentType: mimetype,
ACL: 'public-read',
Key: Date.now().toString(),
Body: buffer,
}
s3bucket.upload(config, (err, data) => {
if(err) console.log(err)
req.file = data;
next()
})
Note that this is for a single file image upload.
The next middleware will have the returned object from s3
{
ETag: '"cacd6d406f891e216f9946911a69aac5"',
Location:'https://react-significant.s3.us-west1.amazonaws.com/posts/1567282665593',
key: 'posts/1567282665593',
Key: 'posts/1567282665593',
Bucket: 'react-significant'
}
In this case, you might need the Location url before you save your data in the db.
I've tried Dougs answer, however the finish was never fired, so i tweaked the code a little bit and got this which works for me:
// It's very crucial that the file name matches the name attribute in your html
app.post('/', (req, res) => {
const busboy = new Busboy({ headers: req.headers })
// This object will accumulate all the uploaded files, keyed by their name
const uploads = {}
// This callback will be invoked for each file uploaded
busboy.on('file', (fieldname, file, filename, encoding, mimetype) => {
console.log(`File [${fieldname}] filename: ${filename}, encoding: ${encoding}, mimetype: ${mimetype}`)
// Note that os.tmpdir() is an in-memory file system, so should only
// be used for files small enough to fit in memory.
const filepath = path.join(os.tmpdir(), filename)
uploads[fieldname] = { file: filepath }
console.log(`Saving '${fieldname}' to ${filepath}`)
const stream = fs.createWriteStream(filepath)
stream.on('open', () => file.pipe(stream))
})
// This callback will be invoked after all uploaded files are saved.
busboy.on('finish', () => {
console.log('look im firing!')
// Do whatever you want here
res.end()
})
// The raw bytes of the upload will be in req.rawBody. Send it to busboy, and get
// a callback when it's finished.
busboy.end(req.rawBody)
})

Create and Send Zip file -NODE JS

I'm trying to create and then send zip file to client. I know how to create it but I've got a problem with send it to client. I tried many ways.
I'm sending POST request from Client and as response I want to send a file.
This is my server-site example code
var Zip = require('node-zip');
router.post('/generator', function(req, res, next) {
var zip = new Zip;
zip.file('hello.txt', 'Hello, World!');
var options = {base64: false, compression:'DEFLATE'};
fs.writeFile('test1.zip', zip.generate(options), 'binary', function (error) {
console.log('wrote test1.zip', error);
});
res.setHeader('Content-disposition', 'attachment; filename=test1.zip');
res.download('test1.zip');
}
});
I also tried something like this:
res.setHeader('Content-disposition', 'attachment; filename=' + filename);
res.setHeader('Content-type', mimetype);
var filestream = fs.createReadStream(file);
filestream.pipe(res);
I tried to use such libraries as:
node-zip
archiver
Can anyone explain me how to do that ?
This module works fine too: https://www.npmjs.com/package/adm-zip
Example without creating temporary zip file in server:
var AdmZip = require('adm-zip');
router.get('/zipFilesAndSend', function(req, res) {
var zip = new AdmZip();
// add local file
zip.addLocalFile("./uploads/29/0046.xml");
// get everything as a buffer
var zipFileContents = zip.toBuffer();
const fileName = 'uploads.zip';
const fileType = 'application/zip';
res.writeHead(200, {
'Content-Disposition': `attachment; filename="${fileName}"`,
'Content-Type': fileType,
})
return res.end(zipFileContents);
});
Try this express-easy-zip npm package to generate a zip file from a local folder path and send it as a download to the client.
var zip = require('express-easy-zip');
var app = require('express')();
app.use(zip());
app.get('my-route/zip', async function(req, res) {
var dirPath = __dirname + "/uploads";
await res.zip({
files: [{
path: dirPath,
name: 'Package'
}],
filename: 'Package.zip'
});
});
I haven't worked with node-zip or archiver before (I usually just use the built-in zlib module), but one thing I noticed right away is that you should place res.download inside the callback of writeFile. That way it will only send the file once it has been fully written to disk.
fs.writeFile('test1.zip', zip.generate(options), 'binary', function (error) {
res.download('test1.zip');
});
I hope this solution works for you, if it doesn't feel free to comment.
Also, I think res.download sets the Content-disposition header for you, you don't need to set it manually. Not 100% sure on that one though.
Above solutions work.(above solutions generate zip and send it to frontend as data in response. In order to make it as downloadable following code will work) I was using express-zip. It is compressing files and sending data to frontend from backend(node). But in frontend I was getting only data in response. In my case I want user can be able to download the zip which sent by server. To solve this I followed following approach. For generating download window in browser i used downloadjs (we can follow another approach but i find this easy)
Front-End
const download = require('downloadjs')
return axios({
url:process.env.API_HOST+'/getuploadedfiles',
method:'get',
headers:{
'Content-Type': 'multipart/form-data',
withCredentials:true,
},
responseType:'arraybuffer' // If we don't mention we can't get data in desired format
})
.then(async response => {
console.log("got al files in api ");
let blob = await new Blob([response.data], { type: 'application/zip' }) //It is optional
download(response.data,"attachement.zip","application/zip") //this is third party it will prompt download window in browser.
return response.data;
})
Backe-End
const zip = require('express-zip');
app.use('/getuploadedfiles',function(req,res){
res.zip([
{path:'/path/to/file/file2.PNG',name:'bond.png'},
{path:'/path/to/file/file1.PNG',name:'james.png'}
])

Categories

Resources