Upload Image from Google Cloud Function to Cloud Storage - javascript

I'm attempting to handle file uploads using a Google Cloud Function. This function uses Busboy to parse the multipart form data and then upload to Google Cloud Storage.
I keep receiving the same error: ERROR: { Error: ENOENT: no such file or directory, open '/tmp/xxx.png' error when triggering the function.
The error seems to occur within the finish callback function when storage.bucket.upload(file) attempts to open the file path /tmp/xxx.png.
Note that I can't generate a signed upload URL as suggested in this question since the application invoking this is an external, non-user application. I also can't upload directly to GCS since I'll be needing to make custom filenames based on some request metadata. Should I just be using Google App Engine instead?
Function code:
const path = require('path');
const os = require('os');
const fs = require('fs');
const Busboy = require('busboy');
const Storage = require('#google-cloud/storage');
const _ = require('lodash');
const projectId = 'xxx';
const bucketName = 'xxx';
const storage = new Storage({
projectId: projectId,
});
exports.uploadFile = (req, res) => {
if (req.method === 'POST') {
const busboy = new Busboy({ headers: req.headers });
const uploads = []
const tmpdir = os.tmpdir();
busboy.on('file', (fieldname, file, filename, encoding, mimetype) => {
const filepath = path.join(tmpdir, filename)
var obj = {
path: filepath,
name: filename
}
uploads.push(obj);
var writeStream = fs.createWriteStream(obj.path);
file.pipe(writeStream);
});
busboy.on('finish', () => {
_.forEach(uploads, function(file) {
storage
.bucket(bucketName)
.upload(file.path, {name: file.name})
.then(() => {
console.log(`${file.name} uploaded to ${bucketName}.`);
})
.catch(err => {
console.error('ERROR:', err);
});
fs.unlinkSync(file.path);
})
res.end()
});
busboy.end(req.rawBody);
} else {
res.status(405).end();
}
}

I eventually gave up on using Busboy. The latest versions of Google Cloud Functions support both Python and Node 8. In node 8, I just put everything into async/await functions and it works fine.

Related

express.js: pass an uploaded image to s3

I am trying to pass an image uploaded from a react app through express to a managed s3 bucket. The platform/host I am using creates and manages the s3 bucket and generates upload and access urls. This all works fine (I have tested a generated upload url in postman with an image in a binary body and it worked perfectly).
My problem is passing the image through express. I am using multer to get the image from the form but I am assuming multer is turning that image into some kind of file object and s3 is expecting some sort of blob or stream.
In following code, the image in req.file exists, I get a 200 response from s3 with no errors and when I visit the asset url the url works, but the image itself is missing.
const router = Router();
const upload = multer()
router.post('/', upload.single('file'), async (req, res) => {
console.log(req.file)
const asset = req.file
const assetPath = req.headers['asset-path']
let s3URLs = await getPresignedURLS(assetPath)
const sendAsset = await fetch(
s3URLs.urls[0].upload_url, // the s3 upload url
{
method: 'PUT',
headers: {
"Content-Type": asset.mimetype
},
body: asset,
redirect: 'follow'
}
)
console.log("s3 response", sendAsset)
res.status(200).json({"url": s3URLs.urls[0].access_url });
});
export default router;
I am not sure what to do to convert what multer gives me to something that aws s3 will accept. I am also open to getting rid of multer if there is an easier way to upload binary files to express.
Instead of multer, you can use multiparty to get file data from request object. And to upload file to s3 bucket you can use aws-sdk.
const AWS = require("aws-sdk");
const multiparty = require("multiparty");
/**
* Helper method which takes the request object and returns a promise with a data.
*/
const getDataFromRequest = (req) =>
new Promise(async(resolve, reject) => {
const form = new multiparty.Form();
await form.parse(req, (err, fields, files) => {
if (err) reject(err);
const bucketname = fields.bucketname[0];
const subfoldername = fields.subfoldername[0];
const file = files["file"][0]; // get the file from the returned files object
if (!file) reject("File was not found in form data.");
else resolve({
file,
bucketname,
subfoldername
});
});
});
/**
* Helper method which takes the request object and returns a promise with the AWS S3 object details.
*/
const uploadFileToS3Bucket = (
file,
bucketname,
subfoldername,
options = {}
) => {
const s3 = new AWS.S3();
// turn the file into a buffer for uploading
const buffer = readFileSync(file.path);
var originalname = file.originalFilename;
var attach_split = originalname.split(".");
var name = attach_split[0];
// generate a new random file name
const fileName = name;
// the extension of your file
const extension = extname(file.path);
console.log(`${fileName}${extension}`);
const params = {
Bucket: bucketname, //Bucketname
ACL: "private", //Permission
Key: join(`${subfoldername}/`, `${fileName}${extension}`), // File name you want to save as in S3
Body: buffer, // Content of file
};
// return a promise
return new Promise((resolve, reject) => {
return s3.upload(params, (err, result) => {
if (err) reject(err);
else resolve(result); // return the values of the successful AWS S3 request
});
});
};
router.post('/', upload.single('file'), async(req, res) => {
try {
// extract the file from the request object
const {
file,
bucketname,
subfoldername
} = await getDataFromRequest(req);
// Upload File to specified bucket
const {
Location,
ETag,
Bucket,
Key
} = await uploadFileToS3Bucket(
file,
bucketname,
subfoldername
);
let response = {};
res["Location"] = Location;
response["ETag"] = ETag;
response["Bucket"] = Bucket;
response["Key"] = Key;
res.status(200).json(response);
} catch (error) {
throw error;
}
});
Request body will be form data with following fields
bucketname:
subfoldername:
file: FileData
For anyone that ever stumbles across this question the solution was to create an custom multer storage engine. Inside the engine you get access to the file with a stream property that s3 accepted (with the correct headers).

How to download after uploading a file using express and multer?

I have uploaded the file in my backend filesystem using multer
My server is node and client is react.
I'm having trouble downloading and displaying the saved file on the client react
Whenever I do res.download(file) it just throws an error as connection refused on client side.
My code is as follows:
UserToUploadMapping.js
const mongoose = require("mongoose");
const UserToUploadMapping = new mongoose.Schema({
userId: {
type:String,
required:true
},
file: {
type: Object,
required: true,
},
date: {
type: Date,
default: Date.now,
},
});
module.exports = mongoose.model("UserToUploadMapping", UserToUploadMapping);
uploadVideo.js
const router = require("express").Router();
const multer = require('multer');
const UserToUploadMapping = require('../models/UserToUploadMapping')
let nameFile = ''
const storage = multer.diskStorage({
destination:'./Videos',
filename:(req,file,cb) => {
console.log(file)
nameFile = file.originalname + " "+ Date.now()
cb(null, nameFile)
}
})
const upload = multer({storage:storage})
router.post('/upload', upload.single('video'), async (req,res,next) => {
console.log("object")
const saveMapping = new UserToUploadMapping({
userId:'123',
file:req.file,
})
await saveMapping.save()
res.send("Video uploaded")
})
router.get('/download', async(req,res,next) => {
const x = await UserToUploadMapping.find()
// res.send(x)
res.download(x[0].path)
})
module.exports = router;
CLIENT
const fetchVideo = async () => {
const resp = await axios.get(
"http://localhost:5000/api/user/video/download"
);
console.log(resp)
};
return (
<>
<NavContainer />
<div className={classes.Post}>
<Input
type="file"
onChange={(e) => uploadVideos(e.target.files)}
accept="video/mp4"
/>
{/* <Button onClick={(e) => submitHandler(e)}>Upload</Button> */}
<video></video>
</div>
</>
);
Error
There is a few problems within the uploadVideo.js file :
to get the path from the data, you need to use x[0].file.path
(based on how you save the file in the database)
const saveMapping = new UserToUploadMapping({
userId:'123',
file:req.file,
})
to avoid problems about where the file uploadVideo.js is and where we run the application, you should use an absolute path when saving files in the system.
(small problem) your filename function will give filenames like this video.mp4 1622180824748. I think this is better "video-1622181268053.mp4" (we have the correct file extension)
You can refer to this code
const router = require("express").Router();
const multer = require('multer');
const UserToUploadMapping = require('../models/UserToUploadMapping')
const path = require('path');
const uploadFolder = path.join(__dirname, "Videos"); // use a variable to hold the value of upload folder
const storage = multer.diskStorage({
destination: uploadFolder, // use it when upload
filename: (req, file, cb) => {
// nameFile = file.originalname + " "+ Date.now() // --> give "video.mp4 1622180824748"
let [filename, extension] = file.originalname.split('.');
let nameFile = filename + "-" + Date.now() + "." + extension; // --> give "video-1622181268053.mp4"
cb(null, nameFile)
}
})
const upload = multer({ storage: storage })
router.post('/upload', upload.single('video'), async (req, res, next) => {
const saveMapping = new UserToUploadMapping({
userId: '123',
file: req.file,
})
await saveMapping.save()
res.send("Video uploaded")
})
router.get('/download', async (req, res, next) => {
const video = await UserToUploadMapping.find({});
res.download(video[0].file.path); // video[0].file.path is the absolute path to the file
})
module.exports = router;
Your code indicates you are handling large files (videos). I would strongly recommend looking at separation of concerns, handling this as part of your other business logic is not recommended based on my experience. This can e.g. complicate firewall rules and DDOS protection when that is needed in the future.
As a minimum, move upload and download into its own server, e.g. 'files.yourappnamehere.com' so that you can handle the specifics separately from your business logic api.
If you run in the public cloud, I would strongly recommend looking at reusing blob upload/download functionality, letting your clients upload directly to blob storage and also handling downloads directly from blob storage, e.g. in Azure, AWS or GCP.
This will save you a lot of the implementation details of handling (very) large files, and also give "free" extensibility options such as events on file upload completion.
You are running 2 apps Frontend and Backend with difference ports (3000, 5000) so browsers block cross domain requests. On Express you must enable CORS to allow request from FrontEnd Url (http://localhost:3000).
For the download route, try using window.location functions instead of using Axios.
It looks like you might have a typo in your get handler... you're referencing an element called 'path', but that's not declared in your schema
router.get('/download', async(req,res,next) => {
const x = await UserToUploadMapping.find()
// res.send(x)
res.download(x[0].path)//<-Path Doesn't seem to be in the schema
})
Since you don't have a try/catch in that function, the resulting error could be bringing down your server, making it unavailable
You might also want to take a look at this for more detail on How to download files using axios

Multer is not working in firebaseapp hosted in firebase [duplicate]

I am trying to upload a file to Cloud Functions, using Express to handle requests there, but i am not succeeding. I created a version that works locally:
serverside js
const express = require('express');
const cors = require('cors');
const fileUpload = require('express-fileupload');
const app = express();
app.use(fileUpload());
app.use(cors());
app.post('/upload', (req, res) => {
res.send('files: ' + Object.keys(req.files).join(', '));
});
clientside js
const formData = new FormData();
Array.from(this.$refs.fileSelect.files).forEach((file, index) => {
formData.append('sample' + index, file, 'sample');
});
axios.post(
url,
formData,
{
headers: { 'Content-Type': 'multipart/form-data' },
}
);
This exact same code seems to break when deployed to Cloud Functions, where req.files is undefined. Does anyone have any idea what is happening here?
EDIT
I also had a go at using multer, which worked fine locally, but once uploaded to Cloud Functions, this got me an empty array (same clientside code):
const app = express();
const upload = multer();
app.use(cors());
app.post('/upload', upload.any(), (req, res) => {
res.send(JSON.stringify(req.files));
});
There was indeed a breaking change in the Cloud Functions setup that triggered this issue. It has to do with the way the middleware works that gets applied to all Express apps (including the default app) used to serve HTTPS functions. Basically, Cloud Functions will parse the body of the request and decide what to do with it, leaving the raw contents of the body in a Buffer in req.rawBody. You can use this to directly parse your multipart content, but you can't do it with middleware (like multer).
Instead, you can use a module called busboy to deal with the raw body content directly. It can accept the rawBody buffer and will call you back with the files it found. Here is some sample code that will iterate all the uploaded content, save them as files, then delete them. You'll obviously want to do something more useful.
const path = require('path');
const os = require('os');
const fs = require('fs');
const Busboy = require('busboy');
exports.upload = functions.https.onRequest((req, res) => {
if (req.method === 'POST') {
const busboy = new Busboy({ headers: req.headers });
// This object will accumulate all the uploaded files, keyed by their name
const uploads = {}
// This callback will be invoked for each file uploaded
busboy.on('file', (fieldname, file, filename, encoding, mimetype) => {
console.log(`File [${fieldname}] filename: ${filename}, encoding: ${encoding}, mimetype: ${mimetype}`);
// Note that os.tmpdir() is an in-memory file system, so should only
// be used for files small enough to fit in memory.
const filepath = path.join(os.tmpdir(), fieldname);
uploads[fieldname] = { file: filepath }
console.log(`Saving '${fieldname}' to ${filepath}`);
file.pipe(fs.createWriteStream(filepath));
});
// This callback will be invoked after all uploaded files are saved.
busboy.on('finish', () => {
for (const name in uploads) {
const upload = uploads[name];
const file = upload.file;
res.write(`${file}\n`);
fs.unlinkSync(file);
}
res.end();
});
// The raw bytes of the upload will be in req.rawBody. Send it to busboy, and get
// a callback when it's finished.
busboy.end(req.rawBody);
} else {
// Client error - only support POST
res.status(405).end();
}
})
Bear in mind that files saved to temp space occupy memory, so their sizes should be limited to a total of 10MB. For larger files, you should upload those to Cloud Storage and process them with a storage trigger.
Also bear in mind that the default selection of middleware added by Cloud Functions is not currently added to the local emulator via firebase serve. So this sample will not work (rawBody won't be available) in that case.
The team is working on updating the documentation to be more clear about what all happens during HTTPS requests that's different than a standard Express app.
Thanks to the answers above I've built a npm module for this (github)
It works with google cloud functions, just install it (npm install --save express-multipart-file-parser) and use it like this:
const fileMiddleware = require('express-multipart-file-parser')
...
app.use(fileMiddleware)
...
app.post('/file', (req, res) => {
const {
fieldname,
filename,
encoding,
mimetype,
buffer,
} = req.files[0]
...
})
I was able to combine both Brian's and Doug's response. Here's my middleware that end's up mimicking the req.files in multer so no breaking changes to the rest of your code.
module.exports = (path, app) => {
app.use(bodyParser.json())
app.use(bodyParser.urlencoded({ extended: true }))
app.use((req, res, next) => {
if(req.rawBody === undefined && req.method === 'POST' && req.headers['content-type'].startsWith('multipart/form-data')){
getRawBody(req, {
length: req.headers['content-length'],
limit: '10mb',
encoding: contentType.parse(req).parameters.charset
}, function(err, string){
if (err) return next(err)
req.rawBody = string
next()
})
} else {
next()
}
})
app.use((req, res, next) => {
if (req.method === 'POST' && req.headers['content-type'].startsWith('multipart/form-data')) {
const busboy = new Busboy({ headers: req.headers })
let fileBuffer = new Buffer('')
req.files = {
file: []
}
busboy.on('field', (fieldname, value) => {
req.body[fieldname] = value
})
busboy.on('file', (fieldname, file, filename, encoding, mimetype) => {
file.on('data', (data) => {
fileBuffer = Buffer.concat([fileBuffer, data])
})
file.on('end', () => {
const file_object = {
fieldname,
'originalname': filename,
encoding,
mimetype,
buffer: fileBuffer
}
req.files.file.push(file_object)
})
})
busboy.on('finish', () => {
next()
})
busboy.end(req.rawBody)
req.pipe(busboy)
} else {
next()
}
})}
I have been suffering from the same problem for a few days, turns out that firebase team has put the raw body of multipart/form-data into req.body with their middleware. If you try console.log(req.body.toString()) BEFORE processing your request with multer, you will see your data. As multer creates a new req.body object which is overriding the resulting req, the data is gone and all we can get is an empty req.body. Hopefully the firebase team could correct this soon.
To add to the official Cloud Function team answer, you can emulate this behavior locally by doing the following (add this middleware higher than the busboy code they posted, obviously)
const getRawBody = require('raw-body');
const contentType = require('content-type');
app.use(function(req, res, next){
if(req.rawBody === undefined && req.method === 'POST' && req.headers['content-type'] !== undefined && req.headers['content-type'].startsWith('multipart/form-data')){
getRawBody(req, {
length: req.headers['content-length'],
limit: '10mb',
encoding: contentType.parse(req).parameters.charset
}, function(err, string){
if (err) return next(err);
req.rawBody = string;
next();
});
}
else{
next();
}
});
Cloud functions pre-processes the request object before passing it on further. As such the original multer middleware doesn't work. Furthermore, using busboy is too low level and you need to take care of everything on your own which isn't ideal. Instead you can use a forked version of multer middleware for processing multipart/form-data on cloud functions.
Here's what you can do.
Install the fork
npm install --save emadalam/multer#master
Use startProcessing configuration for custom handling of req.rawBody added by cloud functions.
const express = require('express')
const multer = require('multer')
const SIZE_LIMIT = 10 * 1024 * 1024 // 10MB
const app = express()
const multipartFormDataParser = multer({
storage: multer.memoryStorage(),
// increase size limit if needed
limits: {fieldSize: SIZE_LIMIT},
// support firebase cloud functions
// the multipart form-data request object is pre-processed by the cloud functions
// currently the `multer` library doesn't natively support this behaviour
// as such, a custom fork is maintained to enable this by adding `startProcessing`
// https://github.com/emadalam/multer
startProcessing(req, busboy) {
req.rawBody ? busboy.end(req.rawBody) : req.pipe(busboy)
},
})
app.post('/some_route', multipartFormDataParser.any(), function (req, res, next) {
// req.files is array of uploaded files
// req.body will contain the text fields
})
I ran into this issue today, check here for more details on how to handle files on google cloud (basically you don't need multer).
Here is a middleware I use to extract files. This will keep all your files on request.files and other form fields on request.body for all POST with multipart/form-data content type. It will leave everything else the same for your other middlewares to handle.
// multiparts.js
const { createWriteStream } = require('fs')
const { tmpdir } = require('os')
const { join } = require('path')
const BusBoy = require('busboy')
exports.extractFiles = async(req, res, next) => {
const multipart = req.method === 'POST' && req.headers['content-type'].startsWith('multipart/form-data')
if (!multipart) return next()
//
const busboy = new BusBoy({ headers: req.headers })
const incomingFields = {}
const incomingFiles = {}
const writes = []
// Process fields
busboy.on('field', (name, value) => {
try {
// This will keep a field created like so form.append('product', JSON.stringify(product)) intact
incomingFields[name] = JSON.parse(value)
} catch (e) {
// Numbers will still be strings here (i.e 1 will be '1')
incomingFields[name] = value
}
})
// Process files
busboy.on('file', (field, file, filename, encoding, contentType) => {
// Doing this to not have to deal with duplicate file names
// (i.e. TIMESTAMP-originalName. Hmm what are the odds that I'll still have dups?)
const path = join(tmpdir(), `${(new Date()).toISOString()}-${filename}`)
// NOTE: Multiple files could have same fieldname (which is y I'm using arrays here)
incomingFiles[field] = incomingFiles[field] || []
incomingFiles[field].push({ path, encoding, contentType })
//
const writeStream = createWriteStream(path)
//
writes.push(new Promise((resolve, reject) => {
file.on('end', () => { writeStream.end() })
writeStream.on('finish', resolve)
writeStream.on('error', reject)
}))
//
file.pipe(writeStream)
})
//
busboy.on('finish', async () => {
await Promise.all(writes)
req.files = incomingFiles
req.body = incomingFields
next()
})
busboy.end(req.rawBody)
}
And now in your function, make sure that this is the first middleware you use.
// index.js
const { onRequest } = require('firebase-functions').https
const bodyParser = require('body-parser')
const express = require('express')
const cors = require('cors')
const app = express()
// First middleware I'm adding
const { extractFiles } = require('./multiparts')
app.use(extractFiles)
app.use(bodyParser.urlencoded({ extended: true }))
app.use(bodyParser.json())
app.use(cors({ origin: true }))
app.use((req) => console.log(req.originalUrl))
exports.MyFunction = onRequest(app);
I fixed some bugs G. Rodriguez's response. I add 'field' and 'finish' event for Busboy, and do next() in 'finish' event. This is work for me. As follow:
module.exports = (path, app) => {
app.use(bodyParser.json())
app.use(bodyParser.urlencoded({ extended: true }))
app.use((req, res, next) => {
if(req.rawBody === undefined && req.method === 'POST' && req.headers['content-type'].startsWith('multipart/form-data')){
getRawBody(req, {
length: req.headers['content-length'],
limit: '10mb',
encoding: contentType.parse(req).parameters.charset
}, function(err, string){
if (err) return next(err)
req.rawBody = string
next()
})
} else {
next()
}
})
app.use((req, res, next) => {
if (req.method === 'POST' && req.headers['content-type'].startsWith('multipart/form-data')) {
const busboy = new Busboy({ headers: req.headers })
let fileBuffer = new Buffer('')
req.files = {
file: []
}
busboy.on('file', (fieldname, file, filename, encoding, mimetype) => {
file.on('data', (data) => {
fileBuffer = Buffer.concat([fileBuffer, data])
})
file.on('end', () => {
const file_object = {
fieldname,
'originalname': filename,
encoding,
mimetype,
buffer: fileBuffer
}
req.files.file.push(file_object)
})
})
busboy.on('field', function(fieldname, val, fieldnameTruncated, valTruncated) {
console.log('Field [' + fieldname + ']: value: ' + inspect(val));
});
busboy.on('finish', function() {
next()
});
busboy.end(req.rawBody)
req.pipe(busboy);
} else {
next()
}
})}
Thanks for everyone's help on this thread. I wasted a whole day trying every possible combination and all these different libraries... only to discover this after exhausting all other options.
Combined some of the above solutions to create a TypeScript and middleware capable script here:
https://gist.github.com/jasonbyrne/8dcd15701f686a4703a72f13e3f800c0
If you just want to get a single uploaded file from the request, use busboy to get the file as a readable stream:
const express = require('express')
const Busboy = require('busboy')
express().post('/', (req, res) => {
const busboy = new Busboy({ headers: req.headers })
busboy.on('file', (fieldname, file, filename, encoding, mimetype) => {
// Do something with `file`, e.g. pipe it to an output stream.
// file.pipe(fs.createWriteStream('upload.pdf')
})
// The original input was moved to `req.rawBody`
busboy.write(req.rawBody)
})
Note that, on top of using Busboy on the server and parsing the rawReq, you may also need to add the following config to your Axios request:
{ headers: { 'content-type': `multipart/form-data; boundary=${formData._boundary}` }};
If you only specify the content-type and not the boundary you get a Boundary not found error on the server. If you remove the headers altogether, instead, Busboy won't parse the fields properly.
See: Firebase Cloud Functions and Busboy not parsing fields or files
I experience the same issue when i deployed my app using firebase function. I was using multer to upload image to amazon s3. I resolve this issue by using the above npm https://stackoverflow.com/a/48648805/5213790 created by Cristóvão.
const { mimetype, buffer, } = req.files[0]
let s3bucket = new aws.S3({
accessKeyId: functions.config().aws.access_key,
secretAccessKey: functions.config().aws.secret_key,
});
const config = {
Bucket: functions.config().aws.bucket_name,
ContentType: mimetype,
ACL: 'public-read',
Key: Date.now().toString(),
Body: buffer,
}
s3bucket.upload(config, (err, data) => {
if(err) console.log(err)
req.file = data;
next()
})
Note that this is for a single file image upload.
The next middleware will have the returned object from s3
{
ETag: '"cacd6d406f891e216f9946911a69aac5"',
Location:'https://react-significant.s3.us-west1.amazonaws.com/posts/1567282665593',
key: 'posts/1567282665593',
Key: 'posts/1567282665593',
Bucket: 'react-significant'
}
In this case, you might need the Location url before you save your data in the db.
I've tried Dougs answer, however the finish was never fired, so i tweaked the code a little bit and got this which works for me:
// It's very crucial that the file name matches the name attribute in your html
app.post('/', (req, res) => {
const busboy = new Busboy({ headers: req.headers })
// This object will accumulate all the uploaded files, keyed by their name
const uploads = {}
// This callback will be invoked for each file uploaded
busboy.on('file', (fieldname, file, filename, encoding, mimetype) => {
console.log(`File [${fieldname}] filename: ${filename}, encoding: ${encoding}, mimetype: ${mimetype}`)
// Note that os.tmpdir() is an in-memory file system, so should only
// be used for files small enough to fit in memory.
const filepath = path.join(os.tmpdir(), filename)
uploads[fieldname] = { file: filepath }
console.log(`Saving '${fieldname}' to ${filepath}`)
const stream = fs.createWriteStream(filepath)
stream.on('open', () => file.pipe(stream))
})
// This callback will be invoked after all uploaded files are saved.
busboy.on('finish', () => {
console.log('look im firing!')
// Do whatever you want here
res.end()
})
// The raw bytes of the upload will be in req.rawBody. Send it to busboy, and get
// a callback when it's finished.
busboy.end(req.rawBody)
})

how to retrieve image from firebase storage and display it on a webpage using google-cloud and node js

i store my image on firebase storage now i wanted to display it on my page
const firebase = require('./../config/firebase');
const gcloud = require('google-cloud');
const fs = require('fs');
var storage = gcloud.storage({
projectId: 'test-a1e76',
keyFilename: 'test-a1e76-firebase-adminsdk-7111d-124guy123eac.json',
});
var bucket = storage.bucket('test-a1e76.appspot.com');
this is how my set up looks like.
and this is my get method.
router.get('/image', function (req,res) {
var remoteReadStream = bucket.file('download.png').createReadStream();
var localWriteStream = fs.createWriteStream('/images/watchers/2jIompF9FUZ6A4LnpBcbpHWw8dx2/download.png');
var ss = remoteReadStream.pipe(localWriteStream);
res.send(ss);
})
i only tried this since it was written on the npm docs.of google-cloud works.
i tried putting it inside get method to see how it works.
and after that i got this error..
Error: ENOENT: no such file or directory, open 'C:\images\test\2jIoasd24zd13ase121s2Ww8dx2\download.png'
this is my ajax get method
$.ajax({
url:'/user/image',
type:'GET',
success:function(data){
console.log(data.path);
}
});
can anyone here guide my on how can i retrieve the images from firebase storage and display it on my webpage?. using this google-cloud npm? cause i read some thread that node js doesn't support firebase-storage so they use google-cloud instead.
I got it working this way.
var admin = require("firebase-admin");
...
app.get('/picture', async (req, res) => {
const fileRef = admin.storage().bucket().file('03aead66e97f0d50ce549b6fffc1b6d7.svg');
const hash = await fileRef.download()
res.contentType(fileRef.metadata.contentType);
res.end(hash[0], 'binary');
});
You need only show the image, right? There`s a dirty way to do.
1- Upload one image on your bucket e click on it (in Firebase console).
2- In the right side of screen shows info about your file.
3- Look for "Download Url" (or something like that) and click on it.
4- This is a url example:
https://firebasestorage.googleapis.com/v0/b/coffee-a7e8c.appspot.com/o/coffeeTrue.png?alt=media&token=7f44e575-414d-4d18-8f39-c94a23f6e014
As you can see there is a pattern:
https://firebasestorage.googleapis.com/v0/b/NAME_FILE?alt=media&token=YOUR_TOKEN
Get your token and now you can show all images on your bucket only pass de name of file and your token (like this example).
This works because Firebase provides a Rest API for your services.
Remember: Do auth before or set-up open rules.
Example: Everyone can read. Only auth user can write.
service firebase.storage {
match /b/{bucket}/o {
match /{allPaths=**} {
allow read: if request.auth == null;
allow write: if request.auth != null;
}
}
}
In the firebase function with node I use below code which is working perfectly
How do I upload a base64 encoded image (string) directly to a Google Cloud Storage bucket using Node.js?
const uuidv4 = require('uuid/v4');
const uuid = uuidv4();
const os = require('os')
const path = require('path')
const cors = require('cors')({ origin: true })
const Busboy = require('busboy')
const fs = require('fs')
var admin = require("firebase-admin");
var serviceAccount = {
"type": "service_account",
"project_id": "xxxxxx",
"private_key_id": "xxxxxx",
"private_key": "-----BEGIN PRIVATE KEY-----\jr5x+4AvctKLonBafg\nElTg3Cj7pAEbUfIO9I44zZ8=\n-----END PRIVATE KEY-----\n",
"client_email": "xxxx#xxxx.iam.gserviceaccount.com",
"client_id": "xxxxxxxx",
"auth_uri": "https://accounts.google.com/o/oauth2/auth",
"token_uri": "https://oauth2.googleapis.com/token",
"auth_provider_x509_cert_url": "https://www.googleapis.com/oauth2/v1/certs",
"client_x509_cert_url": "https://www.googleapis.com/robot/v1/metadata/x509/firebase-adminsdk-5rmdm%40xxxxx.iam.gserviceaccount.com"
}
admin.initializeApp({
credential: admin.credential.cert(serviceAccount),
storageBucket: "xxxxx-xxxx" // use your storage bucket name
});
const app = express();
app.use(bodyParser.urlencoded({ extended: false }));
app.use(bodyParser.json());
app.post('/uploadFile', (req, response) => {
response.set('Access-Control-Allow-Origin', '*');
const busboy = new Busboy({ headers: req.headers })
let uploadData = null
busboy.on('file', (fieldname, file, filename, encoding, mimetype) => {
const filepath = path.join(os.tmpdir(), filename)
uploadData = { file: filepath, type: mimetype }
console.log("-------------->>",filepath)
file.pipe(fs.createWriteStream(filepath))
})
busboy.on('finish', () => {
const bucket = admin.storage().bucket();
bucket.upload(uploadData.file, {
uploadType: 'media',
metadata: {
metadata: { firebaseStorageDownloadTokens: uuid,
contentType: uploadData.type,
},
},
})
.catch(err => {
res.status(500).json({
error: err,
})
})
})
busboy.end(req.rawBody)
});
exports.widgets = functions.https.onRequest(app);

How to perform an HTTP file upload using express on Cloud Functions for Firebase (multer, busboy)

I am trying to upload a file to Cloud Functions, using Express to handle requests there, but i am not succeeding. I created a version that works locally:
serverside js
const express = require('express');
const cors = require('cors');
const fileUpload = require('express-fileupload');
const app = express();
app.use(fileUpload());
app.use(cors());
app.post('/upload', (req, res) => {
res.send('files: ' + Object.keys(req.files).join(', '));
});
clientside js
const formData = new FormData();
Array.from(this.$refs.fileSelect.files).forEach((file, index) => {
formData.append('sample' + index, file, 'sample');
});
axios.post(
url,
formData,
{
headers: { 'Content-Type': 'multipart/form-data' },
}
);
This exact same code seems to break when deployed to Cloud Functions, where req.files is undefined. Does anyone have any idea what is happening here?
EDIT
I also had a go at using multer, which worked fine locally, but once uploaded to Cloud Functions, this got me an empty array (same clientside code):
const app = express();
const upload = multer();
app.use(cors());
app.post('/upload', upload.any(), (req, res) => {
res.send(JSON.stringify(req.files));
});
There was indeed a breaking change in the Cloud Functions setup that triggered this issue. It has to do with the way the middleware works that gets applied to all Express apps (including the default app) used to serve HTTPS functions. Basically, Cloud Functions will parse the body of the request and decide what to do with it, leaving the raw contents of the body in a Buffer in req.rawBody. You can use this to directly parse your multipart content, but you can't do it with middleware (like multer).
Instead, you can use a module called busboy to deal with the raw body content directly. It can accept the rawBody buffer and will call you back with the files it found. Here is some sample code that will iterate all the uploaded content, save them as files, then delete them. You'll obviously want to do something more useful.
const path = require('path');
const os = require('os');
const fs = require('fs');
const Busboy = require('busboy');
exports.upload = functions.https.onRequest((req, res) => {
if (req.method === 'POST') {
const busboy = new Busboy({ headers: req.headers });
// This object will accumulate all the uploaded files, keyed by their name
const uploads = {}
// This callback will be invoked for each file uploaded
busboy.on('file', (fieldname, file, filename, encoding, mimetype) => {
console.log(`File [${fieldname}] filename: ${filename}, encoding: ${encoding}, mimetype: ${mimetype}`);
// Note that os.tmpdir() is an in-memory file system, so should only
// be used for files small enough to fit in memory.
const filepath = path.join(os.tmpdir(), fieldname);
uploads[fieldname] = { file: filepath }
console.log(`Saving '${fieldname}' to ${filepath}`);
file.pipe(fs.createWriteStream(filepath));
});
// This callback will be invoked after all uploaded files are saved.
busboy.on('finish', () => {
for (const name in uploads) {
const upload = uploads[name];
const file = upload.file;
res.write(`${file}\n`);
fs.unlinkSync(file);
}
res.end();
});
// The raw bytes of the upload will be in req.rawBody. Send it to busboy, and get
// a callback when it's finished.
busboy.end(req.rawBody);
} else {
// Client error - only support POST
res.status(405).end();
}
})
Bear in mind that files saved to temp space occupy memory, so their sizes should be limited to a total of 10MB. For larger files, you should upload those to Cloud Storage and process them with a storage trigger.
Also bear in mind that the default selection of middleware added by Cloud Functions is not currently added to the local emulator via firebase serve. So this sample will not work (rawBody won't be available) in that case.
The team is working on updating the documentation to be more clear about what all happens during HTTPS requests that's different than a standard Express app.
Thanks to the answers above I've built a npm module for this (github)
It works with google cloud functions, just install it (npm install --save express-multipart-file-parser) and use it like this:
const fileMiddleware = require('express-multipart-file-parser')
...
app.use(fileMiddleware)
...
app.post('/file', (req, res) => {
const {
fieldname,
filename,
encoding,
mimetype,
buffer,
} = req.files[0]
...
})
I was able to combine both Brian's and Doug's response. Here's my middleware that end's up mimicking the req.files in multer so no breaking changes to the rest of your code.
module.exports = (path, app) => {
app.use(bodyParser.json())
app.use(bodyParser.urlencoded({ extended: true }))
app.use((req, res, next) => {
if(req.rawBody === undefined && req.method === 'POST' && req.headers['content-type'].startsWith('multipart/form-data')){
getRawBody(req, {
length: req.headers['content-length'],
limit: '10mb',
encoding: contentType.parse(req).parameters.charset
}, function(err, string){
if (err) return next(err)
req.rawBody = string
next()
})
} else {
next()
}
})
app.use((req, res, next) => {
if (req.method === 'POST' && req.headers['content-type'].startsWith('multipart/form-data')) {
const busboy = new Busboy({ headers: req.headers })
let fileBuffer = new Buffer('')
req.files = {
file: []
}
busboy.on('field', (fieldname, value) => {
req.body[fieldname] = value
})
busboy.on('file', (fieldname, file, filename, encoding, mimetype) => {
file.on('data', (data) => {
fileBuffer = Buffer.concat([fileBuffer, data])
})
file.on('end', () => {
const file_object = {
fieldname,
'originalname': filename,
encoding,
mimetype,
buffer: fileBuffer
}
req.files.file.push(file_object)
})
})
busboy.on('finish', () => {
next()
})
busboy.end(req.rawBody)
req.pipe(busboy)
} else {
next()
}
})}
I have been suffering from the same problem for a few days, turns out that firebase team has put the raw body of multipart/form-data into req.body with their middleware. If you try console.log(req.body.toString()) BEFORE processing your request with multer, you will see your data. As multer creates a new req.body object which is overriding the resulting req, the data is gone and all we can get is an empty req.body. Hopefully the firebase team could correct this soon.
To add to the official Cloud Function team answer, you can emulate this behavior locally by doing the following (add this middleware higher than the busboy code they posted, obviously)
const getRawBody = require('raw-body');
const contentType = require('content-type');
app.use(function(req, res, next){
if(req.rawBody === undefined && req.method === 'POST' && req.headers['content-type'] !== undefined && req.headers['content-type'].startsWith('multipart/form-data')){
getRawBody(req, {
length: req.headers['content-length'],
limit: '10mb',
encoding: contentType.parse(req).parameters.charset
}, function(err, string){
if (err) return next(err);
req.rawBody = string;
next();
});
}
else{
next();
}
});
Cloud functions pre-processes the request object before passing it on further. As such the original multer middleware doesn't work. Furthermore, using busboy is too low level and you need to take care of everything on your own which isn't ideal. Instead you can use a forked version of multer middleware for processing multipart/form-data on cloud functions.
Here's what you can do.
Install the fork
npm install --save emadalam/multer#master
Use startProcessing configuration for custom handling of req.rawBody added by cloud functions.
const express = require('express')
const multer = require('multer')
const SIZE_LIMIT = 10 * 1024 * 1024 // 10MB
const app = express()
const multipartFormDataParser = multer({
storage: multer.memoryStorage(),
// increase size limit if needed
limits: {fieldSize: SIZE_LIMIT},
// support firebase cloud functions
// the multipart form-data request object is pre-processed by the cloud functions
// currently the `multer` library doesn't natively support this behaviour
// as such, a custom fork is maintained to enable this by adding `startProcessing`
// https://github.com/emadalam/multer
startProcessing(req, busboy) {
req.rawBody ? busboy.end(req.rawBody) : req.pipe(busboy)
},
})
app.post('/some_route', multipartFormDataParser.any(), function (req, res, next) {
// req.files is array of uploaded files
// req.body will contain the text fields
})
I ran into this issue today, check here for more details on how to handle files on google cloud (basically you don't need multer).
Here is a middleware I use to extract files. This will keep all your files on request.files and other form fields on request.body for all POST with multipart/form-data content type. It will leave everything else the same for your other middlewares to handle.
// multiparts.js
const { createWriteStream } = require('fs')
const { tmpdir } = require('os')
const { join } = require('path')
const BusBoy = require('busboy')
exports.extractFiles = async(req, res, next) => {
const multipart = req.method === 'POST' && req.headers['content-type'].startsWith('multipart/form-data')
if (!multipart) return next()
//
const busboy = new BusBoy({ headers: req.headers })
const incomingFields = {}
const incomingFiles = {}
const writes = []
// Process fields
busboy.on('field', (name, value) => {
try {
// This will keep a field created like so form.append('product', JSON.stringify(product)) intact
incomingFields[name] = JSON.parse(value)
} catch (e) {
// Numbers will still be strings here (i.e 1 will be '1')
incomingFields[name] = value
}
})
// Process files
busboy.on('file', (field, file, filename, encoding, contentType) => {
// Doing this to not have to deal with duplicate file names
// (i.e. TIMESTAMP-originalName. Hmm what are the odds that I'll still have dups?)
const path = join(tmpdir(), `${(new Date()).toISOString()}-${filename}`)
// NOTE: Multiple files could have same fieldname (which is y I'm using arrays here)
incomingFiles[field] = incomingFiles[field] || []
incomingFiles[field].push({ path, encoding, contentType })
//
const writeStream = createWriteStream(path)
//
writes.push(new Promise((resolve, reject) => {
file.on('end', () => { writeStream.end() })
writeStream.on('finish', resolve)
writeStream.on('error', reject)
}))
//
file.pipe(writeStream)
})
//
busboy.on('finish', async () => {
await Promise.all(writes)
req.files = incomingFiles
req.body = incomingFields
next()
})
busboy.end(req.rawBody)
}
And now in your function, make sure that this is the first middleware you use.
// index.js
const { onRequest } = require('firebase-functions').https
const bodyParser = require('body-parser')
const express = require('express')
const cors = require('cors')
const app = express()
// First middleware I'm adding
const { extractFiles } = require('./multiparts')
app.use(extractFiles)
app.use(bodyParser.urlencoded({ extended: true }))
app.use(bodyParser.json())
app.use(cors({ origin: true }))
app.use((req) => console.log(req.originalUrl))
exports.MyFunction = onRequest(app);
I fixed some bugs G. Rodriguez's response. I add 'field' and 'finish' event for Busboy, and do next() in 'finish' event. This is work for me. As follow:
module.exports = (path, app) => {
app.use(bodyParser.json())
app.use(bodyParser.urlencoded({ extended: true }))
app.use((req, res, next) => {
if(req.rawBody === undefined && req.method === 'POST' && req.headers['content-type'].startsWith('multipart/form-data')){
getRawBody(req, {
length: req.headers['content-length'],
limit: '10mb',
encoding: contentType.parse(req).parameters.charset
}, function(err, string){
if (err) return next(err)
req.rawBody = string
next()
})
} else {
next()
}
})
app.use((req, res, next) => {
if (req.method === 'POST' && req.headers['content-type'].startsWith('multipart/form-data')) {
const busboy = new Busboy({ headers: req.headers })
let fileBuffer = new Buffer('')
req.files = {
file: []
}
busboy.on('file', (fieldname, file, filename, encoding, mimetype) => {
file.on('data', (data) => {
fileBuffer = Buffer.concat([fileBuffer, data])
})
file.on('end', () => {
const file_object = {
fieldname,
'originalname': filename,
encoding,
mimetype,
buffer: fileBuffer
}
req.files.file.push(file_object)
})
})
busboy.on('field', function(fieldname, val, fieldnameTruncated, valTruncated) {
console.log('Field [' + fieldname + ']: value: ' + inspect(val));
});
busboy.on('finish', function() {
next()
});
busboy.end(req.rawBody)
req.pipe(busboy);
} else {
next()
}
})}
Thanks for everyone's help on this thread. I wasted a whole day trying every possible combination and all these different libraries... only to discover this after exhausting all other options.
Combined some of the above solutions to create a TypeScript and middleware capable script here:
https://gist.github.com/jasonbyrne/8dcd15701f686a4703a72f13e3f800c0
If you just want to get a single uploaded file from the request, use busboy to get the file as a readable stream:
const express = require('express')
const Busboy = require('busboy')
express().post('/', (req, res) => {
const busboy = new Busboy({ headers: req.headers })
busboy.on('file', (fieldname, file, filename, encoding, mimetype) => {
// Do something with `file`, e.g. pipe it to an output stream.
// file.pipe(fs.createWriteStream('upload.pdf')
})
// The original input was moved to `req.rawBody`
busboy.write(req.rawBody)
})
Note that, on top of using Busboy on the server and parsing the rawReq, you may also need to add the following config to your Axios request:
{ headers: { 'content-type': `multipart/form-data; boundary=${formData._boundary}` }};
If you only specify the content-type and not the boundary you get a Boundary not found error on the server. If you remove the headers altogether, instead, Busboy won't parse the fields properly.
See: Firebase Cloud Functions and Busboy not parsing fields or files
I experience the same issue when i deployed my app using firebase function. I was using multer to upload image to amazon s3. I resolve this issue by using the above npm https://stackoverflow.com/a/48648805/5213790 created by Cristóvão.
const { mimetype, buffer, } = req.files[0]
let s3bucket = new aws.S3({
accessKeyId: functions.config().aws.access_key,
secretAccessKey: functions.config().aws.secret_key,
});
const config = {
Bucket: functions.config().aws.bucket_name,
ContentType: mimetype,
ACL: 'public-read',
Key: Date.now().toString(),
Body: buffer,
}
s3bucket.upload(config, (err, data) => {
if(err) console.log(err)
req.file = data;
next()
})
Note that this is for a single file image upload.
The next middleware will have the returned object from s3
{
ETag: '"cacd6d406f891e216f9946911a69aac5"',
Location:'https://react-significant.s3.us-west1.amazonaws.com/posts/1567282665593',
key: 'posts/1567282665593',
Key: 'posts/1567282665593',
Bucket: 'react-significant'
}
In this case, you might need the Location url before you save your data in the db.
I've tried Dougs answer, however the finish was never fired, so i tweaked the code a little bit and got this which works for me:
// It's very crucial that the file name matches the name attribute in your html
app.post('/', (req, res) => {
const busboy = new Busboy({ headers: req.headers })
// This object will accumulate all the uploaded files, keyed by their name
const uploads = {}
// This callback will be invoked for each file uploaded
busboy.on('file', (fieldname, file, filename, encoding, mimetype) => {
console.log(`File [${fieldname}] filename: ${filename}, encoding: ${encoding}, mimetype: ${mimetype}`)
// Note that os.tmpdir() is an in-memory file system, so should only
// be used for files small enough to fit in memory.
const filepath = path.join(os.tmpdir(), filename)
uploads[fieldname] = { file: filepath }
console.log(`Saving '${fieldname}' to ${filepath}`)
const stream = fs.createWriteStream(filepath)
stream.on('open', () => file.pipe(stream))
})
// This callback will be invoked after all uploaded files are saved.
busboy.on('finish', () => {
console.log('look im firing!')
// Do whatever you want here
res.end()
})
// The raw bytes of the upload will be in req.rawBody. Send it to busboy, and get
// a callback when it's finished.
busboy.end(req.rawBody)
})

Categories

Resources