Firebase cloud function for file upload - javascript

I have this cloud function that I wrote to upload file to google cloud storage:
const gcs = require('#google-cloud/storage')({keyFilename:'2fe4e3d2bfdc.json'});
var filePath = file.path + "/" + file.name;
return bucket.upload(filePath, {
destination: file.name
}).catch(reason => {
console.error(reason);
});
I used formidable to parse the uploaded file and I tried to log the properties of the uploaded file and it seems fine; it is uploaded to a temp dir '/tmp/upload_2866bbe4fdcc5beb30c06ae6c3f6b1aa/ but when I try to upload the file to the gcs am getting this error:
{ Error: EACCES: permission denied, stat '/tmp/upload_2866bbe4fdcc5beb30c06ae6c3f6b1aa/thumb_ttttttt.jpg'
at Error (native)
errno: -13,
code: 'EACCES',
syscall: 'stat',
path: '/tmp/upload_2866bbe4fdcc5beb30c06ae6c3f6b1aa/thumb_ttttttt.jpg' }
I am using this html form to upload the file:
<!DOCTYPE html>
<html>
<body>
<form action="https://us-central1-appname.cloudfunctions.net/uploadFile" method="post" enctype="multipart/form-data">
Select image to upload:
<input type="file" name="fileToUpload" id="fileToUpload">
<input type="submit" value="Upload Image" name="submit">
</form>
</body>
</html>

I got a solution from the Firebase Support Team
So first thing:
var filePath = file.path + "/" + file.name;
we dont need the file.name since the file.path is full path of the file (including the file name).
So changed it to this instead:
var filePath = file.path;
Second, the function terminates before the asynchronous work in 'form.parse(...)' is completed. That means the actual file upload might still be in progress while the function execution has ended.
The fix for that is to wrap the form.parse(...) in a promise:
exports.uploadFile = functions.https.onRequest((req, res) => {
var form = new formidable.IncomingForm();
return new Promise((resolve, reject) => {
form.parse(req, function(err, fields, files) {
var file = files.fileToUpload;
if(!file){
reject("no file to upload, please choose a file.");
return;
}
console.info("about to upload file as a json: " + file.type);
var filePath = file.path;
console.log('File path: ' + filePath);
var bucket = gcs.bucket('bucket-name');
return bucket.upload(filePath, {
destination: file.name
}).then(() => {
resolve(); // Whole thing completed successfully.
}).catch((err) => {
reject('Failed to upload: ' + JSON.stringify(err));
});
});
}).then(() => {
res.status(200).send('Yay!');
return null
}).catch(err => {
console.error('Error while parsing form: ' + err);
res.status(500).send('Error while parsing form: ' + err);
});
});
Lastly, you may want to consider using the Cloud Storage for Firebase in uploading your file instead of Cloud functions. Cloud Storage for Firebase allows you to upload files directly to it, and would work much better:
It has access control
It has resumable uploads/downloads (great for poor connectivity)
It can accept files of any size without timeout-issues
If you want to trigger a Cloud Function on file upload even, you can
do that and a lot more

I managed this by downloading the file to the tmp instead.
You will need:
const mkdirp = require('mkdirp-promise');
Then, inside onChange. I created tempLocalDir like so:
const LOCAL_TMP_FOLDER = '/tmp/';
const fileDir = (the name of the file); //whatever method you choose to do this
const tempLocalDir = `${LOCAL_TMP_FOLDER}${fileDir}`;
Then I use mkdirp to make the temp directory
return mkdirp(tempLocalDir).then(() => {
// Then Download file from bucket.
const bucket = gcs.bucket(object.bucket);
return bucket.file(filePath).download({
destination: tempLocalFile
}).then(() => {
console.log('The file has been downloaded to', tempLocalFile);
//Here I have some code that converts images then returns the converted image
//Then I use
return bucket.upload((the converted image), {
destination: (a file path in your database)
}).then(() => {
console.log('JPEG image uploaded to Storage at', filePath);
})//You can perform more actions of end the promise here
I think my code achieves that you were trying to accomplish. I hope this helps; I can offer more code if necessary.

Related

How to resolve path issues while moving files in node.js?

I am trying to get a file from html form and store it in another folder. It's basically cloud function, and I am new to both node.js and firebase so don't know what I am doing wrong. What I manage to do is:
const fileMiddleware = require('express-multipart-file-parser');
app.post("/sendMail", (req, res) => {
const {
fieldname,
filename,
encoding,
mimetype,
buffer,
} = req.files[0];
console.log(req.files[0].originalname);
var fs = require('fs')
var oldPath = req.files[0].originalname;
var newPath = '/functions/'+oldPath;
fs.rename(oldPath, newPath, function (err) {
if (err) throw err
console.log('Successfully renamed - AKA moved!')
});
});
Whenever I try to move file, I got path issues. The error is as follows:
[Error: ENOENT: no such file or directory, rename 'C:\Users\Maisum Abbas\now\functions\sendMail.txt'
> 'C:\functions\sendMail.txt'] {
> errno: -4058,
> code: 'ENOENT',
> syscall: 'rename',
> path: 'C:\\Users\\Maisum Abbas\\now\\functions\\sendMail.txt',
> dest: 'C:\\functions\\sendMail.txt'
> }
Also, this is the path where I want to actually move the file but oldpath is already setup like this.
C:\Users\Maisum Abbas\now\functions\sendMail.txt
Since I needed to attach a file with email, it was causing path issues. I tried it with multer and it works. What I did:
//call libraries here
var storage = multer.diskStorage({
destination: function (req, file, callback) {
callback(null, 'resume/');
},
filename: function (req, file, callback) {
callback(null, file.fieldname + '-' + Date.now());
}
});
var upload = multer({ storage : storage}).single('filetoupload');
app.post("/careerMail", (req, res) => {
const { name } = req.body;
const { email } = req.body;
const { phone } = req.body;
upload(req,res,function(err) {
if(err) {
return res.end("Error uploading file.");
}
});
const dest = 'mymail';
const mailOptions = {
from: email, // Something like: Jane Doe <janedoe#gmail.com>
to: dest,
subject: 'Candidate Application', // email subject
html: `<div>
<strong>From:</strong> ` +
name +
`<br /><br />
<strong>Email:</strong> ` +
email +
`<br /><br />
<strong>Phone:</strong> ` +
phone +
`<br /><br />
</div>
`,// email content in HTML
attachments: [
{
filename: req.files[0].originalname,
content: req.files[0].buffer.toString("base64"),
encoding: "base64"
}
]
and rest of the code...
I suggest rethinking this approach altogether. You won't be able to move files around in a deployed function. The nodejs runtime filesystem doesn't allow any files to be written anywhere in the filesystem, except for os.tmpdir() (which is /tmp on Linux).
If you need to write a file temporarily, you should definitely only use that tmp space. Be aware that files written there occupy memory and should be deleted before the function terminates, or you could leak memory.
You can read files that you deployed with your code, but you should do that through relative paths.
I ran into same problem while moving file. I sort this problem by using a function to get the application root folder and then concatenate rest of the location.
//place this file on application root.
//import where you need to get the root path.
const path = require('path');
module.exports = (function(){
return path.dirname(require.main.filename || process.mainModule.filename);
})();
//taking your case move location.
const rootPath = //require the above module.
const newPath = rootPath + /functions/' +oldPath;
fs.rename(oldPath, newPath, function (err) {
if (err) throw err
console.log('Successfully renamed - AKA moved!')
});

Express file upload and view

I am using express-fileupload to upload the images. The images are saved in my local directory. I want to insert the name of the file to the mongodb if possible. Finally I want the image to be displayed in my frontend.
function insertRecord(req,res){
if(req.files){
const file=req.files.filename
filename=file.name
file.mv("./upload"+filename,function(err){
if(err)
console.log(err)
})
}
const user=new User()
user.name=req.body.name
user.address=req.body.address
user.email=req.body.email
user.mobile=req.body.mobile
user.filename=req.body.filename
user.save((err,docs)=>{
if(!err){
res.redirect('/user/list')
}
else {
if (err.name == 'ValidationError') {
handleValidationError(err, req.body);
res.render("./users/addOrEdit", {
viewTitle: "Insert User",
user: req.body
});
}
else
console.log('Error during record insertion : ' + err);
}
});
}
I am not sure whether the way to insert the name of the file to the mongodb is correct or not. Anyway, that is optional but I am not understanding how can I display the uploaded images which are present in the local directory.
I tried to save the image as base64 but the record is not saved to the database now.
var storage = multer.diskStorage({
destination: function (req, file, cb) {
cb(null, 'uploads/')
},
filename: function (req, file, cb) {
cb(null, file.fieldname + '-' + Date.now())
}
})
var upload = multer({ storage: storage })
router.post('/',upload.single('myImage'),function(req,res){
if (req.body._id == '')
insertRecord(req, res);
else
updateRecord(req, res);
})
function insertRecord(req,res){
var img = fs.readFileSync(req.file.path);
var encode_image = img.toString('base64');
var finalImg = {
contentType: req.file.mimetype,
image: new Buffer(encode_image, 'base64')
};
const user=new User()
user.name=req.body.name
user.address=req.body.address
user.email=req.body.email
user.mobile=req.body.mobile
user.save(finalImg,(err,docs)=>{
if(!err){
res.redirect('/user/list')
}
else {
if (err.name == 'ValidationError') {
handleValidationError(err, req.body);
res.render("./users/addOrEdit", {
viewTitle: "Insert User",
user: req.body
});
}
else
console.log('Error during record insertion : ' + err);
}
});
}
Edit: I think there is a problem in the code: it should be `'./upload/'+filename' not without the second slash.
In order to show the images, you have to open a static route in Express. Example: app.use('/images', express.static(PATH)). Then you can, in the frontend, call it as <img src="URL/images/FILENAME" />
From your code, it is not possible to understand what kind of data you are sending to the server. As far as I understand, you're trying mv the string filename. In order to transfer files (such as images), you should have form-data instead of JSON data or you should encode the image file into Base64 to transfer it as text (not the filename, the whole file).
Check Multer out for this kind of job. It is described well in the README.md. Apart from that, until you submit the form, the image won't be available in the front-end. If you want to preview the image before uploading it's a separate process which you can learn more in here.

getting message file not supported After downloading image file in nodejs . I'm uploading file from angular7 using Formdata

I'm sending the image using formdata in angular to my nodejs api. and in nodejs i'm storing that file at myfolder but when i open my stored image file its shows
"Abc.jpeg It appears that we don't support this file format"
From nodejs i used multiparty, then i used formidable but getting same error in both
I compared the size of file before uploading (original file) size was 78kb but after uploading the file size become 111kb.
Nodejs Code
var form = new formidable.IncomingForm();
form.parse(req, function (err, fields, files) {
console.log(files.fileDetails.path);
var oldpath = files.fileDetails.path;
var newpath = 'C:/storage/myfolder/' + files.fileDetails.name;
fs.rename(oldpath, newpath, function (err) {
if (err) throw err;
res.write('File uploaded and moved!');
res.end();
});
})
Angular Code
public OnSubmit(formValue: any) {
let main_form: FormData = new FormData();
for(let j=0;j<this.totalfiles.length; j++) {
console.log("the values is ",<File>this.totalfiles[j]);
console.log("the name is ",this.totalFileName[j]);
main_form.append(fileDetails,this.totalfiles[j])
}
console.log(formValue.items)
this._SocietyService.postFiles(main_form).subscribe(data => {
console.log("result is ", data)
})
}
var path = require('path')
var multer = require('multer')
var storage = multer.diskStorage({
destination: 'C:/storage/myfolder/',
filename: function (req, file, cb) {
cb(null, file.fieldname + '-' + Date.now());
}
})
var upload = multer({ storage: storage })
You can use the multer its a vary handy middleware to handle form/multipart data.

Save Image URL to the User in SailsJS

I'm creating an api using Sails JS v1.0.0
I have an action to upload an image to the server and it's working great but the problem I'm having is that I want to save the image URL to the user uploaded the image. It's the user profile image.
The code seems to work fine but I get an error in the terminal after uploading the image. I guess it has something with the callbacks.
Here is my controller:
let fs = require('fs');
module.exports = {
upload : async function(req, res) {
req.file('image').upload({ dirname : process.cwd() + '/assets/images/profile' }, function(err, uploadedImage) {
if (err) return res.negotiate(err);
let filename = uploadedImage[0].fd.substring(uploadedImage[0].fd.lastIndexOf('/')+1);
let uploadLocation = process.cwd() +'/assets/images/uploads/' + filename;
let tempLocation = process.cwd() + '/.tmp/public/images/uploads/' + filename;
fs.createReadStream(uploadLocation).pipe(fs.createWriteStream(tempLocation));
res.json({ files : uploadedImage[0].fd.split('assets/')[1] })
})
}
};
About the read stream to the .tmp folder, I wrote it to make the image available the moment it gets uploaded.
I tried to query for the user right before the
res.json({ files : uploadedImage[0].fd.split('assets/')[1] })
line, but it gives me an error in the terminal.
What's the best way to implement this code?
User.update({ id : req.body.id }).set({ image : uploadedImage[0].fd.split('images/')[1] });
You are uploading images to '/assets/images/profile' and trying to fetch it from '/assets/images/uploads/'. Also wrong path in tempLocation variable too. Change your code to following and it will hopefully start working
upload : async function(req, res) {
req.file('image').upload({ dirname : process.cwd() + '/assets/images/profile' },
async function(err, uploadedImage) {
if (err) return res.negotiate(err);
let filename = uploadedImage[0].fd.substring(uploadedImage[0].fd.lastIndexOf('/')+1);
let uploadLocation = process.cwd() +'/assets/images/profile/' + filename;
let tempLocation = process.cwd() + '/.tmp/public/images/profile/' + filename;
fs.createReadStream(uploadLocation).pipe(fs.createWriteStream(tempLocation));
await User.update({ id : req.body.id }).set({ image : uploadedImage[0].fd.split('images/')[1] });
res.json({ files : uploadedImage[0].fd.split('assets/')[1] })
})
},

AWS S3 ZIP file upload is corrupting the archived files

I am using the below code snippet to upload image through file upload control,zip it using jszip api and then store it in AWS S3.File upload is success and i can see demo.zip in AWS console.However files in demo.zip is getting corrupted and hence unable to unzip it.
I tried my level best to debug,but i did not get any clue on the reason for it.
Any help in correcting my code would be really appreciated.
var multer = require('multer');
var uploadservice = multer({ storage: multer.memoryStorage(), limits: { fileSize: 1000 * 1000 * 12 } }).array("files", 5);
app.post('endpointurl', function (req, res, next) {
uploadservice(req, res, function (err) {
if (err) {
console.log("error - " + err)
res.status(200).end("File upload is failure");
} else {
var files = req.files
var JSZip = require("jszip");
var zip = new JSZip();
for (i = 0; i < files.length; i++){
zip.file('file' + i, files[i].buffer, { binary: true });
}
zip.generateAsync({
type: "binarystring",
compression: "DEFLATE",
mimeType:"application/zip"
}).then(function (content) {
var s3 = new aws.S3();
var S3_BUCKET = 'mybucket'
s3.putObject({
ACL: 'private',
Bucket: S3_BUCKET,
Key: "demo.zip",
// serverSideEncryption: 'AES256',
Body: content,
ContentType: "application/zip"
}, function (error, response) {
if (error) {
console.log("error - " + error)
res.status(200).end("File upload failed");
} else {
console.log("success")
res.status(200).end("File is uploaded successfully");
}
});
})
}
});
sorry, but I can't comment, so:
What happens if you create the zip file and then open it before moving it to S3? Perhaps it's the use of zip.generateAsync({type: "binarystring",.... that is causing the corruption.
Well, in my case it gets corrupted after copying into S3.
I've mounted the S3 bucket in an EC2 instance and copying the zip into that particular path. Everything that is copied into that directory gets uploaded into S3.
When I try to unzip after copying it throws "Bad Zip, incorrect headers". I couldn't unzip that. The same zip before uploading works exactly as expected.

Categories

Resources