Creating multiple buckets with GridFS - javascript

I am working with the GridFS library in express and node. I'm trying to create multiple buckets. For example I already have a bucket titled avatars, which stores images.
/* Start of mongo connection for uploading files */
const mongoURI = "mongodb://localhost:27017/PTAdata";
const conn = mongoose.createConnection(mongoURI);
let gfs;
conn.once('open', () => {
gfs = stream(conn.db, mongoose.mongo);
gfs.collection('avatars');
})
const storage = new GridFs({
url: "mongodb://localhost:27017/PTAdata",
file: (req, file) => {
return new Promise((resolve, reject) => {
crypto.randomBytes(16, (err, buf) => {
if (err) {
return reject(err);
}
file.user = req.body.username
const name = file.originalname
const filename = buf.toString('hex') + path.extname(file.originalname);
const fileInfo = {
filename: file.user,
bucketName: 'avatars'
};
resolve(fileInfo);
});
});
}
});
const upload = multer({ storage });
I now want to create another bucket called audio which will store mp3 files. I checked the documentation for GridFS at https://docs.mongodb.com/manual/core/gridfs/ and it states that "you can choose a different bucket name, as well as create multiple buckets in a single database." However it does not provide any insight or steps to how. Has anyone done any work with the GridFS library and know how to create multiple buckets?

You need to store another "new GridFS" object in a different variable, than pass it to multer as a different storage property. In your case, this should work:
const storage = new GridFs({
url: "mongodb://localhost:27017/PTAdata",
file: (req, file) => {
return new Promise((resolve, reject) => {
crypto.randomBytes(16, (err, buf) => {
if (err) {
return reject(err);
}
file.user = req.body.username
const name = file.originalname
const filename = buf.toString('hex') + path.extname(file.originalname);
const fileInfo = {
filename: file.user,
bucketName: 'avatars'
};
resolve(fileInfo);
});
});
}
});
const anotherStorage = new GridFs({
url: "mongodb://localhost:27017/PTAdata",
file: (req, file) => {
return new Promise((resolve, reject) => {
crypto.randomBytes(16, (err, buf) => {
if (err) {
return reject(err);
}
file.user = req.body.username
const name = file.originalname
const filename = buf.toString('hex') + path.extname(file.originalname);
const fileInfo = {
filename: file.user,
bucketName: 'mp3files'
};
resolve(fileInfo);
});
});
}
});
const upload = multer({ storage });
const uploadSongs = multer({ storage: anotherStorage });
Finally, you should choose between those buckets accordingly to your endpoints, for example:
app.post('/api/uploadAvatar', upload.any(), (req, res)=> {
... do stuff
}
app.post('/api/uploadMp3', uploadSongs.any(), (req, res)=> {
... do stuff
}
For me, it made sense to change gfs.collection() in each case (inside the file: (req, file) function), but it worked without changing as well. Be aware that any() is just an option, an it's not the safest one, but it's great for testing your code.

Related

What is the correct way to make multer work with Node and Express here?

I am trying to create a route through which I can upload photos. However as I made so,e changes it stopped working and I am not sure how to make it work.
const multer = require('multer');
// MULTER STORAGE
const multerStorage = multer.diskStorage({
destination: (req, file, cb) => {
cb(null, '/upload');
},
filename: (req, file, cb) => {
const ext = file.mimetype.split('/')[1];
// Saving format: user-UserId-DateStamp.ext
//e.g user-608d55c7e512b74ee00791de-1621992912638.jpeg
cb(null, `user-${req.body.userId}-${Date.now()}.${ext}`);
},
});
//MULTER FILTER
const multerFilter = (req, file, cb) => {
//mimetype always starts with image/ then png or jpeg or..
if (file.mimetype.startsWith('image')) {
cb(null, true);
} else {
cb(new AppError('You are only allowed to upload image files.', 400), false);
}
};
const uploadDirectory = multer({
storage: multerStorage,
fileFilter: multerFilter,
});
//exports.uploadPhoto = uploadDirectory.single('photo');
//app.use(express.static('./uploads'));
// INCLUDE ERROR CLASS AND ERROR CONTROLLER
const AppError = require('../utils/appError.js');
const errorController = require('./errorController.js');
const { Mongoose } = require('mongoose');
The main problem Im guessing is in this block
//UPLOAD PHOTO
exports.uploadPhoto = uploadDirectory(async (req, res) => {
console.log(req.body);
console.log(req.file);
try {
const newPhoto = await photoModel.create(req.file);
newPhoto.save().then((result) => {
console.log('Saved');
res.status(201).json({
status: 'success',
// data: JSON.parse(JSON.stringify(newPhoto.file)),
});
});
} catch (err) {
console.log('Error in upload');
errorController.sendError(err, req, res);
}
}).single('photo');
Can anybody let me know how to correctly write the exports.uploadPhoto
Originally the last function looked like this
exports.uploadPhoto = async (req, res) => {
console.log(req.body);
console.log(req.file);
try {
const newPhoto = await photoModel.create(req.file);
newPhoto.save().then((result) => {
console.log('Saved');
res.status(201).json({
status: 'success',
// data: JSON.parse(JSON.stringify(newPhoto.file)),
});
});
} catch (err) {
console.log('Error in upload');
errorController.sendError(err, req, res);
}
};
The multer middleware function, in your case uploadDirectory, is usually used before other middleware functions/controllers where you have your business logic (e.g. uploadPhoto).
app.post('/upload', uploadDirectory.single('photo'), uploadPhoto);
Keep your original uploadPhoto function and with the above code you'll have access to the data and file through reg.body and req.file, respectively.
This Request Parsing in Node.js Guide (it's free) will help you with file uploads in Node.js.

How can I upload multiple files using vue.js and multer?

I am able to upload a single file using multer. But when it comes to multiple files it won't work anymore and no file is caught by multer.
I send files through formData.append(). But it only uploads single file
Vue component
const formData = new FormData();
formData.append("productImg", this.imgFile);
this.$store.dispatch(POST_PRODUCT_IMAGE, formData)
.then((response) => {
console.log(response.data);
})
.catch(error => {
console.log(error);
})
Server file
const uploadPath = path.join(__dirname, '/../../public/uploads');
var storage = multer.diskStorage({
destination: (req, file, callback) => {
callback(null, uploadPath + "/garbage/productImg");
},
filename: (req, file, callback) => {
var newName = Date.now() + "_" + file.originalname;
callback(null, newName);
}
});
const upload = multer({
storage: storage
});
productRouter.post('/uploadProductImage', upload.any(), async (req, res) => { // Some Code })
I did also
productRouter.post('/uploadProductImage', array('productImg[]', 6), async (req, res) => { // Some Code })
I want to upload multiple files at a time to my specified folder.
Finally i found a solution which is very silly though.
In Vue component file i just use a loop before add in formData. Like this.
Vue Component
const formData = new FormData();
// formData.append("productImg", this.imgFile); // OLD ONE
for (let index = 0; index < this.imgFile.length; index++) { //NEW ONE
let file = this.imgFile[index];
formData.append("productImg["+index+"]", file);
}
this.$store.dispatch(POST_PRODUCT_IMAGE, formData)
.then((response) => {
console.log(response.data);
})
.catch(error => {
console.log(error);
})

Google Cloud Functions - Upload to Google Cloud Storage via HTTP

I'm attempting to handle file uploads using a Google Cloud Function. This function uses Busboy to parse the multipart form data and then upload to Google Cloud Storage.
I keep receiving a ERROR: { Error: ENOENT: no such file or directory, open '/tmp/xxx.png' error when triggering the function.
The error seems to occur within the finish callback function when storage.bucket.upload(file) attempts to open the file path /tmp/xxx.png.
Example code
const path = require('path');
const os = require('os');
const fs = require('fs');
const Busboy = require('busboy');
const Storage = require('#google-cloud/storage');
const moment = require('moment');
const _ = require('lodash');
const projectId = 'xxx';
const bucketName = 'xxx';
const storage = new Storage({
projectId: projectId,
});
exports.uploadFile = (req, res) => {
if (req.method === 'POST') {
const busboy = new Busboy({
headers: req.headers
});
const uploads = []
const tmpdir = os.tmpdir();
busboy.on('file', (fieldname, file, filename, encoding, mimetype) => {
const filepath = path.join(tmpdir, filename)
var obj = {
path: filepath,
name: filename
}
uploads.push(obj);
var writeStream = fs.createWriteStream(obj.path);
file.pipe(writeStream);
});
busboy.on('finish', () => {
_.forEach(uploads, function (file) {
storage
.bucket(bucketName)
.upload(file.path, {
name: moment().format('/YYYY/MM/DD/x') + '-' + file.name
})
.then(() => {
console.log(`${file.name} uploaded to ${bucketName}.`);
})
.catch(err => {
console.error('ERROR:', err);
});
fs.unlinkSync(file.path);
})
res.end()
});
busboy.end(req.rawBody);
} else {
res.status(405).end();
}
}
Solved this with a stream instead of a temporary file. Only handles a single file at the moment though.
https://gist.github.com/PatrickHeneise/8f2c72c16c4e68e829e58ade64aba553#file-gcp-function-storage-file-stream-js
function asyncBusboy(req, res) {
return new Promise((resolve, reject) => {
const storage = new Storage()
const bucket = storage.bucket(process.env.BUCKET)
const fields = []
const busboy = Busboy({
headers: req.headers,
limits: {
fileSize: 10 * 1024 * 1024
}
})
busboy.on('field', (key, value) => {
fields[key] = value
})
busboy.on('file', (name, file, fileInfo) => {
const { mimeType } = fileInfo
const destFile = bucket.file(fileName)
const writeStream = destFile.createWriteStream({
metadata: {
contentType: fileInfo.mimeType,
metadata: {
originalFileName: fileInfo.filename
}
}
})
file.pipe(writeStream)
})
busboy.on('close', function () {
return resolve({ fields })
})
if (req.rawBody) {
busboy.end(req.rawBody)
} else {
req.pipe(busboy)
}
})
}

Encrypt the uploaded file before saving to disk using Node.js

Currently, I am using multer library to save files on File system. This application is using Node and Express.
I can save the file first on server and then encrypt it. Later on delete the unencrypted file. However, I do not want to add unencrypted file on server directly. So, I am looking for a way to encrypt the incoming file from the front end and then save it to disk.
const defaultFolder = 'upload';
const filePath = resolveHome(process.env.FILE_STORAGE_LOCATION || defaultFolder);
const key = 'test';
const cipher = crypto.createCipher('aes-256-cbc', key);
const decipher = crypto.createDecipher('aes-256-cbc', key);
const upload = multer({
storage: multer.diskStorage({
destination: filePath,
filename: (req, file, cb) => {
const parts = file.originalname.split('.');
const ext = parts.pop();
const name = parts.join('.');
cb(null, name + '-' + Date.now() + '.' + ext);
},
}),
}).single('file');
app.post('/upload', (req, res) => {
upload(req, res, err => {
if (err) {
return res.status(400).send({
error: 'The request was invalid',
fileName: req.file.originalname,
});
}
return res.status(200).send({
fileName: req.file.filename,
});
});
});
I tried to use crypto library to encrypt the file but it's not working. I believe the req.cipher is invalid as I would normally use req.file to get reference to the file.
app.post('/upload', (req, res) => {
upload(req, res, err => {
output = fs.createWriteStream(filePath + '/' + req.file.originalname);
req.pipe(cipher).pipe(output).on('finish', () => console.log('Encrypted file written on disk'));
if (err) {
return res.status(400).send({
error: 'The request was invalid',
fileName: req.file.originalname,
});
}
return res.status(200).send({
fileName: req.file.originalname,
});
});
});
I had tried to just write file without using cipher and file was empty. Adding this information in case it helps.
req.pipe(output).on('finish', () => console.log('Encrypted file written on disk'));
Can you try this
app.post('/upload', function(req, res) {
upload(req, res, function(err) {
var fileName = req.file.destination +"\\"+ req.file.filename
var input = fs.createReadStream(fileName);
var output = fs.createWriteStream(fileName + ".enc");
input.pipe(cipher).pipe(output);
output.on('finish', function() {
fs.unlink(fileName, (err) => {
if (err) throw err;
console.log('Encrypted file written to disk!');
res.end('Encrypted file written to disk!')
});
});
})
})

Get notified when asynchronous tasks are finished

I am using this code I found on the internet to upload multiple files to an Amazon S3 server.
const AWS = require("aws-sdk"); // from AWS SDK
const fs = require("fs"); // from node.js
const path = require("path"); // from node.js
// configuration
const config = {
s3BucketName: 'your.s3.bucket.name',
folderPath: '../dist' // path relative script's location
};
// initialize S3 client
const s3 = new AWS.S3({ signatureVersion: 'v4' });
// resolve full folder path
const distFolderPath = path.join(__dirname, config.folderPath);
// get of list of files from 'dist' directory
fs.readdir(distFolderPath, (err, files) => {
if(!files || files.length === 0) {
console.log(`provided folder '${distFolderPath}' is empty or does not exist.`);
console.log('Make sure your project was compiled!');
return;
}
// for each file in the directory
for (const fileName of files) {
// get the full path of the file
const filePath = path.join(distFolderPath, fileName);
// ignore if directory
if (fs.lstatSync(filePath).isDirectory()) {
continue;
}
// read file contents
fs.readFile(filePath, (error, fileContent) => {
// if unable to read file contents, throw exception
if (error) { throw error; }
// upload file to S3
s3.putObject({
Bucket: config.s3BucketName,
Key: fileName,
Body: fileContent
}, (res) => {
console.log(`Successfully uploaded '${fileName}'!`);
});
});
}
});
How can I get notified that the upload is done to execute another process? res is called when a single file were successfully uploaded.
How about incrementing a counter when a file uploads and then checking if all files have been uploaded:
...
var uploadCount = 0
// Read file contents
fs.readFile(filePath, (error, fileContent) => {
// If unable to read file contents, throw exception
if (error) { throw error }
// Upload file to S3
s3.putObject({
Bucket: config.s3BucketName,
Key: fileName,
Body: fileContent
}, (res) => {
console.log(`Successfully uploaded '${fileName}'!`)
// Increment counter
uploadCount++
// Check if all files have uploaded
// 'files' provided in callback from 'fs.readdir()' further up in your code
if (uploadCount >= files.length) {
console.log('All files uploaded')
}
})
})
...
You could try using promises and promise.all
const AWS = require("aws-sdk"); // from AWS SDK
const fs = require("fs"); // from node.js
const path = require("path"); // from node.js
// configuration
const config = {
s3BucketName: 'your.s3.bucket.name',
folderPath: '../dist' // path relative script's location
};
// initialize S3 client
const s3 = new AWS.S3({ signatureVersion: 'v4' });
// resolve full folder path
const distFolderPath = path.join(__dirname, config.folderPath);
// get of list of files from 'dist' directory
fs.readdir(distFolderPath, (err, pathURLS) => {
if(!pathURLS || pathURLS.length === 0) {
console.log(`provided folder '${distFolderPath}' is empty or does not exist.`);
console.log('Make sure your project was compiled!');
return;
}
let fileUploadPromises = pathURLS.reduce(uplaodOnlyFiles, []);
//fileUploadPromises.length should equal the files uploaded
Promise.all(fileUploadPromises)
.then(() => {
console.log('All pass');
})
.catch((err) => {
console.error('uploa Failed', err);
});
});
function uploadFileToAWS(filePath) {
return new Promise(function (resolve, reject) {
try {
fs.readFile(filePath, function (err, buffer) {
if (err) reject(err);
// upload file to S3
s3.putObject({
Bucket: config.s3BucketName,
Key: filePath,
Body: buffer
}, (res) => {
resolve(res)
});
});
} catch (err) {
reject(err);
}
});
}
function uplaodOnlyFiles(fileUploadPromises, pathURL) {
const fullPathURL = path.join(distFolderPath, pathURL);
if (!fs.lstatSync(fullPathURL).isDirectory()) {
fileUploadPromises.push(uploadFileToAWS(fullPathURL));
}
return fileUploadPromises;
}

Categories

Resources