Hi im triying to upload images to cloudinary using multer in my nodejs app it works perfectly on the localhost but when i upload it to zeit it doesn't work it says "ENOENT: no such file or directory, open (folder)"
I tried using #now/static to make the folder available but it doesn't work too. I will be happy if you can help me.
const express = require("express")
const multer = require("multer")
const cloudinary = require("cloudinary").v2
const cors = require("cors")
const config = require("../../config")
const response = require("../../network/response")
const Controller = require("./index")
const auth = require("./secure")
const router = express.Router()
cloudinary.config({
cloud_name: config.cloudinary.name,
api_key: config.cloudinary.api_key,
api_secret: config.cloudinary.api_secret,
})
const storage = multer.diskStorage({
destination: function (req, file, cb) {
cb(null, "./public/uploads")
},
filename: function (req, file, cb) {
cb(null, file.originalname)
},
})
const upload = multer({
storage: storage,
limits: { fileSize: 1024 * 1024 * 5 },
})
// Set routes
router.use(cors())
router.get("/", auth("list"), list)
router.post("/", auth("add"), upload.single("file"), upsert)
// Router Functions
function list(req, res, next) {
Controller.list()
.then((post) => {
response.success(req, res, post, 200)
})
.catch(next)
}
function upsert(req, res, next) {
const path = req.file.path
const uniqueFilename = new Date().toISOString()
cloudinary.uploader.upload(
path,
{ public_id: `public/${uniqueFilename}` }, // directory and tags are optional
function (err, image) {
if (err) return res.send(err)
console.log("file uploaded to Cloudinary")
// remove file from server
const fs = require("fs")
fs.unlinkSync(path)
}
)
Controller.upsert(req.body, uniqueFilename)
.then((post) => {
response.success(req, res, post, 201)
})
.catch(next)
}
module.exports = router
When working with Zeit/Vercel, your API code runs in a Lambda.
This lambda is meant to be disposable when it finishes executing,
therefore the filesystem that you use in your API is destroyed upon the end of your upload execution.
(it works locally because no one destroys your computer at the end :D).
To get this scenario working, you'll need to write the uploaded file somewhere persistent, such as AWS S3.
Related
I'm using Vue with NodeJs, Vuetify and Express. I load user's file with the Vuetify's component:
<v-file-input
v-model="documentFile.value"
:error-messages="documentFile.errors"
accept="application/pdf"
/>
Then I want to pass the file (that is stored in this.documentFile.value) to my backend, so it will upload it to the Drive. I pass the data using Vue Recourse:
var params = {
"data": this.data.value
"score": this.score.value
//"document_file": this.documentFile.value,
"comments": this.comments.value
};
Vue.http.put('http://localhost:8081/api/new-document', {params: params}).then(
response => {
console.log("Sent data");
}, response => {
console.error(response);
}
);
In my NodeJS backend I have:
router.put('/new-document', function(request, response) {
console.log("New Document");
console.log(request.query);
// Upload file to drive
const oauth2Client = new google.auth.OAuth2(
CLIENT_ID,
CLIENT_SECRET,
REDIRECT_URI
);
response.status(200).send({});
});
How can I pass the file from the client to the backend?
EDIT: If I uncomment document_file, and try to print request.query, it prints:
{
data: { age: '27', name: 'robert' },
comments: 'comment',
"score": 89
}
For some reason, it ignores the document_file.
The code in my server.js:
const cors = require("cors");
const express = require("express");
const bodyParser = require("body-parser");
const routes = require('./routes');
const path = __dirname + '/../public/';
console.log("STARTED");
const app = express();
app.use(express.static(path));
var corsOptions = {
origin: "http://localhost:8080"
};
app.use(cors());
app.use(bodyParser.json());
app.use(bodyParser.urlencoded({ extended: true }));
// map requests to routes
app.use('/api', routes);
// set port, listen for requests
const PORT = process.env.PORT || 8081;
app.listen(PORT, () => {
console.log(`Server is running on port ${PORT}.`);
});
To pass the file from the client to the backend, use the new FormData() object.
In your case you can do something like:
var params = new FormData();
params.append('data', this.data.value);
params.append('score', this.score.value);
params.append('document_file', this.documentFile.value)
params.append('comments', this.comments.value);
Then pass the object either using axios or Vue Recourse as you would like.
axios.put('url', params)
EDIT
You can use multer to upload your files either locally or to the cloud. In your case, you can upload to GoogleStorage
const multer = require('multer')
// You can start off by testing local uploads
const upload = multer({ dest: 'uploads/' })
// then use this to upload to Google Storage
const multerGoogleStorage = require('multer-google-storage')
const uploadHandler = multer({
storage: multerGoogleStorage.storageEngine({
autoRetry: true,
bucket: '<your_storage_bucket_name>',
projectId: '<your_project_ID>',
keyFilename: '<your_path_to_key_file>',
filename: (req, file, cb) => {
cb(null, `/<some_prefix_of_choice>/${Date.now()}_${file.originalname}`)
}
})
})
Upload the file either to local or Google Cloud
// local uploads (destination projectRoot/uploads)
router.put('/new-document', upload.single('document_file'), async (request, response) => {});
// or GoogleStorage
router.put('/new-document', uploadHandler.single('document_file'), async (request, response) => {});
Multiple files can also be uploaded
app.put('/new-document', upload.array('document_files', 12), function (req, res, next) {
// req.files is array of `photos` files
// req.body will contain the text fields, if there were any
})
The document file(s) can then be accessible on the Express server using
request.file
You can the upload the file. The other form objects can also be accessible through request.body e.g.
request.body.data
I am using Multer Node JS package to upload files to my app sever , the code is basically typical upload file code
const express = require('express')
const multer = require('multer')
const upload = multer({ dest: 'uploads/' })
const app = express()
app.post('/profile', upload.single('avatar'), function (req, res, next) {
// req.file is the `avatar` file
// req.body will hold the text fields, if there were any
})
app.post('/photos/upload', upload.array('photos', 12), function (req, res, next) {
// req.files is array of `photos` files
// req.body will contain the text fields, if there were any
})
But each time a file is being uploaded the Node server becomes unresponsive and frontend from other request doesnt receive any response from other APIs until the file is uploaded.
Whats the best way to tackle this ?
In your sample code, you must just send a response to the client by res.json() or res.end() :
const express = require('express');
const multer = require('multer');
const upload = multer({ dest: 'uploads/' });
const app = express();
app.post('/profile', upload.single('avatar'), function (req, res, next) {
// req.file is the `avatar` file
// req.body will hold the text fields, if there were any
res.status(204).end();
});
app.post('/photos/upload', upload.array('photos', 12), function (req, res, next) {
// req.files is array of `photos` files
// req.body will contain the text fields, if there were any
res.status(204).end();
});
i can give you an example of how i implemented an imageupload in my app. it the code to upload a profile image for a user. i am also using multer middleware so it shoulder be similiar for you:
code is as follows:
// multer middleware:
const multer = require('multer');
const MIME_TYPE_MAP = {
'image/png': 'png',
'image/jpeg': 'jpg',
'image/jpg': 'jpg',
};
module.exports = storage = multer.diskStorage({
destination: (req, file, cb) => {
const isValid = MIME_TYPE_MAP[file.mimetype];
let error = new Error('invalid mime type');
if (isValid) {
error = null;
}
cb(error, 'images');
},
filename: (req, file, cb) => {
const name = file.originalname.toLowerCase().split(' ').join('-');
const ext = MIME_TYPE_MAP[file.mimetype];
if (name.includes('.' + ext)) {
cb(null, name)
} else {
cb(null, name + '.' + ext);
}
},
});
and here the code in the service handling the fileupload
// profile service in backend written in express
exports.uploadImage = (req, res, next) => {
const url = req.protocol + '://' + req.get('host');
profileRepository
.findOne({ _id: req.params.id })
.then((response) => {
const fetchedUser = response;
fetchedUser.imagePath = url + '/images/' + req.file.filename;
profileRepository
.updateOne({ _id: req.params.id }, fetchedUser)
.then((response) => {
return res.status(200).json({
message: 'profileimage updated',
});
})
.catch((error) => {
return res.status(500).json({
message: 'uploading image failed',
});
});
})
.catch((error) => {
return res.status(404).json({
message: 'fetching user failed',
});
});
};
then i use the middleware in my profile routes file like this:
// profile.routes.js
const express = require('express');
const ProfileController = require('./profileController');
const checkAuth = require('../middleware/checkAuth');
const router = express.Router();
const fileStorage = require('../middleware/fileStorage');
const multer = require('multer');
// imageUpload
router.post('/user/image/:id', checkAuth, multer({storage: fileStorage}).single('image'), ProfileController.image);
my Controller then calls the service function with the actual business logic like this:
// profile.controller.js
const profileService = require('./profileService');
exports.image = (req, res, next) => {
return profileService.uploadImage(req, res);
};
and finally my route is used by my app.js file like this:
// app.js
const express = require('express');
const profileRoutes = require('./profile/profileRoutes');
const app = express();
// set images path for saving images on server
app.use('/images', express.static(path.join('images')));
app.use('/api', profileRoutes);
module.exports = app;
i hope i was able to point you in the right direction with my example
I tried all possible scenarios and I tried with multiple variations and approaches suggested by documentation and by other stackoverflow questions.
Any of them worked -> I keep getting req.file is: undefined
Form:
<form action="/send" enctype="multipart/form-data" method="POST">
<input type="file" id="file" name="file">
<button type="submit">Submit</button>
Express Setup:
const express = require('express');
const ejs = require('ejs');
const homeController = require('./controlers/homeController');
const bodyParser = require('body-parser');
const app = express();
app.use(bodyParser.urlencoded({ extended: true }));
app.set('view engine', 'ejs');
app.use(express.static('public'));
app.listen(3000);
/* routes */
app.use(homeController);
I have the following code:
var multer = require('multer')
const storage = multer.diskStorage({
destination: './public/data/uploads',
filename: function(req, file, cb) {
cb(null, file.fieldname + '-' + Date.now() + path.extname(file.originalname));
}
});
var upload = multer({
storage: storage
}).single('file');
router.get('/', (req, res) => {
res.render('home')
})
router.post('/send', (req, res) => {
upload(req, res, (err) => {
if (err) console.log(err);
console.log('req.file is: ' + req.file);
});
})
I spent 2 days trying to figure this out but I can't see the light at the end of the tunnel, and i just want to get the file from client and send it with nodemailer as attachement later on.
Answer obtained based on eMAD suggestion on question How to perform an HTTP file upload using express on Cloud Functions for Firebase (multer, busboy)
Solution below:
File is saved on the storage location you define in muller.
req.files is an array with the details of the file/files you upload.
PS: thanks #eol for pointing me in this direction
const contactControler = require('../controlers/contactController');
const busboy = require('busboy')
const express = require('express');
const router = express.Router();
var multer = require('multer');
const SIZE_LIMIT = 10 * 1024 * 1024 // 10MB
var stor = multer.diskStorage({
destination: '../public/cv',
filename: function(req, file, callback) {
callback(null, file.originalname);
}
});
const multipartFormDataParser = multer({
storage: stor,
// increase size limit if needed
limits: { fieldSize: SIZE_LIMIT },
// support firebase cloud functions
// the multipart form-data request object is pre-processed by the cloud functions
// currently the `multer` library doesn't natively support this behaviour
// as such, a custom fork is maintained to enable this by adding `startProcessing`
// https://github.com/emadalam/multer
startProcessing(req, busboy) {
req.rawBody ? busboy.end(req.rawBody) : req.pipe(busboy)
},
})
router.post('/send', multipartFormDataParser.any(), contactControler.send);
Please try adding upload in your route as middleware.
router.post('/send', upload, (req, res) => { ...
I'm trying to use S3, specifically multer-s3, for image upload for a traditional web app that currently has multer/file system file upload (GitHub repo with previous code before the failed S3 upload attempt can be found here). The app is deployed to Heroku, which has ephemeral file storage, so the old setup is a no-go.
I tried to use multer-s3 to do it based on this tutorial https://www.youtube.com/watch?v=ASuU4km3VHE&t=1364s, but got up to about 20 mins in, trying to send the POST request to the new image-upload route but am getting a 500 error, whereas in the tutorial an AWS image path is provided in the response.
Here's what I tried so far:
I created a bucket and get my access code and keys. In my S3 bucket settings, under Permissions -> Block public access, I set everything to off. I also added CORS config code as suggested by Heroku here (but I still get a 500 error without it).
In the util folder, I added a file named file-upload.js with this code (I'm using a Nodemon.json file for the config keys):
const aws = require('aws-sdk');
const multer = require('multer');
const multerS3 = require('multer-s3');
const { uuid } = require('uuidv4');
aws.config.update({
secretAccessKey: process.env.AWS_SECRET_ACCESS_KEY,
accessKeyId: process.env.AWS_ACCESS_KEY_ID,
region: 'us-west-2',
});
const s3 = new aws.S3();
const upload = multer({
storage: multerS3({
s3,
bucket: 'nodejs-shop',
acl: 'public-read',
// Called when saving image to AWS
metadata(req, file, cb) {
cb(null, { fieldName: file.fieldname });
},
// Called before saving image to AWS
key(req, file, cb) {
cb(null, uuid());
},
}),
});
module.exports = upload;
In the routes folder, I added a file named file-upload.js, with this code:
const express = require('express');
const router = express.Router();
const upload = require('../util/file-upload');
// Will send image under this key 'image' in request to server
const singleUpload = upload.single('image');
router.post('/image-upload', (req, res, next) => {
// Callback function called after image is uploaded or will get error from server
singleUpload(req, res, (err) => {
return res.json({ imageUrl: req.file.location });
});
});
module.exports = router;
In app.js, I imported the routes file const fileRoutes = require('./routes/file-upload'); and added the middleware after the authRoutes middleware app.use(fileRoutes);. I also commented out all the previously used multer code in app.js.
Current app.js code:
const path = require('path');
const fs = require('fs');
// const https = require('https');
const express = require('express');
const bodyParser = require('body-parser');
const mongoose = require('mongoose');
const session = require('express-session');
const MongoDBStore = require('connect-mongodb-session')(session);
const csrf = require('csurf');
const flash = require('connect-flash');
// const multer = require('multer');
// const { uuid } = require('uuidv4');
const helmet = require('helmet');
const compression = require('compression');
const morgan = require('morgan');
const errorController = require('./controllers/error');
const User = require('./models/user');
const MONGODB_URI =
// process object is globally available in Node app; part of Node core runtime. The env property contains all environment variables known by process object. Using nodemon.json to store environment variables, but could alternatively use dotenv package for this (see https://www.youtube.com/watch?v=17UVejOw3zA)
`mongodb+srv://${process.env.MONGO_USER}:${process.env.MONGO_PASSWORD}#cluster0-4yuid.mongodb.net/${process.env.MONGO_DEFAULT_DATABASE}`;
const app = express();
const store = new MongoDBStore({
uri: MONGODB_URI,
collection: 'sessions',
});
// Secret used for signing/hashing token is stored in session by default
const csrfProtection = csrf();
// Don't want to start server until file is read in, thus using synchronous version
// const privateKey = fs.readFileSync('server.key');
// const certificate = fs.readFileSync('server.cert');
// Commenting out original file upload method since changed to use AWS S3 for image upload/hosting
// const fileStorage = multer.diskStorage({
// destination: (req, file, cb) => {
// // First arg is for error message to throw to inform multer something is wrong with incoming file and it should not store it; with null, telling multer okay to store it
// cb(null, 'images');
// },
// filename: (req, file, cb) => {
// cb(null, uuid());
// },
// });
// const fileFilter = (req, file, cb) => {
// file.mimetype === 'image/png' ||
// file.mimetype === 'image/jpg' ||
// file.mimetype === 'image/jpeg'
// ? cb(null, true)
// : cb(null, false);
// };
app.set('view engine', 'ejs');
// Setting this explicity even though the views folder in main directory is where the view engine looks for views by default
app.set('views', 'views');
const adminRoutes = require('./routes/admin');
const shopRoutes = require('./routes/shop');
const authRoutes = require('./routes/auth');
const fileRoutes = require('./routes/file-upload');
// Create write stream (for passing to morgan, used to log request data), for logging request data in file instead of console
// flags: 'a': a is for append; new data will be appended to that file (additional log statements are added to end of existing file rather than overwriting it)
const accessLogStream = fs.createWriteStream(
path.join(__dirname, 'access.log'),
{ flags: 'a' }
);
// Set secure response header(s) with Helmet
// In my app, in developer tools (in the network tab) I can see it added one additional response header for localhost, Strict-Transport-Security. This HTTP header tells browsers to stick with HTTPS and never visit the insecure HTTP version. Once a browser sees this header, it will only visit the site over HTTPS for the next 60 days
app.use(helmet());
// Compress assets. Note: Compression is normally done by hosting providers, but deploying to Heroku which does offer it
app.use(compression());
// Log request data using writable file stream created above. Which data is logged and how to format it is passed into funtion
// Also normally handled by hosting providers
// app.use(morgan('combined', { stream: accessLogStream }));
app.use(bodyParser.urlencoded({ extended: true }));
app.use(bodyParser.json());
// Commented out since changed to use AWS S3 for image upload/hosting
// app.use(multer({ storage: fileStorage, fileFilter }).single('image'));
app.use(express.static(path.join(__dirname, 'public')));
app.use('/images', express.static(path.join(__dirname, 'images')));
app.use(
session({
secret: 'my secret',
resave: false,
saveUninitialized: false,
store,
})
);
app.use(csrfProtection);
app.use(flash());
app.use((req, res, next) => {
// Locals field: Express feature for setting local variables that are passed into views. For every request that is executed, these fields are set for view that is rendered
res.locals.isAuthenticated = req.session.isLoggedIn;
res.locals.csrfToken = req.csrfToken();
next();
});
app.use((req, res, next) => {
// When you throw an error in synchronous places (outside of callbacks and promises), Express will detect this and execute next error handling middleware. But if error is thrown within async code (in then or catch block), Express error handling middleware won't be executed; app will simply crash; have to use next()
// throw new Error('sync dummy');
if (!req.session.user) {
return next();
}
User.findById(req.session.user._id)
.then((user) => {
if (!user) {
return next();
}
req.user = user;
next();
})
// catch block will be executed in the case of technical issue (e.g., database down, or insufficient permissions to execute findById())
.catch((err) => {
// Within async code snippets, need to use next wrapping error, outside you can throw error
next(new Error(err));
});
});
app.use('/admin', adminRoutes);
app.use(shopRoutes);
app.use(authRoutes);
app.use(fileRoutes);
app.get('/500', errorController.get500);
app.use(errorController.get404);
// Error-handling middleware. Express executes this middleware when you call next() with an error passed to it
app.use((error, req, res, next) => {
// res.status(error.httpStatusCode).render(...);
// res.redirect('/500');
res.status(500).render('500', {
pageTitle: 'Server Error',
path: '/500',
isAuthenticated: req.session.isLoggedIn,
});
});
mongoose
.connect(MONGODB_URI, { useUnifiedTopology: true, useNewUrlParser: true })
.then((result) => {
// First arg for createServer() configures server, second is request handler, in this case, Express application
// Commenting out because just as with request logging and asset compression, it's handled by hosting provider, and browsers don't accept custom/self-signed certificate; will be displayed as insecure with a message that connection is not private
// https
// .createServer({ key: privateKey, cert: certificate }, app)
// .listen(process.env.PORT || 3000);
app.listen(process.env.PORT || 3000);
})
.catch((err) => {
console.log(err);
});
This is my Postman request, similar to the one in the tutorial video, and as you can see I just get a 500 error.
EDIT: Figured out the issue, solution below.
I'm using multer and multer google storage to try to upload image files to my Google Cloud Bucket, but for some reason the files aren't being saved into the Google Cloud Bucket, and I can't log any errors, not sure what I'm doing wrong here. (Have tried to follow several different tutorials, read documentation, check other SO questions, etc. Still no solution).
const express = require('express');
const router = express.Router();
const mongoose = require('mongoose');
const passport = require('passport');
const bodyParser = require('body-parser');
const jwt = require('jsonwebtoken');
const multer = require('multer');
const path = require('path');
const multerGoogleStorage = require('multer-google-storage');
const { Storage } = require('#google-cloud/storage');
const gc = new Storage({
projectId: '{projectIdRedacted}',
keyFilename: path.join(__dirname, '../{keyFileNameRedacted.json}')
});
gc.getBuckets().then(x => console.log(x));
// This is showing that I've successfully paired to the Google Cloud bucket.
const bucket = gc.bucket('{redactedBucketNameHere}');
const fileFilter = (req, file, cb) => {
// Reject a file
if (file.mimetype === 'image/jpeg' || file.mimetype === 'image/png') {
cb(null, true);
} else {
cb(null, false);
}
};
var uploadHandler = multer({
storage: multer.memoryStorage(),
limits: {
fileSize: 1024 * 1024 * 1
},
fileFilter: fileFilter
});
// Testing GCP Bucket Image Upload
// #route POST image-upload
// #desc Add image
// #access Private
router.post('/image-upload', uploadHandler.single('UploadBox'), passport.authenticate('jwt', {
session: false
}), (req, res, next) => {
// This is showing the req.file is being passed through
console.log(req.file);
const blob = bucket.file(req.file.originalname);
const blobStream = blob.createWriteStream({
metadata: {
contentType: req.file.mimetype
},
resumable: false
});
// The err is not getting console logged even though it is not saving to the google cloud bucket properly?
blobStream.on('error', err => {
next(err);
console.log(err);
return;
})
// The publicUrl is not getting console.logged - presumably cause something is breaking before this and it won't save it
blobStream.on('finish', () => {
// the public url can be used to directly access the file via HTTP
const publicUrl = `https://storage.googleapis.com/${bucket.name}/${blob.name}`;
console.log(publicUrl);
// Make the image public to the web (since we'll be displaying it in the browser)
blob.makePublic().then(() => {
res.status(200).send(`Success!\n Image uploaded to ${publicUrl}`);
})
})
});
The documentation for #google-cloud/storage is: https://www.npmjs.com/package/#google-cloud/storage
The documentation for multer google storage is: https://www.npmjs.com/package/multer-google-storage
The documentation for Google's guide on using their cloud storage is: https://cloud.google.com/appengine/docs/flexible/nodejs/using-cloud-storage
Any tips and help would be greatly appreciated.
EDIT: I figured out the solution. I had to move up the uploadHandler and the fileFilter ABOVE the const { storage} import. And then inside the route, I had to add "blobStream.end();" after the blobStream.on('finish'). After doing so it resolved it. I've edited the working code below.
const express = require('express');
const router = express.Router();
const mongoose = require('mongoose');
const passport = require('passport');
const bodyParser = require('body-parser');
const jwt = require('jsonwebtoken');
const multer = require('multer');
const path = require('path');
const multerGoogleStorage = require('multer-google-storage');
const fileFilter = (req, file, cb) => {
// Reject a file
if (file.mimetype === 'image/jpeg' || file.mimetype === 'image/png') {
cb(null, true);
} else {
cb(null, false);
}
};
var uploadHandler = multer({
storage: multer.memoryStorage(),
limits: {
fileSize: 1024 * 1024 * 1
},
fileFilter: fileFilter
});
const { Storage } = require('#google-cloud/storage');
const gc = new Storage({
projectId: '{projectIdRedacted}',
keyFilename: path.join(__dirname, '../{keyFileNameRedacted.json}')
});
gc.getBuckets().then(x => console.log(x));
const bucket = gc.bucket('{bucketNameRedacted}');
// Testing GCP Bucket Image Upload
// #route POST image-upload
// #desc Add image
// #access Private
router.post('/image-upload', uploadHandler.single('UploadBox'), passport.authenticate('jwt', {
session: false
}), (req, res, next) => {
// This is showing the req.file is being passed through
console.log(req.file);
const blob = bucket.file(req.file.originalname);
const blobStream = blob.createWriteStream({
metadata: {
contentType: req.file.mimetype
},
resumable: false
});
// The err is not getting console logged even though it is not saving to the google cloud bucket properly?
blobStream.on('error', err => {
next(err);
console.log(err);
return;
})
// The publicUrl is not getting console.logged - presumably cause something is breaking before this and it won't save it
blobStream.on('finish', () => {
// the public url can be used to directly access the file via HTTP
const publicUrl = `https://storage.googleapis.com/${bucket.name}/${blob.name}`;
console.log(publicUrl);
// Make the image public to the web (since we'll be displaying it in the browser)
blob.makePublic().then(() => {
res.status(200).send(`Success!\n Image uploaded to ${publicUrl}`);
})
})
blobStream.end();
});
You don't really need the multer-google-storage package, that's by the way.
kindly comment the response error message you get when you hit the route meant for this upload.
I figured out the solution. I had to move up the uploadHandler and the fileFilter ABOVE the const { storage} import. And then inside the route, I had to add "blobStream.end();" after the blobStream.on('finish'). After doing so it resolved it. I've edited the working code below.
const express = require('express');
const router = express.Router();
const mongoose = require('mongoose');
const passport = require('passport');
const bodyParser = require('body-parser');
const jwt = require('jsonwebtoken');
const multer = require('multer');
const path = require('path');
const multerGoogleStorage = require('multer-google-storage');
const fileFilter = (req, file, cb) => {
// Reject a file
if (file.mimetype === 'image/jpeg' || file.mimetype === 'image/png') {
cb(null, true);
} else {
cb(null, false);
}
};
var uploadHandler = multer({
storage: multer.memoryStorage(),
limits: {
fileSize: 1024 * 1024 * 1
},
fileFilter: fileFilter
});
const { Storage } = require('#google-cloud/storage');
const gc = new Storage({
projectId: '{projectIdRedacted}',
keyFilename: path.join(__dirname, '../{keyFileNameRedacted.json}')
});
gc.getBuckets().then(x => console.log(x));
const bucket = gc.bucket('{bucketNameRedacted}');
// Testing GCP Bucket Image Upload
// #route POST image-upload
// #desc Add image
// #access Private
router.post('/image-upload', uploadHandler.single('UploadBox'), passport.authenticate('jwt', {
session: false
}), (req, res, next) => {
// This is showing the req.file is being passed through
console.log(req.file);
const blob = bucket.file(req.file.originalname);
const blobStream = blob.createWriteStream({
metadata: {
contentType: req.file.mimetype
},
resumable: false
});
// The err is not getting console logged even though it is not saving to the google cloud bucket properly?
blobStream.on('error', err => {
next(err);
console.log(err);
return;
})
// The publicUrl is not getting console.logged - presumably cause something is breaking before this and it won't save it
blobStream.on('finish', () => {
// the public url can be used to directly access the file via HTTP
const publicUrl = `https://storage.googleapis.com/${bucket.name}/${blob.name}`;
console.log(publicUrl);
// Make the image public to the web (since we'll be displaying it in the browser)
blob.makePublic().then(() => {
res.status(200).send(`Success!\n Image uploaded to ${publicUrl}`);
})
})
blobStream.end();
});