Upload images for an event using Cloudinary API - javascript

Below is my API endpoint for creating an event in my application. I'm also using Cloudinary API for uploading my images and storing the returned URL into my database.
app.post('/event', (req, res) => {
try {
if (req.body.images.length > 0) {
// Creating new Event instance
const event = new Event({
title: req.body.title,
images: [],
});
// Looping over every image coming in the request object from frontend
req.body.images.forEach((img) => {
const base64Data = img.content.split(',')[1];
// Writing the images in upload folder for time being
fs.writeFileSync(`./uploads/${img.filename}`, base64Data, 'base64', (err) => {
if (err) {
throw err;
}
});
/* Now that image is saved in upload folder, Cloudnary picks
the image from upload folder and store it at their cloud space.*/
cloudinary.uploader.upload(`./uploads/${img.filename}`, async (result) => {
// Cloudnary returns id & URL of the image which is pushed into the event.images array.
event.images.push({
id: result.public_id,
url: result.secure_url
});
// Once image is pushed into the array, I'm removing it from my server's upload folder using unlinkSync function
fs.unlinkSync(`./uploads/${img.filename}`);
// When all the images are uploaded then I'm sending back the response
if (req.body.images.length === event.images.length) {
await event.save();
res.send({
event,
msg: 'Event created successfully'
});
}
});
});
}
} catch (e) {
res.status(400).send(e);
}
});
Now My question is how can I convert the above code into more efficient and short?

Related

How to pipe multiple Streams for uploading files/streams to Cloudinary or other storage provider in Nodejs & graphql-upload?

My apollo-server is using graphql-upload package which includes file upload support for GraphQL endpoints. But they only documented about uploading single files. But we need multiple file upload support. Well, I get the streams as an Array. But whenever I createReadStream for each streams & pipe them to cloudinary uploader var, it just uploads the last created stream rather then uploading the each stream.
Code
// graphql reolver
const post = async (_, { post }, { isAuthenticated, user }) => {
if (!isAuthenticated) throw new AuthenticationError("User unauthorized");
const files = await Promise.all(post.files);
let file_urls = [];
const _uploadableFiles = cloudinary.uploader.upload_stream({ folder: "post_files" },
(err, result) => {
console.log("err:", err);
console.log("result:", result);
if (err) throw err;
file_urls.push({
url: result.secure_url,
public_id: result.public_id,
file_type: result.metadata,
});
return result;
}
);
files.forEach(async (file) => await file.createReadStream().pipe(_uploadableFiles));
.... other db related stuff
}
After that, I get the Secure_URL from uploaded files which is returned by cloudinary upload_stream functions callback. But it only gives me the properties of one stream which was the last of the all streams. Please help me in this case. Is there any way to pipe multiple streams?
Instead of making one const upload stream you make it into a factory function that returns an upload stream on each call for pipe'ing
Use array map so that you get an array that you can use in Promise.all
One by one each file should get uploaded to their own respective upload stream, appending the generated file url info to file_urls(on success callback), when all are done Promise.all would resolve and the code can resume to do other db related stuff
const post = async (_, { post }, { isAuthenticated, user }) => {
if (!isAuthenticated) throw new AuthenticationError("User unauthorized");
const files = await Promise.all(post.files);
let file_urls = [];
function createUploader(){
return cloudinary.uploader.upload_stream({ folder: "post_files" },
(err, result) => {
console.log("err:", err);
console.log("result:", result);
if (err) throw err;
file_urls.push({
url: result.secure_url,
public_id: result.public_id,
file_type: result.metadata,
});
return result;
}
);
}
await Promise.all( files.map(async (file) => await file.createReadStream().pipe(createUploader())) ); //map instead of forEach
//.... other db related stuff
}

Upload file to google drive after http get request

I have two functions in separate files to split up the workflow.
const download = function(url){
const file = fs.createWriteStream("./test.png");
const request = https.get(url, function(response) {
response.pipe(file);
});
}
This function in my fileHelper.js is supposed to take a URL with an image in it and then save it locally to test.png
function uploadFile(filePath) {
fs.readFile('credentials.json', (err, content) => {
if (err) return console.log('Error loading client secret file:', err);
// Authorize a client with credentials, then call the Google Drive API.
authorize(JSON.parse(content), function (auth) {
const drive = google.drive({version: 'v3', auth});
const fileMetadata = {
'name': 'testphoto.png'
};
const media = {
mimeType: 'image/png',
body: fs.createReadStream(filePath)
};
drive.files.create({
resource: fileMetadata,
media: media,
fields: 'id'
}, (err, file) => {
if (err) {
// Handle error
console.error(err);
} else {
console.log('File Id: ', file.id);
}
});
});
});
}
This function in my googleDriveHelper.js is supposed to take the filePath of call and then upload that stream into my google drive. These two functions work on their own but it seems that the https.get works asynchronously and if I try to call the googleDriveHelper.uploadFile(filePath) function after the download, it doesn't have time to get the full file to upload so instead a blank file will be uploaded to my drive.
I want to find a way so that when the fileHelper.download(url) is called, it automatically uploads into my drive.
I also don't know if there is a way to create a readStream directly from the download function to the upload function, so I can avoid having to save the file locally to upload it.
I believe your goal as follows.
You want to upload a file retrieving from an URL to Google Drive.
When you download the file from the URL, you want to upload it to Google Drive without creating the file.
You want to achieve this using googleapis with Node.js.
You have already been able to upload a file using Drive API.
For this, how about this answer?
Modification points:
At download function, the retrieved buffer is converted to the stream type, and the stream data is returned.
At uploadFile function, the retrieved stream data is used for uploading.
When the file ID is retrieved from the response value of Drive API, please use file.data.id instead of file.id.
By above modification, the file downloaded from the URL can be uploaded to Google Drive without creating a file.
Modified script:
When your script is modified, please modify as follows.
download()
const download = function (url) {
return new Promise(function (resolve, reject) {
request(
{
method: "GET",
url: url,
encoding: null,
},
(err, res, body) => {
if (err && res.statusCode != 200) {
reject(err);
return;
}
const stream = require("stream");
const bs = new stream.PassThrough();
bs.end(body);
resolve(bs);
}
);
});
};
uploadFile()
function uploadFile(data) { // <--- Modified
fs.readFile("drive_credentials.json", (err, content) => {
if (err) return console.log("Error loading client secret file:", err);
authorize(JSON.parse(content), function (auth) {
const drive = google.drive({ version: "v3", auth });
const fileMetadata = {
name: "testphoto.png",
};
const media = {
mimeType: "image/png",
body: data, // <--- Modified
};
drive.files.create(
{
resource: fileMetadata,
media: media,
fields: "id",
},
(err, file) => {
if (err) {
console.error(err);
} else {
console.log("File Id: ", file.data.id); // <--- Modified
}
}
);
});
});
}
For testing
For example, when above scripts are tested, how about the following script?
async function run() {
const url = "###";
const data = await fileHelper.download(url);
googleDriveHelper.uploadFile(data);
}
References:
Class: stream.PassThrough
google-api-nodejs-client

Missing required parameter - public_id, and what to assign to imageId for Cloudinary

I'm building a website that users can upload insights and comments on reading novels,
and users are free to fetch images of the novel or not.
If an image is chosen, the post schema has image & imageId attribute for Cloudinary uploads and future manipulation such as changing(update route) or deleting(destroy route) it from the Cloudinary library.
If no image is chosen, then a default image takes place.
Problem is, I managed to make default image function work, however I don't want multiple same default images uploaded to Cloudinary library, so I put default_image.jpg in local server folder (/public/images) to be exact), and this default_image.jpg shouldn't be in Cloudinary library and this should save a lot of capacity.
However, for these posts without a chosen image, I dunno what I should assign for their imageId property.
I tried undefined and null, and of course, they didn't work, cuz' this way it won't be able to find certain novel.imageId if they're all undefined or null.
// Schema for data
var fictionSchema = new mongoose.Schema({
...
...
image: String,
// for cloudinary API to track which image to delete or update
imageId: String,
...
...
});
// CREATE route
router.post("/", middleware.isLoggedIn, upload.single('image'), (req, res) => {
if (req.file){
// req.file.path comes from multer npm
cloudinary.v2.uploader.upload(req.file.path, function (err, result) {
if (err) {
...
}
// add cloudinary url for the image to the novel object under image property
req.body.novel.image = result.secure_url;
req.body.novel.imageId = result.public_id;
});
} else {
// setup default image for new novels
req.body.novel.image = "/images/noimage.jpg";
// imageId here should be ... empty? Undefined? Null? A fixed ID but may be delete when accessing Destroy route?
req.body.novel.imageId = undefined;
}
...
...
Novel.create(req.body.novel, function (err, newlyAddedNovel) {
if (err) {
req.flash('error', err.message);
...
} else {
req.flash("success", "Novel added successfully.");
...
}
});
});
// DESTROY route
router.delete("/:id", middleware.checkNovelOwnership, (req, res) => {
// find and delete the correct novel along with the image on cloudinary
Novel.findById(req.params.id, async (err, novel) => {
if (err) {
req.flash("error", err.message);
return res.redirect("back");
}
try {
await cloudinary.v2.uploader.destroy(novel.imageId);
// delete the novel found
novel.remove();
// delete the image from cloudinary
req.flash("success", "Novel deleted successfully.");
...
...
} catch (err) {
..
}
});
});
Good news, I solved the problem! Her-ray! Took me around 2 hrs though...
So, in the end everything worked just as I wanted, such finesse.
Codes are as below:
• CREATE route
For new posts, I set up every new novel with the default image first, then if there's an image (req.file) given, change it and upload.
Afterwards, remember to create that novel data in mongo database. (Novel.create() in my case.)
router.post("/", middleware.isLoggedIn, upload.single('image'), async function (req, res) {
// set every new novel with default image first
req.body.novel.image = "https://res.cloudinary.com/dvfkbz6la/image/upload/v1565434656/noimage_ew1uri.jpg";
req.body.novel.imageId = "noimage_ew1uri";
req.body.novel.user = {
id: req.user._id,
username: req.user.username
};
if (req.file) {
// req.file.path comes from multer npm
await cloudinary.v2.uploader.upload(req.file.path, function (err, result) {
if (err) {
req.flash('error', err.message);
return res.redirect('back');
}
// add cloudinary url for the image to the novel object under image property
// add image's public_id (imageId in novel model) to novel object
req.body.novel.image = result.secure_url;
req.body.novel.imageId = result.public_id;
});
}
Novel.create(req.body.novel, function (err, newlyAddedNovel) {
if (err) {
...
} else {
...
}
});
});
• UPDATE route
In try block, if (novel.imageId = "DEFAULT_IMAGEID") { } else { } is added.
// UPDATE route
router.put("/:id", middleware.checkNovelOwnership, upload.single('image'), (req, res) => {
// find the correct novel
Novel.findById(req.params.id, async (err, novel) => {
if (err) {
...
} else {
// if there's a req.file, we know user is trying to upload a new image
if (req.file) {
try {
// if imageId is default, await the result to be uploaded
if (novel.imageId = "noimage_ew1uri") {
var result = await cloudinary.v2.uploader.upload(req.file.path);
} else {
// if not default, find the old image using imageId and delete
await cloudinary.v2.uploader.destroy(novel.imageId);
var result = await cloudinary.v2.uploader.upload(req.file.path);
}
novel.imageId = result.public_id;
novel.image = result.secure_url;
} catch (err) {
...
}
}
novel.title = req.body.title;
novel.author = req.body.author;
novel.price = req.body.price;
novel.description = req.body.description;
// remember to save the changed novel
novel.save();
req.flash("success", "Successfully Updated!");
res.redirect("/novels/" + novel._id);
}
});
});
• DESTROY route
In try block, if (novel.imageId != "DEFAULT_IMAGEID") { } else { } is added.
// DESTROY route
router.delete("/:id", middleware.checkNovelOwnership, (req, res) => {
// find and delete the correct novel along with the image on cloudinary
Novel.findById(req.params.id, async (err, novel) => {
if (err) {
req.flash("error", err.message);
return res.redirect("back");
}
try {
// if novel.imageId isn't default, destroy it from Cloudinary
if (novel.imageId != "noimage_ew1uri") {
await cloudinary.v2.uploader.destroy(novel.imageId);
}
// delete the novel found
novel.remove();
...
} catch (err) {
...
}
});
});
Only problem left is, I dunno why but when hitting UPDATE route,
and when the imageId isn't the default one and the user changed the image,
the old image will NOT be destroyed and stayed in Cloudinary library. Such oddness.
I sure have this code set up
Novel.findById(req.params.id, async (err, novel) => {
if (err) {
...
} else {
if (req.file) {
try {
// if imageId is default, await the result to be uploaded
if (novel.imageId = "noimage_ew1uri") {
...
} else {
// if not default, find the old image using imageId and delete
await cloudinary.v2.uploader.destroy(novel.imageId);
var result = await cloudinary.v2.uploader.upload(req.file.path);
}
...
...
Dunno why await cloudinary.v2.uploader.destroy(novel.imageId); isn't working as expected. Hmmm...
Images that were already available in your site, are cached in one or more CDN edges. Therefore, even if you delete the image from your Cloudinary account, cached copies might still be available. In default, all delivered images are cached for 30 days.
When using the destroy API with the invalidate parameter set to true, the image will be deleted and purged from the CDN, allowing for the new image to be displayed.
Alternatively, when updating, you can accomplish the same result by setting the overwrite and invalidate parameters to true when uploading the new file (without the need to call the destroy method).

Save images on Google Storage on javascript

My goal is to store an image on S3 or Google Storage and save the link to the database. How can I do it? There is some free solution?
Can someone link me a code sample for to do that?
I never used Google Storage or S3 before.
I pick an image like that:
handleImage = e => {
e.preventDefault();
let reader = new FileReader();
let file = e.target.files[0];
reader.onloadend = () => {
this.setState({
file: file,
image: reader.result
});
}
reader.readAsDataURL(file);
this.setState({ imgchange: true })
}
And then send it to server:
this.props.editUser({
img: this.state.image,
})
My server is written with node.js
I've done this exact thing before, though I'm not claiming I had the most straightforward way.
1) sent the image from the client side to the server as a base64 image, took that image, 2) created a buffer, and 3) then used imageMagick to to stream it to my google cloud bucket. Lastly, 4) stored that link to the google cloud bucket on the object in your database as the imgLink or what have you so it can show in your front-end application.
Some important things to require at the top
var gm = require('gm').subClass({ imageMagick: true });
var gcs = require('#google-cloud/storage')({
keyFilename: sails.config.gcloud.keyFileName,
projectId: sails.config.gcloud.projectId
});
var bucket = gcs.bucket(sails.config.gcloud.bucketname);
Step 1 - Sending base64 image to backend service and decoding it
imageControllerFirstHitAtEndpoint: function (req, res) {
PictureService.uploadPictureCDNReturnLink(req.body.picture, function (err, imageLink) {
if (err) {
// Handle error...
}
// Success, save that imageLink to whatever db object you want
}
}
Step 2 and 3 - Create buffer with base64 data and Stream it to Google Cloud Bucket
// Step 2 create buffer with base64 data
uploadPictureCDNReturnLink: function(picDataInBase64, cb) {
var imageBuffer;
imageBuffer = PictureService.bufferFromBase64(picDataInBase64);
var file = bucket.file('cool-file-name');
// Step 3 stream it to where it needs to go
gm(imageBuffer)
.stream()
.pipe(file.createWriteStream())
.on('finish', function() {
file.getMetadata(function(err, metadata) {
if (err) {
console.log("Error getting metadata from google cloud", err);
return cb(err);
}
cb(null, metadata.mediaLink);
});
}).on('error', function(err) {
console.log("Got an error uploading to Cloud Storage:", err);
cb(err);
});
}
Step 4 - Save that imageLink to wherever you want
Won't spell this out totally for you, not that hard. Something kinda like:
Organization.findOne(req.body.orgID).exec(function (err, organization) {
if(!organization) {
return res.json(400, { error: "No organization with id: " + req.param('id') });
}
if (err) {
return res.json(400, err);
}
organization.pictureLink = imageLink;
organization.save(function (err) {
return res.json(organization);
});
});
Hope that helps! Should give you an idea of one way to do it.
P.S. A lot of that stuff might be using Sails-like NodeJS conventions, Sails is my backend framework of choice.

Send a file from mobile to Node js server

I'm doing an application with react-native. Now I'm trying to send an image from the mobile to the server (Node Js). For this I'm using react-native-image-picker. And the problem is that when I send the image it save a file but it's empty not contain the photo. I think that the problem probably is that the server can't access to the path of the image because is in a different device. But I don't know how I can do it.
React-Native:
openImagePicker(){
const options = {
title: 'Select Avatar',
storageOptions: {
skipBackup: true,
path: 'images'
}
}
ImagePicker.showImagePicker(options, (imagen) =>{
if (imagen.didCancel) {
console.log('User cancelled image picker');
}
else if (imagen.error) {
console.log('ImagePicker Error: ', imagen.error);
}
else if (imagen.customButton) {
console.log('User tapped custom button: ', imagen.customButton);
}
else {
let formdata = new FormData();
formdata.append("file[name]", imagen.fileName);
formdata.append("file[path]", imagen.path);
formdata.append("file[type]", imagen.type);
fetch('http://X/user/photo/58e137dd5d45090d0b000006', {
method: 'PUT',
headers: {
'Content-Type': 'multipart/form-data'
},
body: formdata
})
.then(response => {
console.log("ok");
})
.catch(function(err) {
console.log(err);
})
}})}
Node Js:
addPhotoUser = function (req, res) {
User.findById(req.params.id, function(err, user) {
fs.readFile(req.body.file.path, function (err, data) {
var pwd = 'home/ubuntu/.../';
var newPath = pwd + req.body.file.name;
fs.writeFile(newPath, data, function (err) {
imageUrl: URL + req.body.file.name;
user.save(function(err) {
if(!err) {
console.log('Updated');
} else {
console.log('ERROR: ' + err);
}
res.send(user);
});
});
});
});
};
Yes, the problem is that the filepath is on the local device and not the server. You want to send the actual data returned to you by react-native-image-picker not the uri. It looks like that library encodes the data with base64 so you're going to want send that to your server, not the uri returned from the library because it won't be accessible on a remote server.
What this means is that you won't be reading any files on your server but instead just decoding a base64 string in the response body and writing that to your filesystem.
For the client side:
let formdata = new FormData();
formdata.append("file[name]", imagen.fileName);
formdata.append("file[data]", imagen.data); // this is base64 encoded!
formdata.append("file[type]", imagen.type);
fetch('http://X/user/photo/58e137dd5d45090d0b000006', {
method: 'PUT',
headers: {
'Content-Type': 'multipart/form-data'
},
body: formdata
})
On the server side atob to decode from base64 before writing to the filesystem:
let decoded = atob(req.body.data)
// now this is binary and can written to the filesystem
From there:
fs.writeFile(newPath, decoded, function (err) {
imageUrl: newPath;
user.save(function(err) {
if(!err) {
console.log('Updated');
} else {
console.log('ERROR: ' + err);
}
res.send(user);
});
});
Note, you don't need the filesystem write that's in your code because you're decoding the image that was sent as a b64 string in your request.
There also seems to be some oddities with how you're using that user object. You seem to be only passing a function that handles errors and not any actual data. I don't know what ORM you're using so it's hard to say how it should work. Maybe something like this?
user.save({imageUrl:uriReturnedByFsWrite}, (err, data)=>{...})
Good luck :)
Make an object then send that object to the server. The object will consist of name,path and type, like this:
var imageData = {name: 'image1', path: uri, type: 'image/jpeg'}
Above is a one way to send the image data. The other way is to convert it into BLOB so that server side programmer doesn't have to do this task on their end. You can make BLOB by use of react-native-fetch-blob.
One more way is to directly upload the images to the amazon server(s3) and send the link to the backend..
Function that returns base64 string:
var RNFetchBlob = require('react-native-fetch-blob').default;
getImageAttachment: function(uri_attachment, mimetype_attachment) {
return new Promise((RESOLVE, REJECT) => {
// Fetch attachment
RNFetchBlob.fetch('GET', config.apiRoot+'/app/'+uri_attachment)
.then((response) => {
let base64Str = response.data;
var imageBase64 = 'data:'+mimetype_attachment+';base64,'+base64Str;
// Return base64 image
RESOLVE(imageBase64)
})
}).catch((error) => {
// error handling
console.log("Error: ", error)
});
},
Cheers :)

Categories

Resources