Read Stream not doing firing / catching errors - javascript

I am trying to create a read stream to use Cloudinary's upload stream function, I am also using resumable.js to chunk the initial file, while the create read stream is working perfectly fine (as the whole file gets written perfectly fine.) the read stream / cloudinary upload function seems to not even be firing and failing silently.
router.post("/upload", (req, res, next) => {
console.log("the params are.. ", req.body);
resumable.post(req, function(
status,
filename,
original_filename,
identifier
) {
if (status === "done") {
let timestamp = new Date().getTime().toString();
//stich the chunks
var s = fs.createWriteStream(timestamp + filename);
resumable.write(identifier, s);
var upload_stream = cloudinary.uploader.upload_stream(
{ tags: "basic_sample" },
function(err, image) {
console.log();
console.log("** Stream Upload");
if (err) {
console.warn(err);
}
console.log("* Same image, uploaded via stream");
console.log("* " + image.public_id);
console.log("* " + image.url);
waitForAllUploads(timestamp + filename, err, image);
}
);
fs.createReadStream(timestamp + filename)
.pipe(upload_stream)
.on("error", err => {
console.log(err);
});
s.on("finish", function() {
// Stream upload
console.log("ive finished...");
// delete chunks
setTimeout(() => {
resumable.clean(identifier);
}, 1000);
});
}
res.send(status);
});
});
Here are the resources to what I am using:
https://github.com/cloudinary/cloudinary_npm/blob/master/samples/basic/basic.js
https://github.com/mrawdon/resumable-node

fs.createReadStream(timestamp + filename) accepts the file path but looks like you are passing the timestamp as well. Also, waitForAllUploads is not defined. You can try the following code just using Node and Cloudinary to test it out.
var upload_stream= cloudinary.uploader.upload_stream({tags: 'basic_sample'},function(err,image) {
console.log("** Stream Upload");
if (err){ console.warn(err);}
console.log("* "+image.url)
});
var file_reader = fs.createReadStream('<file path>').pipe(upload_stream);

Related

Nodejs upload multiple images - create dir when doesnt exist

Im using nodejs and multer to upload multiple images.
Firsat check is if dir exist. If not will be created.
Error: When folder doesnt exist all images are passing the first condition fs.access by giving the message "Directory doesnt exist" but then dir is created so the second image gets an error "Directory exist".
var storage = multer.diskStorage({
destination: (req, file, cb) => {
const userId = encryptFolder(req.params["id"]);
const dtrToCreate = "C:/Users/User/Pictures/" + userId;
fs.access(dtrToCreate, (error) => {
if (error) {
console.log("Directory does not exist.", userId);
fs.mkdirSync(dtrToCreate, (error, data) => {
if (error) {
throw error;
}
cb(null, "C:/Users/User/Pictures/");
});
} else {
console.log("Directory exists.", userId);
cb(null, "C:/Users/User/Pictures/");
}
});
},
When directory exist images are uploaded sucessfully.
Working solution:
Since there are multiple files should be a recusrsive function to check if folder exist each time files passing.
fs.mkdirSync(dtrToCreate, { recursive: true })
return cb(null, dtrToCreate)

AWS Lambda returns 'null' after going through a forEach loop of saving multiple files in s3 bucket

I have an AWS Lambda function. It goes through a list of the array which is an array of URLs and saves their screenshot and puts them in s3. How do I return the output of this function which is screenshotLinks array that has all the links to files saved in s3? I used the callback function in the end, but it just returns null! I want callback function to output all the s3 file links saved inside screenshotLinks array.
exports.handler = (event, context, callback) => {
desktopLinks.forEach(function (url, index) {
https.request(url, function(res) {
var data = new Stream();
res.on('data', function(chunk) {
// Agregates chunks
data.push(chunk);
});
res.on('end', function() {
var body = data.read();
// Once you received all chunks, send to S3
var currentLink = links[index];
var linkAddress = encodeURIComponent(currentLink);
var slashPosition = getPosition(currentLink, '/', 3)+1;
var linkName = currentLink.substr(slashPosition, currentLink.length)
var params = {
Bucket: bucket,
Key: completeDate + '/screenshots/' + linkName + '.png',
Body: body
};
s3.putObject(params, function(err, data, callback) {
if (err) {
console.error(err, err.stack);
} else {
bunch = params.Bucket + '/' + params.Key;
screenshotLinks.push(bunch);
}
});
});
}).end();
})
callback(null, screenshotLinks)
};
Your code is event driven / asynchronous which means you are calling the callback before screenshotLinks has been populated.
The node http.ClientRequest.end() method finishes sending a request, but that doesn't mean that the response has been received and handled, as that is done by an asynchronous event handler. However, the callback is executed immediately after the call to request.end(), which is just after the request has been fired off, therefore screenshotLinks is empty.
You need to execute your callback from the callback you pass to s3.putObject. I suggest you pass your callback a response/result object that indicates whether the putObject succeeded and contains the url it relates to and either an error message or a screenshotLink, e.g. something like this:
s3.putObject(params, function(err, data, callback) {
var s3Response;
s3Response.url = url;
if (err) {
s3Response.success = false;
s3Response.error = err;
console.error(err, err.stack);
} else {
bunch = params.Bucket + '/' + params.Key;
s3Response.success = true;
s3Response.screenshotLink = bunch;
}
callback(null, s3Response);
});
I would like to suggest you use an 8.10 node runtime.
ref: https://aws.amazon.com/blogs/compute/node-js-8-10-runtime-now-available-in-aws-lambda/
Then your entry point should be:
export async function <function_name>(event) {}
Then:
let s3 = new AWS.S3({ region: process.env.AWS_REGION, apiVersion: '2006-03-01' });
let params=
{
Bucket: /* a path to bucket (string) */,
Key: name /* string */,
Body: /* supported types (Buffer, Typed Array, Blob, String, ReadableStream) */,
ACL: 'public-read',
ContentType: 'image/png'
};
try
{
let s3Response = await s3.upload(params).promise();
// if succceed
console.log(`File uploaded to S3 at ${s3Response.Bucket} bucket. File location: ${s3Response.Location}`);
}
catch (ex) // if error occured
{
console.error(ex);
}

how to store image from buffer to file system in node.js using sharp.js

i am using sharp.js for image manipulation in node.js
my code
sharp(path)
.toFormat('jpeg')
.toBuffer((err, data, info) => {
fs.writeFile(temp, buffer, { flag: 'w' }, function() {
response.sendFile(temp);
});
});
here temp in fs means "the path" var temp= imageDir + request.params.id; ( http://localhost:2000/images/we.png )
upload image as png format or any other format
convert that image to JPEG using .toFormat('jpeg') and send to
buffer
want to save that image from buffer to
Your code doesn't have 'buffer'. You should be writing data.
.toBuffer((err, data, info) => {
fs.writeFile(temp, data, { flag: 'w' }, function() {
response.sendFile(temp);
});
});
but you'd be better off using toFile instead of toBuffer:
sharp('originalFile.jpg').
resize(330,null).
flatten().
toFile('newFile.jpg', function(err){
if(err){
response.sendStatus(500);
return;
}
response.sendFile('newFile.jpg');
});
You can do this
sharp(req.files[0].path).resize(400, 200).toFile('uploads/' + 'thumbnail-' + req.files[0].originalname, (err, sharp) => {
if (err) {
console.error(err);
}
else {
console.log(sharp);
}
});

Save images on Google Storage on javascript

My goal is to store an image on S3 or Google Storage and save the link to the database. How can I do it? There is some free solution?
Can someone link me a code sample for to do that?
I never used Google Storage or S3 before.
I pick an image like that:
handleImage = e => {
e.preventDefault();
let reader = new FileReader();
let file = e.target.files[0];
reader.onloadend = () => {
this.setState({
file: file,
image: reader.result
});
}
reader.readAsDataURL(file);
this.setState({ imgchange: true })
}
And then send it to server:
this.props.editUser({
img: this.state.image,
})
My server is written with node.js
I've done this exact thing before, though I'm not claiming I had the most straightforward way.
1) sent the image from the client side to the server as a base64 image, took that image, 2) created a buffer, and 3) then used imageMagick to to stream it to my google cloud bucket. Lastly, 4) stored that link to the google cloud bucket on the object in your database as the imgLink or what have you so it can show in your front-end application.
Some important things to require at the top
var gm = require('gm').subClass({ imageMagick: true });
var gcs = require('#google-cloud/storage')({
keyFilename: sails.config.gcloud.keyFileName,
projectId: sails.config.gcloud.projectId
});
var bucket = gcs.bucket(sails.config.gcloud.bucketname);
Step 1 - Sending base64 image to backend service and decoding it
imageControllerFirstHitAtEndpoint: function (req, res) {
PictureService.uploadPictureCDNReturnLink(req.body.picture, function (err, imageLink) {
if (err) {
// Handle error...
}
// Success, save that imageLink to whatever db object you want
}
}
Step 2 and 3 - Create buffer with base64 data and Stream it to Google Cloud Bucket
// Step 2 create buffer with base64 data
uploadPictureCDNReturnLink: function(picDataInBase64, cb) {
var imageBuffer;
imageBuffer = PictureService.bufferFromBase64(picDataInBase64);
var file = bucket.file('cool-file-name');
// Step 3 stream it to where it needs to go
gm(imageBuffer)
.stream()
.pipe(file.createWriteStream())
.on('finish', function() {
file.getMetadata(function(err, metadata) {
if (err) {
console.log("Error getting metadata from google cloud", err);
return cb(err);
}
cb(null, metadata.mediaLink);
});
}).on('error', function(err) {
console.log("Got an error uploading to Cloud Storage:", err);
cb(err);
});
}
Step 4 - Save that imageLink to wherever you want
Won't spell this out totally for you, not that hard. Something kinda like:
Organization.findOne(req.body.orgID).exec(function (err, organization) {
if(!organization) {
return res.json(400, { error: "No organization with id: " + req.param('id') });
}
if (err) {
return res.json(400, err);
}
organization.pictureLink = imageLink;
organization.save(function (err) {
return res.json(organization);
});
});
Hope that helps! Should give you an idea of one way to do it.
P.S. A lot of that stuff might be using Sails-like NodeJS conventions, Sails is my backend framework of choice.

Send a file from mobile to Node js server

I'm doing an application with react-native. Now I'm trying to send an image from the mobile to the server (Node Js). For this I'm using react-native-image-picker. And the problem is that when I send the image it save a file but it's empty not contain the photo. I think that the problem probably is that the server can't access to the path of the image because is in a different device. But I don't know how I can do it.
React-Native:
openImagePicker(){
const options = {
title: 'Select Avatar',
storageOptions: {
skipBackup: true,
path: 'images'
}
}
ImagePicker.showImagePicker(options, (imagen) =>{
if (imagen.didCancel) {
console.log('User cancelled image picker');
}
else if (imagen.error) {
console.log('ImagePicker Error: ', imagen.error);
}
else if (imagen.customButton) {
console.log('User tapped custom button: ', imagen.customButton);
}
else {
let formdata = new FormData();
formdata.append("file[name]", imagen.fileName);
formdata.append("file[path]", imagen.path);
formdata.append("file[type]", imagen.type);
fetch('http://X/user/photo/58e137dd5d45090d0b000006', {
method: 'PUT',
headers: {
'Content-Type': 'multipart/form-data'
},
body: formdata
})
.then(response => {
console.log("ok");
})
.catch(function(err) {
console.log(err);
})
}})}
Node Js:
addPhotoUser = function (req, res) {
User.findById(req.params.id, function(err, user) {
fs.readFile(req.body.file.path, function (err, data) {
var pwd = 'home/ubuntu/.../';
var newPath = pwd + req.body.file.name;
fs.writeFile(newPath, data, function (err) {
imageUrl: URL + req.body.file.name;
user.save(function(err) {
if(!err) {
console.log('Updated');
} else {
console.log('ERROR: ' + err);
}
res.send(user);
});
});
});
});
};
Yes, the problem is that the filepath is on the local device and not the server. You want to send the actual data returned to you by react-native-image-picker not the uri. It looks like that library encodes the data with base64 so you're going to want send that to your server, not the uri returned from the library because it won't be accessible on a remote server.
What this means is that you won't be reading any files on your server but instead just decoding a base64 string in the response body and writing that to your filesystem.
For the client side:
let formdata = new FormData();
formdata.append("file[name]", imagen.fileName);
formdata.append("file[data]", imagen.data); // this is base64 encoded!
formdata.append("file[type]", imagen.type);
fetch('http://X/user/photo/58e137dd5d45090d0b000006', {
method: 'PUT',
headers: {
'Content-Type': 'multipart/form-data'
},
body: formdata
})
On the server side atob to decode from base64 before writing to the filesystem:
let decoded = atob(req.body.data)
// now this is binary and can written to the filesystem
From there:
fs.writeFile(newPath, decoded, function (err) {
imageUrl: newPath;
user.save(function(err) {
if(!err) {
console.log('Updated');
} else {
console.log('ERROR: ' + err);
}
res.send(user);
});
});
Note, you don't need the filesystem write that's in your code because you're decoding the image that was sent as a b64 string in your request.
There also seems to be some oddities with how you're using that user object. You seem to be only passing a function that handles errors and not any actual data. I don't know what ORM you're using so it's hard to say how it should work. Maybe something like this?
user.save({imageUrl:uriReturnedByFsWrite}, (err, data)=>{...})
Good luck :)
Make an object then send that object to the server. The object will consist of name,path and type, like this:
var imageData = {name: 'image1', path: uri, type: 'image/jpeg'}
Above is a one way to send the image data. The other way is to convert it into BLOB so that server side programmer doesn't have to do this task on their end. You can make BLOB by use of react-native-fetch-blob.
One more way is to directly upload the images to the amazon server(s3) and send the link to the backend..
Function that returns base64 string:
var RNFetchBlob = require('react-native-fetch-blob').default;
getImageAttachment: function(uri_attachment, mimetype_attachment) {
return new Promise((RESOLVE, REJECT) => {
// Fetch attachment
RNFetchBlob.fetch('GET', config.apiRoot+'/app/'+uri_attachment)
.then((response) => {
let base64Str = response.data;
var imageBase64 = 'data:'+mimetype_attachment+';base64,'+base64Str;
// Return base64 image
RESOLVE(imageBase64)
})
}).catch((error) => {
// error handling
console.log("Error: ", error)
});
},
Cheers :)

Categories

Resources