How upload a file to Dropbox with dropbox.js? - javascript

ORIGINAL
I'm having problems to upload a file (image) to Dropbox from Node.js using the official dropbox.js.
I want to upload a picture that I have in another server. For example with the dropbpox icon (www.dropbox.com/static/images/new_logo.png).
client.writeFile(file, 'www.dropbox.com/static/images/new_logo.png', function(error, stat) {
if (error) {
return es.send(error.status); // Something went wrong.
}
res.send("File saved as revision " + stat.revisionTag);
});
I know that this only creates a text file with the url, but how I can upload the picture to Dropbox?
I also try to download the file using http.get and then upload this to dropbox but it doesn't work.
Thanks.
UPDATE WITH MORE INFO
First I download the image from a remote url with this code:
var request = http.get(options, function(res){
var imagedata = ''
res.setEncoding('binary')
res.on('data', function(chunk){
imagedata += chunk
})
res.on('end', function(){
console.log("Image downloaded!");
fs.writeFile(local, imagedata, 'binary', function(err){
if (err) throw err
console.log('File saved.')
})
})
})
The file is saved correctly.
Then I trie to things:
Sending the 'imagedata' to Dropbox:
console.log("Image downloaded!");
client.writeFile(file, imagedata, function(error, stat) {
if (error) {
return response.send(error.status); // Something went wrong.
}
response.send("File saved as revision " + stat.revisionTag);
});
And something is uploaded to Dropbox but it's nothing useful.
Then I also tried to read the file from disc and then send it to Dropbox but it doesn't work neither:
fs.readFile(file, function(err, data) {

Use dropbox-js 0.9.1-beta1 or above to upload binary files from node.js. You need to pass it Buffer or ArrayBuffer instances. Try this code:
var req = http.get(options, function(res) {
var chunks = [];
res.on('data', function(chunk) {
chunks.push(chunk);
});
res.on('end', function() {
console.log("Image downloaded!");
var imageData = Buffer.concat(chunks);
client.writeFile(file, imageData, function(error, stat) {
if (error) {
return response.send(error.status);
}
response.send("File saved as revision " + stat.revisionTag);
});
});
});
```
Original answer: the dropbox-js README mentions that binary files don't work in node.js just yet.

I had issue as well, I just copied and modified a bit on the old dropbox-node npm(which is now deprecated), but I added following function on dropbox.js.
Client.prototype.writeFileNodejs = function(path, data, callback) {
var self = this;
fs.readFile(data.path, function(err, data) {
if (err) return callback(err);
var uri = "" + self.urls.putFile + "/" + (self.urlEncodePath(path));
if (typeof data === 'function') callback = data, data = undefined;
var oauth = {
consumer_key: self.oauth.key
, consumer_secret: self.oauth.secret
, token: self.oauth.token
, token_secret: self.oauth.tokenSecret
};
var requestOptions = { uri: uri, oauth: oauth };
requestOptions.body = data;
return request['put'](requestOptions, callback ?
function(err, res, body) {
if (err) return callback(err);
var contentType = res.headers['content-type'];
// check if the response body is in JSON format
if (contentType === 'application/json' ||
contentType === 'text/javascript') {
body = JSON.parse(body);
if (body.error) {
var err = new Error(body.error);
err.statusCode = res.statusCode;
return callback(err);
}
} else if (errors[res.statusCode]) {
var err = new Error(errors[res.statusCode]);
err.statusCode = res.statusCode;
return callback(err);
}
// check for metadata in headers
if (res.headers['x-dropbox-metadata']) {
var metadata = JSON.parse(res.headers['x-dropbox-metadata']);
}
callback(null, body, metadata);
} : undefined);
});
};
As well you would like to require request and fs to do this.
var request = require('request'),
fs = require('fs');

Related

Download zip file being sent by server on client side?

I have an API that downloads multiple files from AWS S3, creates a zip which is saved to disk, and sends that zip back to the client. The API works, but I have no idea how to handle the response / download the zip to disk on the client side.
This is my API:
reports.get('/downloadMultipleReports/:fileKeys', async (req, res) => {
var s3 = new AWS.S3();
var archiver = require('archiver');
const { promisify } = require('util');
var str_array = req.params.fileKeys.split(',');
console.log('str_array: ',str_array);
for (var i = 0; i < str_array.length; i++) {
var filename = str_array[i].trim();
var filename = str_array[i];
var localFileName = './temp/' + filename.substring(filename.indexOf("/") + 1);
console.log('FILE KEY >>>>>> : ', filename);
const params = { Bucket: config.reportBucket, Key: filename };
const data = await (s3.getObject(params)).promise();
const writeFile = promisify(fs.writeFile);
await writeFile(localFileName, data.Body);
}
// create a file to stream archive data to.
var output = fs.createWriteStream('reportFiles.zip');
var archive = archiver('zip', {
zlib: { level: 9 } // Sets the compression level.
});
// listen for all archive data to be written
// 'close' event is fired only when a file descriptor is involved
output.on('close', function() {
console.log(archive.pointer() + ' total bytes');
console.log('archiver has been finalized and the output file descriptor has closed.');
});
// This event is fired when the data source is drained no matter what was the data source.
// It is not part of this library but rather from the NodeJS Stream API.
// #see: https://nodejs.org/api/stream.html#stream_event_end
output.on('end', function() {
console.log('Data has been drained');
});
// good practice to catch warnings (ie stat failures and other non-blocking errors)
archive.on('warning', function(err) {
if (err.code === 'ENOENT') {
// log warning
} else {
// throw error
throw err;
}
});
// good practice to catch this error explicitly
archive.on('error', function(err) {
throw err;
});
// pipe archive data to the file
archive.pipe(output);
// append files from a sub-directory, putting its contents at the root of archive
archive.directory('./temp', false);
// finalize the archive (ie we are done appending files but streams have to finish yet)
// 'close', 'end' or 'finish' may be fired right after calling this method so register to them beforehand
archive.finalize();
output.on('finish', () => {
console.log('Ding! - Zip is done!');
const zipFilePath = "./reportFiles.zip" // or any file format
// res.setHeader('Content-Type', 'application/zip');
fs.exists(zipFilePath, function(exists){
if (exists) {
res.writeHead(200, {
"Content-Type": "application/octet-stream",
"Content-Disposition": "attachment; filename=" + "./reportFiles.zip"
});
fs.createReadStream(zipFilePath).pipe(res);
} else {
response.writeHead(400, {"Content-Type": "text/plain"});
response.end("ERROR File does not exist");
}
});
});
return;
});
And this is how I am calling the API / expecting to download the response:
downloadMultipleReports(){
var fileKeysString = this.state.awsFileKeys.toString();
var newFileKeys = fileKeysString.replace(/ /g, '%20').replace(/\//g, '%2F');
fetch(config.api.urlFor('downloadMultipleReports', { fileKeys: newFileKeys }))
.then((response) => response.body())
this.closeModal();
}
How can I handle the response / download the zip to disk?
This is what ended up working for me:
Server side:
const zipFilePath = "./reportFiles.zip";
fs.exists(zipFilePath, function(exists){
if (exists) {
res.writeHead(200, {
"Content-Type": "application/zip",
"Content-Disposition": "attachment; filename=" + "./reportFiles.zip"
});
fs.createReadStream(zipFilePath).pipe(res);
} else {
response.writeHead(400, {"Content-Type": "text/plain"});
response.end("ERROR File does not exist");
}
});
Client side:
downloadMultipleReports(){
var fileKeysString = this.state.awsFileKeys.toString();
var newFileKeys = fileKeysString.replace(/ /g, '%20').replace(/\//g, '%2F');
fetch(config.api.urlFor('downloadMultipleReports', { fileKeys: newFileKeys }))
.then((res) => {return res.blob()})
.then(blob =>{
download(blob, 'reportFiles.zip', 'application/zip');
this.setState({isOpen: false});
})
}

Using node-thumbnail to generate thumbnail from image

I am trying to generate a thumbnail from image using node-thumbnail, the thumbnail is being uploaded to my container in azure storage but it looks like the original file not like a thumbnail. Here's my code, first I am uploading the original image, then reading it and generating a thumbnail from it, then uploading the thumbnail to container. What am I doing wrong? I couldn't find much resources online on how to do this, please help!
app.post('/upload', function(req, res) {
if (!req.files)
return res.status(400).send('No files were uploaded.');
// The name of the input field (i.e. "sampleFile") is used to retrieve the uploaded file
let sampleFile = req.files.sampleFile;
let name = sampleFile.name;
let data = sampleFile.data;
//var options = { contentSettings: { contentType: 'image/jpeg' } }
blobSvc.createBlockBlobFromText('test-container', name, data, function(error, result, response){
if (error){
return res.status(500).send(error);
} else {
console.log('Uploaded to container');
}
var info = blobSvc.getBlobToLocalFile ("test-container", name, name,
function (error, blockBlob, response) {
thumb({
source: name, // could be a filename: dest/path/image.jpg
destination: './',
concurrency: 4,
width: 100
}, function(files, err){
if (err) throw err;
console.log("resized");
//Delete the downloaded BIG one
//Upload the thumbnail
blobSvc.createBlockBlobFromLocalFile("test-container", files[0].dstPath, files[0].dstPath,
function (error, blockBlob, response) {
if (!error) {
console.log("thumbnail uploaded: " + name);
} else{
console.log(error);
}
});
});
});
});
This isn't really an Azure Storage issue, it's more of a node-thumbnail issue.
How about using Jimp:
var azure = require('azure-storage');
var Jimp = require("jimp");
var path = require('path');
// ...
var info = blobSvc.getBlobToLocalFile("test-container", name, name, function(error, blockBlob, response) {
if (!error) {
var dstName = path.parse(name).name + "_thumb" + path.parse(name).ext;
Jimp.read(name, function(err, image) {
if (err) throw err;
image.resize(100, Jimp.AUTO) // resize
.quality(60) // set JPEG quality
.write(dstName, function(err, ret) { // save
//Upload the thumbnail
blobSvc.createBlockBlobFromLocalFile("test-container", dstName, dstName, function(error, blockBlob, response) {
if (!error) {
console.log("thumbnail uploaded: " + dstName);
} else {
console.log(error);
}
});
});
});
}
});

How to return in client side large base64 image with GridFS?

I store images with GridFS on MongoDB and I want to display this images on the client side. The code works for little image, but when I want to display images bigger than 5 Mo, I have no return in client side.
ResultController.js :
images.forEach(function(item) {
GridFileService.getBuffer(item.id, function(res)
{
var blob = UtilsService.b64toBlob(res, item.mimetype); //Convert base64 to blob
var blobURL = URL.createObjectURL(blob);
$scope.resultsModel.originalImage.push(blobURL); //Store for display in image src tag
});
});
GridFileService.js :
gridFileService.getBuffer = function(id, callback)
{
$http(
{
method: 'GET',
url: '/api/file_buffer/' + id
}).then(function successCallback(response)
{
callback(response.data);
}, function errorCallback(response)
{
AlertService.addAlert('danger', response.data);
});
};
api.js :
app.get('/api/file_buffer/:id', routesFiles.getBufferFile);
routeFiles.js :
function getBufferFile(req, res, next)
{
var idFile = req.params.id;
//Get buffer from GridFS with id file
gfs.readFile({_id: idFile}, function (err, data) {
if (err)
{
log.error('Error on get buffer file ', err);
return res.status(500).send(err.message);
}
else
{
//Convert buffer in base64
var base64 = new Buffer(data).toString('base64');
return res.status(200).send(base64); // return to client side with datas
}
});
}
How can I do to have a quick return about the buffer of a big image ?

Implementing callbacks in javascript

I have a function in my helper js file to upload files on S3:
module.exports.uploadToAWS = function uploadToAWS(folderName, fileName, fileData) {
var s3 = new AWS.S3({ params: {Bucket: ‘myBucket’} });
var keyName = folderName + "/" + fileName;
var buffer = new Buffer(fileData.replace(/^data:image\/\w+;base64,/, ""),'base64')
var data = {
Key: keyName,
Body: buffer,
ContentEncoding: 'base64',
ContentType: 'image/jpeg'
};
s3.putObject(data, function(err, data) {
if (err) {
console.log(err);
console.log('Error uploading data: ', data);
} else {
console.log('succesfully uploaded the image!');
}
});
}
My consumer function is like this:
if(images) {
images.forEach(function(imageRecord) {
awsHelper.uploadToAWS('assets', assetObject.id, imageRecord);
});
}
I want to introduce a callback method here, such that I might get the success and failure from my helper function to my caller function. How could it be implemented?
Need my consumer function to be like the following, but what would be the helper function be like?
if(images) {
images.forEach(function(imageRecord) {
awsHelper.uploadToAWS(
'assets',
assetObject.id,
imageRecord,
function (success, failure) {
//handle success or failure
});
});
}
Why don't you simply add a Callback in your "uploadToAWS" like :
module.exports.uploadToAWS = function uploadToAWS(folderName, fileName, fileData, callback){
Then make a simple validation and call it (Passing the Response) when your upload completes/fails
s3.putObject(data, function(err, data){
if (err) {
console.log(err);
console.log('Error uploading data: ', data);
if(typeof callback === 'function') callback(err , data);
} else {
console.log('succesfully uploaded the image!');
if(typeof callback === 'function') callback(err , data);
}
});
After this you will use it exactly as you proposed
if(images){
images.forEach(function(imageRecord){
awsHelper.uploadToAWS('assets', assetObject.id, imageRecord, function (err , data) {
//handle success or failure
});
});
}
So by rewriting in the Promise style, you can:
module.exports.uploadToAWSAsync =
function uploadToAWSAsync(folderName, fileName, fileData){
return new Promise(function(resolve, reject){
var s3 = new AWS.S3({ params: {Bucket: ‘myBucket’} });
var keyName = folderName + "/" + fileName;
var buffer = new Buffer(fileData.replace(/^data:image\/\w+;base64,/, ""),
'base64')
var data = {
Key: keyName,
Body: buffer,
ContentEncoding: 'base64',
ContentType: 'image/jpeg'
};
s3.putObject(data, function(err, data){
if (err) {
console.log(err);
console.log('Error uploading data: ', data);
reject(err);
} else {
console.log('succesfully uploaded the image!');
resolve();
}
});
});
}
You could rewrite you consumer code as follows:
//fire them all off in one go
var promises = images.map(function(imageRecord){
return awsHelper.uploadToAWSAsync('assets', assetObject.id, imageRecord);
});
Promise.all(promises).then(function(){
//success, everything uploaded
}).catch(function(err){
//something went wrong
});
You need to pass the callback function as an argument for the helper function.
Try this:
module.exports.uploadToAWS = function uploadToAWS(folderName, fileName, fileData, callback){
var s3 = new AWS.S3({ params: {Bucket: ‘myBucket’} });
var keyName = folderName + "/" + fileName;
var buffer = new Buffer(fileData.replace(/^data:image\/\w+;base64,/, ""),'base64')
var data = {
Key: keyName,
Body: buffer,
ContentEncoding: 'base64',
ContentType: 'image/jpeg'
};
s3.putObject(data, function(err, data){
if (err) {
callback(err);
} else {
callback(null, data);
}
});
}

Upload images to twitter API from node.js

I'm attempting to post an image onto the twitter api, v1.1
I've tried just about all the example out there, and nothing seems to be able to post it.
include Posting images to twitter in Node.js using Oauth
I'm using the oauth library mentioned there, and I also had jsOauth, which I thought I'd give a shot according to https://gist.github.com/lukaszkorecki/1038408
Nothing has worked, and at this point I'm starting to lose hope on whether I can even do this.
function postStatusWithMedia(status, file) {
var err = new Object();
if(fs.existsSync(file) === false) {
err.message = "File not found :(";
parseTwitterError(err);
} else {
var oauth = OAuth(options = {
"consumerKey": consumer_key,
"consumerSecret": consumer_secret,
"accessTokenKey": access_token,
"accessTokenSecret": access_token_secret
});
callbacks = {
onSuccess : function() {
console.log('upload worked!')
},
onFailure : function() {
console.log('upload failed!');
console.dir(arguments);
}
},
uploadData = {
'status' : status,
'media' : Base64.encode(fs.readFileSync(file))
};
oauth.post('https://api.twitter.com/1.1/statuses/update_with_media.json',uploadData, callbacks.onSuccess, callbacks.onFailure);
return false;
}
}
If it can't be done, can you please explain why?
Otherwise, anything that could lead me to the right direction would be great.
var fs = require('fs');
var request = require('request');
var FormData = require('form-data');
var utf8 = require('utf8');
// Encode in UTF-8
status = utf8.encode(status);
var form = new FormData();
form.append('status', status)
form.append('media[]', fs.createReadStream(file));
// Twitter OAuth
form.getLength(function(err, length){
if (err) {
return requestCallback(err);
}
var oauth = {
consumer_key: consumer_key,
consumer_secret: consumer_secret,
token: access_token,
token_secret: access_token_secret
};
var r = request.post({url:"https://api.twitter.com/1.1/statuses/update_with_media.json", oauth:oauth, host: "api.twitter.com", protocol: "https:"}, requestCallback);
r._form = form;
r.setHeader('content-length', length);
});
function requestCallback(err, res, body) {
if(err) {
throw err;
} else {
console.log("Tweet and Image uploaded successfully!");
}
}
I ended up using request and node-form-data to manually construct a multipart/form-data request and send it with the status request, utf8 was for encoding the status into UTF-8, not doing so caused issues with '<3' and other characters.
I have not tested these code.Its from my colleague.sure the code is working.
Perhaps this will help.
//twitter_update_with_media.js
(function() {
var fs, path, request, twitter_update_with_media;
fs = require('fs');
path = require('path');
request = require('request');
twitter_update_with_media = (function() {
function twitter_update_with_media(auth_settings) {
this.auth_settings = auth_settings;
this.api_url = 'https://api.twitter.com/1.1/statuses/update_with_media.json';
}
twitter_update_with_media.prototype.post = function(status, imageUrl, callback) {
var form, r;
r = request.post(this.api_url, {
oauth: this.auth_settings
}, callback);
form = r.form();
form.append('status', status);
return form.append('media[]', request(imageUrl));
};
return twitter_update_with_media;
})();
module.exports = twitter_update_with_media;
}).call(this);
next file
//upload_to_twitter.js
var tuwm = new twitter_update_with_media({
consumer_key: TWITTER_OAUTH_KEY,
consumer_secret: TWITTER_OAUTH_SECRET,
token: access[0],
token_secret: access[1]
});
media_picture.picture = imageURL;
if (media_picture.picture) {
console.log('with media upload');
request.head(media_picture.picture,
function (error, response, body) {
if (!error && response.statusCode == 200) {
var image_size = response.headers['content-length'];
if (image_size > 2000000) { // 2mb max upload limit
console.log('greater than 2mb');
sendMessageWithoutImage(err, req, res, next, twit, wallpost, access);
} else {
console.log('less than 2mb');
console.log('twitter text', content);
tuwm.post(content, media_picture.picture, function(err, response) {
if (err) {
console.log('error', err);
return next(err);
}
error_parse = JSON.parse(response.body);
console.log('with media response', response.body);
if (error_parse.errors) {
console.log('have errors', error_parse);
res.json({
status: 500,
info: error_parse.errors[0].code + ' ' + error_parse.errors[0].message
});
} else {
res.json({
status: 200,
info: "OK",
id: response.id
});
}
});
}
}
});

Categories

Resources