Using node-thumbnail to generate thumbnail from image - javascript

I am trying to generate a thumbnail from image using node-thumbnail, the thumbnail is being uploaded to my container in azure storage but it looks like the original file not like a thumbnail. Here's my code, first I am uploading the original image, then reading it and generating a thumbnail from it, then uploading the thumbnail to container. What am I doing wrong? I couldn't find much resources online on how to do this, please help!
app.post('/upload', function(req, res) {
if (!req.files)
return res.status(400).send('No files were uploaded.');
// The name of the input field (i.e. "sampleFile") is used to retrieve the uploaded file
let sampleFile = req.files.sampleFile;
let name = sampleFile.name;
let data = sampleFile.data;
//var options = { contentSettings: { contentType: 'image/jpeg' } }
blobSvc.createBlockBlobFromText('test-container', name, data, function(error, result, response){
if (error){
return res.status(500).send(error);
} else {
console.log('Uploaded to container');
}
var info = blobSvc.getBlobToLocalFile ("test-container", name, name,
function (error, blockBlob, response) {
thumb({
source: name, // could be a filename: dest/path/image.jpg
destination: './',
concurrency: 4,
width: 100
}, function(files, err){
if (err) throw err;
console.log("resized");
//Delete the downloaded BIG one
//Upload the thumbnail
blobSvc.createBlockBlobFromLocalFile("test-container", files[0].dstPath, files[0].dstPath,
function (error, blockBlob, response) {
if (!error) {
console.log("thumbnail uploaded: " + name);
} else{
console.log(error);
}
});
});
});
});

This isn't really an Azure Storage issue, it's more of a node-thumbnail issue.
How about using Jimp:
var azure = require('azure-storage');
var Jimp = require("jimp");
var path = require('path');
// ...
var info = blobSvc.getBlobToLocalFile("test-container", name, name, function(error, blockBlob, response) {
if (!error) {
var dstName = path.parse(name).name + "_thumb" + path.parse(name).ext;
Jimp.read(name, function(err, image) {
if (err) throw err;
image.resize(100, Jimp.AUTO) // resize
.quality(60) // set JPEG quality
.write(dstName, function(err, ret) { // save
//Upload the thumbnail
blobSvc.createBlockBlobFromLocalFile("test-container", dstName, dstName, function(error, blockBlob, response) {
if (!error) {
console.log("thumbnail uploaded: " + dstName);
} else {
console.log(error);
}
});
});
});
}
});

Related

download csv to browser downloads node.js

I have this code:
fs = require("fs");
var downloadData = "select * from PRODUCTS"
ibmdb.open(req.session.ibmdbconnDash, function (err, conn) {
if (err) return console.log(err);
conn.query(downloadData, function (err, rows) {
if (err) {
console.log(err);
}
const ws = fs.createWriteStream("productsDownloaded.csv");
const jsonData = JSON.parse(JSON.stringify(rows));
console.log("jsonData", jsonData);
fastcsv
.write(jsonData, { headers: true })
.on("finish", function() {
console.log("Write to productsDownloaded.csv successfully!");
})
.pipe(ws);
var value = true;
res.render("edit-products", {
page_title: "edit-products",
data: rows,
userName: req.session.username,
FN: req.session.firstname,
LN: req.session.lastname,
CO: req.session.company,
value: value,
});
conn.close(function () {
console.log("closing function p1 of delete product");
});
});
});
however, it downloads but it doesn't go anywhere. So when im testing locally it goes into my vs code directory, but i want it to download by the browser, so do i have to send something to the front end? Like if i press the download button and trigger this function, it should download to the users directory. how can i achieve this?

nodejs Promise.all() not working after promise process

I want to download images so I used Promise.
And my code snipped like that:
var download = function(uri, filename, callback) {
request.head(uri, function(err, res, body) {
/* console.log('content-type:', res.headers['content-type']);
console.log('content-length:', res.headers['content-length']); */
request(uri).pipe(fs.createWriteStream(filename + '.' + res.headers['content-type'].split('/')[1])).on('close', callback);
});
};
var imagePaths = [];
for (const imageURL of imageURLs) {
imagePaths.push(new Promise((resolve, reject) => {
let filename = __dirname + '/../../../downloads/' + naming(6)
download(imageURL, filename, function(data, err) {
console.log("download image successful")
});
}))
}
Promise.all(imagePaths).then(() => {
console.log("11")
console.log(imagePaths)
});
And my output like that:
download image successful
download image successful
download image successful
And,
Promise.all(imagePaths).then(() => {
console.log("11")
console.log(imagePaths)
});
not working.
Why Im not getting imagePaths? How can I solve this?

How to return in client side large base64 image with GridFS?

I store images with GridFS on MongoDB and I want to display this images on the client side. The code works for little image, but when I want to display images bigger than 5 Mo, I have no return in client side.
ResultController.js :
images.forEach(function(item) {
GridFileService.getBuffer(item.id, function(res)
{
var blob = UtilsService.b64toBlob(res, item.mimetype); //Convert base64 to blob
var blobURL = URL.createObjectURL(blob);
$scope.resultsModel.originalImage.push(blobURL); //Store for display in image src tag
});
});
GridFileService.js :
gridFileService.getBuffer = function(id, callback)
{
$http(
{
method: 'GET',
url: '/api/file_buffer/' + id
}).then(function successCallback(response)
{
callback(response.data);
}, function errorCallback(response)
{
AlertService.addAlert('danger', response.data);
});
};
api.js :
app.get('/api/file_buffer/:id', routesFiles.getBufferFile);
routeFiles.js :
function getBufferFile(req, res, next)
{
var idFile = req.params.id;
//Get buffer from GridFS with id file
gfs.readFile({_id: idFile}, function (err, data) {
if (err)
{
log.error('Error on get buffer file ', err);
return res.status(500).send(err.message);
}
else
{
//Convert buffer in base64
var base64 = new Buffer(data).toString('base64');
return res.status(200).send(base64); // return to client side with datas
}
});
}
How can I do to have a quick return about the buffer of a big image ?

Uploading files using Skipper with Sails.js v0.10 - how to retrieve new file name

I am upgrading to Sails.js version 0.10 and now need to use Skipper to manage my file uploads.
When I upload a file I generate a new name for it using a UUID, and save it in the public/files/ folder (this will change when I've got this all working but it's good for testing right now)
I save the original name, and the uploaded name + path into a Mongo database.
This was all quite straightforward under Sails v0.9.x but using Skipper I can't figure out how to read the new file name and path. (Obviously if I could read the name I could construct the path though so it's really only the name I need)
My Controller looks like this
var uuid = require('node-uuid'),
path = require('path'),
blobAdapter = require('skipper-disk');
module.exports = {
upload: function(req, res) {
var receiver = blobAdapter().receive({
dirname: sails.config.appPath + "/public/files/",
saveAs: function(file) {
var filename = file.filename,
newName = uuid.v4() + path.extname(filename);
return newName;
}
}),
results = [];
req.file('docs').upload(receiver, function (err, files) {
if (err) return res.serverError(err);
async.forEach(files, function(file, next) {
Document.create({
name: file.filename,
size: file.size,
localName: // ***** how do I get the `saveAs()` value from the uploaded file *****,
path: // *** and likewise how do i get the path ******
}).exec(function(err, savedFile){
if (err) {
next(err);
} else {
results.push({
id: savedFile.id,
url: '/files/' + savedFile.localName
});
next();
}
});
}, function(err){
if (err) {
sails.log.error('caught error', err);
return res.serverError({error: err});
} else {
return res.json({ files: results });
}
});
});
},
_config: {}
};
How do I do this?
I've worked this out now and thought I'd share my solution for the benefit of others struggling with similar issues.
The solution was to not use skipper-disk at all but to write my own custom receiver. I've created this as a Sails Service object.
So in file api/services/Uploader.js
// Uploader utilities and helper methods
// designed to be relatively generic.
var fs = require('fs'),
Writable = require('stream').Writable;
exports.documentReceiverStream = function(options) {
var defaults = {
dirname: '/dev/null',
saveAs: function(file){
return file.filename;
},
completed: function(file, done){
done();
}
};
// I don't have access to jQuery here so this is the simplest way I
// could think of to merge the options.
opts = defaults;
if (options.dirname) opts.dirname = options.dirname;
if (options.saveAs) opts.saveAs = options.saveAs;
if (options.completed) opts.completed = options.completed;
var documentReceiver = Writable({objectMode: true});
// This `_write` method is invoked each time a new file is received
// from the Readable stream (Upstream) which is pumping filestreams
// into this receiver. (filename === `file.filename`).
documentReceiver._write = function onFile(file, encoding, done) {
var newFilename = opts.saveAs(file),
fileSavePath = opts.dirname + newFilename,
outputs = fs.createWriteStream(fileSavePath, encoding);
file.pipe(outputs);
// Garbage-collect the bytes that were already written for this file.
// (called when a read or write error occurs)
function gc(err) {
sails.log.debug("Garbage collecting file '" + file.filename + "' located at '" + fileSavePath + "'");
fs.unlink(fileSavePath, function (gcErr) {
if (gcErr) {
return done([err].concat([gcErr]));
} else {
return done(err);
}
});
};
file.on('error', function (err) {
sails.log.error('READ error on file ' + file.filename, '::', err);
});
outputs.on('error', function failedToWriteFile (err) {
sails.log.error('failed to write file', file.filename, 'with encoding', encoding, ': done =', done);
gc(err);
});
outputs.on('finish', function successfullyWroteFile () {
sails.log.debug("file uploaded")
opts.completed({
name: file.filename,
size: file.size,
localName: newFilename,
path: fileSavePath
}, done);
});
};
return documentReceiver;
}
and then my controller just became (in api/controllers/DocumentController.js)
var uuid = require('node-uuid'),
path = require('path');
module.exports = {
upload: function(req, res) {
var results = [],
streamOptions = {
dirname: sails.config.appPath + "/public/files/",
saveAs: function(file) {
var filename = file.filename,
newName = uuid.v4() + path.extname(filename);
return newName;
},
completed: function(fileData, next) {
Document.create(fileData).exec(function(err, savedFile){
if (err) {
next(err);
} else {
results.push({
id: savedFile.id,
url: '/files/' + savedFile.localName
});
next();
}
});
}
};
req.file('docs').upload(Uploader.documentReceiverStream(streamOptions),
function (err, files) {
if (err) return res.serverError(err);
res.json({
message: files.length + ' file(s) uploaded successfully!',
files: results
});
}
);
},
_config: {}
};
I'm sure it can be improved further but this works perfectly for me.
The uploaded file object contains all data you need:
req.file('fileTest').upload({
// You can apply a file upload limit (in bytes)
maxBytes: maxUpload,
adapter: require('skipper-disk')
}, function whenDone(err, uploadedFiles) {
if (err) {
var error = { "status": 500, "error" : err };
res.status(500);
return res.json(error);
} else {
for (var u in uploadedFiles) {
//"fd" contains the actual file path (and name) of your file on disk
fileOnDisk = uploadedFiles[u].fd;
// I suggest you stringify the object to see what it contains and might be useful to you
console.log(JSON.stringify(uploadedFiles[u]));
}
}
});

How upload a file to Dropbox with dropbox.js?

ORIGINAL
I'm having problems to upload a file (image) to Dropbox from Node.js using the official dropbox.js.
I want to upload a picture that I have in another server. For example with the dropbpox icon (www.dropbox.com/static/images/new_logo.png).
client.writeFile(file, 'www.dropbox.com/static/images/new_logo.png', function(error, stat) {
if (error) {
return es.send(error.status); // Something went wrong.
}
res.send("File saved as revision " + stat.revisionTag);
});
I know that this only creates a text file with the url, but how I can upload the picture to Dropbox?
I also try to download the file using http.get and then upload this to dropbox but it doesn't work.
Thanks.
UPDATE WITH MORE INFO
First I download the image from a remote url with this code:
var request = http.get(options, function(res){
var imagedata = ''
res.setEncoding('binary')
res.on('data', function(chunk){
imagedata += chunk
})
res.on('end', function(){
console.log("Image downloaded!");
fs.writeFile(local, imagedata, 'binary', function(err){
if (err) throw err
console.log('File saved.')
})
})
})
The file is saved correctly.
Then I trie to things:
Sending the 'imagedata' to Dropbox:
console.log("Image downloaded!");
client.writeFile(file, imagedata, function(error, stat) {
if (error) {
return response.send(error.status); // Something went wrong.
}
response.send("File saved as revision " + stat.revisionTag);
});
And something is uploaded to Dropbox but it's nothing useful.
Then I also tried to read the file from disc and then send it to Dropbox but it doesn't work neither:
fs.readFile(file, function(err, data) {
Use dropbox-js 0.9.1-beta1 or above to upload binary files from node.js. You need to pass it Buffer or ArrayBuffer instances. Try this code:
var req = http.get(options, function(res) {
var chunks = [];
res.on('data', function(chunk) {
chunks.push(chunk);
});
res.on('end', function() {
console.log("Image downloaded!");
var imageData = Buffer.concat(chunks);
client.writeFile(file, imageData, function(error, stat) {
if (error) {
return response.send(error.status);
}
response.send("File saved as revision " + stat.revisionTag);
});
});
});
```
Original answer: the dropbox-js README mentions that binary files don't work in node.js just yet.
I had issue as well, I just copied and modified a bit on the old dropbox-node npm(which is now deprecated), but I added following function on dropbox.js.
Client.prototype.writeFileNodejs = function(path, data, callback) {
var self = this;
fs.readFile(data.path, function(err, data) {
if (err) return callback(err);
var uri = "" + self.urls.putFile + "/" + (self.urlEncodePath(path));
if (typeof data === 'function') callback = data, data = undefined;
var oauth = {
consumer_key: self.oauth.key
, consumer_secret: self.oauth.secret
, token: self.oauth.token
, token_secret: self.oauth.tokenSecret
};
var requestOptions = { uri: uri, oauth: oauth };
requestOptions.body = data;
return request['put'](requestOptions, callback ?
function(err, res, body) {
if (err) return callback(err);
var contentType = res.headers['content-type'];
// check if the response body is in JSON format
if (contentType === 'application/json' ||
contentType === 'text/javascript') {
body = JSON.parse(body);
if (body.error) {
var err = new Error(body.error);
err.statusCode = res.statusCode;
return callback(err);
}
} else if (errors[res.statusCode]) {
var err = new Error(errors[res.statusCode]);
err.statusCode = res.statusCode;
return callback(err);
}
// check for metadata in headers
if (res.headers['x-dropbox-metadata']) {
var metadata = JSON.parse(res.headers['x-dropbox-metadata']);
}
callback(null, body, metadata);
} : undefined);
});
};
As well you would like to require request and fs to do this.
var request = require('request'),
fs = require('fs');

Categories

Resources