I want to download images so I used Promise.
And my code snipped like that:
var download = function(uri, filename, callback) {
request.head(uri, function(err, res, body) {
/* console.log('content-type:', res.headers['content-type']);
console.log('content-length:', res.headers['content-length']); */
request(uri).pipe(fs.createWriteStream(filename + '.' + res.headers['content-type'].split('/')[1])).on('close', callback);
});
};
var imagePaths = [];
for (const imageURL of imageURLs) {
imagePaths.push(new Promise((resolve, reject) => {
let filename = __dirname + '/../../../downloads/' + naming(6)
download(imageURL, filename, function(data, err) {
console.log("download image successful")
});
}))
}
Promise.all(imagePaths).then(() => {
console.log("11")
console.log(imagePaths)
});
And my output like that:
download image successful
download image successful
download image successful
And,
Promise.all(imagePaths).then(() => {
console.log("11")
console.log(imagePaths)
});
not working.
Why Im not getting imagePaths? How can I solve this?
Related
I am very new to development related work. PLEASE HELP
I am trying to upload 10 images taken from user (implemented via MULTER) to the mongoDB database but, before final upload I want to compress the images using SHARP.
I tried doing this using callbacks. But FAILED.
This is what I want to do:
use MULTER to upload an array of 10 images to the UPLOADS/IMAGES directory.
FOR EACH file in the array use SHARP to compress the file and store the new compressed .jpeg file to UPLOADS/COMPRESSED directory.
THEN
use fsPromises.readFile to read the newly compressed image in the UPLOADS/COMPRESSED directory.
THEN
make an object : const toInsertImgData = { data: result, contentType: "image/jpeg"};
and push it in an array called imgArray. Here result is binary data read in the previous step.
THEN
use fsPromises.unlink to remove all files in UPLOADS/IMAGES and UPLOADS/COMPRESSED
THEN
Use the imgArray to make a document to save in the posts collection of the Database.
Right now every time the imgArray is empty when I want to use it at the end. I know that PROMISES or AYSNC/AWAIT can help. But I am not sure how to implement it.
Please help .
THANKYOU IF YOU HAVE READ THIS FAR
Here is my code:
const promises = [];
app.post("/compose/:id", upload.array("image", 10), (req, res) => {
const id = req.params.id;
const imgArray = [];
const caption = req.body.caption;
const now = new Date();
req.files.forEach((file) => {
const compressedImgPath =__dirname +"/public/uploads/compressed/" +now.getDate() +"-" +(now.getMonth() + 1) +"-" +now.getFullYear() +"-" +now.getTime() +".jpeg";
sharp(file.path)
.resize(640, 480)
.jpeg({
quality: 80,
chromaSubsampling: "4:4:4",
})
.toFile(compressedImgPath)
.then(() => {
fsPromises.readFile(compressedImgPath)
.then((result) => {
const toInsertImgData = {
data: result,
contentType: "image/jpeg",
};
imgArray.push(toInsertImgData);
})
.then(() => {
promises.push(fsPromises.unlink(compressedImgPath));
promises.push(fsPromises.unlink(file.path));
})
.catch((err) => {
console.log(err);
});
});
});
Promise.all(promises)
.then(() => {
User.findById(id, (err, result) => {
if (!err) {
if (imgArray.length > 0) {
console.log("found user:" + id);
const newPost = new Post({
uId: id,
userName: result.name,
timeStamp: "5th August, 2020 at 2:10PM",
caption: caption,
img: imgArray,
});
newPost.save((err) => {
if (!err) {
console.log("post saved in DB");
res.redirect("/users/" + id.toString());
} else {
console.log(err);
}
});
} else {
console.log("array is empty");
}
}
});
})
.catch((err) => {
console.log(err);
});
});
inside for each , you use async call, which means all .then() are guaranteed to be executed before .forEach end,so promises array can be ambiguous.
One easy fix is to use fs.promises inside.then() and not push it to promises.
Below is a very similar use case that might help solve the mentioned question. I have provided the full API route code to understand it better.
var fs = require('fs');
var path = require('path');
const sharp = require('sharp');
const multer = require('multer');
var storage = multer.diskStorage({
destination: (req, file, cb) => {
cb(null, './public/uploads')
},
filename: (req, file, cb) => {
cb(null, file.fieldname + '-' + Date.now())
}
});
var upload = multer({ storage: storage });
app.post('/imageupload', upload.single('image'), async (req, res, next) => {
console.log("Image post request by " + req.user.username);
await sharp('./public/uploads/' + req.file.filename).resize(150, 150)
.rotate()
.png({ quality: 100 }).toFile('./public/uploads/' + req.file.filename + '-thumb');
Member.findOneAndUpdate({ sabhe_id: req.body.sabhe_id },
{
img: {
data: fs.readFileSync(path.join('./public/uploads/' + req.file.filename + '-thumb')),
contentType: 'image/png'
}
}
, function (err) {
if (err) {
console.log(err.message);
}
else {
console.log("Image saved to database " + req.user.username);
fs.unlink('./public/uploads/' + req.file.filename, (err) => {
if (err) {
console.error(err.message);
return
}
});
fs.unlink('./public/uploads/' + req.file.filename + '-thumb', (err) => {
if (err) {
console.error(err.message)
return
}
});
if (req.user.isAdmin) {
res.redirect("/admin")
} else {
res.redirect("/profile");
}
}
}
)
});
I am trying to upload an image from a URL to my Google Cloud Storage (Firebase). The following function shall return the file and a consecutive function will retrieve the actual Signed/Download Url to the new file. After all this I am updating a document in my Firestore Database with the new URL. That part works; the functions wait on uploading the (unfortunately incomplete) image and my document gets updated with the newly created file url. But the actual file/image is incomplete. :-(
async function saveToStorage(fileUrl) {
var storage = admin.storage();
var urlLib = require("url");
var pathLib = require("path");
//Get File Name from provided URL
var parsed = urlLib.parse(fileUrl);
var fileName = pathLib.basename(parsed.pathname);
//Create Storage Reference with new File Name
var bucket = storage.bucket('gs://myprojectname.appspot.com');
//Path Folder
var folderPath = 'data/photos/';
//Path Folder + File
var internalFilePath = folderPath + fileName ;
//Bucket File Ref
var file = bucket.file(internalFilePath);
const request = require('request');
const writeStream = file.createWriteStream({
metadata: {
contentType: 'image/jpg'
}
});
return new Promise((resolve, reject) => {
request.get(fileUrl)
.pipe(writeStream)
.on("error", (err) => {
console.error(`Error occurred`);
reject();
})
.on('finish', () => {
console.info(`Photo saved`);
resolve(file);
});
});
}
The Image that is saved/uploaded/streamed to my Cloud Storage file looks like this:
I have tried using node-fetch and request and rewrote my function in several ways, but always turn out with this result. I'm sure it has something to do with how I use my Promise, because if I omit the Promise the file actually completes but then the main code keeps executing instead of waiting for this Promise.
This has the same result (incomplete file):
return await fetch(fileUrl).then(res => {
const contentType = res.headers.get('content-type');
const writeStream = file.createWriteStream({
metadata: {
contentType
}
});
let p = new Promise((resolve, reject) => {
res.body.pipe(writeStream);
writeStream.on('finish', function() {
console.log("Stream finished")
resolve(file);
});
writeStream.on('error', function() {
reject(new Error("Whoops!"));
});
});
return p.then(
function(file) {
console.log('Photo saved');
return file},
function(error) {
console.error(error);
return;
});
});
And outright returning the stream writes a complete file, but my main code is not waiting for the file (and I need to handle the file)..
return res.body.pipe(writeStream)
.on('finish', () => {
return file;
console.log('Photo')
})
.on('error', err => {
return console.error(err);
});
Thanks for any help on this!
So this is the code that finally worked for me.
return new Promise((resolve, reject) => {
const req = request(fileUrl);
req.pause();
req.on('response', res => {
const writeStream = file.createWriteStream({
metadata: {
contentType: res.headers['content-type']
}
});
req.pipe(writeStream)
.on('finish', () => {
console.log('Photo saved');
resolve(file);
})
.on('error', err => {
writeStream.end();
console.error(err);
reject();
});
req.resume();
});
req.on('error', err => console.error(err));
});
I am trying to generate a thumbnail from image using node-thumbnail, the thumbnail is being uploaded to my container in azure storage but it looks like the original file not like a thumbnail. Here's my code, first I am uploading the original image, then reading it and generating a thumbnail from it, then uploading the thumbnail to container. What am I doing wrong? I couldn't find much resources online on how to do this, please help!
app.post('/upload', function(req, res) {
if (!req.files)
return res.status(400).send('No files were uploaded.');
// The name of the input field (i.e. "sampleFile") is used to retrieve the uploaded file
let sampleFile = req.files.sampleFile;
let name = sampleFile.name;
let data = sampleFile.data;
//var options = { contentSettings: { contentType: 'image/jpeg' } }
blobSvc.createBlockBlobFromText('test-container', name, data, function(error, result, response){
if (error){
return res.status(500).send(error);
} else {
console.log('Uploaded to container');
}
var info = blobSvc.getBlobToLocalFile ("test-container", name, name,
function (error, blockBlob, response) {
thumb({
source: name, // could be a filename: dest/path/image.jpg
destination: './',
concurrency: 4,
width: 100
}, function(files, err){
if (err) throw err;
console.log("resized");
//Delete the downloaded BIG one
//Upload the thumbnail
blobSvc.createBlockBlobFromLocalFile("test-container", files[0].dstPath, files[0].dstPath,
function (error, blockBlob, response) {
if (!error) {
console.log("thumbnail uploaded: " + name);
} else{
console.log(error);
}
});
});
});
});
This isn't really an Azure Storage issue, it's more of a node-thumbnail issue.
How about using Jimp:
var azure = require('azure-storage');
var Jimp = require("jimp");
var path = require('path');
// ...
var info = blobSvc.getBlobToLocalFile("test-container", name, name, function(error, blockBlob, response) {
if (!error) {
var dstName = path.parse(name).name + "_thumb" + path.parse(name).ext;
Jimp.read(name, function(err, image) {
if (err) throw err;
image.resize(100, Jimp.AUTO) // resize
.quality(60) // set JPEG quality
.write(dstName, function(err, ret) { // save
//Upload the thumbnail
blobSvc.createBlockBlobFromLocalFile("test-container", dstName, dstName, function(error, blockBlob, response) {
if (!error) {
console.log("thumbnail uploaded: " + dstName);
} else {
console.log(error);
}
});
});
});
}
});
I have a program where user first create a file once file is created i am appending data to the file that is coming from client consistently.The below code is working as expected. I am new to nodejs so just want to get an expert opinion in case when multiple users creating and recording files on their machines at same time, will it work asynchronously or do i need to make some changes to the code ?
io.js
socket.on('createlogfile', function() {
logsRecording.userLogs(function(filename) {
socket.emit('filename', filename);
});
});
socket.on('startrecording', function(obj) {
logsRecording.recordLogs(obj);
});
server.js
userLogs: function (callback) {
var filename = uuid.v4() + '.log';
var file = filePath + '/' + filename;
fs.openSync(file, 'a',function () {
console.log('file created');
});
console.log('userLogs');
callback(filename);
},
recordLogs: function (obj) {
var dir = './app/records/templogs'
var fileAppend = dir + '/'+ obj.file;
console.log('data from recording', obj.data);
fs.readdir(dir, function(err, items) {
items.forEach(function(file){
if(obj.file === file){
fs.appendFile(fileAppend, obj.data+ "\r\n", null, 'utf8', function (err) {
if (err) throw err;
});
console.log('filename in records',obj.file);
}
});
});
}
You are using fs.openSync, which is synchronous and as such can hang the event loop.
You should be using fs.open and callback inside it:
userLogs: function (callback) {
var filename = uuid.v4() + '.log';
var file = filePath + '/' + filename;
fs.open(file, 'a', function (err) {
console.log('file created');
console.log('userLogs');
callback(err, filename);
});
},
And you can flatten recordLogs using async.
Also, it is bad practice to throw error in synchronous function, you should be passing the error in the callback.
As a last tip, Array.forEach is synchronous, and can hang the process, you should be using async.each
recordLogs: function (obj, callback) {
var dir = './app/records/templogs'
var fileAppend = dir + '/'+ obj.file;
console.log('data from recording', obj.data);
async.waterfall([
(callback) => {
fs.readdir(dir, (err, items) => {
callback(err, items);
});
},
(items, callback) => {
async.each(items, (file, callback) => {
if(obj.file === file) {
fs.appendFile(fileAppend, obj.data+ "\r\n", null, 'utf8', function (err) {
callback(err);
});
console.log('filename in records',obj.file);
} else {
callback();
}
}, (err) => {
callback(err);
});
}
], (err, file) => {
if(callback) {
callback(err);
}
});
}
ORIGINAL
I'm having problems to upload a file (image) to Dropbox from Node.js using the official dropbox.js.
I want to upload a picture that I have in another server. For example with the dropbpox icon (www.dropbox.com/static/images/new_logo.png).
client.writeFile(file, 'www.dropbox.com/static/images/new_logo.png', function(error, stat) {
if (error) {
return es.send(error.status); // Something went wrong.
}
res.send("File saved as revision " + stat.revisionTag);
});
I know that this only creates a text file with the url, but how I can upload the picture to Dropbox?
I also try to download the file using http.get and then upload this to dropbox but it doesn't work.
Thanks.
UPDATE WITH MORE INFO
First I download the image from a remote url with this code:
var request = http.get(options, function(res){
var imagedata = ''
res.setEncoding('binary')
res.on('data', function(chunk){
imagedata += chunk
})
res.on('end', function(){
console.log("Image downloaded!");
fs.writeFile(local, imagedata, 'binary', function(err){
if (err) throw err
console.log('File saved.')
})
})
})
The file is saved correctly.
Then I trie to things:
Sending the 'imagedata' to Dropbox:
console.log("Image downloaded!");
client.writeFile(file, imagedata, function(error, stat) {
if (error) {
return response.send(error.status); // Something went wrong.
}
response.send("File saved as revision " + stat.revisionTag);
});
And something is uploaded to Dropbox but it's nothing useful.
Then I also tried to read the file from disc and then send it to Dropbox but it doesn't work neither:
fs.readFile(file, function(err, data) {
Use dropbox-js 0.9.1-beta1 or above to upload binary files from node.js. You need to pass it Buffer or ArrayBuffer instances. Try this code:
var req = http.get(options, function(res) {
var chunks = [];
res.on('data', function(chunk) {
chunks.push(chunk);
});
res.on('end', function() {
console.log("Image downloaded!");
var imageData = Buffer.concat(chunks);
client.writeFile(file, imageData, function(error, stat) {
if (error) {
return response.send(error.status);
}
response.send("File saved as revision " + stat.revisionTag);
});
});
});
```
Original answer: the dropbox-js README mentions that binary files don't work in node.js just yet.
I had issue as well, I just copied and modified a bit on the old dropbox-node npm(which is now deprecated), but I added following function on dropbox.js.
Client.prototype.writeFileNodejs = function(path, data, callback) {
var self = this;
fs.readFile(data.path, function(err, data) {
if (err) return callback(err);
var uri = "" + self.urls.putFile + "/" + (self.urlEncodePath(path));
if (typeof data === 'function') callback = data, data = undefined;
var oauth = {
consumer_key: self.oauth.key
, consumer_secret: self.oauth.secret
, token: self.oauth.token
, token_secret: self.oauth.tokenSecret
};
var requestOptions = { uri: uri, oauth: oauth };
requestOptions.body = data;
return request['put'](requestOptions, callback ?
function(err, res, body) {
if (err) return callback(err);
var contentType = res.headers['content-type'];
// check if the response body is in JSON format
if (contentType === 'application/json' ||
contentType === 'text/javascript') {
body = JSON.parse(body);
if (body.error) {
var err = new Error(body.error);
err.statusCode = res.statusCode;
return callback(err);
}
} else if (errors[res.statusCode]) {
var err = new Error(errors[res.statusCode]);
err.statusCode = res.statusCode;
return callback(err);
}
// check for metadata in headers
if (res.headers['x-dropbox-metadata']) {
var metadata = JSON.parse(res.headers['x-dropbox-metadata']);
}
callback(null, body, metadata);
} : undefined);
});
};
As well you would like to require request and fs to do this.
var request = require('request'),
fs = require('fs');