nodeJS await to move files then compress images - javascript

I wrote this code that checks image files sizes in a folder, if the file are bigger than 30000 bytes then moves to a temporary folder called 'before-compress'. The compressImages() function iterates over the 'before-compress' folder and returns the compressed images to the original folder. My question is: How can i await the process of move the exceeded size files and then call the compressImage() function?, as you can see in the code i handle this with a setTimeout once the forEach reaches the last item. Thanks in advance.
const fs = require('fs');
const path = require('path');
const imagemin = require("imagemin");
const imageminMozjpeg = require("imagemin-mozjpeg");
const imageminPngquant = require("imagemin-pngquant");
const imageminGifsicle = require('imagemin-gifsicle');
const directoryPath = path.join(__dirname, 'uploads');
fs.readdir(`${directoryPath}/products`, function (err, files) {
if (err) {
return console.log('Unable to scan directory: ' + err);
}
files.forEach(function (file) {
console.log(`File: ${file} - Size: ${getFilesizeInBytes(file)} bytes`);
if(getFilesizeInBytes(file) > 30000){
moveFile(file)
}
if(files.indexOf(file) == files.length - 1){
//console.log('last index');
setTimeout(() => compressImages(), 4000);
}
});
});
function getFilesizeInBytes(fileName) {
var stats = fs.statSync(`${directoryPath}/products/${fileName}`);
var fileSizeInBytes = stats.size;
return fileSizeInBytes;
}
function moveFile(file){
var oldPath = `${directoryPath}/products/${file}`;
var newPath = `${directoryPath}/before-compress/${file}`;
fs.rename(oldPath, newPath, function (err) {
if (err) throw err;
console.log(`File ${file} moved!`);
})
}
function compressImages(){
fs.readdir(`${directoryPath}/before-compress`, function (err, files) {
if (err) {
return console.log('Unable to scan directory: ' + err);
}
files.forEach(function (file) {
console.log(`File to compress: ${file}`);
let fileExt = file.split('.')[1];
let compressPlugin = fileExt == 'jpg' || fileExt == 'jpeg' ? imageminMozjpeg({quality: 40}) :
fileExt == 'png' ? imageminPngquant({quality: [0.5, 0.6]}) :
fileExt == 'gif' ? imageminGifsicle() : 0;
(async () => {
const files = await imagemin([`./uploads/before-compress/${file}`], {
destination: './uploads/products',
plugins: [ compressPlugin ]
});
fs.unlink(`${directoryPath}/before-compress/${file}`, err => err ? console.log(err) : 0);
})();
});
});
}

This kind of code would become much more readable if you would convert all the functions from using callbacks to using async.
If you want to keep using callbacks however, there are two options:
Make moveFile() to use fs.renameSync() instead of fs.rename(). Normally I would advise against that, but since you are already using fs.statSync() and I suppose you run this as a script with nothing in parallel, maybe that would be an acceptable solution.
Or make moveFile() accept a callback:
function moveFile(file, callback){
// [...]
fs.rename(oldPath, newPath, callback)
}
Now you can use this callback to detect when the file has been moved, for example like this:
// [...]
var done = 0;
var error = false;
files.forEach(function (file) {
if(error) return;
if(getFilesizeInBytes(file) > 30000){
moveFile(file, function(err) {
if (err) { console.log(err); error = true; }
done++;
});
} else {
done++;
}
if(done == files.length) {
compressImages(), 4000);
}
});
});

Related

How to apply promises in a node.js function

I'm trying to write a program that can unzip a zip file, read the images in the file and apply grayscale to them.
right now i have these two functions :
var fs = require('fs'),
PNG = require('pngjs').PNG
const unzipper = require('unzipper')
PNG = require('pngjs').PNG
const dir = __dirname + "/";
const myFile = (fileName) => {
let createdFile = dir + fileName
fs.createReadStream(createdFile)
.pipe(unzipper.Extract({ path: 'myfile' }));
console.log('file unzipped')
}
myFile("myfile.zip")
function applyFilter(Name) {
fs.readdir(Name, 'utf-8', (err, data) => {
if (err) {
console.log(err)
} else {
data.forEach(function (file) {
if (file.includes('png')) {
let greyPNG = (__dirname + '/' + 'myfile' + '/' + file)
console.log (greyPNG)
fs.createReadStream(greyPNG)
.pipe(new PNG({
colorType: 0,
}))
.on('parsed', function () {
this.pack().pipe(fs.createWriteStream(__dirname + "/" + "myfile" + "/" + file));
});
}
})
}
})
}
applyFilter ('myfile')
these two function works fine individually, however, it will not run together, if I comment out "applyFilter". A zip file will be unzipped. if there is a file in the directory, "applyFilter" will apply grayscale on those pictures. I know that this is because both functions runs at the same time which causes the problem. So how do I implement promises to solve this issue. I know that I can use "Sync" version of the functions. I just want to know how to do it in promises.
There are Example's in official documentation about ‘promisify’:
https://nodejs.org/dist/latest-v12.x/docs/api/util.html#util_util_promisify_original
which will gives you a "promised" version of the same function (as long as the original function has a standard signature / a custom promisified definition).
const util = require('util');
const fs = require('fs');
const stat = util.promisify(fs.stat);
async function callStat() {
const stats = await stat('.');
console.log(`This directory is owned by ${stats.uid}`);
}
You can also implement your own, just return a promise (this example is reading a http request):
function read_request(request) {
request.setEncoding("utf-8");
return new Promise((resolve, reject) => {
var cache = "";
request.on("data", (chunk) => {
cache += cache;
}).on("end", () => {
resolve(cache);
}).on("error", reject);
});
}
You can use the stream finish event to determine when the file unzip is complete. We can then use promises and async / await to ensure we don't try to apply the filter before the files are ready.
const fs = require('fs');
const PNG = require('pngjs').PNG;
const unzipper = require('unzipper');
const dir = __dirname + "/";
function unzipFile(fileName, outputPath) {
return new Promise((resolve, reject) => {
let createdFile = dir + fileName
let stream = fs.createReadStream(createdFile)
.pipe(unzipper.Extract({ path: outputPath }));
stream.on('finish', () => {
console.log('file unzipped');
resolve(outputPath);
});
});
}
function applyFilter(Name) {
fs.readdir(dir, 'utf-8', (err, data) => {
if (err) {
console.log(err)
} else {
data.filter(file => file.includes("png")).forEach(file => {
let greyPNG = (__dirname + '/' + Name + '/' + file)
console.log (greyPNG)
fs.createReadStream(greyPNG)
.pipe(new PNG({
colorType: 0,
}))
.on('parsed', function () {
this.pack().pipe(fs.createWriteStream(greyPNG));
});
})
}
})
}
async function unzipAndApplyFilter(zipFile, outputPath) {
await unzipFile(zipFile, outputPath); // Wait until unzip is complete.
applyFilter(outputPath);
}
unzipAndApplyFilter('myfile.zip', 'myfile');

How to hash a download stream in Node js

I want to know how to hash the download stream of a file using node js
Because I wanna hash the file before I store in to mongo db in order to avoid duplicates , I am using mongo grid fs by the way. https://github.com/aheckmann/gridfs-stream
downloading file
var download = function (url, dest, callback) {
request.get(url)
.on('error', function (err) { console.log(err) })
.pipe(fs.createWriteStream(dest))
.on('close', callback);
};
final_list.forEach(function (str) {
var filename = str.split('/').pop();
console.log('Downloading ' + filename);
download(str, filename, function () { console.log('Finished Downloading' + "" + filename) });
});
function getHash(dest, filename) {
let crypto = require('crypto');
let hash = crypto.createHash('sha256').setEncoding('hex');
let fileHash = "";
let filePath = `${dest}/${filename}`
fs.createReadStream(filePath)
.pipe(hash)
.on('finish', function() {
fileHash = hash.read();
console.log(`Filehash calculated for ${filename} is ${fileHash}.`);
// insert into mongo db here
});
}

Whats the best way to copy files from 2 diferents folders to a new one using javascript?

Im setting up a program that will check two different folder and copy all the files from the in a third one. The problem for me here is how to copy them without their names?
var fs = require("fs");
fs.renameSync("/home/oem/git/test/folder1/test1.js", "/home/oem/git/test/folder1/test1CHANGED.js")
console.log("file renamed");
fs.rename("/home/oem/git/test/folder1/test2", "/home/oem/git/test/folder2", function(err){
if(err)
{
console.log(err);
}
else
{
console.log("file moved successfully");
}
});
with the code above i can move files that i manually write thei r names, i want to implement it somehow that it will automatically scan the folder and change them to the other one!
Here a Function that could help you to do this
const fs = require('fs');
const path = require('path');
function copyFiles() {
const firstFolder = 'firstFolder';
const secondFolder = 'secondFolder';
const destinationFolder = 'destinationFolder';
const firstDir = path.join(__dirname, firstFolder);
const secondDir = path.join(__dirname, secondFolder);
const destDir = path.join(__dirname, destinationFolder);
fs.readdir(firstDir, (err, files) => {
if (err) {
throw err;
}
for (let i = 0; i < files.length; i += 1) {
fs.copyFile(firstDir + '/' + files[i], destDir + '/' + files[i], function (err) {
if (err)
throw err;
});
}
});
fs.readdir(secondDir, (err, files) => {
if (err) {
throw err;
}
for (let i = 0; i < files.length; i += 1) {
fs.copyFile(secondDir + '/' + files[i], destDir + '/' + files[i], function (err) {
if (err)
throw err;
});
}
});
}
copyFiles();
You should take a look on nodes docs where it mentions this more detailed
If I may assume that there is two folder folder1 and folder2
folder1 where there is file word.txt and folder2 is empty
In the script file(assuming also that it lives with the two folders) you can write
const fs = require('fs');
// destination.txt will be created or overwritten by default.
// you can do the renaming here
fs.copyFile('./folder1/word.txt', './folder2/destination.txt', (err) => {
if (err) throw err;
console.log('word.txt was copied to destination.txt');
});
The fastest way to copy file
const fs = require('fs');
function copies(fs, files, destination)
{
for (i=0;i<files.length;i++) {
// destination file will be created or overwritten by default.
fs.copyFile(files[i], destionation + '/' + files[i].replace(/^.*[\\\/]/, ''), (err) => {
if (err) throw err;
console.log(files[i] +' was copied to ' + destination);
});
}
}
var files = ['/path/to/source/files.txt', '/sources/files/files2.txt'];
var destination = '/file/would/copy/to';
copies(fs, files, destination);

How to copy multiple files using fs.copyFile node?

I am using fs.copyFile to copy files from one location to another. I am doing this twice in order to copy two files. It's redundant and I would like to make my code better by maybe copying both files to the destination with a single call? How can I achieve this?
fs.copyFile('src/blah.txt', 'build/blah.txt', (err) => {
if (err) throw err;
});
fs.copyFile('src/unk.txt', 'build/unk.txt', (err) => {
if (err) throw err;
});
You can simply create a function of your own that takes the src and dest path and an array of filenames as arguments:
const util = require('util');
const fs = require('fs');
const path = require('path');
const copyFilePromise = util.promisify(fs.copyFile);
function copyFiles(srcDir, destDir, files) {
return Promise.all(files.map(f => {
return copyFilePromise(path.join(srcDir, f), path.join(destDir, f));
}));
}
// usage
copyFiles('src', 'build', ['unk.txt', 'blah.txt']).then(() => {
console.log("done");
}).catch(err => {
console.log(err);
});
Probably the best option is to use fs-extra:
const fse = require('fs-extra');
const srcDir = `path/to/file`;
const destDir = `path/to/destination/directory`;
// To copy a folder
fse.copySync(srcDir, destDir, function (err) {
if (err) {
console.error(err);
} else {
console.log("success!");
}
});
const fs = require('fs');
const path = require('path');
const files = ['/files/a.js', '/files/b.js', '/files/c.txt'];
files.forEach(file => {
fs.copyFile(path.join(__dirname + file), path.join(__dirname + '/files/backup/' + path.basename(file)), err => {
if(!err){
console.log(file + " has been copied!");
}
})
});
Use the following code if forced replace is unwanted.
const fs = require('fs');
const path = require('path');
const files = ['/files/a.js','/files/b.js','/files/c.txt'];
files.forEach(file => {
let basename = path.basename(file);
let oldFile = path.join(__dirname+file);
let newFile = path.join(__dirname+'/files/backup/'+basename);
if (!fs.existsSync(newFile)) {
fs.copyFile(oldFile, newFile, err=>{
if(!err){
console.log(basename+" has been copied!");
}
});
}else{
console.log(basename+" already existed!");
}
});
This is how I implemented the function above with the walk npm package to get all the files.
https://www.npmjs.com/package/walk
This gets all files within sub folders as well.
Worked copying 16,000 images from my Go Pro into one single folder on my desktop.
const util = require('util');
const fs = require('fs');
const path = require('path');
const copyFilePromise = util.promisify(fs.copyFile);
const walk = require('walk');
let files = [];
let source_folder = '/Volumes/Untitled/DCIM';
let destination_folder = '/Users/dave/Desktop/pics';
let walker = walk.walk(source_folder, {
followLinks: false
});
walker.on('file', function(root, stat, next) {
let file_path = root + '/' + stat.name;
files.push({
src: file_path,
des: destination_folder + '/' + stat.name
});
next();
});
walker.on('end', function() {
copyFiles(files).then(() => {
console.log("done");
}).catch(err => {
console.log(err);
});
});
function copyFiles(files) {
return Promise.all(files.map(f => {
return copyFilePromise(f.src, f.des);
}));
}

How do i create two directory at same time using nodejs

I am using this npm to create directory
var mkdirp = require('mkdirp');
var dest = denotes the path
mkdirp(dest, function (err) {
if (err) cb(err, dest);
else cb(null, dest);
});
My doubt is how do I create two directory in different path?
I have tried following code:
var dest = first path;
var dest2 = second path;
mkdirp(dest,dest2, function (err) {
if (err) cb(err, dest);
else cb(null, dest);
});
But its not working how do I do that?Also, I need to rename for those folder which is in different path at same time.
Updated
var mkdirp = require('mkdirp');
var destArray = [ './root/dest1', './dest2' ]
destArray.map( path => {
mkdirp(path, function (err) {
if (err) console.error(err)
else {
res.json("ok")
}
});
})
I have used this solution, Because of loop am getting can't set headers again
you can try this
var mkdirp = require('mkdirp');
var destArray = [ './root/dest1', './dest2' ]
destArray.forEach( path => {
mkdirp(path, function (err) {
if (err) console.error(err)
else console.log('Directory created: ' + path )
});
})
You can do this using the following code
const fs = require('fs');
const {promisify} = require('util');
const mkdir = promisify(fs.mkdir);
const destArray = [ 'dest1', 'dest2'];
Promise.all(destArray.map(destPath => mkdir(destPath))).
then(res => {
console.log('Directories created');
}).
catch(err => {
console.error(err);
});

Categories

Resources