How to copy multiple files using fs.copyFile node? - javascript

I am using fs.copyFile to copy files from one location to another. I am doing this twice in order to copy two files. It's redundant and I would like to make my code better by maybe copying both files to the destination with a single call? How can I achieve this?
fs.copyFile('src/blah.txt', 'build/blah.txt', (err) => {
if (err) throw err;
});
fs.copyFile('src/unk.txt', 'build/unk.txt', (err) => {
if (err) throw err;
});

You can simply create a function of your own that takes the src and dest path and an array of filenames as arguments:
const util = require('util');
const fs = require('fs');
const path = require('path');
const copyFilePromise = util.promisify(fs.copyFile);
function copyFiles(srcDir, destDir, files) {
return Promise.all(files.map(f => {
return copyFilePromise(path.join(srcDir, f), path.join(destDir, f));
}));
}
// usage
copyFiles('src', 'build', ['unk.txt', 'blah.txt']).then(() => {
console.log("done");
}).catch(err => {
console.log(err);
});

Probably the best option is to use fs-extra:
const fse = require('fs-extra');
const srcDir = `path/to/file`;
const destDir = `path/to/destination/directory`;
// To copy a folder
fse.copySync(srcDir, destDir, function (err) {
if (err) {
console.error(err);
} else {
console.log("success!");
}
});

const fs = require('fs');
const path = require('path');
const files = ['/files/a.js', '/files/b.js', '/files/c.txt'];
files.forEach(file => {
fs.copyFile(path.join(__dirname + file), path.join(__dirname + '/files/backup/' + path.basename(file)), err => {
if(!err){
console.log(file + " has been copied!");
}
})
});
Use the following code if forced replace is unwanted.
const fs = require('fs');
const path = require('path');
const files = ['/files/a.js','/files/b.js','/files/c.txt'];
files.forEach(file => {
let basename = path.basename(file);
let oldFile = path.join(__dirname+file);
let newFile = path.join(__dirname+'/files/backup/'+basename);
if (!fs.existsSync(newFile)) {
fs.copyFile(oldFile, newFile, err=>{
if(!err){
console.log(basename+" has been copied!");
}
});
}else{
console.log(basename+" already existed!");
}
});

This is how I implemented the function above with the walk npm package to get all the files.
https://www.npmjs.com/package/walk
This gets all files within sub folders as well.
Worked copying 16,000 images from my Go Pro into one single folder on my desktop.
const util = require('util');
const fs = require('fs');
const path = require('path');
const copyFilePromise = util.promisify(fs.copyFile);
const walk = require('walk');
let files = [];
let source_folder = '/Volumes/Untitled/DCIM';
let destination_folder = '/Users/dave/Desktop/pics';
let walker = walk.walk(source_folder, {
followLinks: false
});
walker.on('file', function(root, stat, next) {
let file_path = root + '/' + stat.name;
files.push({
src: file_path,
des: destination_folder + '/' + stat.name
});
next();
});
walker.on('end', function() {
copyFiles(files).then(() => {
console.log("done");
}).catch(err => {
console.log(err);
});
});
function copyFiles(files) {
return Promise.all(files.map(f => {
return copyFilePromise(f.src, f.des);
}));
}

Related

dynamically import class from file and use a function in it

i have this file ./src/commands/test.js:
export default class {
run() {
console.log('test!')
}
}
i am trying to make a command that will get all commands in that folder and call their run function:
#! /usr/bin/env node
const fs = require('fs')
const folders = [
'./src/commands',
//'./my_modules/diontron/unity/src/commands',
]
folders.forEach(folder => {
fs.readdir(folder, (err, files) => {
files.forEach(file => {
if (file != 'index.js') {
let commandClass = require(folder + '/' + file).default
let command = new commandClass()
command.run()
}
})
})
})
this gives me the error:
Cannot find module './src/commands/test.js'
how do i dynamically go through all files in a folder, require/import them, and use the class function?
You can do it with dynamic import() rather than require. Note that you'll need a package.json in src or src/commands with "type": "module" in it, since test.js uses ESM by Node.js uses CommonJS by default.
This works for me in that setup:
#! /usr/bin/env node
const fs = require("fs");
const folders = [
"./src/commands",
//"./my_modules/diontron/unity/src/commands",
];
folders.forEach(folder => {
fs.readdir(folder, (err, files) => {
files.forEach(file => {
if (file != "index.js") {
const modFile = folder + "/" + file;
import(modFile)
.then(({ default: commandClass }) => {
const command = new commandClass();
command.run();
})
.catch(error => {
console.log(`Failed to load module ${modFile}`);
});
}
});
});
});
Although that runs the code in parallel. If you want to run it in series (each waiting for the previous one to complete), you might do this (note the change from "fs" to "fs/promises" at the top):
#! /usr/bin/env node
const fs = require("fs/promises");
const folders = [
"./src/commands",
//"./my_modules/diontron/unity/src/commands",
];
(async () => {
for (const folder of folders) {
for (const folder of folders) {
const files = await fs.readdir(folder);
for (const file of files) {
const { default: commandClass } = await import(folder + "/" + file);
const command = new commandClass();
command.run();
}
}
}
})()
.catch(error => {
console.error(error);
});
That relies on the script being run from the parent folder of the folders you're searching. If you want to use the current path instead, you should use process.cwd() to get it, then use that to resolve the folders:
#! /usr/bin/env node
const fs = require("fs");
const path = require("path"); // ***
const folders = [
"./src/commands",
//"./my_modules/diontron/unity/src/commands",
];
const currentDir = process.cwd(); // ***
folders.forEach(folder => {
fs.readdir(folder, (err, files) => {
const fullFolder = path.join(currentDir, folder); // ***
files.forEach(file => {
if (file != "index.js") {
const modFile = fullFolder + "/" + file; // ***
import(modFile)
.then(({ default: commandClass }) => {
const command = new commandClass();
command.run();
})
.catch(error => {
console.log(`Failed to load module ${modFile}`);
});
}
});
});
});

Node.js batch create files to every folders in current directory with loop

I have sub 20 directories inside the current directory. I would like to create 'README.md' for every folder inside the current directory. Here is I've tried so far,
const fs = require('fs').promises;
const path = require('path');
async function readDirectory(filePath) {
try {
const data = await fs.readdir(filePath);
data.forEach(async file => {
fs.writeFile('README.md', file);
});
} catch (err) {
console.log(err.message);
}
}
readDirectory(path.resolve(__dirname));
but the result is not what I'm looking for. Any suggestion?
First writeFile parameters should be full path of file you want to create/write
fs.writeFile( file, data, options, callback )
So I changed your code a little bit
const fs = require('fs');
const path = require('path');
async function readDirectory(filePath) {
const data = await fs.readdirSync(filePath);
const folders = data.filter((item) => fs.lstatSync(item).isDirectory());
folders.map((folder) => {
const subFilePath = path.join(filePath, `/${folder}` + `/README.md`);
fs.writeFile(subFilePath, '', (err) => {
if (err) console.log(err);
else {
console.log(`File ${subFilePath} created`);
}
});
});
}
readDirectory(path.resolve(__dirname));

How to apply promises in a node.js function

I'm trying to write a program that can unzip a zip file, read the images in the file and apply grayscale to them.
right now i have these two functions :
var fs = require('fs'),
PNG = require('pngjs').PNG
const unzipper = require('unzipper')
PNG = require('pngjs').PNG
const dir = __dirname + "/";
const myFile = (fileName) => {
let createdFile = dir + fileName
fs.createReadStream(createdFile)
.pipe(unzipper.Extract({ path: 'myfile' }));
console.log('file unzipped')
}
myFile("myfile.zip")
function applyFilter(Name) {
fs.readdir(Name, 'utf-8', (err, data) => {
if (err) {
console.log(err)
} else {
data.forEach(function (file) {
if (file.includes('png')) {
let greyPNG = (__dirname + '/' + 'myfile' + '/' + file)
console.log (greyPNG)
fs.createReadStream(greyPNG)
.pipe(new PNG({
colorType: 0,
}))
.on('parsed', function () {
this.pack().pipe(fs.createWriteStream(__dirname + "/" + "myfile" + "/" + file));
});
}
})
}
})
}
applyFilter ('myfile')
these two function works fine individually, however, it will not run together, if I comment out "applyFilter". A zip file will be unzipped. if there is a file in the directory, "applyFilter" will apply grayscale on those pictures. I know that this is because both functions runs at the same time which causes the problem. So how do I implement promises to solve this issue. I know that I can use "Sync" version of the functions. I just want to know how to do it in promises.
There are Example's in official documentation about ‘promisify’:
https://nodejs.org/dist/latest-v12.x/docs/api/util.html#util_util_promisify_original
which will gives you a "promised" version of the same function (as long as the original function has a standard signature / a custom promisified definition).
const util = require('util');
const fs = require('fs');
const stat = util.promisify(fs.stat);
async function callStat() {
const stats = await stat('.');
console.log(`This directory is owned by ${stats.uid}`);
}
You can also implement your own, just return a promise (this example is reading a http request):
function read_request(request) {
request.setEncoding("utf-8");
return new Promise((resolve, reject) => {
var cache = "";
request.on("data", (chunk) => {
cache += cache;
}).on("end", () => {
resolve(cache);
}).on("error", reject);
});
}
You can use the stream finish event to determine when the file unzip is complete. We can then use promises and async / await to ensure we don't try to apply the filter before the files are ready.
const fs = require('fs');
const PNG = require('pngjs').PNG;
const unzipper = require('unzipper');
const dir = __dirname + "/";
function unzipFile(fileName, outputPath) {
return new Promise((resolve, reject) => {
let createdFile = dir + fileName
let stream = fs.createReadStream(createdFile)
.pipe(unzipper.Extract({ path: outputPath }));
stream.on('finish', () => {
console.log('file unzipped');
resolve(outputPath);
});
});
}
function applyFilter(Name) {
fs.readdir(dir, 'utf-8', (err, data) => {
if (err) {
console.log(err)
} else {
data.filter(file => file.includes("png")).forEach(file => {
let greyPNG = (__dirname + '/' + Name + '/' + file)
console.log (greyPNG)
fs.createReadStream(greyPNG)
.pipe(new PNG({
colorType: 0,
}))
.on('parsed', function () {
this.pack().pipe(fs.createWriteStream(greyPNG));
});
})
}
})
}
async function unzipAndApplyFilter(zipFile, outputPath) {
await unzipFile(zipFile, outputPath); // Wait until unzip is complete.
applyFilter(outputPath);
}
unzipAndApplyFilter('myfile.zip', 'myfile');

How do i get list of all the files including their path using java script

I want to get the list of all the files with their path. My code below does not meet my requirement. It just gives me the file name of the directory.
For Example : below is my input
\dir\temp1.txt
\dir\dir1\temp2.txt
I want to create a file may be temp.json and dump the file names with their path in it.
const fs = require('fs');
const path = require('path');
const directoryPath = path.join(__dirname, 'Dir');
console.log(directoryPath);
fs.readdir(directoryPath, function(err, files){
if (err) {
return console.log("Unable to scan directory" + err);
}
files.forEach(function (file){
console.log(file);
})
});
is there anyone who can guide me ?
How about this implementation:
const walkSync = (dir, filelist = []) => {
const files = fs.readdirSync(dir);
for (const file of files) {
const dirFile = path.join(dir, file);
const dirent = fs.statSync(dirFile);
if (dirent.isDirectory()) {
console.log('directory', path.join(dir, file));
var odir = {
file: dirFile,
files: []
}
odir.files = walkSync(dirFile, dir.files);
filelist.push(odir);
} else {
filelist.push({
file: dirFile
});
}
}
return filelist;
};
Look into this gist

How do i create two directory at same time using nodejs

I am using this npm to create directory
var mkdirp = require('mkdirp');
var dest = denotes the path
mkdirp(dest, function (err) {
if (err) cb(err, dest);
else cb(null, dest);
});
My doubt is how do I create two directory in different path?
I have tried following code:
var dest = first path;
var dest2 = second path;
mkdirp(dest,dest2, function (err) {
if (err) cb(err, dest);
else cb(null, dest);
});
But its not working how do I do that?Also, I need to rename for those folder which is in different path at same time.
Updated
var mkdirp = require('mkdirp');
var destArray = [ './root/dest1', './dest2' ]
destArray.map( path => {
mkdirp(path, function (err) {
if (err) console.error(err)
else {
res.json("ok")
}
});
})
I have used this solution, Because of loop am getting can't set headers again
you can try this
var mkdirp = require('mkdirp');
var destArray = [ './root/dest1', './dest2' ]
destArray.forEach( path => {
mkdirp(path, function (err) {
if (err) console.error(err)
else console.log('Directory created: ' + path )
});
})
You can do this using the following code
const fs = require('fs');
const {promisify} = require('util');
const mkdir = promisify(fs.mkdir);
const destArray = [ 'dest1', 'dest2'];
Promise.all(destArray.map(destPath => mkdir(destPath))).
then(res => {
console.log('Directories created');
}).
catch(err => {
console.error(err);
});

Categories

Resources