How do i create two directory at same time using nodejs - javascript

I am using this npm to create directory
var mkdirp = require('mkdirp');
var dest = denotes the path
mkdirp(dest, function (err) {
if (err) cb(err, dest);
else cb(null, dest);
});
My doubt is how do I create two directory in different path?
I have tried following code:
var dest = first path;
var dest2 = second path;
mkdirp(dest,dest2, function (err) {
if (err) cb(err, dest);
else cb(null, dest);
});
But its not working how do I do that?Also, I need to rename for those folder which is in different path at same time.
Updated
var mkdirp = require('mkdirp');
var destArray = [ './root/dest1', './dest2' ]
destArray.map( path => {
mkdirp(path, function (err) {
if (err) console.error(err)
else {
res.json("ok")
}
});
})
I have used this solution, Because of loop am getting can't set headers again

you can try this
var mkdirp = require('mkdirp');
var destArray = [ './root/dest1', './dest2' ]
destArray.forEach( path => {
mkdirp(path, function (err) {
if (err) console.error(err)
else console.log('Directory created: ' + path )
});
})

You can do this using the following code
const fs = require('fs');
const {promisify} = require('util');
const mkdir = promisify(fs.mkdir);
const destArray = [ 'dest1', 'dest2'];
Promise.all(destArray.map(destPath => mkdir(destPath))).
then(res => {
console.log('Directories created');
}).
catch(err => {
console.error(err);
});

Related

Create new arrays from a list based on a fragment of the filename

Let's say I have the following filelist:
foo-bar_lorem.ext
foo-bar_ipsum.ext
foo-bar_dolor.ext
foo-baz_lorem.ext
foo-baz_ipsum.ext
foo-baz_dolor.ext
foo-amet.ext
foo-sic.ext
What I'd like to do is to create new arrays based on what's between the dash and the underscore, while having an array of a single object when the file has no underscore in it:
The resulting arrays should be as follow:
array1 = ['foo-bar_lorem.ext', 'foo-bar_ipsum.ext', 'foo-bar_dolor.ext'];
array2 = ['foo-baz_lorem.ext', 'foo-baz_ipsum.ext', 'foo-baz_dolor.ext'];
array3 = ['foo-amet.ext'];
array4 = ['foo-sic.ext'];
This is how I'm currently retrieving the filelist using fs:
const path = require('path');
const fs = require('fs');
const myFolder = path.join(__dirname);
fs.readdir(myFolder, function (err, files) {
if (err) {
return console.log('Unable to scan directory: ' + err);
}
//listing all files using forEach
files.forEach(function (file) {
console.log(file);
});
});
this is the logic need to be made with file object returns from fs.readdir
const files = ['foo-bar_lorem.ext', 'foo-bar_ipsum.ext', 'foo-bar_dolor.ext',
'foo-baz_lorem.ext', 'foo-baz_ipsum.ext', 'foo-baz_dolor.ext', 'foo-amet.ext', 'foo-sic.ext']
const res = files.reduce((acc, file) => {
const dir = file.split("-")[1].split("_")[0]
acc[dir] = [...(acc[dir] ? acc[dir] : []), file]
return acc;
}, {});
console.log(Object.values(res))
after embedding it inside the callback
function getFiles(myFolder) {
return new Promise((resolve, reject) => {
try {
fs.readdir(myFolder, function (err, files) {
if (err) {
return console.log('Unable to scan directory: ' + err);
}
const res = files.reduce((acc, file) => {
const dir = file.split("-")[1].split("_")[0]
acc[dir] = [...(acc[dir] ? acc[dir] : []), file]
return acc;
}, {});
resolve(Object.values(res))
});
} catch (err) {
reject(err)
}
})
}

nodeJS await to move files then compress images

I wrote this code that checks image files sizes in a folder, if the file are bigger than 30000 bytes then moves to a temporary folder called 'before-compress'. The compressImages() function iterates over the 'before-compress' folder and returns the compressed images to the original folder. My question is: How can i await the process of move the exceeded size files and then call the compressImage() function?, as you can see in the code i handle this with a setTimeout once the forEach reaches the last item. Thanks in advance.
const fs = require('fs');
const path = require('path');
const imagemin = require("imagemin");
const imageminMozjpeg = require("imagemin-mozjpeg");
const imageminPngquant = require("imagemin-pngquant");
const imageminGifsicle = require('imagemin-gifsicle');
const directoryPath = path.join(__dirname, 'uploads');
fs.readdir(`${directoryPath}/products`, function (err, files) {
if (err) {
return console.log('Unable to scan directory: ' + err);
}
files.forEach(function (file) {
console.log(`File: ${file} - Size: ${getFilesizeInBytes(file)} bytes`);
if(getFilesizeInBytes(file) > 30000){
moveFile(file)
}
if(files.indexOf(file) == files.length - 1){
//console.log('last index');
setTimeout(() => compressImages(), 4000);
}
});
});
function getFilesizeInBytes(fileName) {
var stats = fs.statSync(`${directoryPath}/products/${fileName}`);
var fileSizeInBytes = stats.size;
return fileSizeInBytes;
}
function moveFile(file){
var oldPath = `${directoryPath}/products/${file}`;
var newPath = `${directoryPath}/before-compress/${file}`;
fs.rename(oldPath, newPath, function (err) {
if (err) throw err;
console.log(`File ${file} moved!`);
})
}
function compressImages(){
fs.readdir(`${directoryPath}/before-compress`, function (err, files) {
if (err) {
return console.log('Unable to scan directory: ' + err);
}
files.forEach(function (file) {
console.log(`File to compress: ${file}`);
let fileExt = file.split('.')[1];
let compressPlugin = fileExt == 'jpg' || fileExt == 'jpeg' ? imageminMozjpeg({quality: 40}) :
fileExt == 'png' ? imageminPngquant({quality: [0.5, 0.6]}) :
fileExt == 'gif' ? imageminGifsicle() : 0;
(async () => {
const files = await imagemin([`./uploads/before-compress/${file}`], {
destination: './uploads/products',
plugins: [ compressPlugin ]
});
fs.unlink(`${directoryPath}/before-compress/${file}`, err => err ? console.log(err) : 0);
})();
});
});
}
This kind of code would become much more readable if you would convert all the functions from using callbacks to using async.
If you want to keep using callbacks however, there are two options:
Make moveFile() to use fs.renameSync() instead of fs.rename(). Normally I would advise against that, but since you are already using fs.statSync() and I suppose you run this as a script with nothing in parallel, maybe that would be an acceptable solution.
Or make moveFile() accept a callback:
function moveFile(file, callback){
// [...]
fs.rename(oldPath, newPath, callback)
}
Now you can use this callback to detect when the file has been moved, for example like this:
// [...]
var done = 0;
var error = false;
files.forEach(function (file) {
if(error) return;
if(getFilesizeInBytes(file) > 30000){
moveFile(file, function(err) {
if (err) { console.log(err); error = true; }
done++;
});
} else {
done++;
}
if(done == files.length) {
compressImages(), 4000);
}
});
});

How can I generate a `V1Job` object for the Kubernetes nodejs API client from a yaml file?

I've done this previously in python using:
with open(path.join(path.dirname(__file__), "job.yaml")) as f:
body= yaml.safe_load(f)
try:
api_response = api_instance.create_namespaced_job(namespace, body)
Looking at source of the nodejs api client:
public createNamespacedJob (namespace: string, body: V1Job, includeUninitialized?: boolean, pretty?: string, dryRun?: string, options: any = {}) : Promise<{ response: http.IncomingMessage; body: V1Job; }> {
How can I generate that the V1Job?
I've tried the below but get back a very verbose error message / response:
const k8s = require('#kubernetes/client-node');
const yaml = require('js-yaml');
const fs = require('fs');
const kc = new k8s.KubeConfig();
kc.loadFromDefault();
const k8sApi = kc.makeApiClient(k8s.BatchV1Api);
var namespace = {
metadata: {
name: 'test123',
},
};
try {
var job = yaml.safeLoad(fs.readFileSync('job.yaml', 'utf8'));
k8sApi.createNamespacedJob(namespace, job).then(
(response) => {
console.log('Created namespace');
console.log("Success!")
},
(err) => {
console.log(err);
console.log(job);
console.log("Err")
},
);
} catch (e) {
console.log(e);
}
V1Job seems to be an ordinary object so the below worked.
Namespace had to be a string rather than an object...
const k8s = require('#kubernetes/client-node');
const yaml = require('js-yaml');
const fs = require('fs');
const kc = new k8s.KubeConfig();
kc.loadFromDefault();
const k8sApi = kc.makeApiClient(k8s.BatchV1Api);
try {
var job = yaml.safeLoad(fs.readFileSync('job.yaml', 'utf8'));
k8sApi.createNamespacedJob("default", job).then(
(response) => {
console.log("Success")
},
(err) => {
console.log(e);
process.exit(1);
},
);
} catch (e) {
console.log(e);
process.exit(1);
}
This is the same as chris-stryczynski's example with 2 slight modifications. Also please note that chris-stryczynski's example with NodeJs-8 results in (at least on my side):
(upon execution of k8sApi.createNamespacedJob)
TypeError [ERR_INVALID_ARG_TYPE]: The "original" argument must be of type function at promisify
This error does not occur with NodeJs-12.
Here is the modified version:
const k8s = require('#kubernetes/client-node');
const yaml = require('js-yaml');
const fs = require('fs');
const kc = new k8s.KubeConfig();
kc.loadFromDefault(); //You might consider using kc.loadFromFile(...) here
const k8sApi = kc.makeApiClient(k8s.BatchV1Api);
try {
var job = yaml.load(fs.readFileSync('job.yaml', 'utf8')); // Change#1 safeLoad->load
k8sApi.createNamespacedJob("default", job).then(
(response) => {
console.log("Success")
},
(err) => {
console.log(err); // Change#2 e->err
process.exit(1);
},
);
} catch (e) {
console.log(e);
process.exit(1);
}

Whats the best way to copy files from 2 diferents folders to a new one using javascript?

Im setting up a program that will check two different folder and copy all the files from the in a third one. The problem for me here is how to copy them without their names?
var fs = require("fs");
fs.renameSync("/home/oem/git/test/folder1/test1.js", "/home/oem/git/test/folder1/test1CHANGED.js")
console.log("file renamed");
fs.rename("/home/oem/git/test/folder1/test2", "/home/oem/git/test/folder2", function(err){
if(err)
{
console.log(err);
}
else
{
console.log("file moved successfully");
}
});
with the code above i can move files that i manually write thei r names, i want to implement it somehow that it will automatically scan the folder and change them to the other one!
Here a Function that could help you to do this
const fs = require('fs');
const path = require('path');
function copyFiles() {
const firstFolder = 'firstFolder';
const secondFolder = 'secondFolder';
const destinationFolder = 'destinationFolder';
const firstDir = path.join(__dirname, firstFolder);
const secondDir = path.join(__dirname, secondFolder);
const destDir = path.join(__dirname, destinationFolder);
fs.readdir(firstDir, (err, files) => {
if (err) {
throw err;
}
for (let i = 0; i < files.length; i += 1) {
fs.copyFile(firstDir + '/' + files[i], destDir + '/' + files[i], function (err) {
if (err)
throw err;
});
}
});
fs.readdir(secondDir, (err, files) => {
if (err) {
throw err;
}
for (let i = 0; i < files.length; i += 1) {
fs.copyFile(secondDir + '/' + files[i], destDir + '/' + files[i], function (err) {
if (err)
throw err;
});
}
});
}
copyFiles();
You should take a look on nodes docs where it mentions this more detailed
If I may assume that there is two folder folder1 and folder2
folder1 where there is file word.txt and folder2 is empty
In the script file(assuming also that it lives with the two folders) you can write
const fs = require('fs');
// destination.txt will be created or overwritten by default.
// you can do the renaming here
fs.copyFile('./folder1/word.txt', './folder2/destination.txt', (err) => {
if (err) throw err;
console.log('word.txt was copied to destination.txt');
});
The fastest way to copy file
const fs = require('fs');
function copies(fs, files, destination)
{
for (i=0;i<files.length;i++) {
// destination file will be created or overwritten by default.
fs.copyFile(files[i], destionation + '/' + files[i].replace(/^.*[\\\/]/, ''), (err) => {
if (err) throw err;
console.log(files[i] +' was copied to ' + destination);
});
}
}
var files = ['/path/to/source/files.txt', '/sources/files/files2.txt'];
var destination = '/file/would/copy/to';
copies(fs, files, destination);

How to copy multiple files using fs.copyFile node?

I am using fs.copyFile to copy files from one location to another. I am doing this twice in order to copy two files. It's redundant and I would like to make my code better by maybe copying both files to the destination with a single call? How can I achieve this?
fs.copyFile('src/blah.txt', 'build/blah.txt', (err) => {
if (err) throw err;
});
fs.copyFile('src/unk.txt', 'build/unk.txt', (err) => {
if (err) throw err;
});
You can simply create a function of your own that takes the src and dest path and an array of filenames as arguments:
const util = require('util');
const fs = require('fs');
const path = require('path');
const copyFilePromise = util.promisify(fs.copyFile);
function copyFiles(srcDir, destDir, files) {
return Promise.all(files.map(f => {
return copyFilePromise(path.join(srcDir, f), path.join(destDir, f));
}));
}
// usage
copyFiles('src', 'build', ['unk.txt', 'blah.txt']).then(() => {
console.log("done");
}).catch(err => {
console.log(err);
});
Probably the best option is to use fs-extra:
const fse = require('fs-extra');
const srcDir = `path/to/file`;
const destDir = `path/to/destination/directory`;
// To copy a folder
fse.copySync(srcDir, destDir, function (err) {
if (err) {
console.error(err);
} else {
console.log("success!");
}
});
const fs = require('fs');
const path = require('path');
const files = ['/files/a.js', '/files/b.js', '/files/c.txt'];
files.forEach(file => {
fs.copyFile(path.join(__dirname + file), path.join(__dirname + '/files/backup/' + path.basename(file)), err => {
if(!err){
console.log(file + " has been copied!");
}
})
});
Use the following code if forced replace is unwanted.
const fs = require('fs');
const path = require('path');
const files = ['/files/a.js','/files/b.js','/files/c.txt'];
files.forEach(file => {
let basename = path.basename(file);
let oldFile = path.join(__dirname+file);
let newFile = path.join(__dirname+'/files/backup/'+basename);
if (!fs.existsSync(newFile)) {
fs.copyFile(oldFile, newFile, err=>{
if(!err){
console.log(basename+" has been copied!");
}
});
}else{
console.log(basename+" already existed!");
}
});
This is how I implemented the function above with the walk npm package to get all the files.
https://www.npmjs.com/package/walk
This gets all files within sub folders as well.
Worked copying 16,000 images from my Go Pro into one single folder on my desktop.
const util = require('util');
const fs = require('fs');
const path = require('path');
const copyFilePromise = util.promisify(fs.copyFile);
const walk = require('walk');
let files = [];
let source_folder = '/Volumes/Untitled/DCIM';
let destination_folder = '/Users/dave/Desktop/pics';
let walker = walk.walk(source_folder, {
followLinks: false
});
walker.on('file', function(root, stat, next) {
let file_path = root + '/' + stat.name;
files.push({
src: file_path,
des: destination_folder + '/' + stat.name
});
next();
});
walker.on('end', function() {
copyFiles(files).then(() => {
console.log("done");
}).catch(err => {
console.log(err);
});
});
function copyFiles(files) {
return Promise.all(files.map(f => {
return copyFilePromise(f.src, f.des);
}));
}

Categories

Resources