Unzip and process files in Node.js - javascript

I can't find what I am doing wrong here. I'm trying to unzip the files then go though them creating a new HTML in each folder and also renaming the image files.
I tested it and the script is waiting to unzip the files before go though them, but I'm always getting the following error:
(node:3656) UnhandledPromiseRejectionWarning: Error: ENOENT: no such file or directory, scandir 'D:\Sites\rename-files\files\cloud'
[0] at Object.readdirSync (fs.js:955:3)
[0] at processDirectories (D:\Sites\rename-files\src\controllers\goDirs.js:8:12)
[0] at goDirs (D:\Sites\rename-files\src\controllers\goDirs.js:31:5)
[0] at createBanners (D:\Sites\rename-files\src\controllers\RenameController.js:18:33)
[0] at processTicksAndRejections (internal/process/task_queues.js:97:5)
Here are my files:
RenameController.js
const unzipFiles = require('./unzipFiles')
const goDirs = require('./goDirs')
const RenameController = {
async root(req, res) {
res.send('hello root!');
},
async createBanners(req, res) {
const { name, link, template } = req.body
const { filename } = req.file
const projectName = name.replace(' ','-')
try{
const unzipPath = await unzipFiles(projectName, filename)
const files = await goDirs(
unzipPath,
projectName,
template,
link
)
return res.json(JSON.stringify(files))
} catch(err){
return res.json(err)
}
}
}
module.exports = RenameController
unzipFiles.js
const fs = require('fs')
const path = require('path')
const unzipper = require('unzipper')
const unzipFiles = (projectName, filename) => {
const zipFile = path.join(__dirname, `../../files/${filename}`)
const unzipPath = path.join(__dirname, `../../files/${projectName}`)
return new Promise( (resolve, reject) => {
fs.createReadStream(zipFile)
.pipe(unzipper.Extract({ path: unzipPath }))
.on('close', resolve(unzipPath))
})
}
module.exports = unzipFiles
goDirs.js
const fs = require('fs')
const path = require('path')
const createHtml = require('./createHtml')
let bannerFiles = []
const goDirs = (directory, projectName, template, link) => {
const processDirectories = async (directory, projectName, template, link) => {
fs.readdirSync(directory).forEach(function(file){
const absolute = path.join(directory, file)
let filename = ''
if(fs.lstatSync(absolute).isDirectory()){
createHtml(file, projectName, template, link)
return processDirectories(absolute, projectName, template, link)
} else {
if (file.indexOf('background') >= 0) filename = 'background.jpg'
else if (file.indexOf('copy') >= 0) filename = 'copy.png'
else if (file.indexOf('cta') >= 0) filename = 'cta.png'
else if (file.indexOf('logo') >= 0) filename = 'logo.png'
fs.rename(
absolute,
absolute.replace(file, filename),
() => {}
)
bannerFiles.push(absolute)
}
})
}
processDirectories(directory, projectName, template, link)
return new Promise((resolve, reject) => {
bannerFiles.length != 0 ? resolve(bannerFiles) : reject()
})
}
module.exports = goDirs
Thanks!!

Apparently, you need to treat the case of a rejection of the processDirectories Promise in the goDirs.js.
processDirectories(directory, projectName, template, link)
try to .catch(...) this ☝️ call to your async method. Like this:
processDirectories(directory, projectName, template, link).catch(/* your error treatment code */)

are you sure that path D:\Sites\rename-files\files\cloud exist before you're trying to read this dir? If not, you should create it first manually or with fs.mkdir
P.S. if you are using up-to-date version of node, you can use 'fs/promise' package instead of promisifying fs methods or using Sync one. https://nodejs.org/api/fs.html

Related

dynamically import class from file and use a function in it

i have this file ./src/commands/test.js:
export default class {
run() {
console.log('test!')
}
}
i am trying to make a command that will get all commands in that folder and call their run function:
#! /usr/bin/env node
const fs = require('fs')
const folders = [
'./src/commands',
//'./my_modules/diontron/unity/src/commands',
]
folders.forEach(folder => {
fs.readdir(folder, (err, files) => {
files.forEach(file => {
if (file != 'index.js') {
let commandClass = require(folder + '/' + file).default
let command = new commandClass()
command.run()
}
})
})
})
this gives me the error:
Cannot find module './src/commands/test.js'
how do i dynamically go through all files in a folder, require/import them, and use the class function?
You can do it with dynamic import() rather than require. Note that you'll need a package.json in src or src/commands with "type": "module" in it, since test.js uses ESM by Node.js uses CommonJS by default.
This works for me in that setup:
#! /usr/bin/env node
const fs = require("fs");
const folders = [
"./src/commands",
//"./my_modules/diontron/unity/src/commands",
];
folders.forEach(folder => {
fs.readdir(folder, (err, files) => {
files.forEach(file => {
if (file != "index.js") {
const modFile = folder + "/" + file;
import(modFile)
.then(({ default: commandClass }) => {
const command = new commandClass();
command.run();
})
.catch(error => {
console.log(`Failed to load module ${modFile}`);
});
}
});
});
});
Although that runs the code in parallel. If you want to run it in series (each waiting for the previous one to complete), you might do this (note the change from "fs" to "fs/promises" at the top):
#! /usr/bin/env node
const fs = require("fs/promises");
const folders = [
"./src/commands",
//"./my_modules/diontron/unity/src/commands",
];
(async () => {
for (const folder of folders) {
for (const folder of folders) {
const files = await fs.readdir(folder);
for (const file of files) {
const { default: commandClass } = await import(folder + "/" + file);
const command = new commandClass();
command.run();
}
}
}
})()
.catch(error => {
console.error(error);
});
That relies on the script being run from the parent folder of the folders you're searching. If you want to use the current path instead, you should use process.cwd() to get it, then use that to resolve the folders:
#! /usr/bin/env node
const fs = require("fs");
const path = require("path"); // ***
const folders = [
"./src/commands",
//"./my_modules/diontron/unity/src/commands",
];
const currentDir = process.cwd(); // ***
folders.forEach(folder => {
fs.readdir(folder, (err, files) => {
const fullFolder = path.join(currentDir, folder); // ***
files.forEach(file => {
if (file != "index.js") {
const modFile = fullFolder + "/" + file; // ***
import(modFile)
.then(({ default: commandClass }) => {
const command = new commandClass();
command.run();
})
.catch(error => {
console.log(`Failed to load module ${modFile}`);
});
}
});
});
});

Node.js get all files in directory with 0 bytes

I have a directory with hundreds of thousands of files, some of them have 0 bytes because of connection errors, I would like to download them again. How do I get their filenames based on their size?
If using fs.promises is an option, the code might be quite concise. This version has small additional feature: it excludes directories, by supplying readdir with withFileTypes function then checking isFile() on those Dirent objects:
const fs = require('fs/promises');
(async() => {
const fileEntries = await fs.readdir(__dirname, {
encoding: 'utf8',
withFileTypes: true
});
const zeroSizeFiles = await Promise.all(
fileEntries.filter(dirent => dirent.isFile())
.map(async ({ name }) => (await fs.stat(name)).size ? null : name)
);
console.log(zeroSizeFiles.filter(Boolean));
})()
If this check is not required (no subdirectories), it's even more concise:
const fs = require('fs/promises');
(async () => {
const filenames = await fs.readdir(__dirname);
const zeroSizeFiles = await Promise.all(
filenames.map(async name => (await fs.stat(name)).size ? null : name)
);
console.log(zeroSizeFiles.filter(Boolean));
})()
Taken from here: Get all files with infos(name, type, size) within directory node-fs
const path = require('path');
const fs = require('fs');
const getFileInfoFromFolder = (route) => {
const files = fs.readdirSync(route, 'utf8');
const response = [];
for (let file of files) {
const extension = path.extname(file);
const fileSizeInBytes = fs.statSync(file).size;
if (fileSizeInBytes == 0) {
response.push(file);
}
}
return response;
}
const zero_size_files = getFileInfoFromFolder("...")
What about if we use the command line list all empty files from a folder??
find ./ -name '*.txt' -size 0
I believe this is a fastest ways if possible :D
const { exec } = require('child_process');
exec("find ./ -name '*.txt' -size 0", (err, stdout, stderr) => {
if (err) {
// node couldn't execute the command
return;
}
// the *entire* stdout and stderr (buffered)
console.log(`stdout: ${stdout}`);
console.log(`stderr: ${stderr}`);
});

How to check file with given extension which are present in array list using nodejs

I want to check folder with names which are present in array and only select them which are present inside array list but with if condition
and return the value which are present inside FileArray
let extensionArray = [".html", ".htm", ".aspx"];
let fileArray = [
"index.html",
"index.htm",
"index.aspx",
"Index.html",
"Index.htm",
"Index.aspx",
"default.html",
"default.htm",
"default.aspx",
"Default.html",
"Default.htm",
"Default.aspx",
];
if(!extensionArray.include(true){
if(!fileArray.inclue(true){
// return the output
}
}
I have checked one of the post in which file can be checked from all the folder and subfolder
but I don't know where to apply my condition to check the extension and file name and then return it.
code is as follow :-
const fs = require('fs');
const path = require('path');
async function getFile(dir) {
let files = await fs.readdir(dir);
files = await Promise.all(
files.map(async (file) => {
const filePath = path.join(dir, file);
const stats = await fs.stat(filePath);
if (stats.isDirectory()) return getFile(filePath);
else if (stats.isFile()) return filePath;
})
);
return files.reduce((all, folderContents) => all.concat(folderContents), []);
}
You don't have to add the filenames in both capitalized and lowercase forms to fileArray. You can convert the filenames to lowercase when filtering them. And you can add the filenames to a Set. Also, you don't need extensionArray since you're going to check for the filenames directly. Once you have the list of file paths in the directory by calling the getFilePaths function, you can filter them by checking if the lowercased filename (obtained by splitting the file path by / and getting the last element in the array) is present in the set.
const fs = require('fs').promises
const path = require('path')
const filenames = new Set([
'index.html',
'index.htm',
'index.aspx',
'default.html',
'default.htm',
'default.aspx',
])
const getFilePaths = async (dir) => {
let files = await fs.readdir(dir)
files = await Promise.all(
files.map(async (file) => {
const filePath = path.join(dir, file)
const stats = await fs.stat(filePath)
if (stats.isDirectory()) {
return getFilePaths(filePath)
}
return filePath
})
)
return files.flat()
}
const filterFiles = async (dir) => {
const paths = await getFilePaths(dir)
const filteredFiles = paths.filter((filePath) => {
const parts = filePath.split('/')
const filename = parts[parts.length - 1]
return filenames.has(filename.toLowerCase())
})
console.log(filteredFiles)
}
filterFiles('.')

Node.js batch create files to every folders in current directory with loop

I have sub 20 directories inside the current directory. I would like to create 'README.md' for every folder inside the current directory. Here is I've tried so far,
const fs = require('fs').promises;
const path = require('path');
async function readDirectory(filePath) {
try {
const data = await fs.readdir(filePath);
data.forEach(async file => {
fs.writeFile('README.md', file);
});
} catch (err) {
console.log(err.message);
}
}
readDirectory(path.resolve(__dirname));
but the result is not what I'm looking for. Any suggestion?
First writeFile parameters should be full path of file you want to create/write
fs.writeFile( file, data, options, callback )
So I changed your code a little bit
const fs = require('fs');
const path = require('path');
async function readDirectory(filePath) {
const data = await fs.readdirSync(filePath);
const folders = data.filter((item) => fs.lstatSync(item).isDirectory());
folders.map((folder) => {
const subFilePath = path.join(filePath, `/${folder}` + `/README.md`);
fs.writeFile(subFilePath, '', (err) => {
if (err) console.log(err);
else {
console.log(`File ${subFilePath} created`);
}
});
});
}
readDirectory(path.resolve(__dirname));

How to copy multiple files using fs.copyFile node?

I am using fs.copyFile to copy files from one location to another. I am doing this twice in order to copy two files. It's redundant and I would like to make my code better by maybe copying both files to the destination with a single call? How can I achieve this?
fs.copyFile('src/blah.txt', 'build/blah.txt', (err) => {
if (err) throw err;
});
fs.copyFile('src/unk.txt', 'build/unk.txt', (err) => {
if (err) throw err;
});
You can simply create a function of your own that takes the src and dest path and an array of filenames as arguments:
const util = require('util');
const fs = require('fs');
const path = require('path');
const copyFilePromise = util.promisify(fs.copyFile);
function copyFiles(srcDir, destDir, files) {
return Promise.all(files.map(f => {
return copyFilePromise(path.join(srcDir, f), path.join(destDir, f));
}));
}
// usage
copyFiles('src', 'build', ['unk.txt', 'blah.txt']).then(() => {
console.log("done");
}).catch(err => {
console.log(err);
});
Probably the best option is to use fs-extra:
const fse = require('fs-extra');
const srcDir = `path/to/file`;
const destDir = `path/to/destination/directory`;
// To copy a folder
fse.copySync(srcDir, destDir, function (err) {
if (err) {
console.error(err);
} else {
console.log("success!");
}
});
const fs = require('fs');
const path = require('path');
const files = ['/files/a.js', '/files/b.js', '/files/c.txt'];
files.forEach(file => {
fs.copyFile(path.join(__dirname + file), path.join(__dirname + '/files/backup/' + path.basename(file)), err => {
if(!err){
console.log(file + " has been copied!");
}
})
});
Use the following code if forced replace is unwanted.
const fs = require('fs');
const path = require('path');
const files = ['/files/a.js','/files/b.js','/files/c.txt'];
files.forEach(file => {
let basename = path.basename(file);
let oldFile = path.join(__dirname+file);
let newFile = path.join(__dirname+'/files/backup/'+basename);
if (!fs.existsSync(newFile)) {
fs.copyFile(oldFile, newFile, err=>{
if(!err){
console.log(basename+" has been copied!");
}
});
}else{
console.log(basename+" already existed!");
}
});
This is how I implemented the function above with the walk npm package to get all the files.
https://www.npmjs.com/package/walk
This gets all files within sub folders as well.
Worked copying 16,000 images from my Go Pro into one single folder on my desktop.
const util = require('util');
const fs = require('fs');
const path = require('path');
const copyFilePromise = util.promisify(fs.copyFile);
const walk = require('walk');
let files = [];
let source_folder = '/Volumes/Untitled/DCIM';
let destination_folder = '/Users/dave/Desktop/pics';
let walker = walk.walk(source_folder, {
followLinks: false
});
walker.on('file', function(root, stat, next) {
let file_path = root + '/' + stat.name;
files.push({
src: file_path,
des: destination_folder + '/' + stat.name
});
next();
});
walker.on('end', function() {
copyFiles(files).then(() => {
console.log("done");
}).catch(err => {
console.log(err);
});
});
function copyFiles(files) {
return Promise.all(files.map(f => {
return copyFilePromise(f.src, f.des);
}));
}

Categories

Resources