Find the newest folder in a directory with NodeJS - javascript

I need to find a way to get the newest folder created in a directory. Usually there are 3-5 different folders inside of a folder, and I want to find the newest one either by folder name (newest version numbering) or date and time created. Generally the folder name would like like this:
version-0101xxxxxxxxxxxx
(x representing the newest version)

You can get directories with 'fs' using 'readdirSync', then look inside each directory recursively.
This code snippet is extracted from (this answered question):
const fs = require('fs');
const path = require('path');
function flatten(lists) {
return lists.reduce((a, b) => a.concat(b), []);
}
function getDirectories(srcpath) {
return fs.readdirSync(srcpath)
.map(file => path.join(srcpath, file))
.filter(path => fs.statSync(path).isDirectory());
}
function getDirectoriesRecursive(srcpath) {
return [srcpath, ...flatten(getDirectories(srcpath).map(getDirectoriesRecursive))];
}
let dirs = getDirectoriesRecursive(__dirname);
console.info(path);
This will get you an array with all directories, then you just need to loop your array with the condition you need.

You can use node.js built-in fs module for this. First of all you need to list all the files and folders in the directory that you intend to search.
let dirs = await fs.readdir(dirName, {
withFileTypes: true,
});
You can use fs.promises.readdir for this. Make sure to add the option "withFileTypes" because it gives additional information that you can use to distinguish folders from files.
You can then filter all files like this:
dirs = dirs.filter((file) => file.isDirectory()).map((dirent) => dirent.name);
This gives you a result like ["firstFolder", "secondFolder", etc]
And then you can loop through the above array and find the newest folder by using the fs.promises.stat method
let newestFolder = await fs.stat(path.join(dirName, dirs[0]));
let newestFolderName = dirs[0];
for (let directory of dirs) {
const stats = await fs.stat(path.join(dirName, directory));
if (stats.ctimeMs > newestFolder.ctimeMs) {
newestFolder = stats;
newestFolderName = directory;
}
}
And then you can create the absolute path of that directory using path module:
const absoultePath = path.resolve(path.join(dirName, newestFolderName));
Here is how the function looks using promises:
const fs = require("fs/promises");
const path = require("path");
async function getNewestDirectory(dirName) {
let dirs = await fs.readdir(dirName, {
withFileTypes: true,
});
dirs = dirs.filter((file) => file.isDirectory()).map((dirent) => dirent.name);
let newestFolder = await fs.stat(path.join(dirName, dirs[0]));
let newestFolderName = dirs[0];
for (let directory of dirs) {
const stats = await fs.stat(path.join(dirName, directory));
if (stats.ctimeMs > newestFolder.ctimeMs) {
newestFolder = stats;
newestFolderName = directory;
}
}
console.log("newestFolder", newestFolder);
console.log("newestFolderName", newestFolderName);
return path.resolve(path.join(dirName, newestFolderName));
}
and using synchronous approach(not recommended):
const fs = require("fs");
let dirs = fs.readdirSync(".", {
withFileTypes: true,
});
dirs = dirs.filter((file) => file.isDirectory()).map((dirent) => dirent.name);
let newestFolder = fs.statSync(dirs[0]);
let newestFolderName = dirs[0];
for (let directory of dirs) {
const stats = fs.statSync(directory);
if (stats.ctimeMs > newestFolder.ctimeMs) {
newestFolder = stats;
newestFolderName = directory;
}
console.log("newestFolder", newestFolder);
console.log("newestFolderName", newestFolderName);
}

Related

How to verify if a file is downloaded with a particular extension instead of whole name?

I created test that:
erase existing download directory (cypress/download)
Downloading a file.pdf/csv/txt/and so on...
Makes an assertion and in order to do that you have to provide expected filename for instance 'correctFile.pdf' and cypress will match this expected name with filename that has been downloaded in 'cypress/download'.
erase existing download directory (cypress/download)
cypress/download
TEST.pdf
TEST:
`
it.only("should downloads file", () => {
cy.task('deleteDirectory', downloadsFolder); // deleting download directory
cy.get("downloadButton").click(); // downloading 'TEST.pdf' file
cy.task('isExistDownloadedFile', 'TEST.pdf').should('equal', true); // assertion
cy.task('deleteDirectory', downloadsFolder); // deleting download directory
})
`
plugins/index.js
`
**--snip--**
const path = require('path');
const fs = require('fs');
const downloadDirectory = path.join(__dirname, '..', 'downloads');
const findDownloadedFile = (filename) => {
**const FileName = `${downloadDirectory}/${filename}`;**
const contents = fs.existsSync(FileName);
return contents;
};
const hasFile = (filename, ms) => {
const delay = 10;
return new Promise((resolve, reject) => {
if (ms < 0) {
return reject(
new Error(`Could not find any file ${downloadDirectory}/${filename}`)
);
}
const found = findDownloadedFile(filename);
if (found) {
return resolve(true);
}
setTimeout(() => {
hasFile(filename, ms - delay).then(resolve, reject);
}, delay);
});
};
**--snip--**
// find downloaded file and match the name of that file with the expected filename
on('task', {
isExistDownloadedFile(filename, ms = 4000) {
return hasFile(filename, ms);
},
});
return config;
`
My question:
Now I can assert only entire file name e.g.
TEST.pdf (downloaded) equal to TEST.pdf (expected) RESULT: PASSED
but how can I make that my program will also accept some characters, like only extension for instance:
TEST.pdf (downloaded) contains .pdf (expected) RESULT: PASSED
Install glob and use it in place of fs.existsSync()
plugins/index.js
...
const glob = require("glob");
const findDownloadedFile = (filename) => {
const matches = glob.sync(fileName);
return (matches.length > 0);
}
test
// partial name
cy.task('isExistDownloadedFile', '*.pdf').should('equal', true)
// still works with full name
cy.task('isExistDownloadedFile', 'TEST.pdf').should('equal', true)
You could implement something like this answer to filter the results of the entire list of files in the directory.
const findDownloadedFile = (filename) => {
const files = fs.readDirSync(downloadDirectory).filter((x) => x.includes(filename));
return files.length
};

Node JS can't find module

I have a files.js file and am requiring it at the start of my code.
When I use:
const files = require('./lib/files');
I get this error:
Error: Cannot find module './lib/files'
However if I test it with another one of the files in the same folder like:
const files = require('./lib/repo');
It runs.
files.js:
const fs = require('fs');
const path = require('path');
module.exports = {
getCurrentDirectoryBase: () => {
return path.basename(process.cwd());
},
directoryExists: (filePath) => {
return fs.existsSync(filePath);
}
};
I would use tree command however I have too many node modules installed to show it correctly so:
const { getCurrentDirectoryBase, directoryExists } = require('./lib/files')

Fail to navigate to a proper folder in nodejs

I'm trying to create a function that pull a remote repository and then navigate through this repo and install it's dependencies but somehow it fail to install the dependencies inside the cloned repo and it install them outside:
const spinner = clui.Spinner;
const git = require("simple-git/promise");
const path = require("path");
const { install } = require("pkg-install");
async function pullRepo() {
const pulling = new spinner("Initializing project...");
const installing = new spinner("Installing dependencies...");
const rep = await inquirer.DirectoryName();
const package = path.join(rep.project, "package.json");
pulling.start();
await git()
.silent(true)
.clone("git#github.com:blacklane/create-blacklane-app.git", rep.project)
.then(async () => {
pulling.stop();
console.log(`working directory:`, process.cwd());
// check file exist asynchronously
fs.access(package, fs.constants.F_OK, err => {
console.log(`${package} ${err ? "does not exist" : "exists"}`);
});
installing.start();
const obj = JSON.parse(fs.readFileSync(package, "utf8"));
const dependencies = { ...obj.dependencies, ...obj.devDependencies };
process.chdir(rep.project); // navigate to directory to install dpendencies
console.log(`new working directory from git:`, process.cwd());
const { stdout } = await install(dependencies, {
dev: true,
prefer: "npm"
});
console.log(stdout);
installing.stop();
})
.catch(error => console.error("failed: ", error));
// progress.finish();
}
The reason this can happen because install is somehow not able to get to the cloned repo. May be because underlying shell which is running the Node process is still the same.
following is the working code:
const clui = require('clui');
const spinner = clui.Spinner;
const git = require("simple-git/promise");
const path = require("path");
const fs = require('fs');
const fsPromises = fs.promises;
const { spawn } = require('child_process');
async function pullRepo(repo, dirName) {
const pulling = new spinner("Initializing project...");
const installing = new spinner("Installing dependencies...");
pulling.start();
await git().silent(true).clone(repo, dirName);
pulling.stop();
const package = path.join(dirName, "package.json");
// check file exist asynchronously
await fsPromises.access(package, fs.constants.F_OK);
installing.start();
const npmInstall = spawn('npm', ['i'], { cwd: dirName });
npmInstall.stdout.on('data', (data) => {
console.log(`stdout: ${data}`);
});
npmInstall.stderr.on('data', (data) => {
console.error(`stderr: ${data}`);
installing.stop();
});
npmInstall.on('close', (data) => {
installing.stop();
});
}
const repoToPull = "https://github.com/facebook/create-react-app.git"; // can be any repo
const dirToPullTo = path.join(__dirname,'gitpull'); // directory you want to pull it to.
pullRepo(
repoToPull,
dirToPullTo
).then(res => console.log(res));
This code needs better error handling.
You don't need to add .then when you are awaiting a promise

Download videos in series one after the other in Node JS using async?

I want to download videos one after the other in a series.
That is the first one should be completely downloaded before the second one starts & the second one should be completely downloaded before the third one starts & so on.
I have the following directory structure -
video-downloader
├── index.js
├── videos.js
├── package.json
package.json
{
"name": "video-downloader",
"version": "1.0.0",
"main": "index.js",
"license": "MIT",
"dependencies": {
"download": "^7.1.0"
},
"scripts": {
"start": "node index"
}
}
video.js
const videos = [
{
url: 'https://video.com/lesson1.mp4',
name: 'Lesson 1',
},
{
url: 'https://video.com/lesson2.mp4',
name: 'Lesson 2',
},
.
.
.
{
url: 'https://video.com/lesson2.mp4',
name: 'Lesson 100',
}
]
index.js
const fs = require('fs')
const download = require('download')
const videos = require('./videos')
const OUTPUT_DIR = 'Downloads'
fs.mkdir(OUTPUT_DIR, () => {
main()
})
const main = () => {
videos.map((video, i) => {
console.log(`Downloaded file ${i + 1} of ${videos.length} (${video.name})`)
download(video.url).pipe(
fs.createWriteStream(`${OUTPUT_DIR}/${video.name}.mp4`),
)
})
}
This downloads videos chunk by chunk parallelly. All the videos are downloaded at once but none of them gets completed before the other one starts.
How do I download it serially?
I know I should use something like http://caolan.github.io/async/ but it needs a function signature & I have videos as an array so I'm not sure how to go about it.
You can use the await keyword on standard for loops, and things will process in order, and wait on each download before proceeding.
const fs = require('fs')
const download = require('download')
const videos = require('./videos')
const util = require('util')
const mkdirAsync = util.promisify(fs.mkdir)
const OUTPUT_DIR = 'Downloads'
const main = async () => {
await mkdirAsync(OUTPUT_DIR)
for (let i = 0; i < videos.length; i++) {
const video = videos[i]
const data = await download(video.url)
fs.writeFileSync(`${OUTPUT_DIR}/${video.name}.mp4`, data)
console.log(`Downloaded file ${i + 1} of ${videos.length} (${video.name})`)
}
}
main()
You can use .reduce with promises to resolve sequentially, as follows:
const fs = require('fs')
const sh = require('shelljs')
const download = require('download')
const videos = require('./videos')
const OUTPUT_DIR = 'Downloads'
sh.mkdir('-p', OUTPUT_DIR)
videos = videos.reduce((acc, item) => {
return acc.then(() => {
return new Promise((resolve) => {
// Here you are using it as a Duplex Stream, not a promise,
// therefore, you must check when the stream emits the 'end' event
// so you can proceed further
let stream = download(video.url)
.pipe(fs.createWriteStream(`${OUTPUT_DIR}/${video.name}.mp4`));
stream.on('end', () => {
console.log(`stream done ${item}`);
resolve(item);
})
})
});
}, Promise.resolve());
// 'videos' is now a promise
videos.then((lastPromise) => {
// using reduce will return the last evaluated item(promise)
// but reaching the last one means the promises before that have been resolved
console.log('all files were downloaded');
})
Try async await for this. First download and then write in Sync.
const fs = require('fs');
const sh = require('shelljs');
const download = require('download');
const videos = require('./videos');
const OUTPUT_DIR = 'Downloads';
sh.mkdir('-p', OUTPUT_DIR);
videos.forEach(async (video, i) => {
console.log(`Downloading ${video.name}. Fil${i + 1}/${videos.length} - `);
const data = await download(video.url);
fs.writeFileSync(`${OUTPUT_DIR}/${video.name}.mp4`, data);
});

Does having the same `require` in multiple files increase runtime

So I'm planning to separate my functions into separate files and then import them into a single index.js which then becomes the main exporter. So I'm wondering if having something like var bcrypt = require('bcrypt') in several of my files be slower than just having it in one file.
Here's how I'm planning to group and export in index.js
const fs = require('fs');
const path = require('path')
const modules = {}
const files = fs.readdirSync(__dirname)
files.forEach(file => {
if (file === 'index.js') return
let temp = require(path.join(__dirname, file))
for (let key in temp) {
modules[key] = temp[key]
}
});
module.exports = modules
As an example of what I mean:
file1.js
var bcrypt = require("bcrypt");
module.exports.file1test = "hi"
file2.js
var bcrypt = require("bcrypt");
module.exports.file2test = "bye"
No, it does not. Whenever a module is required for the first time, the module's code runs, assigns something to its exports, and those exports are returned. Further requires of that module simply reference those exports again. The logic is similar to this:
const importModule = (() => {
const exports = {};
return (name) => {
if (!exports[name]) exports[name] = runModule(name);
return exports[name];
};
})();
So, multiple imports of the same module is no more expensive than referencing an object multiple times.

Categories

Resources