NodeJs Glob dont show files/folders in Drive Disk on Windows - javascript

I want to get a folders hierarchy tree from the Drive Disk until the last subfolder.
Disk Drive
-First Folder
---Subfolder One
---Subfolder Two
-Second Folder
---Subfolder One
-Nth Folder
---Subfolder One
---Subfolder N
I will input what folder I want to inspect (this part is working in glob):
let mg = new Glob('C:/Program Files/*', { mark: true }, function (err, matches) {
if (err) {
console.log(err);
} else {
console.log('folders', matches);
}
});
Glob will not show what folders are in a Disk Drive:
let mg = new Glob('C:/*', { mark: true }, function (err, matches) {
if (err) {
console.log(err);
} else {
console.log('folders', matches);
}
});
Res:
files []

I have found a solution. I used child_process to call cmd commands and find the subfolders of the Local Drives.
I think child_process has a promise module, when I will find a cleaner version of the code I will update.
const child = require('child_process');
let pathLetter: '';
function asyncGetAllFolders() {
return new Promise((resolve) => {
child.exec(
`cd /d ${pathLetter}:/ && dir ${pathLetter}: /b /ad`,
(error, stdout) => {
if (error) {
console.log(error);
next(error);
} else {
allFoldersList = stdout.split('\r\n').map((value) => value.trim());
resolve('done!');
}
}
);
});
}
function asyncGetHiddenFolders() {
return new Promise((resolve) => {
child.exec(
`cd /d ${pathLetter}:/ && dir ${pathLetter}: /b /ah`,
(error, stdout) => {
if (error) {
next(error);
} else {
hiddenFolderList = stdout
.split('\r\n')
.map((value) => value.trim());
resolve('done!');
}
}
);
});
}
async function extractUniqueFolders() {
await asyncGetAllFolders();
await asyncGetHiddenFolders();
hiddenFolderList.forEach((item) => {
if (allFoldersList.indexOf(item) !== -1) {
allFoldersList.splice(allFoldersList.indexOf(item), 1);
}
});
}

Related

VS Code debugger not working as expected while debugging Nodejs app

I am debugging a node app and it has a method deleteProduct. Inside this method is two static methods Cart.deleteProductFromCart(productID, product.price) and Product.deleteProductWithId(productID) that read files from the filesystem and write to the file.
Using the VSCode built-in debugger, When I set the breakpoints to see if the callbacks are getting called back properly, only the callback fired from the first method gets registered whereas the callback from the second method never gets called. I also experimented by changing the order of the two methods. And, the result is that only the method which gets called first has its callback registered whereas the callback from the second method is never called. This happens only during the debug session. The program runs as expected with yarn start though.
So, it's really hard to debug the whole process.
I am not sure if this is an issue with the vs-code debugger or something I am doing wrong due to which the callback is not being registered during the debugging process.
URL to the repository is attached here
const deleteProduct = async (req, res, next) => {
const productID = req.params.productID;
const product = await Product.fetchProductWithId(productID);
Cart.deleteProductFromCart(productID, product.price);
Product.deleteProductWithId(productID);
res.redirect(301, '/products');
};
Cart Model
const fs = require('fs');
const path = require('path');
const rootDir = require('../utils/path');
const filePath = path.join(rootDir, 'data', 'cart.json');
class Cart {
static addProduct(productID, productPrice) {
fs.readFile(filePath, (err, data) => {
let cart = { items: [], totalPrice: 0 };
if (!err && data?.length > 0) {
cart = JSON.parse(data);
}
const existingProduct = cart.items.find((item) => item.id === productID);
if (existingProduct) {
existingProduct.quantity++;
} else {
const product = { id: productID, quantity: 1 };
cart.items.push(product);
}
cart.totalPrice = cart.totalPrice + +productPrice;
console.log(existingProduct === cart.items[0]);
fs.writeFileSync(filePath, JSON.stringify(cart));
});
}
// Delete logic
static deleteProductFromCart(productID, productPrice) {
fs.readFile(filePath, (err, data) => {
// Attached breakpoints within this callback
if (err) {
return;
}
const cart = JSON.parse(data);
const product = cart.items.find((item) => item.id === productID);
if (product) {
cart.totalPrice = cart.totalPrice - product.quantity * +productPrice;
cart.items = cart.items.filter((item) => item.id !== productID);
}
fs.writeFileSync(filePath, JSON.stringify(cart));
});
}
}
module.exports = { Cart };
Product Model
const fs = require('fs');
const path = require('path');
const { v4: uuidv4 } = require('uuid');
const rootDir = require('../utils/path');
const filePath = path.join(rootDir, 'data', 'products.json');
class Product {
constructor(id, title, price, imageURL, description) {
this.id = id;
this.title = title;
this.price = price;
this.imageURL = imageURL;
this.description = description;
}
save() {
fs.readFile(filePath, (err, data) => {
let products = [];
if (!err && data.length > 0) {
products = JSON.parse(data);
}
if (this.id) {
const productIndex = products.findIndex((item) => item.id === this.id);
products[productIndex] = this;
} else {
this.id = uuidv4();
products.push(this);
}
fs.writeFileSync(filePath, JSON.stringify(products));
});
}
static deleteProductWithId(id) {
fs.readFile(filePath, (err, data) => {
// Attached callback within this callback
let products = [];
if (!err && data.length > 0) {
products = JSON.parse(data);
}
const productIndex = products.findIndex((item) => item.id === id);
products.splice(productIndex, 1);
fs.writeFileSync(filePath, JSON.stringify(products));
});
}
static async fetchProductWithId(id) {
return new Promise((resolve, reject) => {
try {
fs.readFile(filePath, (err, data) => {
if (data) {
const products = JSON.parse(data);
const product = products.find((item) => item.id === id);
resolve(product);
}
resolve({
id: null,
title: 'null',
price: 0,
description: null,
imageURL: null,
});
});
} catch {
reject(err);
}
});
}
static async fetchAll() {
return new Promise((resolve, reject) => {
try {
fs.readFile(filePath, (err, data) => {
if (data.length > 0) {
resolve(JSON.parse(data));
}
resolve([]);
});
} catch {
reject(err);
}
});
}
}
module.exports = { Product };
lauch.json file
{
// Use IntelliSense to learn about possible attributes.
// Hover to view descriptions of existing attributes.
// For more information, visit: https://go.microsoft.com/fwlink/?linkid=830387
"version": "0.2.0",
"configurations": [
{
"type": "pwa-node",
"request": "launch",
"name": "Launch Program",
"skipFiles": ["<node_internals>/**"],
"program": "${workspaceFolder}/app.js",
// Auto Restart Debugger on file Change
// nodemon should be installed globally
// Also integrated console should be used so that they both listen to same port or something like that
"restart": true,
"runtimeExecutable": "nodemon",
"console": "integratedTerminal"
}
]
}
To Reproduce
Steps to reproduce the behavior:
Attach breakpoints inside callback of readFile as shown in the snippet in both the static methods
Run the debugger
Observe Callstack or Run Step Over, Step Into to see the program flow
Observe that callback from the second method which read file from the filesystem is never reached till the end of program execution.

How to read multiple json file using fs and bulk request

I'm using elasticsearch search engine with my react app, I was reading one file at the backend as you see in the code and it work perfectly, but now I want to read three different JSON files to three different indexes using the "fs" package and bulk request, can you please help me?
the code:
// Start reading the json file
fs.readFile("DocRes.json", { encoding: "utf-8" }, function (err, data) {
if (err) {
throw err;
}
// Build up a giant bulk request for elasticsearch.
bulk_request = data.split("\n").reduce(function (bulk_request, line) {
var obj, ncar;
try {
obj = JSON.parse(line);
} catch (e) {
console.log("Done reading 1");
return bulk_request;
}
// Rework the data slightly
ncar = {
id: obj.id,
name: obj.name,
summary: obj.summary,
image: obj.image,
approvetool: obj.approvetool,
num: obj.num,
date: obj.date,
};
bulk_request.push({
index: { _index: "ncar_index", _type: "ncar", _id: ncar.id },
});
bulk_request.push(ncar);
return bulk_request;
}, []);
// A little voodoo to simulate synchronous insert
var busy = false;
var callback = function (err, resp) {
if (err) {
console.log(err);
}
busy = false;
};
// Recursively whittle away at bulk_request, 1000 at a time.
var perhaps_insert = function () {
if (!busy) {
busy = true;
client.bulk(
{
body: bulk_request.slice(0, 1000),
},
callback
);
bulk_request = bulk_request.slice(1000);
console.log(bulk_request.length);
}
if (bulk_request.length > 0) {
setTimeout(perhaps_insert, 100);
} else {
console.log("Inserted all records.");
}
};
perhaps_insert();
});
You can create multiple promises for each file read and feed it to the elastic search bulk_request.
const fsPromises = require('fs').promises,
files = ['filename1', 'filename1'],
response = [];
const fetchFile = async (filename) => {
return new Promise((resolve, reject) => {
const path = path.join(__dirname, filename);
try {
const data = await fsPromises.readFile(path)); // make sure path is correct
resolve(data);
} catch (e) {
reject(e)
}
});
files.forEach((fileName) => results.push(fetchFile()));
Promise.all(results).then(data => console.log(data)).catch(e => console.log(e));
}
Once you get data from all the promises pass it to the elastic search.

What am I missing here to get data out of this spawned Node.js child process?

I'm trying to use a spawned command-line lzip process to expand an lzipped data stream, as I haven't found any good native JavaScript tools to do the job.
I can get this to work using files and file descriptors, but it seems stupid to have to write out, and read back in, a bunch of temporary scratch files. I want to do all of the work I can in memory.
So here's the code I'm trying to use:
import { requestBinary } from 'by-request';
import { spawn } from 'child_process';
import { min } from '#tubular/math';
export async function tarLzToZip(url: string): Promise<void> {
const lzData = await requestBinary(url, { headers: { 'User-Agent': 'curl/7.64.1' } });
const lzipProc = spawn('lzip', ['-d'], { stdio: ['pipe', 'pipe', process.stderr] });
let tarContent = Buffer.alloc(0);
lzipProc.stdout.on('data', data => {
tarContent = Buffer.concat([tarContent, data], tarContent.length + data.length);
});
for (let offset = 0; offset < lzData.length; offset += 4096) {
await new Promise<void>((resolve, reject) => {
lzipProc.stdin.write(lzData.slice(offset, min(offset + 4096, lzData.length)), err => {
if (err)
reject(err);
else
resolve();
});
});
}
await new Promise<void>((resolve, reject) => {
lzipProc.stdin.end((err: any) => {
if (err)
reject(err);
else
resolve();
});
});
console.log('data length:', tarContent.length);
}
When I step through with a debugger everything seems to be going well with the sending data into lzipProc.stdin. (I've tried doing both chunks like this, and all data in one go.) lzipProc.stdout.on('data', data =>, however, never gets called. When I get to the end, tarContent is empty.
What's missing here? Do I need a different stdio config? Are there different stream objects I should be using? Do I need to more goats to sacrifice under the light of a full moon?
UPDATE
My solution based on Matt's excellent answer posted below, with all of the particulars for my use case:
import archiver from 'archiver';
import fs, { ReadStream } from 'fs';
import fsp from 'fs/promises';
import needle from 'needle';
import path from 'path';
import { spawn } from 'child_process';
import tar from 'tar-stream';
const baseUrl = 'https://data.iana.org/time-zones/releases/';
export async function codeAndDataToZip(version: string): Promise<ReadStream> {
return compressedTarToZip(`${baseUrl}tzdb-${version}.tar.lz`);
}
export async function codeToZip(version: string): Promise<ReadStream> {
return compressedTarToZip(`${baseUrl}tzcode${version}.tar.gz`);
}
export async function dataToZip(version: string): Promise<ReadStream> {
return compressedTarToZip(`${baseUrl}tzdata${version}.tar.gz`);
}
async function compressedTarToZip(url: string): Promise<ReadStream> {
const fileName = /([-a-z0-9]+)\.tar\.[lg]z$/i.exec(url)[1] + '.zip';
const filePath = path.join(process.env.TZE_ZIP_DIR || path.join(__dirname, 'tz-zip-cache'), fileName);
if (await fsp.stat(filePath).catch(() => false))
return fs.createReadStream(filePath);
const [command, args] = url.endsWith('.lz') ? ['lzip', ['-d']] : ['gzip', ['-dc']];
const originalArchive = needle.get(url, { headers: { 'User-Agent': 'curl/7.64.1' } });
const tarExtract = tar.extract({ allowUnknownFormat: true });
const zipPack = archiver('zip');
const writeFile = fs.createWriteStream(filePath);
const commandProc = spawn(command, args);
commandProc.stderr.on('data', msg => { throw new Error(`${command} error: ${msg}`); });
commandProc.stderr.on('error', err => { throw err; });
originalArchive.pipe(commandProc.stdin);
commandProc.stdout.pipe(tarExtract);
tarExtract.on('entry', (header, stream, next) => {
zipPack.append(stream, { name: header.name, date: header.mtime });
stream.on('end', next);
});
tarExtract.on('finish', () => zipPack.finalize());
zipPack.pipe(writeFile);
return new Promise<ReadStream>((resolve, reject) => {
const rejectWithError = (err: any): void =>
reject(err instanceof Error ? err : new Error(err.message || err.toString()));
writeFile.on('error', rejectWithError);
writeFile.on('finish', () => resolve(fs.createReadStream(filePath)));
tarExtract.on('error', err => {
// tar-stream has a problem with the format of a few of the tar files
// dealt with here, which nevertheless are valid archives.
if (/unexpected end of data|invalid tar header/i.test(err.message))
console.error('Archive %s: %s', url, err.message);
else
reject(err);
});
zipPack.on('error', rejectWithError);
zipPack.on('warning', rejectWithError);
commandProc.on('error', rejectWithError);
commandProc.on('exit', err => err && reject(new Error(`${command} error: ${err}`)));
originalArchive.on('error', rejectWithError);
});
}
I would leave the streaming to node or packages, unless you have specific processing that needs to be done. Just wrap the whole stream setup in a promise.
If you also stream the request/response, it can be piped into the decompresser. Then stdout from the decompressor can be piped to the archive stream handlers.
import fs from 'fs'
import { spawn } from 'child_process'
import needle from 'needle'
import tar from 'tar-stream'
import archiver from 'archiver'
export function tarLzToZip(url) {
return new Promise((resolve, reject) => {
// Setup streams
const res = needle.get(url)
const lzipProc = spawn('lzip', ['-dc'], { stdio: ['pipe','pipe',process.stderr] })
const tarExtract = tar.extract()
const zipPack = archiver('zip')
const writeFile = fs.createWriteStream('tardir.zip')
// Pipelines and processing
res.pipe(gzipProc.stdin)
lzipProc.stdout.pipe(tarExtract)
// tar -> zip (simple file name)
tarExtract.on('entry', function(header, stream, next) {
console.log('entry', header)
zipPack.append(stream, { name: header.name })
stream.on('end', () => next())
})
tarExtract.on('finish', function() {
zipPack.finalize()
})
zipPack.pipe(writeFile)
// Handle the things
writeFile.on('error', reject)
writeFile.on('close', () => console.log('write close'))
writeFile.on('finish', resolve(true))
tarExtract.on('error', reject)
zipPack.on('error', reject)
zipPack.on('warning', reject)
lzipProc.on('error', reject)
lzipProc.on('exit', code => {if (code !== 0) reject(new Error(`lzip ${code}`))})
res.on('error', reject)
res.on('done', ()=> console.log('request done', res.request.statusCode))
})
}
You might want to be a bit more verbose about logging errors and stderr as the singular promise reject can easily hide what actually happened across the multiple streams.

nodeJS await to move files then compress images

I wrote this code that checks image files sizes in a folder, if the file are bigger than 30000 bytes then moves to a temporary folder called 'before-compress'. The compressImages() function iterates over the 'before-compress' folder and returns the compressed images to the original folder. My question is: How can i await the process of move the exceeded size files and then call the compressImage() function?, as you can see in the code i handle this with a setTimeout once the forEach reaches the last item. Thanks in advance.
const fs = require('fs');
const path = require('path');
const imagemin = require("imagemin");
const imageminMozjpeg = require("imagemin-mozjpeg");
const imageminPngquant = require("imagemin-pngquant");
const imageminGifsicle = require('imagemin-gifsicle');
const directoryPath = path.join(__dirname, 'uploads');
fs.readdir(`${directoryPath}/products`, function (err, files) {
if (err) {
return console.log('Unable to scan directory: ' + err);
}
files.forEach(function (file) {
console.log(`File: ${file} - Size: ${getFilesizeInBytes(file)} bytes`);
if(getFilesizeInBytes(file) > 30000){
moveFile(file)
}
if(files.indexOf(file) == files.length - 1){
//console.log('last index');
setTimeout(() => compressImages(), 4000);
}
});
});
function getFilesizeInBytes(fileName) {
var stats = fs.statSync(`${directoryPath}/products/${fileName}`);
var fileSizeInBytes = stats.size;
return fileSizeInBytes;
}
function moveFile(file){
var oldPath = `${directoryPath}/products/${file}`;
var newPath = `${directoryPath}/before-compress/${file}`;
fs.rename(oldPath, newPath, function (err) {
if (err) throw err;
console.log(`File ${file} moved!`);
})
}
function compressImages(){
fs.readdir(`${directoryPath}/before-compress`, function (err, files) {
if (err) {
return console.log('Unable to scan directory: ' + err);
}
files.forEach(function (file) {
console.log(`File to compress: ${file}`);
let fileExt = file.split('.')[1];
let compressPlugin = fileExt == 'jpg' || fileExt == 'jpeg' ? imageminMozjpeg({quality: 40}) :
fileExt == 'png' ? imageminPngquant({quality: [0.5, 0.6]}) :
fileExt == 'gif' ? imageminGifsicle() : 0;
(async () => {
const files = await imagemin([`./uploads/before-compress/${file}`], {
destination: './uploads/products',
plugins: [ compressPlugin ]
});
fs.unlink(`${directoryPath}/before-compress/${file}`, err => err ? console.log(err) : 0);
})();
});
});
}
This kind of code would become much more readable if you would convert all the functions from using callbacks to using async.
If you want to keep using callbacks however, there are two options:
Make moveFile() to use fs.renameSync() instead of fs.rename(). Normally I would advise against that, but since you are already using fs.statSync() and I suppose you run this as a script with nothing in parallel, maybe that would be an acceptable solution.
Or make moveFile() accept a callback:
function moveFile(file, callback){
// [...]
fs.rename(oldPath, newPath, callback)
}
Now you can use this callback to detect when the file has been moved, for example like this:
// [...]
var done = 0;
var error = false;
files.forEach(function (file) {
if(error) return;
if(getFilesizeInBytes(file) > 30000){
moveFile(file, function(err) {
if (err) { console.log(err); error = true; }
done++;
});
} else {
done++;
}
if(done == files.length) {
compressImages(), 4000);
}
});
});

Using browserify on file with require('mongodb') causes TypeError: dns.resolveSrv is not a function

I am working on a simple chrome extension that will allow me to write, get and delete URL's from an to a DB.
I decided to use MongoDB and wrote the functions in JS file called popup.js. and they work OK from the IDE.
Because it will run from the browser i used Browserify for the require('mongodb').
I used browserify popup.js -o bundle.js
and when running the bundle.js file i get TypeError: dns.resolveSrv is not a function (when running from both the ide and the browser)
it happend when it tries connecting to the client.
what can i do to avoid this issue?
Thanks in advance.
popup.js :
const {MongoClient} = require('mongodb');
async function checkURL(client , URL){
const result = await client.db("url-uploader").collection("urls").findOne({URL: URL})
return !(result===null)
}
async function uploadURL(){
const uri = {myURI};
const client = new MongoClient(uri, {useUnifiedTopology: true});
try {
await client.connect();
let newURL = 'www.b.com'//tab.url
if (!await checkURL(client, newURL)) {
const result = await client.db("url-uploader").collection("urls").insertOne({URL: newURL})
}
} catch (e) {
console.error(e);
} finally {
await client.close();
}
}
async function removeURL(){
const uri = {myURI};
const client = new MongoClient(uri , {useUnifiedTopology: true});
try {
await client.connect();
let oldURL = "www.blabla.com" //tab.url
const result = await client.db("url-uploader").collection("urls").deleteOne({URL: oldURL})
} catch (e) {
console.error(e);
} finally {
await client.close();
}
}
and this is the part in bundle.js that gets the error:
_poll() {
const generation = this.generation;
dns.resolveSrv(this.srvAddress, (err, srvRecords) => {
if (generation !== this.generation) {
return;
}
if (err) {
this.failure('DNS error', err);
return;
}
const finalAddresses = [];
srvRecords.forEach(record => {
if (matchesParentDomain(record.name, this.srvHost)) {
finalAddresses.push(record);
} else {
this.parentDomainMismatch(record);
}
});
if (!finalAddresses.length) {
this.failure('No valid addresses found at host');
return;
}
this.success(finalAddresses);
});
}
}

Categories

Resources