I am creating a program in which users can preserve search terms from session to session. If a search term is preserved by the user, then the data that corresponds to that search term is also preserved. At the beginning of each session, the script discards any old data that should be preserved if it no longer corresponds to an active or connected drive.
However, I am new to working with JSON and such objects in general, so I am wondering if there is a better way to accomplish this than the way below? Specifically, is there an approach that is more efficient or even prettier than the first for of loop and the heavily nested if,for,for,if block of code?
async function guaranteeData(drives){
const
config = await readJson('./config.json'),
data = await readJson('./data.json')
// Is there a better way to write this code?
let
json = {}
for (const [drive] of drives) {
json[drive] = {}
}
// if tags have been preserved
if (config.fileTypes.length > 0) {
// loop thru all current system drives
for (const [drive] of drives) {
// loop thru all preserved tags
for (const fileType of config.fileTypes) {
// if current drive has current tag data
if (data[drive].hasOwnProperty(fileType)) {
// preserve this data: data[drive][fileType]
json[drive][fileType] = data[drive][fileType]
}
}
}
}
////////////////////////////////////////////
json = JSON.stringify(json, null, 2)
await fsp.writeFile('./data.json', json, {
flag: 'w',
encoding: 'utf8'
})
.then(() => {
return true
})
.catch(error => {
if (error.code === 'EEXIST') {
return false
} else {
throw error
}
})
}
I would change two things in this
Remove the if (config.fileTypes.length > 0) check as it will be handled the for loop which is afterwards.
Remove the for loop which assigns the json[drive] to empty object inside the for loop where you have nested for loops. This will remove that for loop as well.
It will looks something like
async function guaranteeData(drives) {
const config = await readJson("./config.json");
const data = await readJson("./data.json");
const json = {};
// if tags have been preserved
// loop thru all current system drives
for (const [drive] of drives) {
json[drive] = {};
// loop thru all preserved tags
for (const fileType of config.fileTypes) {
// if current drive has current tag data
if (data[drive].hasOwnProperty(fileType)) {
// preserve this data: data[drive][fileType]
json[drive][fileType] = data[drive][fileType];
}
}
}
////////////////////////////////////////////
json = JSON.stringify(json, null, 2);
await fsp
.writeFile("./data.json", json, {
flag: "w",
encoding: "utf8"
})
.then(() => {
return true;
})
.catch(error => {
if (error.code === "EEXIST") {
return false;
} else {
throw error;
}
});
}
Related
I created a function to fetch mp3 files that are inside user's storage drives that takes an array of directories that needed to be searched. After receiving full list of music files in an array, I used that as an argument in another function that fetches it's metadata (using music metadata). So far my code is working perfectly when there are no files it returns empty array otherwise and array of objects containing their metadata. Here is My code :
const find = require('find') ;
const mm = require('music-metadata') ;
const directories = ["C:\\Users\\{UserNamehere}\\Music\\"] // you can add more directories
async function parseMetadata(files){
let metadata ;
data = files.map(async (file) => {
metadata = await mm.parseFile(file,{duration:true}) ;
m = metadata.common ;
return m ;
}) ;
const musicarray = await Promise.all(data) ;
return musicarray ;
}
function fetchNewSongs(dirs){
let res,musicfiles = [] ;
dirs.forEach((path)=>{
res = find.fileSync(/\.mp3/i,path) ;
musicfiles.push(...res) ;
});
if (musicfiles.length !== 0){
return parseMetadata(musicfiles) ;
}
else{
return Promise.resolve([]) ;
}
}
fetchNewSongs(directories).then( value => {
console.log(value)
})
Problem arises when any music file is corrupted or it's metadata cannot be fetched by music-metadata causing the flow of parsing the metadata list to stop. I tried to rename a .txt to .mp3 to reconstruct situation of a corrupted file. What I want is whenever parsing the metadata of a particular music file if an error occurs it just return empty array and then continue searching for other files. After the process is complete then removing those elements of array having empty objects.
I think you are missing a try/catch in your map function:
Mocked version:
const mm = {
parseFile(file) {
return Promise.reject("Bad format");
},
};
async function parseMetadata(files) {
let metadata = files.map(async (file) => {
try {
metadata = await mm.parseFile(file, { duration: true });
} catch (error) {
return [];
}
m = metadata.common;
return m;
});
return Promise.all(metadata);
}
async function fetchNewSongs(dirs = ["foo", "bar", "baz"]) {
return parseMetadata(dirs);
}
fetchNewSongs().then(console.log, console.error);
// output : [ [], [], [] ]
As an addition you might go for a for loop and avoid having to filter your array afterward
const mm = {
parseFile(file) {
return Promise.reject("Bad format");
},
};
async function parseMetadata(files) {
const metadataCollection = [];
for (const file of files) {
try {
const metadata = await mm.parseFile(file, { duration: true });
metadataCollection.push(metadata);
} catch (error) {
console.warn(error);
}
}
return metadataCollection;
}
async function fetchNewSongs(dirs = ["foo", "bar", "baz"]) {
return parseMetadata(dirs);
}
fetchNewSongs().then(console.log, console.error);
// outputs:
// Bad format
// Bad format
// Bad format
// []
This is my first question here. I tried to save document in my collection, but it doesn't work. Response of function is exactly like I want, but it doesn't save in my db. In another controller (createRoom) foundUser.save() it works, but in this controller it doesn't. Thanks in advance!
I am using mongodb/mongooose and express.
const removeRoom = async (req,res,next) => {
const {roomId, userData} = req.body;
const { userId, token } = userData;
let foundUser;
let updatedRooms;
let indexOfNamespaces;
try {
foundUser = await User.findById(userId)
foundUser.namespaces.forEach((ns,i1)=>{
updatedRooms = ns.rooms.filter((room,i2) => {
if(room.id === roomId){
indexOfNamespaces = i1;
}
return room.id !== roomId
})
})
foundUser.namespaces[indexOfNamespaces].rooms = updatedRooms;
console.log(foundUser);
await foundUser.save();
} catch (err) {
console.log(err);
const error = new HttpError('Sth went wrong [removeRoom]', 500);
return next(error);
}
res.status(201).json({updatedNamespaces: foundUser.namespaces});
}
Mongoose does some optimizations where it will only actually save a field if it "changes". In this case you are modifyting an array, but the array is still the "same" array as in it still === (equals) the previous array. You need to use a new array to replace namespaces.
For example:
foundUser.namespaces = [
...foundUser.namespaces.slice(0, indexOfNamespaces),
{ ...foundUser.namespaces[indexOfNamespaces], rooms: updatedRooms },
...foundUser.namespaces.slice(indexOfNamespaces + 1)
]
Now, when you save Mongoose will see a "new" array that !== (does not equal) the previous array because it is a new instance and it will save it.
I am creating a program that...
1. Detects all of the drives on any given system.
2. Scans those drives for files of specific file types. For example, it may search all of the drives for any jpeg, png, and svg files.
3. The results are then stored in a JSON file in the following desired format.
{
"C:": {
"jpeg": [
...
{
"path": "C:\\Users\\John\\Pictures\\example.jpeg",
"name": "example",
"type": "jpeg",
"size": 86016
},
...
],
"png": [],
"svg": []
},
...
}
The code...
async function scan(path, exts) {
try {
const stats = await fsp.stat(path)
if (stats.isDirectory()) {
const
childPaths = await fsp.readdir(path),
promises = childPaths.map(
childPath => scan(join(path, childPath), exts)
),
results = await Promise.all(promises)
// Likely needs to change.
return [].concat(...results)
} else if (stats.isFile()) {
const fileExt = extname(path).replace('.', '')
if (exts.includes(fileExt)){
// Likely needs to change.
return {
"path": path,
"name": basename(path, fileExt).slice(0, -1),
"type": fileExt,
"size": stats.size
}
}
}
return []
}
catch (error) {
return []
}
}
const results = await Promise.all(
config.drives.map(drive => scan(drive, exts))
)
console.log(results) // [ Array(140), Array(0), ... ]
// And I would like to do something like the following...
for (const drive of results) {
const
root = parse(path).root,
fileExt = extname(path).replace('.', '')
data[root][fileExt] = []
}
await fsp.writeFile('./data.json', JSON.stringify(config, null, 2))
The global results is of course divided into individual arrays that correspond to each drive. But currently it combines all of the objects into one giant array despite their corresponding file types. There is also currently no way for me to know which array belongs to each drive, especially if the drive's array does not contain any items that I can parse to retrieve the root directory.
I can obviously map or loop thru the global results again, and then sort everything out, as illustrated below, but it would be a lot cleaner to have scan() handle everything from the get go.
// Initiate scan sequence.
async function initiateScan(exts) {
let
[config, data] = await Promise.all([
readJson('./config.json'),
readJson('./data.json')
]),
results = await Promise.all(
// config.drives.map(drive => scan(drive, exts))
['K:', 'D:'].map(drive => scan(drive, exts))
)
for (const drive of results) {
let root = false
for (const [i, file] of drive.entries()) {
if (!root) root = parse(file.path).root.slice(0,-1)
if (!data[root][file.type] || !i) data[root][file.type] = []
data[root][file.type].push(file)
}
}
await fsp.writeFile('./data.json', JSON.stringify(config, null, 2))
}
Due to my lack of experience with asynchronicity and objects in general, I am not quite sure how to best handle the data in map( ... )/scan. I am really not even sure how to best structure the output of scan() so that the structure of the global results is easily manipulable.
Any help would be greatly appreciated.
Mutating an outer object as asynchronously-derived results arrive is not particularly clean, however it can be done fairly simply and safely as follows:
(async function(exts, results) { // async IIFE wrapper
async function scan(path) { // lightly modified version of scan() from the question.
try {
const stats = await fsp.stat(path);
if (stats.isDirectory()) {
const childPaths = await fsp.readdir(path);
const promises = childPaths.map(childPath => scan(join(path, childPath)));
return Promise.all(promises);
} else if (stats.isFile()) {
const fileExt = extname(path).replace('.', '');
if (results[path] && results[path][fileExt]) {
results[path][fileExt].push({
'path': path,
'name': basename(path, fileExt).slice(0, -1),
'type': fileExt,
'size': stats.size
});
}
}
}
catch (error) {
console.log(error);
// swallow error by not rethrowing
}
}
await Promise.all(config.drives.map(path => {
// Synchronously seed the results object with the required data structure
results[path] = {};
for (fileExt of exts) {
results[path][fileExt] = []; // array will populated with data, or remain empty if no qualifying data is found.
}
// Asynchronously populate the results[path] object, and return Promise to the .map() callback
return scan(path);
}));
console.log(results);
// Here: whatever else you want to do with the results.
})(exts, {}); // pass `exts` and an empty results object to the IIFE function.
The results object is synchronously seeded with empty data structures, which are then populated asynchronously.
Everything is wrapped in an async Immediately Invoked Function Expression (IIFE), thus:
avoiding the global namespace (if not already avoided)
ensuring availabillty of await (if not already available)
making a safe closure for the results object.
This still needs some work, and it is iterating through the generated files collection a second time.
// This should get you an object with one property per drive
const results = Object.fromEntries(
(await Promise.all(
config.drives.map(async drive => [drive, await scan(drive, exts)])
)
)
.map(
([drive, files]) => [
drive,
// we reduce each drive's file array to an object with
// one property per file extension
files.reduce(
(acc, file) => {
acc[file.type].push(file)
return acc
},
Object.fromEntries(exts.map(ext => [ext, []]))
)
]
)
)
nodejs supports Object.fromEntries from version 12.0.0, so if you can guarantee your application will always be run in that version or a later one, Object.fromEntries should be fine here.
You can use the glob npm library to get all of the filenames and then just transform that array to your object like this:
import {basename, extname} from 'path';
import {stat} from 'fs/promises'; // Or whichever library you use to promisify fs
import * as glob from "glob";
function searchForFiles() {
return new Promise((resolve, reject) => glob(
"/**/*.{jpeg,jpg,png,svg}", // The files to search for and where
{ silent: true, strict: false}, // No error when eg. something cannot be accessed
(err, files) => err ? reject() : resolve(files)
));
}
async function getFileObject() {
const fileNames = await searchForFiles(); // An array containing all file names (eg. ['D:\\my\path\to\file.jpeg', 'C:\\otherfile.svg'])
// An array containing all objects describing your file
const fileObjects = await Promise.all(fileNames.map(async filename => ({
path: filename,
name: basename(path, fileExt).slice(0, -1),
type: extname(path).replace('.', ''),
size: stat(path).size,
drive: `${filename.split(':\\')[0]}:`
})));
// Create your actual object
return fileObjects.reduce((result, {path, name, type, size, drive}) => {
if (!result[drive]) { // create eg. { C: {} } if it does not already exist
result.drive = {};
}
if (!result[drive][type]) { // create eg. {C: { jpeg: [] }} if it does not already exist
result[drive][type] = [];
}
// Push the object to the correct array
result[drive][type].push({path, name, type, size});
return result;
}, {});
}
The function must traverse the file system recursively, looking for files that match your criteria. The recursion can be simplified by the fact that the result doesn't need to retain any hierarchy, so we can just carry a flat array (files) as a parameter.
let exts = [...]
async function scan(path, files) {
const stats = await fsp.stat(path)
if (stats.isDirectory()) {
childPaths = await fsp.readdir(path)
let promises = childPaths.map(childPath => {
return scan(join(path, childPath), files)
})
return Promise.all(promises)
} else if (stats.isFile()) {
const fileExt = extname(path).replace('.', '')
if (exts.includes(fileExt)) {
files.push({
path: path,
name: basename(path, fileExt).slice(0, -1),
type: fileExt,
size: stats.size
})
}
}
}
let files = []
await scan('/', files)
console.log(files)
Init code:
let dbPormise = null;
const OBJECT_STORE_NAME = 'pages';
const DB_NAME = 'tracking-log';
To initiate an ObjectStore:
dbPromise = idb.open(DB_NAME, 3, upgradeDB => {
upgradeDB.createObjectStore(OBJECT_STORE_NAME, {
autoIncrement: true,
keypath: 'id'
});
});
This is how I generate a blank record in the IndexedDB:
const tx = db.transaction(OBJECT_STORE_NAME, 'readwrite');
tx.objectStore(OBJECT_STORE_NAME).put(
{ id: newBucketID, data: [] });
Now, at a later point, I have some elements that I want to append to the data array for a particular id.
This is how I tried doing it:
const tx = db.transaction(OBJECT_STORE_NAME, 'readwrite');
tx.objectStore(OBJECT_STORE_NAME).put(
{ id: localStorage.getItem("currentBucket"), data: item }
);
Schema
{
data: Array
}
Every item has a unique key generated and provided by me.
However, this doesn't work and returns an error: "Key already exists in the object store."
So, how can I append a value to a field inside a IDB objectt?
Not sure about the error, but regardless of that, the basic way of adding an item would be something like this:
function addItem(db, bucketId, item) {
return new Promise(addItemExecutor.bind(null, db, bucketId, item));
}
function addItemExecutor(db, bucketId, item, resolve, reject) {
// Start a single writable transaction that we will use for two requests. One to
// find the corresponding bucket, and one to update it.
const tx = db.transaction(OBJECT_STORE_NAME, 'readwrite');
// If all requests completed without error, we are done
tx.oncomplete = resolve;
// If any request fails, the operation fails
tx.onerror = event => reject(event.target.error);
const store = tx.objectStore(OBJECT_STORE_NAME);
// Go find the corresponding bucket object to update
const findRequest = store.get(bucketId);
findRequest.onsuccess = findRequestOnsuccess.bind(findRequest, bucketId, item, reject);
}
// React to the resolution of the get request
function findRequestOnsuccess(bucketId, item, reject, event) {
const bucket = event.target.result;
// If no bucket exists for that id then fail
if(!bucket) {
const error = new Error('No bucket found for id ' + bucketId);
reject(error);
return;
}
// Lazily init the data array property
if(!bucket.data) {
bucket.data = [];
}
// Add our item to the data array
bucket.data.push(item);
// Save the bucket object back into the bucket object store, completely replacing
// the bucket that was there before.
const bucketStore = event.target.source;
bucketStore.put(bucket);
}
async function someCallingCodeExampleAvoidingTopLevelAwait() {
const bucketId = localStorage.currentBucket;
const item = {foo:bar};
const db = evilUnreliableGlobalDbVariableFromSomewhereMagicalForeverOpenAssumeInitialized;
try {
await addItem(db, bucketId, item);
} catch(error) {
console.debug(error);
}
// Leave the database connection open for page lifetime
}
Without a reduced example it's difficult to figure out what's going on. The best way to get help is to create a reduced example of the problem, as in, the smallest amount of code needed to recreate the issue you're seeing, then put it on something like jsbin.com or glitch.com so folks only have to click a link to see the error you're seeing.
I wasn't able to recreate the error you're seeing. You have keypath when it should be keyPath, but I don't think that creates the error you're seeing.
Anyway, here's how to modify a record in IDB:
async function main() {
// Set up the database.
const OBJECT_STORE_NAME = 'pages';
const DB_NAME = 'tracking-log';
const db = await idb.open(DB_NAME, 1, upgradeDB => {
upgradeDB.createObjectStore(OBJECT_STORE_NAME, {
autoIncrement: true,
keyPath: 'id'
});
});
// The OP didn't make it clear what this value was, so I'll guess.
const newBucketID = 1;
{
// Create the record.
const tx = db.transaction(OBJECT_STORE_NAME, 'readwrite');
tx.objectStore(OBJECT_STORE_NAME).put({ id: newBucketID, data: ['first value'] });
}
{
const tx = db.transaction(OBJECT_STORE_NAME, 'readwrite');
// Get the record.
const record = await tx.objectStore(OBJECT_STORE_NAME).get(newBucketID);
// Modify it.
record.data.push('second value');
// Put the modified record back.
tx.objectStore(OBJECT_STORE_NAME).put(record);
}
{
// Read the value to confirm everything worked.
const tx = db.transaction(OBJECT_STORE_NAME);
const value = await tx.objectStore(OBJECT_STORE_NAME).get(newBucketID);
console.log(value);
}
}
main();
And here's that example running: https://jsbin.com/dineguq/edit?js,console
I am just trying to build a crawler with chrome-remote-interface but i don't know how to get multiple dom elements like specific targets id,classes.
for Ex:
price = document.getelementbyid('price')
name= document.getelementbyid('name')
Code
const CDP = require('chrome-remote-interface');
CDP((client) => {
// Extract used DevTools domains.
const {Page, Runtime} = client;
// Enable events on domains we are interested in.
Promise.all([
Page.enable()
]).then(() => {
return Page.navigate({url: 'http://example.com'})
});
// Evaluate outerHTML after page has loaded.
Page.loadEventFired(() => {
Runtime.evaluate({expression: 'document.body.outerHTML'}).then((result) => {
//How to get Multiple Dom elements
console.log(result.result.value);
client.close();
});
});
}).on('error', (err) => {
console.error('Cannot connect to browser:', err);
});
Update
const CDP = require('chrome-remote-interface');
CDP((client) => {
// Extract used DevTools domains.
const {DOM,Page, Runtime} = client;
// Enable events on domains we are interested in.
Promise.all([
Page.enable()
]).then(() => {
return Page.navigate({url: 'https://someDomain.com'});
})
Page.loadEventFired(() => {
const expression = `({
test: document.getElementsByClassName('rows')),
})`
Runtime.evaluate({expression,returnByValue: true}).then((result) => {
console.log(result.result) // Error
client.close()
})
})
}).on('error', (err) => {
console.error('Cannot connect to browser:', err);
});
Error
{ type: 'object',
subtype: 'error',
className: 'SyntaxError',
description: 'SyntaxError: Unexpected token )',
objectId: '{"injectedScriptId":14,"id":1}' }
Actually I want to iterate over the list of elements But I don't know where it goes wrong
You cannot move DOM object from the browser context to the Node.js context, all you can do is pass a property or whatever can be considered a JSON object. Here I'm assuming you're interested in the computed HTML.
A possible solution is:
const CDP = require('chrome-remote-interface');
CDP((client) => {
// Extract used DevTools domains.
const {Page, Runtime} = client;
// Enable events on domains we are interested in.
Promise.all([
Page.enable()
]).then(() => {
return Page.navigate({url: 'http://example.com'});
});
// Evaluate outerHTML after page has loaded.
Page.loadEventFired(() => {
const expression = `({
name: document.getElementById('name').outerHTML,
price: document.getElementById('price').outerHTML
})`;
Runtime.evaluate({
expression,
returnByValue: true
}).then(({result}) => {
const {name, price} = result.value;
console.log(`name: ${name}`);
console.log(`price: ${price}`);
client.close();
});
});
}).on('error', (err) => {
console.error('Cannot connect to browser:', err);
});
The key point is returning a JSON object using returnByValue: true.
Update: You have an error in your expression, a trailing ) in ...('rows')),. But even if you fix it you'd still end up in a wrong situation because you're attempting to pass an array of DOM objects (see the first paragraph of this answer). Again, if you want just the outer HTML you can do something like:
// Evaluate outerHTML after page has loaded.
Page.loadEventFired(() => {
const expression = `
// fetch an array-like of DOM elements
var elements = document.getElementsByTagName('p');
// create and return an array containing
// just a property (in this case `outerHTML`)
Array.prototype.map.call(elements, x => x.outerHTML);
`;
Runtime.evaluate({
expression,
returnByValue: true
}).then(({result}) => {
// this is the returned array
const elements = result.value;
elements.forEach((html) => {
console.log(`- ${html}`);
});
client.close();
});
});