Adding Data to Event Emitter - javascript

I am using "proxy-lists": "^1.16.0" package to obtain proxies.
I would like to save all incoming Array-Objects into my own array to later save it to the db.
When running the below example my array is empty and no file is written:
const ProxyLists = require('proxy-lists');
const fs = require('fs');
global.__basedir = __dirname;
const options = {
countries: null
};
// `gettingProxies` is an event emitter object.
const gettingProxies = ProxyLists.getProxies(options);
const data = []
gettingProxies.on('data', function (proxies) {
console.log(proxies);
data.push(proxies)
});
gettingProxies.on('error', function (error) {
console.error(error);
});
gettingProxies.once('end', function () {
fs.writeFile(__basedir + "data/file.txt", data, function (err) {
if (err) {
return console.log(err);
}
console.log("The file was saved!");
});
});
Any suggestions what I am doing wrong?
I appreciate your replies!

Looks good to me, but when I tested locally, I see that there was an issue concatenating your paths. Try __basedir + "/data/file.txt" (or use path.join)

Related

having problems with `fs.writeFile` it doesn't create files

I'm trying to start a script that itself creates a model file in json using fs.writeFile. The problem is when I run the script using node file.js. It is supposed to create a new file face-expression-model.json in directory /models but it doesn't create anything and doesn't show any errors.
I tried to use another library fs-extra not working as well, tried to make the script to create model directory fs.WriteDir not working eitheritried to add process.cwd() to bypass any authorisation when creating the file but didn't work. I also tried to add try/catch block to catch all errors but it doesn't show any errors and it appears that the file was created for the first while but NOPE, unfortunately.
Here is the code I'm using.
const axios = require("axios");
const faceapi = require("face-api.js");
const { FaceExpressions } = faceapi.nets;
const fs = require("fs");
async function trainModel(imageUrls) {
try {
await FaceExpressions.loadFromUri(process.cwd() + "/models");
const imageTensors = [];
for (let i = 0; i < imageUrls.length; i++) {
const response = await axios.get(imageUrls[i], {
responseType: "arraybuffer"
});
const image = new faceapi.Image();
image.constructor.loadFromBytes(new Uint8Array(response.data));
const imageTensor = faceapi.resizeImageToBase64Tensor(image);
imageTensors.push(imageTensor);
}
const model = await faceapi.trainFaceExpressions(imageTensors);
fs.writeFileSync("./models/face-expression-model.json", JSON.stringify(model), (err) => {
if (err) throw err;
console.log("The file has been saved!");
});
} catch (error) {
console.error(error);
}
}
const imageUrls = [
array of images urls here
];
trainModel(imageUrls);
I don't know exactly why but I had the same problem a while ago. Try using the "fs.writeFile" method. It worked for me.
fs.writeFile("models/face-expression-model.json", JSON.stringify(model), {}, (err) => {
if (err) throw err;
console.log("The file has been saved!");
});
Good luck with that!

Downloading and sending pdf document in Node through API

I am new to node, I want to download a pdf document from some another url when person hits a post request in the back-end, change the name of file and send the file back to original client where the pdf will be downloaded.
NOTE the file should not be saved in server
first there is controller file which contains following code
try {
const get_request: any = req.body;
const result = await printLabels(get_request,res);
res.contentType("application/pdf");
res.status(200).send(result);
} catch (error) {
const ret_data: errorResponse = await respondError(
error,"Something Went Wrong.",
);
res.status(200).json(ret_data);
}
Then after this the function printLabels is defined as
export const printLabels = async (request: any,response:any) => {
try {
const item_id = request.item_id;
let doc=await fs.createReadStream(`some url with ${item_id}`);
doc.pipe(fs.createWriteStream("Invoice_" + item_id + "_Labels.pdf"));
return doc;
} catch (error) {
throw error;
}
};
Using above code, I am getting error as no such file found. Also, I don't have access of front end so is it possible to test the API with postman for pdf which I am doing or my approach is incorrect?
Next solution working for Express, but I'm not sure if you're using Express-like framework. If that, please specify which framework you're using.
At first, you need to use sendFile instead of send:
try {
const get_request: any = req.body;
const result = await printLabels(get_request,res);
res.contentType("application/pdf");
res.status(200).sendFile(result);
} catch (error) {
const ret_data: errorResponse = await respondError(
error,"Something Went Wrong.",
);
res.status(200).json(ret_data);
}
Then, you returning readStream, instead of path to file. Notice, you need to use absolute path to do that.
const printLabels = async () => {
try {
let doc= await fs.createReadStream(path.join(__dirname, 'test.pdf'));
doc.pipe(fs.createWriteStream("Invoice_test_Labels.pdf"));
return path.join(__dirname, 'Invoice_test_Labels.pdf');
} catch (error) {
throw error;
}
};
About PostMan, of course you can see it or save it to file:

how can i store data in json file Continuous for discord js

I want to take the message information from the user and save it in a JSON file and this data is constantly added, but with the following code, this data is constantly replaced.
and I don't replace data I want to add data
this is my code :
const fs = require("fs");
const { Client, Intents } = require("discord.js");
const client = new Client({
intents: [Intents.FLAGS.GUILDS, Intents.FLAGS.GUILD_MESSAGES],
});
const now = new Date();
const obj = {
table: [],
};
let confirm = false;
const { badWords } = require("../badWordList.json");
client.on("message", async (message) => {
if (message.author.bot) return;
for (let i = 0; i < badWords.length; i++) {
if (message.content.toLowerCase().includes(badWords[i].toLowerCase()))
confirm = true;
}
if (confirm) {
obj.table.push({
name: message.author.username,
date: `${now.getFullYear()}/${now.getMonth()}/${now.getDate()}`,
message: message.channel.messages.channel.lastMessage.cleanContent,
});
fs.writeFile("myjsonfile.json", JSON.stringify(obj), function (err) {
if (err) throw err;
console.log("complete");
});
}
});
When using the fs.writeFile() it replaces the content of the file, as written in the docs.
At a first glance, you might want to use fs.write(), see the docs for usage.
But the NodeJS docs says :
It is unsafe to use fs.write() multiple times on the same file without waiting for the callback. For this scenario, fs.createWriteStream() is recommended.
Since you are in asynchronous mode, you shoud probably define a write stream to the file and then write to it, it gives you something like that :
// -snip-
const whateverJsonFileStream = fs.createWriteStream("myjsonfile.json");
client.on("message", async (message) => {
//-snip-
if (confirm) {
obj.table.push({
name: message.author.username,
date: `${now.getFullYear()}/${now.getMonth()}/${now.getDate()}`,
message: message.channel.messages.channel.lastMessage.cleanContent,
});
whateverJsonFileStream.write(JSON.stringify(obj), function (err) {
if (err) throw err;
console.log("complete");
});
}
});

Retrieving documents for MongoDB cluster

I am trying to retrieve all the documents from a MongoDB cluster. I have followed code I've seen online, however I am facing a small problem.
const MongoClient = require('mongodb');
const uri = "mongodb+srv://<user>:<password>#cluster0-10soy.mongodb.net/test?retryWrites=true&w=majority";
var questionsArray = [];
MongoClient.connect(uri, function (err, client) {
const database = client.db("WhatSportWereYouMadeFor");
database.collection("Questions").find({}, (error, cursor) =>{
cursor.each(function(error, item){
if (item == null){
console.log(error);
}
questionsArray.push(item);
});
})
});
module.exports = { questionsArray };
I connect fine to the database, however I've set a breakpoint at the stop variable and that gets hit before any of the documents retrieved from the database get pushed to the questions array.
I've also tried wrapping the code inside an async function and then awaiting it before the stop variable, but still that breakpoint gets hit first and only after the documents get pushed to the array.
What I would do, this wrap the whole thing into a promise, and the export that.
const MyExport = () => {
return new Promise((resolve, reject) => {
var questionsArray = [];
MongoClient.connect(uri, function (err, client) {
const database = client.db("WhatSportWereYouMadeFor");
database.collection("Questions").find({}, (error, cursor) =>{
cursor.each(function(error, item){
if (item == null){
console.log(error);
}
questionsArray.push(item);
});
resolve(questionsArray)
})
});
})
}
module.exports.questionsArray = MyExport
But then when you import it, you need to run and await it
cosnt questionsArrayFunc = require("path/to/this/file").questionsArray
const questionsArray = await questionsArrayFunc()
I hope this is what you looking for. There might be some other way, but I think this works.

Looping through files in a folder Node.JS

I am trying to loop through and pick up files in a directory, but I have some trouble implementing it. How to pull in multiple files and then move them to another folder?
var dirname = 'C:/FolderwithFiles';
console.log("Going to get file info!");
fs.stat(dirname, function (err, stats) {
if (err) {
return console.error(err);
}
console.log(stats);
console.log("Got file info successfully!");
// Check file type
console.log("isFile ? " + stats.isFile());
console.log("isDirectory ? " + stats.isDirectory());
});
Older answer with callbacks
You want to use the fs.readdir function to get the directory contents and the fs.rename function to actually do the renaming. Both these functions have synchronous versions if you need to wait for them to finishing before running the code afterwards.
I wrote a quick script that does what you described.
var fs = require('fs');
var path = require('path');
// In newer Node.js versions where process is already global this isn't necessary.
var process = require("process");
var moveFrom = "/home/mike/dev/node/sonar/moveme";
var moveTo = "/home/mike/dev/node/sonar/tome"
// Loop through all the files in the temp directory
fs.readdir(moveFrom, function (err, files) {
if (err) {
console.error("Could not list the directory.", err);
process.exit(1);
}
files.forEach(function (file, index) {
// Make one pass and make the file complete
var fromPath = path.join(moveFrom, file);
var toPath = path.join(moveTo, file);
fs.stat(fromPath, function (error, stat) {
if (error) {
console.error("Error stating file.", error);
return;
}
if (stat.isFile())
console.log("'%s' is a file.", fromPath);
else if (stat.isDirectory())
console.log("'%s' is a directory.", fromPath);
fs.rename(fromPath, toPath, function (error) {
if (error) {
console.error("File moving error.", error);
} else {
console.log("Moved file '%s' to '%s'.", fromPath, toPath);
}
});
});
});
});
Tested on my local machine.
node testme.js
'/home/mike/dev/node/sonar/moveme/hello' is a file.
'/home/mike/dev/node/sonar/moveme/test' is a directory.
'/home/mike/dev/node/sonar/moveme/test2' is a directory.
'/home/mike/dev/node/sonar/moveme/test23' is a directory.
'/home/mike/dev/node/sonar/moveme/test234' is a directory.
Moved file '/home/mike/dev/node/sonar/moveme/hello' to '/home/mike/dev/node/sonar/tome/hello'.
Moved file '/home/mike/dev/node/sonar/moveme/test' to '/home/mike/dev/node/sonar/tome/test'.
Moved file '/home/mike/dev/node/sonar/moveme/test2' to '/home/mike/dev/node/sonar/tome/test2'.
Moved file '/home/mike/dev/node/sonar/moveme/test23' to '/home/mike/dev/node/sonar/tome/test23'.
Moved file '/home/mike/dev/node/sonar/moveme/test234' to '/home/mike/dev/node/sonar/tome/test234'.
Update: fs.promises functions with async/await
Inspired by ma11hew28's answer (shown here), here is the same thing as above but with the async functions in fs.promises. As noted by ma11hew28, this may have memory limitations versus fs.promises.opendir added in v12.12.0.
Quick code below.
//jshint esversion:8
//jshint node:true
const fs = require( 'fs' );
const path = require( 'path' );
const moveFrom = "/tmp/movefrom";
const moveTo = "/tmp/moveto";
// Make an async function that gets executed immediately
(async ()=>{
// Our starting point
try {
// Get the files as an array
const files = await fs.promises.readdir( moveFrom );
// Loop them all with the new for...of
for( const file of files ) {
// Get the full paths
const fromPath = path.join( moveFrom, file );
const toPath = path.join( moveTo, file );
// Stat the file to see if we have a file or dir
const stat = await fs.promises.stat( fromPath );
if( stat.isFile() )
console.log( "'%s' is a file.", fromPath );
else if( stat.isDirectory() )
console.log( "'%s' is a directory.", fromPath );
// Now move async
await fs.promises.rename( fromPath, toPath );
// Log because we're crazy
console.log( "Moved '%s'->'%s'", fromPath, toPath );
} // End for...of
}
catch( e ) {
// Catch anything bad that happens
console.error( "We've thrown! Whoops!", e );
}
})(); // Wrap in parenthesis and call now
fs.readdir(path[, options], callback) (which Mikey A. Leonetti used in his answer) and its variants (fsPromises.readdir(path[, options]) and fs.readdirSync(path[, options])) each reads all of a directory's entries into memory at once. That's good for most cases, but if the directory has very many entries and/or you want to lower your application's memory footprint, you could instead iterate over the directory's entries one at a time.
Asynchronously
Directories are async iterable, so you could do something like this:
const fs = require('fs')
async function ls(path) {
const dir = await fs.promises.opendir(path)
for await (const dirent of dir) {
console.log(dirent.name)
}
}
ls('.').catch(console.error)
Or, you could use dir.read() and/or dir.read(callback) directly.
Synchronously
Directories aren't sync iterable, but you could use dir.readSync() directly. For example:
const fs = require('fs')
const dir = fs.opendirSync('.')
let dirent
while ((dirent = dir.readSync()) !== null) {
console.log(dirent.name)
}
dir.closeSync()
Or, you could make directories sync iterable. For example:
const fs = require('fs')
function makeDirectoriesSyncIterable() {
const p = fs.Dir.prototype
if (p.hasOwnProperty(Symbol.iterator)) { return }
const entriesSync = function* () {
try {
let dirent
while ((dirent = this.readSync()) !== null) { yield dirent }
} finally { this.closeSync() }
}
if (!p.hasOwnProperty(entriesSync)) { p.entriesSync = entriesSync }
Object.defineProperty(p, Symbol.iterator, {
configurable: true,
enumerable: false,
value: entriesSync,
writable: true
})
}
makeDirectoriesSyncIterable()
And then, you could do something like this:
const dir = fs.opendirSync('.')
for (const dirent of dir) {
console.log(dirent.name)
}
Note: "In busy processes, use the asynchronous versions of these calls. The synchronous versions will block the entire process until they complete, halting all connections."
References:
Node.js Documentation: File System: Class fs.Dir
Node.js source code: fs.Dir
GitHub: nodejs/node: Issues: streaming / iterative fs.readdir #583
Read all folders in a directory
const readAllFolder = (dirMain) => {
const readDirMain = fs.readdirSync(dirMain);
console.log(dirMain);
console.log(readDirMain);
readDirMain.forEach((dirNext) => {
console.log(dirNext, fs.lstatSync(dirMain + "/" + dirNext).isDirectory());
if (fs.lstatSync(dirMain + "/" + dirNext).isDirectory()) {
readAllFolder(dirMain + "/" + dirNext);
}
});
};
The answers provided are for a single folder. Here is an asynchronous implementation for multiple folders where all the folders are processed simultaneously but the smaller folders or files gets completed first.
Please comment if you have any feedback
Asynchronously Multiple Folders
const fs = require('fs')
const util = require('util')
const path = require('path')
// Multiple folders list
const in_dir_list = [
'Folder 1 Large',
'Folder 2 Small', // small folder and files will complete first
'Folder 3 Extra Large'
]
// BEST PRACTICES: (1) Faster folder list For loop has to be outside async_capture_callback functions for async to make sense
// (2) Slower Read Write or I/O processes best be contained in an async_capture_callback functions because these processes are slower than for loop events and faster completed items get callback-ed out first
for (i = 0; i < in_dir_list.length; i++) {
var in_dir = in_dir_list[i]
// function is created (see below) so each folder is processed asynchronously for readFile_async that follows
readdir_async_capture(in_dir, function(files_path) {
console.log("Processing folders asynchronously ...")
for (j = 0; j < files_path.length; j++) {
file_path = files_path[j]
file = file_path.substr(file_path.lastIndexOf("/") + 1, file_path.length)
// function is created (see below) so all files are read simultaneously but the smallest file will be completed first and get callback-ed first
readFile_async_capture(file_path, file, function(file_string) {
try {
console.log(file_path)
console.log(file_string)
} catch (error) {
console.log(error)
console.log("System exiting first to catch error if not async will continue...")
process.exit()
}
})
}
})
}
// fs.readdir async_capture function to deal with asynchronous code above
function readdir_async_capture(in_dir, callback) {
fs.readdir(in_dir, function(error, files) {
if (error) { return console.log(error) }
files_path = files.map(function(x) { return path.join(in_dir, x) })
callback(files_path)
})
}
// fs.readFile async_capture function to deal with asynchronous code above
function readFile_async_capture(file_path, file, callback) {
fs.readFile(file_path, function(error, data) {
if (error) { return console.log(error) }
file_string = data.toString()
callback(file_string)
})
}

Categories

Resources