I'm trying to loop over multiple files with fs.createReadStream, and I cannot figure out how to read the second file.
const fs = require('fs')
const csv = require('csv-parse')
const parser = csv({
...
})
const files = process.argv.slice(2)
async function analyzeFiles () {
for (const file of files) {
const string = file
console.log(`Analyzing ${file}.`)
await runFile(file, string)
console.log(`Analyzed ${file}.`)
}
}
async function runFile (filepath, string) {
return new Promise(function (resolve, reject) {
const shimmedData = {}
let fileName = ''
fs.createReadStream(filepath)
.pipe(parser)
.on('data', (row) => {
// ...
fileName = 'something dynamic from row'
shimmedData[index] = row // Or something similar, not sure this matters
})
.on('error', (e) => {
console.log('BONK', e)
})
.on('end', () => {
fs.writeFile(`${fileName}.json`, JSON.stringify(shimmedData), (err) => {
if (err) {
console.log(err)
reject(err)
} else {
console.log('File written successfully.')
resolve()
}
})
})
})
}
analyzeFiles()
And then I run node script.js file1.txt file2.txt file3.txt
When I run this, only the first file will ever be saved. Looking into it with console logs, it looks like for the second file, fs.createReadStream is never called.
➜ shimeBirdData git:(main) ✗ node stackoverflowExample.js sampleData/sample.txt sampleData/sample2.txt
Analyzing sampleData/sample.txt.
File written successfully.
Analyzed sampleData/sample.txt.
Analyzing sampleData/sample2.txt.
is saved.
What am I missing?
Related
I am new and am trying to concat a folder of audio files and then stream the create file with ffmpeg in node.js.
I thought I could call the function that creates the file with await and then when it's done the code would continue allowing me to call the created file. However thats not whats happening. I am getting a "file undefined"
Main function
//CONCATS THE FILES
await concatAudio(supportedFileTypes.supportedAudioTypes, `${path}${config[typeKey].audio_directory}`);
// CALLS THE FILE CREATED FROM concatAudio
const randomSong = await getRandomFileWithExtensionFromPath(
supportedFileTypes.supportedAudioTypes,
`${path}${config[typeKey].audio_final}`
);
concatAudio function
var audioconcat = require('audioconcat');
const getRandomFileWithExtensionFromPath = require('./randomFile');
const find = require('find');
// Async Function to get a random file from a path
module.exports = async (extensions, path) => {
// Find al of our files with the extensions
let allFiles = [];
extensions.forEach(extension => {
allFiles = [...allFiles, ...find.fileSync(extension, path)];
});
await audioconcat(allFiles)
.concat('./live-stream-radio/final/all.mp3')
.on('start', function(command) {
console.log('ffmpeg process started:', command);
})
.on('error', function(err, stdout, stderr) {
console.error('Error:', err);
console.error('ffmpeg stderr:', stderr);
})
.on('end', function(output) {
console.error('Audio created in:', output);
});
// Return a random file
// return '/Users/Semmes/Downloads/live-stream-radio-ffmpeg-builds/live-stream-radio/final/all.mp3';
};
I solved it wrapping into a Promise, I adapted my solution to your script (I don't tested in this code, but is almost the same), I hope this helps someone:
var audioconcat = require('audioconcat');
const getRandomFileWithExtensionFromPath = require('./randomFile');
const find = require('find');
// Async Function to get a random file from a path
module.exports = async (extensions, path) => {
// Find al of our files with the extensions
let allFiles = [];
extensions.forEach(extension => {
allFiles = [...allFiles, ...find.fileSync(extension, path)];
});
const concatPromise = new Promise(async (resolve, error) => {
audioconcat(allFiles)
.concat('./live-stream-radio/final/all.mp3')
.on('start', function(command) {
console.log('ffmpeg process started:', command);
})
.on('error', function(err, stdout, stderr) {
console.error('Error:', err);
console.error('ffmpeg stderr:', stderr);
error(err);
})
.on('end', function(output) {
console.error('Audio created in:', output);
resolve(otuput);
});
});
// Return a random file (this should return the output string)
return await concatPromise();
// return '/Users/Semmes/Downloads/live-stream-radio-ffmpeg-builds/live-stream-radio/final/all.mp3';
};
so I am new to async/await on node.js and I could use some help figuring out this code.
I'm trying to get a file from a ftp server via the 'ftp' package, to write the data into a local 'data.txt' and to open it later in the code. My problem is that I don't understand how to make sure the file is completely written in the 'data.txt' before trying to open it with fs.readFileSync().
const ConfigFTP = require('./credentials.json')
const FtpClient = new ftpclient();
FtpClient.on('ready', async function() {
await new Promise(resolve =>
FtpClient.get('the ftp file directory', (err, stream) => {
if (err) throw err;
stream.once('close', () => {FtpClient.end();});
// Stream written in data.txt
const Streampipe = stream.pipe(fs.createWriteStream('data.txt')).on('finish', resolve)
})
)
})
FtpClient.connect(ConfigFTP);
var Data = fs.readFileSync('data.txt', 'utf8');
I'm not sure what you want to accomplish, but you can do something like these:
1)
const ConfigFTP = require('./credentials.json')
const FtpClient = new ftpclient()
let writeStream = fs.createWriteStream('data.txt')
FtpClient.on('ready', async function () {
FtpClient.get('the ftp file directory', (err, stream) => {
if (err) throw err
stream.once('close', () => { FtpClient.end() })
// Stream written in data.txt
const Streampipe = stream.pipe(writeStream)
})
})
FtpClient.connect(ConfigFTP)
writeStream.on('finish', () => {
var Data = fs.readFileSync('data.txt', 'utf8')
})
2)
const ConfigFTP = require('./credentials.json')
const FtpClient = new ftpclient()
FtpClient.on('ready', async function() {
await new Promise(resolve =>
FtpClient.get('the ftp file directory', (err, stream) => {
if (err) throw err
stream.once('close', () => {FtpClient.end()})
// Stream written in data.txt
const Streampipe = stream.pipe(fs.createWriteStream('data.txt')).on('finish', resolve)
})
)
var Data = fs.readFileSync('data.txt', 'utf8')
})
FtpClient.connect(ConfigFTP)
I've written a script that recursively asynchronously modifies js files in a directory. It's made up of a search(dirname) function which searches a directory for js files, and a modify(filename) which does the modifying.
let totalFilesSearched = 0;
const search = (dir) => {
fs.readdir(dir, (err, list) => {
if (err) return;
list.forEach((filename) => {
const filepath = path.join(dir, filename);
if (filename.endsWith('.js')) {
modify(filepath);
} else if (fs.lstatSync(filepath).isDirectory()) {
search(filepath);
}
})
});
}
const modify = (filename) => {
fs.readFile(filename, 'utf8', (err, data) => {
if (err) console.log(err);
// ... my modification code ...
totalFilesSearched++;
});
}
search(args[0])
console.log(`Total files searched: ${totalFilesSearched}`);
I want to print out the totalFilesSearched at the end of my script but because my code is asynchronous, it just prints Total files searched: 0 right away.
Does anyone know how I'd wait until the script is about to end to print this out? I'm having trouble because both my search() and modify() functions are asynchronous.
Use Promises instead, and then call console.log when everything is resolved. Use promisify to turn the callbacks into promises:
const { promisify } = require('util');
const readFile = promisify(fs.readFile);
const readDir = util.promisify(fs.readdir);
const search = (dir) => (
readDir(dir).then((list) => (
Promise.all(list.map((filename) => {
const filepath = path.join(dir, filename);
if (filename.endsWith('.js')) {
return modify(filepath); // recursively return the promise
} else if (fs.lstatSync(filepath).isDirectory()) {
return search(filepath); // recursively return the promise
}
}))
))
.catch(err => void 0)
);
const modify = (filename) => (
readFile(filename, 'utf8')
.then((data) => {
// other code
totalFilesSearched++;
}).catch(err => console.log(err))
)
search(args[0])
.then(() => {
console.log(`Total files searched: ${totalFilesSearched}`);
});
Self answer:
Just use process.on('exit', callback_function_to_execute_at_end)
Its built into node, your callback will get executed right before the process exits.
I am using Node.js file system to build an array of file paths. I would like to know when all files have been read, so I could work further with my array.
Sequence of events:
Go into a folder
Get a path of each file
Put each path into an array
Let me know once you're done
Code:
'use strict';
const fs = require('fs');
function readDirectory(path) {
return new Promise((resolve, reject) => {
const files = [];
fs.readdir(path, (err, contents) => {
if (err) {
reject(err);
}
contents.forEach((file) => {
const pathname = `${ path }/${ file }`;
getFilesFromPath(pathname).then(() => {
console.log('pathname', pathname);
files.push(pathname);
});
resolve(files);
});
});
});
}
function getFilesFromPath(path) {
return new Promise((resolve, reject) => {
const stat = fs.statSync(path);
if (stat.isFile()) {
fs.readFile(path, 'utf8', (err, data) => {
if (err) {
reject(err);
} else {
resolve(data);
}
});
} else if (stat.isDirectory()) {
readDirectory(path);
}
});
}
getFilesFromPath('./dist');
Would be great to glue with:
Promise.all(files).then(() => {
// do stuff
})
Your suggestion pretty much works - did you try it? Here's a typical way of doing it:
getFilesFromPath( path ).then( files => {
const filePromises = files.map( readFile );
return Promises.all( filePromises );
}).then( fileContentsArray => {
//do stuff - the array will contain the contents of each file
});
You'll have to write the "readFile()" function yourself, but looks like you got that covered.
I'm building a server in Node that will search a folder to see if an XML file exists (glob), and if it does, read the file in (fs) as a JSON object (xml2js) and eventually store it in a database somewhere. I'm want to get the results OUT of the parser and into another variable so I can do other things with the data. From what I can tell, something is running synchronously, but I can't figure out how to stop it and for me to wait until it's finished to continue moving on.
I'm separating my function out into a controller elsewhere from app.js:
app.controller.js
const fs = require('fs-extra');
const glob = require('glob');
const xml2js = require('xml2js');
exports.requests = {};
exports.checkFileDrop = async () => {
console.log('Checking for xml in filedrop...');
// this is the only place await works...
await glob('./filedrop/ALLREQUESTS-*.xml', (err, files) => {
var parser = new xml2js.Parser();
// this is looking for a specific file now, which I'll address later once I can figure out this issue
fs.readFile('./filedrop/ALLREQUESTS-20170707.xml', 'utf16le', function (err, data) {
if (err) {
console.log('ERROR: ', err);
} else {
parser.parseString(data, (err, result) => {
if (err) {
console.log('ERROR: ', err);
} else {
console.log('data found');
exports.requests = JSON.stringify(result.Records.Record);
// data is outputted here correctly
console.log(exports.requests);
// this doesn't even seem to want to save to exports.requests anyways...
}
});
}
});
});
}
app.js
const appController = require('./controllers/app.controller');
// check if there is file in filedrop
appController.checkFileDrop();
// prints out an empty object
console.log(appController.requests);
// can't do anything if it doesn't exist yet
appController.saveToDB(appController.requests);
await will wait for a Promise value to resolve, otherwise it'll just wrap the value it is given in a promise and resolve the promise right away. In your example,
await glob('./filedrop/ALLREQUESTS-*.xml', (err, files) => {
the call to glob does not return a Promise, so the await is essentially useless. So you need to create the promise yourself.
exports.checkFileDrop = async () => {
console.log('Checking for xml in filedrop...');
const files = await new Promise((resolve, reject) => glob('./filedrop/ALLREQUESTS-*.xml', (err, files) => {
if (err) reject(err);
else resolve(files);
});
const parser = new xml2js.Parser();
const data = await new Promise((resolve, reject) => fs.readFile('./filedrop/ALLREQUESTS-20170707.xml', 'utf16le', function (err, data) {
if (err) reject(err);
else resolve(data);
});
const result = await new Promise((resolve, reject) => parser.parseString(data, (err, result) => {
if (err) reject(err);
else resolve(result);
});
console.log('data found');
const requests = JSON.stringify(result.Records.Record);
console.log(requests);
}
Note that now this function will reject the promise it returns instead of force-logging the error.
You can also condense this down with a helper. Node 8 for instance includes util.promisify to make code like this easier to write, e.g.
const util = require('util');
exports.checkFileDrop = async () => {
console.log('Checking for xml in filedrop...');
const files = await util.promisify(glob)('./filedrop/ALLREQUESTS-*.xml');
const parser = new xml2js.Parser();
const data = await util.promisify(fs.readFile)('./filedrop/ALLREQUESTS-20170707.xml', 'utf16le');
const result = await util.promisify(parser.parseString.bind(parser))(data);
console.log('data found');
const requests = JSON.stringify(result.Records.Record);
console.log(requests);
}
You can use async/await
import fs from 'fs';
import { promisify } from 'util';
const xmlToJson = async filePath => {
const parser = new xml2js.Parser
try {
const data = await fs.promises.readFile(filePath, 'utf8')
const result = await promisify(parser.parseString)(data);
const requests = JSON.stringify(result.merchandiser.product);
return requests
}
catch(err) {
console.log(err)
}
}