nodejs - streaming csv to string variable - javascript

I have a code that accepts a list of nested objects, each of which should be converted to a log row.
The code goes through a loop on each object, then an inner loop on each property, and extracts its property (there are hundreds of properties), then puts all the information of a row - as a map of the object's name and its value, into a variable called returnVar.
We use the 'fast-csv' library, with WriteStream that is named csvStream. also with a fs.createWriteStream pipe.
Finally, we loop over each object and write it with csvStream.write(),
that will insert the properties name in the first line of the file, and the logs (in the same order) in the other lines.
I need to change the code so that instead of doing pipe to file stream, it will print to a string type variable.
This is the code:
let Promise = require('bluebird');
let csv = require('fast-csv');
let fs = Promise.promisifyAll(require('fs'));
...
return new Promise(function (resolve, reject) {
var csvStream = csv.createWriteStream({ headers: propNames })
.transform(function (item) { // every item is a nested object that contains data for a log line
var returnVar = {}; // every returnVar will represents a map of property and value, that will be transform to a log line
for (var prop in item) {
if (item.hasOwnProperty(prop)) {
if (propNames.indexOf(prop) >= 0) {
if (typeof item[prop] === 'object') {
returnVar[prop] = JSON.stringify(item[prop]);
}
else {
returnVar[prop] = item[prop];
}
}
//the object might be a complex item that contains some properties that we want to export...
else if (typeof item[prop] === 'object') {
var nestedItem = item[prop];
for (var nestedProp in nestedItem) {
if (propNames.indexOf(prop + "_" + nestedProp) >= 0) {
returnVar[prop + "_" + nestedProp] = nestedItem[nestedProp];
}
}
}
}
}
return returnVar; // return log line
});
// create file path
var fileId = "Report_" + cryptoService.generateRandomPassword(16) + ".csv";
var filePath = tempPath + fileId;
getOrCreateTempDirectory().then(function () {
var writableStream = fs.createWriteStream(filePath);
writableStream.on("finish", function () {
resolve({
fileId: fileId
});
});
csvStream.pipe(writableStream);
_.each(results.records, function (result) {
// write line to file
csvStream.write(result._source);
});
csvStream.end();
});
});

https://c2fo.io/fast-csv/docs/formatting/methods#writetobuffer
https://c2fo.io/fast-csv/docs/formatting/methods#writetostring
Change
csvStream.write(result._source);
to
csvStream.writeToString(result._source).then(data => console.log(data));
Promise.all(_.map(results.records, result => csvStream.writeToString(result._source)))
.then(rows=>console.log(rows))
// rows should be an array of strings representing all the results
You can also use async/await

Related

How to return getTexts for array of elements with isExisiting

I am aiming to return a set of texts from a single locator that shares three elements and they are stored in an array of elements name selector (being selector storing three elements)and before it returns texts I want them to pass isExisting() flag but it throws "TypeError: selector.isExisting is not a function" if I remove isExisting part of code then it works fine and return the three text values. My code is:
async function getTreeTexts(browser, sidebar, parentClass, treeClass, spanClass = 'caption') {
//await browser.ideOpenSidebar(sidebar)
var cl = c => c ? '.' + c : '';
var selector = await browser.$$(`${cl(parentClass)} .ace_tree${cl(treeClass)} .tree-row span${cl(spanClass)}`);
await browser.ideOpenSidebar(sidebar)
await selector.isExisting().then(async function (exists) {
if (exists) {
// Depending on the number of matches, items can be a string or an array of strings. Coerce to array.
let configs = [];
for (let element of selector) {
try {
let text = await element.getText();
configs.push(text);
} catch (err) {
configs.push('[deleted]');
}
}
return configs;
} else {
return [];
}
});
}
Can someone please assist me here if I am doing something wrong? (my guess is, isExisiting() not handling all three elements at the same time)

Wait for all Firebase data query requests before executing code

I am trying to fetch data from different collections in my cloud Firestore database in advance before I process them and apply them to batch, I created two async functions, one to capture the data and another to execute certain code only after all data is collected, I didn't want the code executing and creating errors before the data is fetched when i try to access the matchesObject after the async function to collect data is finished, it keeps saying "it cannot access a property matchStatus of undefined", i thought took care of that with async and await? could anyone shed some light as to why it is undefined one moment
axios.request(options).then(function(response) {
console.log('Total matches count :' + response.data.matches.length);
const data = response.data;
var matchesSnapshot;
var marketsSnapshot;
var tradesSnapshot;
var betsSnapshot;
matchesObject = {};
marketsObject = {};
tradesObject = {};
betsObject = {};
start();
async function checkDatabase() {
matchesSnapshot = await db.collection('matches').get();
matchesSnapshot.forEach(doc => {
matchesObject[doc.id] = doc.data();
console.log('matches object: ' + doc.id.toString())
});
marketsSnapshot = await db.collection('markets').get();
marketsSnapshot.forEach(doc2 => {
marketsObject[doc2.id] = doc2.data();
console.log('markets object: ' + doc2.id.toString())
});
tradesSnapshot = await db.collection('trades').get();
tradesSnapshot.forEach(doc3 => {
tradesObject[doc3.id] = doc3.data();
console.log('trades object: ' + doc3.id.toString())
});
betsSnapshot = await db.collection('bets').get();
betsSnapshot.forEach(doc4 => {
betsObject[doc4.id] = doc4.data();
console.log('bets object: ' + doc4.id.toString())
});
}
async function start() {
await checkDatabase();
// this is the part which is undefined, it keeps saying it cant access property matchStatus of undefined
console.log('here is matches object ' + matchesObject['302283']['matchStatus']);
if (Object.keys(matchesObject).length != 0) {
for (let bets of Object.keys(betsObject)) {
if (matchesObject[betsObject[bets]['tradeMatchId']]['matchStatus'] == 'IN_PLAY' && betsObject[bets]['matched'] == false) {
var sfRef = db.collection('users').doc(betsObject[bets]['user']);
batch11.set(sfRef, {
accountBalance: admin.firestore.FieldValue + parseFloat(betsObject[bets]['stake']),
}, {
merge: true
});
var sfRef = db.collection('bets').doc(bets);
batch12.set(sfRef, {
tradeCancelled: true,
}, {
merge: true
});
}
}
}
});
There are too many smaller issues in the current code to try to debug them one-by-one, so this refactor introduces various tests against your data. It currently won't make any changes to your database and is meant to be a replacement for your start() function.
One of the main differences against your current code is that it doesn't unnecessarily download 4 collections worth of documents (two of them aren't even used in the code you've included).
Steps
First, it will get all the bet documents that have matched == false. From these documents, it will check if they have any syntax errors and report them to the console. For each valid bet document, the ID of it's linked match document will be grabbed so we can then fetch all the match documents we actually need. Then we queue up the changes to the user's balance and the bet's document. Finally we report about any changes to be done and commit them (once you uncomment the line).
Code
Note: fetchDocumentById() is defined in this gist. Its a helper function to allow someCollectionRef.where(FieldPath.documentId(), 'in', arrayOfIds) to take more than 10 IDs at once.
async function applyBalanceChanges() {
const betsCollectionRef = db.collection('bets');
const matchesCollectionRef = db.collection('matches');
const usersCollectionRef = db.collection('users');
const betDataMap = {}; // Record<string, BetData>
await betsCollectionRef
.where('matched', '==', false)
.get()
.then((betsSnapshot) => {
betsSnapshot.forEach(betDoc => {
betDataMap[betDoc.id] = betDoc.data();
});
});
const matchDataMap = {}; // Record<string, MatchData | undefined>
// betIdList contains all IDs that will be processed
const betIdList = Object.keys(betDataMap).filter(betId => {
const betData = betDataMap[betId];
if (!betData) {
console.log(`WARN: Skipped Bet #${betId} because it was falsy (actual value: ${betData})`);
return false;
}
const matchId = betData.tradeMatchId;
if (!matchId) {
console.log(`WARN: Skipped Bet #${betId} because it had a falsy match ID (actual value: ${matchId})`);
return false;
}
if (!betData.user) {
console.log(`WARN: Skipped Bet #${betId} because it had a falsy user ID (actual value: ${userId})`);
return false;
}
const stakeAsNumber = Number(betData.stake); // not using parseFloat as it's too lax
if (isNaN(stakeAsNumber)) {
console.log(`WARN: Skipped Bet #${betId} because it had an invalid stake value (original NaN value: ${betData.stake})`);
return false;
}
matchDataMap[matchId] = undefined; // using undefined because its the result of `doc.data()` when the document doesn't exist
return true;
});
await fetchDocumentsById(
matchesCollectionRef,
Object.keys(matchIdMap),
(matchDoc) => matchDataMap[matchDoc.id] = matchDoc.data()
);
const batch = db.batch();
const queuedUpdates = 0;
betIdList.forEach(betId => {
const betData = betDataMap[betId];
const matchData = matchDataMap[betData.tradeMatchId];
if (matchData === undefined) {
console.log(`WARN: Skipped /bets/${betId}, because it's linked match doesn't exist!`);
continue;
}
if (matchData.matchStatus !== 'IN_PLAY') {
console.log(`INFO: Skipped /bets/${betId}, because it's linked match status is not "IN_PLAY" (actual value: ${matchData.matchStatus})`);
continue;
}
const betRef = betsCollectionRef.doc(betId);
const betUserRef = usersCollectionRef.doc(betData.user);
batch.update(betUserRef, { accountBalance: admin.firestore.FieldValue.increment(Number(betData.stake)) });
batch.update(betRef, { tradeCancelled: true });
queuedUpdates += 2; // for logging
});
console.log(`INFO: Batch currently has ${queuedUpdates} queued`);
// only uncomment when you are ready to make changes
// batch.commit();
}
Usage:
axios.request(options)
.then(function(response) {
const data = response.data;
console.log('INFO: Total matches count from API:' + data.matches.length);
return applyBalanceChanges();
}

Return value from async / await try..catch

I have a test folder with files
file
file (1)
file (2)
If the file exists I add a suffix to a new filename, to prevent overwriting the file. For example
if file exists new name should be file (1)
if file (1) exists new name should be file (2)
if file (2) exists new name should be file (3)
and so on.
The following function works fine, except the value is not returned so I can assign it later.
async function dest_exists_new_name(file) {
const access = fs.promises.access
try {
await access(file, fs.F_OK)
// file exists - generate new name
const info = path.parse(file)
const dir = info.dir
let name = info.name
const ext = info.ext
// generate suffix
let suffix = ' (1)'
const suffix_regex = / \([0-9]+\)$/
if (suffix_regex.test(name)) { // if suffix exists -> increment it
const num = name.split(' ').reverse()[0].replace(/[()]/g,'')
const next_num = parseInt(num) + 1
suffix = ' (' + next_num + ')'
name = name.replace(suffix_regex, '') // remove old suffix
}
// generate suffix end
const new_name = path.join(dir, name + suffix + ext)
// recurse until dest not exists
await dest_exists_new_name(new_name)
} catch {
// file not exist - return its name
// console.log works OK
console.log('new name ->', file)
// return doesn't work - returns undefined if the previous name exists, but works ok if the name doesn't exists
return file
}
}
await dest_exists_new_name('/path/file')
new name -> /path/file (3) // console.log - works OK
undefined // returns undefined, if file previously exists
The question is how can I correctly return the new file name value?
If there are any culprits in such a solution like
accidental file rewriting
infinite recursion
other issues
I will be grateful for the hints on how to improve the function.
Your function will return file, but being an async function, you need to await its return and you cannot do so outside of an async scope. Thus, if you just console.log() its "istantaneous" value, it will indeed return a pending promise, as the return value has not been resolved yet. You may retrieve the correct return value by including your function in an async scope, like this:
let a = async () => {
console.log(await dest_exists_new_name('/path/file'))
}
a();
This will output:
new name -> /path/file
/path/file //here's your file
Now, by adding return await dest_exists_new_name(new_name) you should be able to achive what you want and both console.log() and return the new, non-existent, file name. Here's a complete, reproducible example:
const fs = require('fs');
const path = require('path');
async function dest_exists_new_name(file) {
const access = fs.promises.access
try {
await access(file, fs.F_OK)
const info = path.parse(file)
const dir = info.dir
let name = info.name
const ext = info.ext
let suffix = ' (1)'
const suffix_regex = / \([0-9]+\)$/
if (suffix_regex.test(name)) {
const num = name.split(' ').reverse()[0].replace(/[()]/g, '')
const next_num = parseInt(num) + 1
suffix = ' (' + next_num + ')'
name = name.replace(suffix_regex, '')
}
const new_name = path.join(dir, name + suffix + ext)
return await dest_exists_new_name(new_name)
} catch {
console.log('new name ->', file)
return file
}
}
//Here, make sure that the path to "file" is correct
let a = async() => console.log(await dest_exists_new_name(path.join(__dirname, './file')));
a();
Output, having up to file (2) in the same folder:
new name -> /path/to/file (3)
/path/to/file (3)
Check you try catch and how you are receiving your variable.
async function dest_exists_new_name(file) {
try {
const res = await dest_exists_new_name(file1); // recursion result
} catch (err) {
return Promise.resolve("file not found");
}
}
// usage
let res = await dest_exists_new_name(fileArg);
First of all, you should use await, since it's async function:
// recurse until dest not exists
await dest_exists_new_name(new_name)
About recursion - IMHO, it's always better to use cycle (if it doesn't make code too complicated).
Mixing async-await & promises is not very good. Ideally you should use one style.
I prefer to use destructuring, lambda functions, and other modern features.
So, my variant for async-await, without recursion:
const fs = require('fs');
const path = require('path');
// better to create outside of func, since it doesn't depend on context
const suffix_regex = / \([0-9]+\)$/
const defaultSuffix = ' (1)'
const access = async (...params) => new Promise((resolve) => fs.access(...params, (err) => (err) ? resolve(false) : resolve(true)))
const generate_new_suffix = ({ dir, name, ext }) => {
if (suffix_regex.test(name)) { // if suffix exists -> increment it
const num = name.split(' ').reverse()[0].replace(/[()]/g, '')
const suffix = `(${+num + 1})`;
const cleanName = name.replace(suffix_regex, '') // remove old suffix
return path.join(dir, cleanName + suffix + ext)
}
return path.join(dir, name + defaultSuffix + ext)
}
const dest_exists_new_name = async (file) => {
let newNameGenerated = false
let newFileName = file
while (await access(newFileName, fs.F_OK)) {
console.log(newFileName);
const info = path.parse(newFileName)
newFileName = generate_new_suffix(info)
};
console.log('new name ->', newFileName)
return newFileName
}
(async () => {
console.log(await dest_exists_new_name(path.join(__dirname, 'file')))
})();

how to iterate json response to map json object

How to iterate through json response to find value. I have this function getObjects to iterate through the response json obj and key which is the swaggerDefSplit (api/user).
getObjects(obj, key) {
    var objects = [];
    for (var i in obj) {
if (obj.hasOwnProperty(i)) {
var val = obj[i];
objects.push(val);
}
}
    return objects;
}
getSwagger() {
this._searchService.getSwagger()
.subscribe(data => {
// data = json response
console.log(data.paths);
let swaggerFullUrl = (<any>window).parent.swaggerURL;
let hashSplit = swaggerFullUrl.split('#');
let swaggerDefSplit = hashSplit[1].split('/');
// debugger;
var exist = this.getObjects(data, "/api/"+swaggerDefSplit[1]);
console.log(exist[4]);
},
err => {
console.log('err', err);
}
);
}
This console.log(exist[4]); gives me every api/paths but what I'm expecting is to give me the api/path relative to my swaggerSplit say (user) for example instead of giving me every paths.
json response I get from the console.log:
/api/User:{} // map this instead of everything
/api/Items:{}
/api/Stocks:{}
Hope I'm making sense at all.

Convert a text from text file to array with fs [node js]

I have a txt file contains:
{"date":"2013/06/26","statement":"insert","nombre":1}
{"date":"2013/06/26","statement":"insert","nombre":1}
{"date":"2013/06/26","statement":"select","nombre":4}
how I can convert the contents of the text file as array such as:
statement = [
{"date":"2013/06/26","statement":"insert","nombre":1},
{"date":"2013/06/26","statement":"insert","nombre":1},
{"date":"2013/06/26","statement":"select","nombre":4}, ];
I use the fs module node js. Thanks
Sorry
I will explain more detailed:
I have an array :
st = [
{"date":"2013/06/26","statement":"insert","nombre":1},
{"date":"2013/06/26","statement":"insert","nombre":5},
{"date":"2013/06/26","statement":"select","nombre":4},
];
if I use this code :
var arr = new LINQ(st)
.OrderBy(function(x) {return x.nombre;})
.Select(function(x) {return x.statement;})
.ToArray();
I get the result I want.
insert select insert
but the problem my data is in a text file.
any suggestion and thanks again.
There is no reason for not to do your file parser yourself. This will work on any size of a file:
var fs = require('fs');
var fileStream = fs.createReadStream('file.txt');
var data = "";
fileStream.on('readable', function() {
//this functions reads chunks of data and emits newLine event when \n is found
data += fileStream.read();
while( data.indexOf('\n') >= 0 ){
fileStream.emit('newLine', data.substring(0,data.indexOf('\n')));
data = data.substring(data.indexOf('\n')+1);
}
});
fileStream.on('end', function() {
//this functions sends to newLine event the last chunk of data and tells it
//that the file has ended
fileStream.emit('newLine', data , true);
});
var statement = [];
fileStream.on('newLine',function(line_of_text, end_of_file){
//this is the code where you handle each line
// line_of_text = string which contains one line
// end_of_file = true if the end of file has been reached
statement.push( JSON.parse(line_of_text) );
if(end_of_file){
console.dir(statement);
//here you have your statement object ready
}
});
If it's a small file, you might get away with something like this:
// specifying the encoding means you don't have to do `.toString()`
var arrayOfThings = fs.readFileSync("./file", "utf8").trim().split(/[\r\n]+/g).map(function(line) {
// this try/catch will make it so we just return null
// for any lines that don't parse successfully, instead
// of throwing an error.
try {
return JSON.parse(line);
} catch (e) {
return null;
}
// this .filter() removes anything that didn't parse correctly
}).filter(function(object) {
return !!object;
});
If it's larger, you might want to consider reading it in line-by-line using any one of the many modules on npm for consuming lines from a stream.
Wanna see how to do it with streams? Let's see how we do it with streams. This isn't a practical example, but it's fun anyway!
var stream = require("stream"),
fs = require("fs");
var LineReader = function LineReader(options) {
options = options || {};
options.objectMode = true;
stream.Transform.call(this, options);
this._buffer = "";
};
LineReader.prototype = Object.create(stream.Transform.prototype, {constructor: {value: LineReader}});
LineReader.prototype._transform = function _transform(input, encoding, done) {
if (Buffer.isBuffer(input)) {
input = input.toString("utf8");
}
this._buffer += input;
var lines = this._buffer.split(/[\r\n]+/);
this._buffer = lines.pop();
for (var i=0;i<lines.length;++i) {
this.push(lines[i]);
}
return done();
};
LineReader.prototype._flush = function _flush(done) {
if (this._buffer.length) {
this.push(this._buffer);
}
return done();
};
var JSONParser = function JSONParser(options) {
options = options || {};
options.objectMode = true;
stream.Transform.call(this, options);
};
JSONParser.prototype = Object.create(stream.Transform.prototype, {constructor: {value: JSONParser}});
JSONParser.prototype._transform = function _transform(input, encoding, done) {
try {
input = JSON.parse(input);
} catch (e) {
return done(e);
}
this.push(input);
return done();
};
var Collector = function Collector(options) {
options = options || {};
options.objectMode = true;
stream.Transform.call(this, options);
this._entries = [];
};
Collector.prototype = Object.create(stream.Transform.prototype, {constructor: {value: Collector}});
Collector.prototype._transform = function _transform(input, encoding, done) {
this._entries.push(input);
return done();
};
Collector.prototype._flush = function _flush(done) {
this.push(this._entries);
return done();
};
fs.createReadStream("./file").pipe(new LineReader()).pipe(new JSONParser()).pipe(new Collector()).on("readable", function() {
var results = this.read();
console.log(results);
});
fs.readFileSync("myfile.txt").toString().split(/[\r\n]/)
This gets your each line as a string
You can then use UnderscoreJS or your own for loop to apply the JSON.parse("your json string") method to each element of the array.
var arr = fs.readFileSync('mytxtfile', 'utf-8').split('\n')
I think this is the simplest way of creating an array from your text file

Categories

Resources