I want to save my JavaScript object to a JSON file. I already did like this
const superagent = require('superagent');
const cheerio = require('cheerio');
const fs = require('fs');
var object = new Object();
var toJson = { articles: [] };
var arr = [];
// I obtain the data prior to this forEach loop.
// The data loading code is omitted, because it is too long to fit here.
data.forEach((val, index) => {
const authorName = val.authorName;
const articleDate = val.articleDate;
const relateArticle = val.relateArticle;
const relateArticleURL = val.relateArticleURL;
object.url = arr[1][index];
object.title = arr[0][index];
object.date = articleDate[0];
object.author = authorName[0];
toJson.articles.push(object);
});
var saveJson = JSON.stringify(toJson)
fs.writeFile('solution.json', saveJson, 'utf8', (err) => {
if (err) {
console.log(err)
}
})
I would expect the result to look like so:
{
"articles": [
{
"url": "...",
"title": "...",
"author": "...",
"postingDate: "..."
}
]
}
but what I get instead looks like so:
{"articles":[{"url":"...","title":"...","author":"...","postingDate":"..."}]}
How do I save an object to a JSON file, but in the desired format? Any answer would be appreciated. Thank you in advance!
Try this
const superagent = require('superagent');
const cheerio = require('cheerio');
const fs = require('fs');
var object = new Object();
var toJson = { articles:[] };
var arr = [];
// above this for each, is how i get the data. i don't put those code because it's too long.
data.forEach((val, index)=>{
const authorName = val.authorName;
const articleDate = val.articleDate;
const relateArticle = val.relateArticle;
const relateArticleURL = val.relateArticleURL;
object.url = arr[1][index];
object.title = arr[0][index];
object.date = articleDate[0];
object.author = authorName[0];
toJson.articles.push(object);
});
var saveJson = JSON.stringify(toJson, null, 4)
fs.writeFile('solution.json', saveJson, 'utf8', (err)=>{
if(err){
console.log(err)
}
})
Related
I'm trying to get the amount received from the request by going to a list and then trying to export it to another file, but I'm having trouble doing this because I tried several ways to pass the list as return and it didn't work.
const moment = require("moment");
const sslChecker = require("ssl-checker");
const express = require("express");
//const dominios = require('./server');
var dominios = [
"api-prd.koerich.com.br",
"api-prd.reservapto.com.br",
"c.btg360.com.br",
"compassouol.com",
"gmfy.compasso.com.br",
"webqplus.cbn.com.bo",
"webqplus.cerchile.cl",
"webqplus.cervepar.com.py",
"webqplus.fnc.com.uy",
"webqplus.quilmes.com.ar",
"www.efurukawa.com",
];
var get_domain = dominios;
for (var i = 0; i < get_domain.length; i++) {
sslChecker(get_domain[i])
.then((certdata) => {
var startdate = new Date(certdata.validFrom);
var enddate = new Date(certdata.validTo);
var certstart = moment(startdate);
var certend = moment(enddate);
var ssldata = [
{
domain: get_domain,
issued: certstart.format("LLLL"),
expires: certend.format("LLLL"),
daysleft: certdata.daysRemaining,
},
];
var data = JSON.parse(certdata.daysRemaining);
var nomes = (certdata.validFor[0]);
var lista = [];
lista.push(data);
lista.push(nomes);
console.log(lista); //get this values
})
.catch((err) => {
console.log(err);
});
}
//module.exports = lista; //export here
console.log:
async function getList() {
mondongo = [];
var dominios = [
"api-prd.koerich.com.br",
"api-prd.reservapto.com.br",
"c.btg360.com.br",
"compassouol.com",
"gmfy.compasso.com.br",
"webqplus.cbn.com.bo",
"webqplus.cerchile.cl",
"webqplus.cervepar.com.py",
"webqplus.fnc.com.uy",
"webqplus.quilmes.com.ar",
"www.efurukawa.com",
]
try {
for (var i = 2; i < domains.length; i++) {
mondongo.push(
sslChecker(domains[i])
.then((certdata) => {
var startdate = new Date(certdata.validFrom);
var enddate = new Date(certdata.validTo);
var certstart = moment(startdate);
var certend = moment(enddate);
var ssldata = [
{
domain: domains,
issued: certstart.format("LLLL"),
expires: certend.format("LLLL"),
daysleft: certdata.daysRemaining,
},
];
var data = certdata.daysRemaining;
var nome = certdata.validFor[0];
var lista = [];
lista.push(nome);
lista.push(data);
return lista;
})
.catch((err) => {
//console.log(err);
})
);
}
}
catch (error) {
console.log(error);
}
var arroz = await Promise.all(mondongo);
return arroz;
}
return:
create a function and use the async await methods.
I have a cloud function that "Joins" data from a list of documents in a collection.
I then return the result as an array, but I want to return the documentId as well (doc.id) in the list that i return.
How can i do that?
const restData = [];
//const userId = ctx.auth.uid;
const userId = 'dHAP1CNN6LhJWddQoTqyIkqIjhB2'; // !!! TEST ONLY
const all = await db.collection(`/customers/${userId}/lunch_cards`).listDocuments().then((snapshot) => {
snapshot.forEach(doc => {
const nextData = db.collection(`/restaurants`).doc(doc.id).get();
const newData = {...nextData, documentId: doc.id}; <-- This does not work only documentId isout in newData
console.log(util.inspect(newData));
restData.push(nextData);
console.log(doc.id);
});
});
const snaps = await Promise.all(restData);
const responseArray = snaps.map((s) => {return s.data()});
return responseArray;
I solved it!
Solution:
Just adding a new string to the array :)
const responseArray = snaps.map((s) => {
const snapData = s.data();
if (snapData) {
snapData['id'] = s.id;
}
return snapData;
});
I was wondering, how can I save generated Excel file into blob storage when running this js code.please, do you have an idea how to preform something similar like fs.safeFileSync?
const fs = require('fs');
var json2xls = require('json2xls');
var json = {
foo: 'bar',
qux: 'moo',
poo: 123,
stux: "tester"
}
var pole = [];
pole.push(json);
pole.push(json);
pole.push(json);
pole.push(json);
var xls = json2xls(pole);
console.log("done, saved", xls);
fs.writeFileSync('data.xlsx', xls, 'binary'); //this saves file to disk, but I need to save it to a blob storage with the function below
context.bindings.outputBlob = ??????
If anyone is interested, I solved it like this:
const csval = require("csval");
var json2xls = require('json2xls');
var fs = require('fs');
const { BlobServiceClient } = require('#azure/storage-blob');
const AZURE_STORAGE_CONNECTION_STRING = process.env.storageCred;
module.exports = async function (context, myQueueItem) {
context.log('JavaScript queue trigger function processed work item', myQueueItem);
const blobServiceClient = BlobServiceClient.fromConnectionString(AZURE_STORAGE_CONNECTION_STRING);
var json = {
foo: 'bar',
qux: 'moo',
poo: 123,
stux: new Date()
}
var arry = [];
arry.push(json);
arry.push(json);
var xls = json2xls(arry);
const data = Buffer.from(xls,'binary')
const blobName = "excelfile.xls";
const containerClient = blobServiceClient.getContainerClient("incontainer");
const blockBlobClient = containerClient.getBlockBlobClient(blobName);
const uploadBlobResponse = await blockBlobClient.upload(data, data.length);
context.log("done")
};
I have this file in my node app that supposed to go fetch me some data about every league champion from their official website using cheerio and its going all great but when I add all the data to my array to then return it as json data the write function runs before the map finishes so I just creating a json file with an empty array in it:
const request = require('request');
const cheerio = require('cheerio');
const fs = require('fs');
const champions = fs.readFileSync('champions.json');
const championsObj = JSON.parse(champions);
let champsList = [];
championsObj.map(champ => {
request(champ.href, (err, res, html) => {
if (!err && res.statusCode == 200) {
const $ = cheerio.load(html);
const champName = $('.style__Title-sc-14gxj1e-3 span').text();
let skins = [];
const skinsList = $('.style__CarouselItemText-sc-1tlyqoa-16').each(
(i, el) => {
const skinName = $(el).text();
skins.push = skinName;
}
);
const champion = {
champName,
skins
};
console.log(champion);
champsList.push = champion;
}
});
});
const jsonContent = JSON.stringify(champsList);
fs.writeFile('champions2.json', jsonContent, 'utf8', function(err) {
if (err) {
console.log(err);
}
});
I'm not a node expert but I tried using Promise but it didn't work but I'm not sure maybe I used it wrong.
UPDATE #1: using axios
championsObj.map(async champ => {
const html = await axios.get(champ.href);
const $ = await cheerio.load(html);
const champName = $('.style__Title-sc-14gxj1e-3 span').text();
let skins = [];
const skinsList = $('.style__CarouselItemText-sc-1tlyqoa-16').each(
(i, el) => {
const skinName = $(el).text();
skins.push = skinName;
}
);
const champion = {
champName,
skins
};
console.log(champion);
champsList.push = champion;
});
you can use await Promise.all(<array>.map(async () => {...}). it does not require any additional dependencies. however you have no guarantees about the order of asynchronous iterations (starting all the iterations in the right order, but no guarantees about iterations' endings).
Your problem here is that Array#map doesn't wait for asynchronous functions such as the request calls to finish before moving on. I recommend p-map with got. To ensure perfect execution order, I also recommend reading and writing the file asynchronously.
const got = require('got');
const pMap = require('p-map');
const cheerio = require('cheerio');
const fs = require('fs').promises;
(async () => {
const champions = JSON.parse(await fs.readFile('champions.json', 'utf8'));
let champsList = await pMap(champions, async champ => {
const {
body
} = await got(champ.href)
const $ = cheerio.load(body);
const champName = $('.style__Title-sc-14gxj1e-3 span').text();
let skins = [];
$('.style__CarouselItemText-sc-1tlyqoa-16').each(
(_, el) => {
const skinName = $(el).text();
skins.push(skinName);
}
);
const champion = {
champName,
skins
};
console.log(champion);
return champion;
})
await fs.writeFile('champions2.json', JSON.stringify(champsList));
})();
I'm downloading posts from subreddit
const electron = require('electron');
const _ = require('lodash');
const getsub = (sub, epoch) => {
let res = {};
const sub_url = `https://api.pushshift.io/reddit/search/submission/?subreddit=${sub}&limit=1000&sort=desc&before=${epoch}`;
return new Promise((resolve, reject) => {
axios
.get(sub_url)
.then(response => {
let d = response.data.data;
let e = d[d.length - 1].created_utc;
res.data = d;
res.epoch = e;
return resolve(res);
})
.catch(error => {
return reject(error.message);
});
});
};
The function in question
async function getSubreddit(sub) {
const utc = new Date().toJSON().slice(0, 10).replace(/-/g, "/");
const endEpoch = new Date(utc).getTime() / 1000;;
var dataArray = [];
let epoch;
const resp = await getsub(sub, endEpoch);
const jsondat = resp.data;
epoch = resp.epoch;
var result = _.map(dataArray, function(o) {
return _.pick(o, ['title', 'url', 'subreddit', 'created_utc', 'domain', 'full_link', 'created_utc'])
})
console.log(jsondat.length);
// console.log({result,epoch});
// downloadStuff(sub,result)
}
while jsondat shows a length of 1000. But after I parse it with lodash, the result shows length as 0.
I'm confused because this code works in regular javascript page.
EDIT: This is a silly error on my part. Here is the corrected code
async function getSubreddit(sub) {
var utc = new Date().toJSON().slice(0, 10).replace(/-/g, "/");
var endEpoch = new Date(utc).getTime() / 1000;;
var dataArray = [];
const resp = await getsub(sub, endEpoch);
const jsondat = resp.data;
dataArray = jsondat.concat(dataArray);
epoch = resp.epoch;
var result = _.map(dataArray, function(o) {
return _.pick(o, ['title', 'author','url', 'subreddit', 'created_utc', 'domain', 'full_link', 'created_utc'])
})
console.log({result,epoch});
downloadStuff(sub,result)
}