loop json call from (previous call) last result value - javascript

this call asks from starting name and result limit, the max limit is 1000, so i have to recall it many times with last result value of previous call to collect all results.
steem.api.lookupAccounts(lowerBoundName, limit, function(err, result)
i tried using a set to save the results and then get the last value in the set and recall using it but i started hitting my head to the wall.
here is my tries :
<script src="https://cdn.steemjs.com/lib/latest/steem.min.js"></script>
<script>
steem.api.lookupAccounts('a', '1000', function(err, result) {
//var last = result[999];
//console.log(last);
var mySet = new Set();
mySet.add(result);
var last = mySet.slice(-1)[0];
//var ok = mySet.last();
console.log(last);
var i;
for (i = 0; i < 5; i++) {
steem.api.lookupAccounts(mySet.pop(), '1000', function(err, result) {
});
}
});
// mySet.forEach(function(value) {
// console.log(value);
//});
</script>

let all = [];
let fetchAll = (start = "a") => {
fetchPart(start)
.then((result) => {
all.push(result);
if(result.length == 1000){
fetchAll(result[result.length - 1]);
}
});
}
let fetchPart = (start) => {
return new Promise((ok) => {
steem.api.lookupAccounts(start, '1000', function (err, result) {
console.log(result);
ok(result);
});
});
}
fetchAll();
This might do the trick for you. all will be an array containing all sub results.

Related

array return value is always empty

i'm trying to program a prototype right now, however i have a problem, my return values, my output overall always returns an empty array.
If I put everything into a function it works, but if I divide everything into code blocks it doesn't work anymore.
I always get an empty array back as I said.
What am I doing wrong?
async function dataLength(){
return new Promise((resolve, reject) => {
pdo.query(`SELECT * FROM mainsites`, function(err, result) {
let results = result;
let resultsLength = results.length;
let dataIndexMainId = [];
for(let index = 0; index < resultsLength; index++){
dataIndexMainId[index] = results[index]["id"];
}
resolve();
})
})
}
async function getSubSitesIndex(length){
let dataSitesIndex = [];
for(let i = 0; i <= length; i++){
await new Promise((resolve) => {
pdo.query("SELECT * FROM subsites WHERE main = ?",[i] , function(err, result) {
dataSitesIndex[i] = result;
resolve();
})
})
}
let filterDataSitesIndex = dataSitesIndex.filter(String);
console.log(filterDataSitesIndex);
return filterDataSitesIndex;
}
async function getIndex(paramIndex){
let indexResult = await paramIndex;
let indexArray = [];
for (let indexRes of indexResult){
for(let res of indexRes){
indexArray.push(res);
}
}
return indexArray;
}
if I execute the code like this
getIndex(
await getSubSitesIndex(
await dataLength()
)
);
In dataLength, your pdo.query call is not properly promisified.
However, you shouldn't have to write 3 functions for this at all. Do not make multiple queries to your database. Use a single query that does a JOIN - much more efficient!
function getIndex() {
return new Promise((resolve, reject) => {
pdo.query(`
SELECT mainsites.id AS main_id, subsites.id AS sub_id
FROM mainsites
JOIN subsites ON subsites.main = mainsites.id;
`, [], (err, result) => {
if (err) reject(err);
else resolve(result);
});
});
}

Why is my for loop not working how I expect to? Run function twice - JavaScript

So guys, I've got scraping function, where I create object of scraped data. Code of scraper is:
const axios = require('axios');
const cheerio = require('cheerio');
const db = require('../config/db.config');
const Article = db.article;
const prices = new Array();
const ids = new Array();
const descs = new Array();
const links = new Array();
for (p = 1; p < 3; p++) {
function again() {
const url = `https://www.olx.ba/pretraga?vrsta=samoprodaja&kategorija=23&sort_order=desc&kanton=9&sacijenom=sacijenom&stranica=${p}`;
axios
.get(url)
.then((response) => {
let $ = cheerio.load(response.data);
$('div[class="naslov"] > a').each((i, el) => {
const id = $(el).attr('href'); // ID, description and link are in the same div class
const desc = id;
const link = id;
descs.push(desc.substring(36)); //Retriving description with substring and push into array
ids.push(id.substring(27, 35)); //Retriving id with substring and push into array
links.push(link); //Retriving link and push into array
for (var i = 0; i < descs.length; i++) {
descs[i] = descs[i].replace('/', '').replace('-', ' ');
}
});
$('div[class="datum"] > span').each((i, el) => {
$('span[class="prekrizenacijena"]').remove();
const price = $(el).text();
prices.push(price); //Retriving price and push into array
});
for (var i = prices.length - 1; i >= 0; i--) {
if (prices[i] === 'PO DOGOVORU') {
prices.splice(i, 1);
}
}
async function asy() {
const sqm = new Array();
for (k = 0; k < links.length; k++) {
const res = await axios
.get(`${links[k]}`)
.then((result) => {
let $ = cheerio.load(result.data);
const pr = $('div[class="df2 "]').first().text();
sqm.push(pr);
for (var i = 0; i < sqm.length; i++) {
sqm[i] = sqm[i].replace('m2', '');
}
})
.catch((err) => {
//handle error
console.log(err);
});
}
const object = ids.map((element, index) => {
const ppm2 =
parseFloat(
prices[index].replace(/\.| ?€$/g, '').replace(',', '.')
) / parseFloat(sqm[index]);
const ppm2final = Math.round(ppm2);
return {
id: element,
price: prices[index],
descr: descs[index],
link: links[index],
sqm: sqm[index],
ppm2: ppm2final + ' KM',
};
});
console.log(object);
console.log(Object.keys(object).length);
/*const ins = await Article.bulkCreate(object)
.then(console.log('Data added to DB'))
.catch((err) => console.log(err));*/
}
asy();
})
.catch((e) => {
console.log(e);
});
}
again();
}
Now when I delete first for lop and function again() and instead of ${p} in url insert eg. 1,2,3 etc. it's working perfect - sqm is fetched for correct link.
Now the problem:
I want to run this url multiple times because ${p} is number of page on that url. Now first problem I got:
sqm isn't correct - sqm data is thrown all over the object and isn't correct for that link.(it's correct when I don't use ${p}
First time i get sqm data(but not correct for that link), when function needs to get ran second time (for second page - to ${p}=2) - sqm isn't fetched at all (it throws NaN).
Also I've got console.log(Object.keys(object).length); where I expect first time to be 30, then after is runned second time to I get 60.(each page contains 30 articles), but I get 60, then again 60.
I've tried with many things: async functions, putting axios to await etc. but nothing really work - sometimes I get only 30 articles, sometimes 60 but with incorrect values.

Trying to get the function to return the arr with the pushed data in it, but for some reason its not returning it there

Here I have required the sql in my main index file
seeded the database and checked that the sql query returns
// require sql connection from main index
const connection = require('./index');
async function tow() {
let arr = [];
let one = await connection.query("Select name FROM department", (err, res) => {
if (err) throw err;
// console.log(res);
console.log('1'arr);
for (let i = 0; i < res.length; i++) {
// pushing to the array here
arr.push(res[i].name);
}
console.log('2',arr);
});
console.log('3',arr);
return one;
}
// I want this function to return array with the pushed elements
tow();
You need to return arr. Also you are mixing the promise style with the callback style of async, you can do either one, but not a mix. Try something like this.
If connection.query returns a promise:
async function tow() {
let arr = [];
let one = await connection.query("Select name FROM department").then(res => {
// console.log(res);
console.log('1', arr);
for (let i = 0; i < res.length; i++) {
// pushing to the array here
arr.push(res[i].name);
}
console.log('2',arr);
return arr; // need to return arr
},
err => console.error(err));
console.log('3',arr);
return one;
}
tow().then(res => console.log('result', res));
In your case, where connection.query expects a callback function, you can convert the callback to a promise like this.
function tow() {
return new Promise((resolve, reject) => {
let arr = [];
connection.query("Select name FROM department", (err, res) => {
if (err) reject();
// console.log(res);
console.log('1', arr);
for (let i = 0; i < res.length; i++) {
// pushing to the array here
arr.push(res[i].name);
}
console.log('2',arr);
resolve(arr);
});
})
}
// I want this function to return array with the pushed elements
tow().then(res => console.log('result', res));
Here's a codeSandbox

Node.js, wait for all Redis queries to complete before continuing with execution

I need to go through an array of values, look up date in Redis (to see if it exists), and then continue. For example:
var to_check = [ 1, 2, 3 ]
var found_elements = []
for (var i = 0; i < to_check.length; i++) {
redis.EXISTS('namespace:' + to_check.length[i], function(err, value) {
if (!err && value) {
found_elements.push(to_check.length[i])
}
})
}
console.log(found_elements.join(', '))
I need to get the last line executed after all callbacks sent to Redis have been executed. What would be the best way to approach this?
Use Promise to handle complex async operations. Parallel execution is one of them.
var to_check = [ 1, 2, 3 ];
var found_elements = [];
Promise.all(to_check.map(function(item){
return new Promise(function(resolve,reject){
redis.EXISTS('namespace:' + item, function(err, value) {
if(err){
return reject(err);
}
if (value) {
found_elements.push(item);
}
resolve();
})
});
})).then(function(){
console.log('All operations are done');
}).catch(function(err){
console.log(err);
});
I am sure there are other ways. But this should work(not tested):
var to_check = [ 1, 2, 3 ]
var found_elements = []
for (var i = 0; i < to_check.length; i++) {
(function(i){
redis.EXISTS('namespace:' + to_check.length[i], function(err, value) {
if (!err && value) {
found_elements.push(to_check.length[i])
}
if(i == (to_check.length-1)){
console.log(found_elements.join(', '))
}
})
})(i);
}

learnyounode #9 juggling async

I am trying to go through nodeschool's learnyounode.
This problem is the same as the previous problem (HTTP COLLECT) in
that you need to use http.get(). However, this time you will be
provided with three URLs as the first three command-line arguments.
You must collect the complete content provided to you by each of the
URLs and print it to the console (stdout). You don't need to print out
the length, just the data as a String; one line per URL. The catch is
that you must print them out in the same order as the URLs are
provided to you as command-line arguments.
I'm confused as to why my solution doesn't work exactly since it looks the same to me but more functional and am unsure of their inner test workings:
1. ACTUAL: ""
1. EXPECTED: "As busy as a dead horse also lets get some dero. Built like a sleepout no dramas lets get some chook. She'll be right thingo my she'll be right ute. "
2. ACTUAL: "She'll be right bizzo no worries she'll be right fair dinkum. We're going aerial pingpong no worries as busy as a gyno. "
2. EXPECTED: "She'll be right bizzo no worries she'll be right fair dinkum. We're going aerial pingpong no worries as busy as a gyno. "
3. ACTUAL: "He's got a massive pretty spiffy heaps she'll be right brizzie. He hasn't got a fly wire where shazza got us some strewth. She'll be right spit the dummy with it'll be fair go. We're going gobsmacked with as stands out like arvo. He's got a massive bush bash mate she'll be right slacker. "
3. EXPECTED: "He's got a massive pretty spiffy heaps she'll be right brizzie. He hasn't got a fly wire where shazza got us some strewth. She'll be right spit the dummy with it'll be fair go. We're going gobsmacked with as stands out like arvo. He's got a massive bush bash mate she'll be right slacker. "
4. ACTUAL: ""
4. EXPECTED: ""
my code:
var http = require('http');
var bl = require('bl');
var result = [];
var urls = process.argv.slice(2);
urls.forEach(function(url, i) {
http.get(url, function(response) {
response.pipe(bl(function(err, data) {
if (err) return console.error(err);
result[i] = data.toString();
if (i === urls.length - 1) {
console.log(result.join('\n'));
}
}));
});
});
official solution:
var http = require('http')
var bl = require('bl')
var results = []
var count = 0
function printResults () {
for (var i = 0; i < 3; i++)
console.log(results[i])
}
function httpGet (index) {
http.get(process.argv[2 + index], function (response) {
response.pipe(bl(function (err, data) {
if (err)
return console.error(err)
results[index] = data.toString()
count++
if (count == 3)
printResults()
}))
})
}
for (var i = 0; i < 3; i++)
httpGet(i)
Basically the first test never passes (although if there is only 1 url in the iterated array (instead of 3), the first test passes but not the others). Any insight would be great. I'm not sure where to ask about this and perhaps I'm just missing some JS thing, so sorry if this is not appropriate.
You haven't made sure that all of the urls have been downloaded.
The requests don't necessarily come back in order. Consider if 3 comes back first. You'll skip the other two urls and only print out 3.
The demo code counts the number of responses so it's guaranteed to get everything before it prints out the answer.
I think that you just need wait until all requested results ends or any one error. There are my passed answer:
var http = require('http');
var bl = require('bl');
var urls = process.argv.slice(2)
var count = urls.length;
var results = [];
urls.forEach((url, index) => {
http.get(url, (res) => {
res.pipe(bl((err, data) => {
if (err) throw err;
results[index] = data.toString();
count--;
if (count == 0) {
results.forEach((result) => {
console.log(result)
});
}
}))
})
})
var http = require('http');
var links = [2, 3, 4];
var buffer = [];
(function render(index) {
http.get(process.argv[links[index]], function (response){
response.setEncoding('utf8');
response.on('data', function(chunk){
if(buffer[index] === undefined) {
buffer[index] = '';
}
buffer[index] += chunk;
});
response.on('end', function () {
var newIndex = index+1;
if(links[newIndex] !== undefined) {
render(newIndex);
} else {
return renderOutput();
}
});
response.on('error', console.error);
}).on('error', console.error);
})(0); //self-calling function
function renderOutput() {
buffer.forEach(function (elem) {
console.log(elem);
});
}
I got it working without using bufferList(bl) module and may be more generic approach.
var http = require('http');
var urlList = [];
urlList.push(process.argv[2]);
urlList.push(process.argv[3]);
urlList.push(process.argv[4]);
var results = []
var count = 0
function getURLdata (index) {
http.get(urlList[index], function(response){
var data = {};
data[index] = '';
response.setEncoding('utf-8');
response.on('error', function(err){
console.log(err);
});
response.on('data', function(chunk){
data[index] += chunk;
});
response.on('end', function(){
results[index] = data;
count++;
if (count == urlList.length){
for (var i = 0; i < urlList.length; i++){
console.log(results[i][i]);
}
}
});
});
}
for (var i = 0; i < urlList.length; i++)
getURLdata(i);
I am a beginner so maybe this solution has problems, this is using async/await, by making an array of promises and waiting for them to resolve, this will control the order of responses
const axios = require("axios")
const getURL = async url =>
{
let res = await axios.get(url)
return res.data
}
const getUrlArray = () =>
{
let args = process.argv.slice(2)
.map(e => getURL(e))
return Promise.all(args)
}
getUrlArray()
.then(data => data.forEach(e => console.log(e)))
Pretty simple solution, but gets the job done:
const http = require('http');
const bl = require('bl');
var x;
for (x = 2; x < 5; x++) {
http.get(process.argv[x], function (res) {
res.pipe(bl(function (err, data) {
if (err) { return console.error(err) }
console.log(data.toString());
}));
});
};

Categories

Resources