Ensure for loop waits until moving onto next item in JavaScript - javascript

Firstly I appreciate that there are many answers out there explaining this topic but I just can't understand it at the moment.
I want to loop through a JavaScript object I have created and then perform various actions like making a request to an API and then storing some data in Redis.
This is what I have so far
const params = { "handle1": { "screen_name": "handle1", "hash_tag": "#hashtag1"},
"handle2": { "screen_name": "handle2", "hash_tag": "#hashtag2"} }
for (const k of Object.keys(params)) {
console.log("Searching for " + params[k]['screen_name'])
client.get('statuses/user_timeline', { screen_name: params[k]['screen_name']})
.then(function (tweets) {
for (const key of Object.keys(tweets)) {
const val = tweets[key]['text'];
if(val.includes(params[k]['hash_tag'])) {
console.log("Found")
r_client.hset(params[k]['screen_name'], 'tweet_id', tweets[key]['id'], 'tweet_text', tweets[key]['text'], function (err, res) {
console.log(res)
});
r_client.hgetall(params[k]['screen_name'], function(err, object) {
console.log(object);
});
}
}
r_client.quit();
})
.catch(function (error) {
throw error;
});
}
When I run this the output is as follows
Searching for handle1
Searching for handle2
Found
0
{ tweet_id: '123456789',
tweet_text: 'text found in tweet' }
Found
undefined
undefined
So straight away I have a problem in that the first loop hasn't event finished and it's moved onto the second loop.
I would like to run this in sequential order (if that's the best way), but more importantly I was hoping someone could break down my code and explain how I should be approaching this to have it run correctly.

const tweets = await client.get(...) should do the trick.

Under the condition that .get(), .hset() and hgetall() return Promises this should pause the execution until all functions have resolved:
const params = {
"handle1": {
"screen_name": "handle1",
"hash_tag": "#hashtag1"
},
"handle2": {
"screen_name": "handle2",
"hash_tag": "#hashtag2"
}
}
async function search(params) {
for (const k in params) { //for..in goes through the keys
console.log("Searching for " + params[k]['screen_name'])
const tweets = await client.get('statuses/user_timeline', {
screen_name: params[k]['screen_name']
});
for (const key in tweets) { //same loop
const val = tweets[key]['text'];
if (val.includes(params[k]['hash_tag'])) {
console.log("Found")
await r_client.hset(params[k]['screen_name'], 'tweet_id', tweets[key]['id'], 'tweet_text', tweets[key]['text'], (err, res) => {
console.log(res)
});
await r_client.hgetall(params[k]['screen_name'], (err, object) => {
console.log(object);
});
}
}
r_client.quit();
}
}
search(params);

Related

How to read multiple json file using fs and bulk request

I'm using elasticsearch search engine with my react app, I was reading one file at the backend as you see in the code and it work perfectly, but now I want to read three different JSON files to three different indexes using the "fs" package and bulk request, can you please help me?
the code:
// Start reading the json file
fs.readFile("DocRes.json", { encoding: "utf-8" }, function (err, data) {
if (err) {
throw err;
}
// Build up a giant bulk request for elasticsearch.
bulk_request = data.split("\n").reduce(function (bulk_request, line) {
var obj, ncar;
try {
obj = JSON.parse(line);
} catch (e) {
console.log("Done reading 1");
return bulk_request;
}
// Rework the data slightly
ncar = {
id: obj.id,
name: obj.name,
summary: obj.summary,
image: obj.image,
approvetool: obj.approvetool,
num: obj.num,
date: obj.date,
};
bulk_request.push({
index: { _index: "ncar_index", _type: "ncar", _id: ncar.id },
});
bulk_request.push(ncar);
return bulk_request;
}, []);
// A little voodoo to simulate synchronous insert
var busy = false;
var callback = function (err, resp) {
if (err) {
console.log(err);
}
busy = false;
};
// Recursively whittle away at bulk_request, 1000 at a time.
var perhaps_insert = function () {
if (!busy) {
busy = true;
client.bulk(
{
body: bulk_request.slice(0, 1000),
},
callback
);
bulk_request = bulk_request.slice(1000);
console.log(bulk_request.length);
}
if (bulk_request.length > 0) {
setTimeout(perhaps_insert, 100);
} else {
console.log("Inserted all records.");
}
};
perhaps_insert();
});
You can create multiple promises for each file read and feed it to the elastic search bulk_request.
const fsPromises = require('fs').promises,
files = ['filename1', 'filename1'],
response = [];
const fetchFile = async (filename) => {
return new Promise((resolve, reject) => {
const path = path.join(__dirname, filename);
try {
const data = await fsPromises.readFile(path)); // make sure path is correct
resolve(data);
} catch (e) {
reject(e)
}
});
files.forEach((fileName) => results.push(fetchFile()));
Promise.all(results).then(data => console.log(data)).catch(e => console.log(e));
}
Once you get data from all the promises pass it to the elastic search.

How to update a global variable with a sqlite query in javascript?

I want to update a globally declared variable after sqlite query,
but I cant get it to work,
I have read that it might be related to asynchronous functions but I have no idea how to implement callbacks and stuff in this example,
can you guys help please. Here is the code:
const sqlite3 = require('sqlite3').verbose();
const dbPath = './src/db/db.sqlite3';
let db = new sqlite3.Database(dbPath, (err) => {
if (err) {
console.error(err.message);
}
console.log('Connected to database.');
});
let number = null;
let rowsExist = null;
db.get("select count(*) from PRICE", [], (err, row) => {
if (err) {
console.error(err.message)
}
else {
rowsExist = Object.values(row)[0];
console.log(rowExist) //this works but outside the function it doesnt get updated
}
});
// here rowExist remains the same after query
if (rowsExist === null) {
number = 1
}
else {
db.get("SELECT number FROM PRICE ORDER BY number DESC LIMIT 1", [], (err, row) => {
if (err) {
console.error(err.message)
}
else {
number = Object.values(row)[0] + 1
}
})
};

How do I extract data from df ~ onto my website?

I am creating a pie chart which shows how much disk space is available/used on my linux box. However, I am unsure how to parse the data onto a microservice url. Help will greatly be appreciated.
Here is what I have at the moment:
Router:
router.route('/linux_disk').get(disk.get_linux_disk)
Controller:
function get_linux_disk(req, res, next) {
try {
var cmd = `df ~`;
exec(cmd)
rows = [];
rows.push({"Command": cmd});
if (rows.length >= 1) {
res.status(200).json(rows);
} else {
res.status(404).end();
}
} catch (err) {
next(err);
}
}
You might try the approach below, we create a row object for each entry that the df
command creates. Once you have this you should be able to create your pie chart from this:
const { exec } = require('child_process');
const { promisify } = require('util');
const execPromise = promisify(exec);
async function get_linux_disk(req, res, next) {
try {
const result = await execPromise(`df ~`)
const lines = result.stdout.split("\n");
const keys = lines[0].split(/\s+/ig);
// Skip the header row when assigning objects..
const rows = lines.slice(1).map(line => {
// Parse each line..
const values = line.split(/\s+/ig);
return keys.reduce((o, k, index) => {
o[k] = values[index];
return o;
}, {})
});
res.status(200).json(rows);
} catch (err) {
res.status(500).send(err.message);
}
}
The resulting JSON will look a bit like so :
[
{
"Filesystem": "/dev/sda1",
"1K-blocks": "10253588",
"Used": "7971516",
"Available": "1741504",
"Use%": "83%",
"Mounted": "/"
}
]

rxjs subscribing late results to empty stream

I have the following piece of code. As is, with a couple of lines commented out, it works as expected. I subscribe to a stream, do some processing and stream the data to the client. However, if I uncomment the comments, my stream is always empty, i.e. count in getEntryQueryStream is always 0. I suspect it has to do with the fact that I subscribe late to the stream and thus miss all the values.
// a wrapper of the mongodb driver => returns rxjs streams
import * as imongo from 'imongo';
import * as Rx from 'rx';
import * as _ from 'lodash';
import {elasticClient} from '../helpers/elasticClient';
const {ObjectId} = imongo;
function searchElastic({query, sort}, limit) {
const body = {
size: 1,
query,
_source: { excludes: ['logbookType', 'editable', 'availabilityTag'] },
sort
};
// keep the search results "scrollable" for 30 secs
const scroll = '30s';
let count = 0;
return Rx.Observable
.fromPromise(elasticClient.search({ index: 'data', body, scroll }))
.concatMap(({_scroll_id, hits: {hits}}) => {
const subject = new Rx.Subject();
// subject needs to be subscribed to before adding new values
// and therefore completing the stream => execute in next tick
setImmediate(() => {
if(hits.length) {
// initial data
subject.onNext(hits[0]._source);
// code that breaks
//if(limit && ++count === limit) {
//subject.onCompleted();
//return;
//}
const handleDoc = (err, res) => {
if(err) {
subject.onError(err);
return;
}
const {_scroll_id, hits: {hits}} = res;
if(!hits.length) {
subject.onCompleted();
} else {
subject.onNext(hits[0]._source);
// code that breaks
//if(limit && ++count === limit) {
//subject.onCompleted();
//return;
//}
setImmediate(() =>
elasticClient.scroll({scroll, scrollId: _scroll_id},
handleDoc));
}
};
setImmediate(() =>
elasticClient.scroll({scroll, scrollId: _scroll_id},
handleDoc));
} else {
subject.onCompleted();
}
});
return subject.asObservable();
});
}
function getElasticQuery(searchString, filter) {
const query = _.cloneDeep(filter);
query.query.filtered.filter.bool.must.push({
query: {
query_string: {
query: searchString
}
}
});
return _.extend({}, query);
}
function fetchAncestors(ancestorIds, ancestors, format) {
return imongo.find('session', 'sparse_data', {
query: { _id: { $in: ancestorIds.map(x => ObjectId(x)) } },
fields: { name: 1, type: 1 }
})
.map(entry => {
entry.id = entry._id.toString();
delete entry._id;
return entry;
})
// we don't care about the results
// but have to wait for stream to finish
.defaultIfEmpty()
.last();
}
function getEntryQueryStream(entriesQuery, query, limit) {
const {parentSearchFilter, filter, format} = query;
return searchElastic(entriesQuery, limit)
.concatMap(entry => {
const ancestors = entry.ancestors || [];
// if no parents => doesn't match
if(!ancestors.length) {
return Rx.Observable.empty();
}
const parentsQuery = getElasticQuery(parentSearchFilter, filter);
parentsQuery.query.filtered.filter.bool.must.push({
terms: {
id: ancestors
}
});
// fetch parent entries
return searchElastic(parentsQuery)
.count()
.concatMap(count => {
// no parents match query
if(!count) {
return Rx.Observable.empty();
}
// fetch all other ancestors that weren't part of the query results
// and are still a string (id)
const restAncestorsToFetch = ancestors.filter(x => _.isString(x));
return fetchAncestors(restAncestorsToFetch, ancestors, format)
.concatMap(() => Rx.Observable.just(entry));
});
});
}
function executeQuery(query, res) {
try {
const stream = getEntryQueryStream(query);
// stream is passed on to another function here where we subscribe to it like:
// stream
// .map(x => whatever(x))
// .subscribe(
// x => res.write(x),
// err => console.error(err),
// () => res.end());
} catch(e) {
logger.error(e);
res.status(500).json(e);
}
}
I don't understand why those few lines of code break everything or how I could fix it.
Your use case is quite complex, you can start off with building up searchElastic method like the pattern bellow.
convert elasticClient.scroll to an observable first
setup the init data for elasticClient..search()
when search is resolved then you should get your scrollid
expand() operator let you recursively execute elasticClientScroll observable
use map to select data you want to return
takeWhile to decide when to complete this stream
The correct result will be once you do searchElastic().subscribe() the stream will emit continuously until there's no more data to fetch.
Hope this structure is correct and can get you started.
function searchElastic({ query, sort }, limit) {
const elasticClientScroll = Observable.fromCallback(elasticClient.scroll)
let obj = {
body: {
size: 1,
query,
_source: { excludes: ['logbookType', 'editable', 'availabilityTag'] },
sort
},
scroll: '30s'
}
return Observable.fromPromise(elasticClient.search({ index: 'data', obj.body, obj.scroll }))
.expand(({ _scroll_id, hits: { hits } }) => {
// guess there are more logic here .....
// to update the scroll id or something
return elasticClientScroll({ scroll: obj.scroll, scrollId: _scroll_id }).map(()=>
//.. select the res you want to return
)
}).takeWhile(res => res.hits.length)
}

NodeJS returned data sometimes changes

I'm new to NodeJS and I have a problem I don't understand.
In this function, I call several API one after another to retrieve some data about a movie. The result isn't always the same. Most of the time, the result is correct, but sometimes the result isn't complete.
I tried using then to try and chain the API calls but it doesn't seem to work.
Any idea why the result isn't always the same? Any help would be appreciated.
// test fetchData(456165)
function fetchData(filmid) {
let average = array => array.reduce((a, b) => a + b) / array.length
var notes = []
mdb.movieInfo({
id: filmid,
language: 'fr'
},
(err, resOmdb) => {
notes.push(parseFloat(resOmdb.vote_average))
imdb
.getById(resOmdb.imdb_id, {
apiKey: 'e9d59b68',
timeout: 3000
})
.then(
allocine.api(
'search', {
q: `${resOmdb.title}`,
filter: 'movie'
},
function(error, resAllo) {
if (error) {
return
}
allocine.api(
'movie', {
code: `${resAllo.feed.movie[0].code}`
},
function(error, result) {
if (error) {
return
}
notes.push(parseFloat(result.movie.statistics.userRating) * 2)
}
)
// doesn't seem to execute all the time
allocine.api(
'showtimelist', {
zip: 44260,
movie: resAllo.feed.movie[0].code
},
function(error, resultCin) {
if (error) {
return
}
// sometimes doesn't appear in the result
resOmdb.cinemas = resultCin
}
)
}
)
)
.then(
function(result) {
notes.push(parseFloat(result.rating))
resOmdb.vote_average = average(notes).toFixed(2)
// check the result
console.log(util.inspect(resOmdb, false, null))
},
function(error) {
return
}
)
}
)
}
First of all you should decide if you want to use Promises or not.
If you do, promisify all functions. Next thing you need to do is 'return' your promises if they are used inside a function.
In your case your first imbd api call is not returned probably.
As next thing you should check if your node version supports async await.
Then you can easily do your api calls without any distractions.
'use strict';
const Promise = require('bluebird');
const mdb = Promise.promisfyAll(require('mdb'));
const allocine = Promise.pomisifyAll(require('allocine-api'));
// test fetchData(456165)
async function fetchDate(filmId) {
const notes = [];
const resOmdb = await mdb.movieInfoAsync({ id: filmId });
notes.push(parseFloat(resOmdb.vote_average));
const imdbResult = await imdb.getByIdAsync(resOmdb.imdb_id, { apiKey: 'e9d59b68', timeout: 3000 });
const resAllo = await allocine.apiAsync('search', { q: `${resOmdb.title}`, filter: 'movie' });
// and so on ...
}
UPDATE:
To speed up your function you can do requests concurrently.
To do so, use Promise.join
const [imdbResult, allocineResult] = await Promise.join(
imdb.getByIdAsync(resOmdb.imdb_id, { apiKey: 'e9d59b68', timeout: 3000 }),
allocine.apiAsync('search', { q: `${resOmdb.title}`, filter: 'movie' });
);

Categories

Resources