How do I make parallel concurrent requests in node.js? - javascript

I'm trying to make a this.getData(item) call in parallel. 2 at a time.
However with my approach even if I do 1 instead of 2 concurrently it still gobbles up my API usage limit.
I think I have a bug but not sure where.
async makeAsyncCallsInParallel(items) {
// Define the maximum number of parallel requests
const maxParallelRequests = 1;
// Use the map method to create an array of promises for each call to GetData
const promises = items.map(item => this.getData(item));
// Use a loop to process the promises in batches of up to the maximum number of parallel requests
const results = [];
for (let i = 0; i < promises.length; i += maxParallelRequests) {
const batch = promises.slice(i, i + maxParallelRequests);
const batchResults = await Promise.all(batch);
results.push(...batchResults);
}
// Return the final results
return results;
}
Here is my getData function, I think the problem is in here too:
async getData(item) {
const me = await this.me();
const {
link,
asin,
starts_at,
ends_at,
title,
image,
deal_price,
list_price,
merchant_name,
free_shipping,
description,
category,
// tags
} = item;
const discountPercent = deal_price?.value && list_price?.value ? parseInt((((deal_price?.value - list_price?.value) / list_price?.value) * 100).toFixed(0)) : null;
const { aiTitle, aiDescription, aiPrompt, aiChoices, aiTags } = await this.getAIDescription(item);
console.log('title: ', title, 'aiTitle:', aiTitle, 'description: ', description, 'aiDescription: ', aiDescription);
const deal = {
link,
productId: asin,
startsAt: starts_at,
endsAt: ends_at,
imageUrl: image,
title: aiTitle || title,
originalTitle: title,
dealPrice: deal_price,
listPrice: list_price,
discountPercent,
merchant: merchant_name,
isFreeShipping: free_shipping,
description: aiDescription || description,
originalDescription: description,
category,
createdBy: me.id,
tags: aiTags,
aiPrompt,
aiChoices,
};
return deal;
}

Related

Increase performace in a private trading terminal

I am in the process of building my own trading terminal with various functions for a school project where speed is important..
I have two functions that I would like to optimize. But don't know how to proceed
from here.
function1
const calcPromise = async () => {
const markPriceData = await restClient.getTickers({
symbol: selectedSymbol,
});
let cash = 100;
const lastPrice = markPriceData.result[0].last_price;
const quantity = Math.round((cash / lastPrice * 1.03).toFixed(3));
return { lastPrice, quantity };
};
it makes one request for the the last price, from the given symbol and calculates the quantity if I want to buy for a certain amount.
function2
export default async function placeOrder() {
try {
const orderData = await restClient.placeActiveOrder({
symbol: selectedSymbol,
order_type: 'Limit',
side: 'Buy',
qty: quantity,
price: lastPrice,
time_in_force: 'GoodTillCancel',
reduce_only: false,
close_on_trigger: false,
position_idx: LinearPositionIdx.OneWayMode
});
const endTime2 = performance.now();
const executionTime2 = endTime2 - startTime2;
console.log(`Buy order ${executionTime2}ms`);
console.log(orderData);
} catch (err) {
console.log(err);
}
}
Function2 takes the value from function 1, lastPrice & quantity and makes another request and executes a buy order.
This process is taking to long. Im clocking this at averege 2-5 seconds.
As it is now, function2 must sit and wait for the first request, which will then do the calculations before it can be executed.
The idea I have is to get function1 to run in the background somehow. That the price is already available, so that function2 does not need to wait for function1 to be ready.
I've tried implementing async. But it resulted in errors, because function2 always runs faster than function1 if they are run at the same time.
Is there a smart way I can solve this?
I combine the 2 functions in one only.
Try this:
async function placeOrder() {
try {
const markPriceData = await restClient.getTickers({
symbol: selectedSymbol,
});
let cash = 100;
const lastPrice = markPriceData.result[0].last_price;
const quantity = Math.round((cash / lastPrice * 1.03).toFixed(3));
const startTime1 = performance.now();
const orderData = await restClient.placeActiveOrder({
symbol: selectedSymbol,
order_type: "Limit",
side: "Buy",
qty: quantity,
price: lastPrice,
time_in_force: "GoodTillCancel",
reduce_only: false,
close_on_trigger: false,
position_idx: LinearPositionIdx.OneWayMode,
});
const endTime1 = performance.now();
const executionTime = endTime1 - startTime1;
console.log(`Calculations, buy ${executionTime}ms`);
return orderData;
} catch (error) {
throw error;
}
}

Get data from multiple references from Firebase realtime database in one query javascript

On photo below you can see my data structure.
I want to get the required data from the messages object in one request.
For example:
firebase.database().ref('messages/' + "-Me4yNAigYmM5_PhuiIS", 'messages/' + -Me4yNoy8n971XMwnxqP).get();
expected output:
{
-Me4yNAigYmM5_PhuiIS: {
text: example;
},
-Me4yNoy8n971XMwnxqP: {
text: example2;
},
}
I got out of the situation like this:
useEffect(() => {
const usersMessages = firebase.database().ref("users/" + userUid + "/messages");
usersMessages.on('value', async (snapshot) => {
let usersMessages = [];
const messages = (await firebase.database().ref("messages/").get()).val();
snapshot.val().map(messageId => {
usersMessages.push(messages[messageId]);
});
});
}, []);
How correct is it to constantly load all messages of all users? Is there a way to get only the required messages from Firestone in one request, knowing a few message IDs?
To load multiple nodes based on their key, you'll need a separate call for each node/key. It's essentially a client-side join of the data from the two nodes:
const usersMessages = firebase.database().ref(`users/${userUid}/messages`);
usersMessages.on('value', async (snapshot) => {
let usersMessages = [];
snapshot.forEach((child) => }
let messageId = child.key;
let messageSnapshot = await firebase.database().ref(`messages/${messageId}`).get()
const message = messageSnapshot.val();
usersMessages.push(message);
});
});

Loop from multiple airtable bases in a Next JS page

I think this more of a general async/await loop question, but I'm trying to do it within the bounds of an Airtable API request and within getStaticProps of Next.js so I thought that is important to share.
What I want to do is create an array of base IDs like ["appBaseId01", "appBaseId02", "appBaseId03"] and output the contents of a page. I have it working with 1 base, but am failing at getting it for multiple.
Below is the code for one static base, if anyone can help me grok how I'd want to loop over these. My gut says that I need to await each uniquely and then pop them into an array, but I'm not sure.
const records = await airtable
.base("appBaseId01")("Case Overview Information")
.select()
.firstPage();
const details = records.map((detail) => {
return {
city: detail.get("City") || null,
name: detail.get("Name") || null,
state: detail.get("State") || null,
};
});
return {
props: {
details,
},
};
EDIT
I've gotten closer to emulating it, but haven't figured out how to loop the initial requests yet.
This yields me an array of arrays that I can at least work with, but it's janky and unsustainable.
export async function getStaticProps() {
const caseOneRecords = await setOverviewBase("appBaseId01")
.select({})
.firstPage();
const caseTwoRecords = await setOverviewBase("appBaseId02")
.select({})
.firstPage();
const cases = [];
cases.push(minifyOverviewRecords(caseOneRecords));
cases.push(minifyOverviewRecords(caseTwoRecords));
return {
props: {
cases,
},
};
}
setOverviewBase is a helper that establishes the Airtable connection and sets the table name.
const setOverviewBase = (baseId) =>
base.base(baseId)("Case Overview Information");
You can map the array of base IDs and await with Promise.all. Assuming you have getFirstPage and minifyOverviewRecords defined as below, you could do the following:
const getFirstPage = (baseId) =>
airtable
.base(baseId)("Case Overview Information")
.select({})
.firstPage();
const minifyOverviewRecords = (records) =>
records.map((detail) => {
return {
city: detail.get("City") || null,
name: detail.get("Name") || null,
state: detail.get("State") || null,
};
});
export async function getStaticProps() {
const cases = await Promise.all(
["appBaseId01", "appBaseId02", "appBaseId03"].map(async (baseId) => {
const firstPage = await getFirstPage(baseId);
return minifyOverviewRecords(firstPage);
})
);
return {
props: {
cases
}
};
}

Function ends before all asynchronous work completes

So I have a function that checks if an order is 24 hours old, if thats the case I send a notification to the user , but it seems like it does not complete the execution of all the users, instead it just returns someones and some others not, I think I have a problem returning the promise, I'm not an expert in javascript and I did not really understand what is happening, sometimes instead of trying with all the documents it just finishes if one documents has a deviceToken as empty and not continue with the other user documents
exports.rememberToFinishOrder = functions.pubsub.schedule('every 3 minutes').onRun(async (context) => {
var db = admin.firestore();
const tsToMillis = admin.firestore.Timestamp.now().toMillis()
const compareDate = new Date(tsToMillis - (24 * 60 * 60 * 1000)) //24 horas
let snap = await db.collection('orders').where("timestamp","<",new Date(compareDate)).where("status", "in" ,[1,2,4,5,6]).get()
if(snap.size > 0){
snap.forEach(async(doc) => {
const userId = doc.data().uid
let userSnap = await db.collection('user').doc(userId).get()
const deviceToken = userSnap.data().deviceToken
const payload = {
notification: {
title: "¿ Did you received your order ?",
body: "We need to know if you have received your order",
clickAction: "AppMainActivity"
},
data: {
ORDER_REMINDER: "ORDER_REMINDER"
}
}
console.log("User: "+doc.data().uid)
return admin.messaging().sendToDevice(deviceToken,payload)
});
}
});
sometimes when in someusers the devicetoken is empty it will finish the execution of this function instead of continuing to the next user, and also it will not finish this function for all the users in my orders collection, it will do someones and someones not, and this should be an atomic operation that changes everything in that collection, not just some documents
what is happening ?
Like andresmijares are saying, are you not handling the promises correctly.
When you are doing several asynchronous calls, I'd suggest using the Promise.all() function that will wait for all the promises to be done before it continues.
exports.rememberToFinishOrder = functions.pubsub.schedule('every 3 minutes').onRun(async (context) => {
const db = admin.firestore();
const messaging = admin.messaging();
const tsToMillis = admin.firestore.Timestamp.now().toMillis()
const compareDate = new Date(tsToMillis - (24 * 60 * 60 * 1000)) //24 horas
const snap = await db.collection('orders').where("timestamp","<",new Date(compareDate)).where("status", "in" ,[1,2,4,5,6]).get()
let allPromises = [];
if(snap.size > 0){
snap.forEach((doc) => {
const userId = doc.data().uid;
allPromises.push(db.collection('user').doc(userId).get().then(userSnapshot => {
const userData = userSnapshot.data();
const deviceToken = userData.deviceToken;
if (userData && deviceToken) {
const payload = {
notification: {
title: "¿ Did you received your order ?",
body: "We need to know if you have received your order",
clickAction: "AppMainActivity"
},
data: {
ORDER_REMINDER: "ORDER_REMINDER"
}
}
console.log("User: "+doc.data().uid)
return messaging.sendToDevice(deviceToken,payload)
} else {
return;
}
}));
});
}
return Promise.all(allPromises);
});
EDIT:
I added a check to see if the deviceToken is present on the userData before sending the notification.

Import CSV Using Mongoose Schema

Currently I need to push a large CSV file into a mongo DB and the order of the values needs to determine the key for the DB entry:
Example CSV file:
9,1557,358,286,Mutantville,4368,2358026,,M,0,0,0,1,0
9,1557,359,147,Wroogny,4853,2356061,,D,0,0,0,1,0
Code to parse it into arrays:
var fs = require("fs");
var csv = require("fast-csv");
fs.createReadStream("rank.txt")
.pipe(csv())
.on("data", function(data){
console.log(data);
})
.on("end", function(data){
console.log("Read Finished");
});
Code Output:
[ '9',
'1557',
'358',
'286',
'Mutantville',
'4368',
'2358026',
'',
'M',
'0',
'0',
'0',
'1',
'0' ]
[ '9',
'1557',
'359',
'147',
'Wroogny',
'4853',
'2356061',
'',
'D',
'0',
'0',
'0',
'1',
'0' ]
How do I insert the arrays into my mongoose schema to go into mongo db?
Schema:
var mongoose = require("mongoose");
var rankSchema = new mongoose.Schema({
serverid: Number,
resetid: Number,
rank: Number,
number: Number,
name: String,
land: Number,
networth: Number,
tag: String,
gov: String,
gdi: Number,
protection: Number,
vacation: Number,
alive: Number,
deleted: Number
});
module.exports = mongoose.model("Rank", rankSchema);
The order of the array needs to match the order of the schema for instance in the array the first number 9 needs to always be saved as they key "serverid" and so forth. I'm using Node.JS
You can do it with fast-csv by getting the headers from the schema definition which will return the parsed lines as "objects". You actually have some mismatches, so I've marked them with corrections:
const fs = require('mz/fs');
const csv = require('fast-csv');
const { Schema } = mongoose = require('mongoose');
const uri = 'mongodb://localhost/test';
mongoose.Promise = global.Promise;
mongoose.set('debug', true);
const rankSchema = new Schema({
serverid: Number,
resetid: Number,
rank: Number,
name: String,
land: String, // <-- You have this as Number but it's a string
networth: Number,
tag: String,
stuff: String, // the empty field in the csv
gov: String,
gdi: Number,
protection: Number,
vacation: Number,
alive: Number,
deleted: Number
});
const Rank = mongoose.model('Rank', rankSchema);
const log = data => console.log(JSON.stringify(data, undefined, 2));
(async function() {
try {
const conn = await mongoose.connect(uri);
await Promise.all(Object.entries(conn.models).map(([k,m]) => m.remove()));
let headers = Object.keys(Rank.schema.paths)
.filter(k => ['_id','__v'].indexOf(k) === -1);
console.log(headers);
await new Promise((resolve,reject) => {
let buffer = [],
counter = 0;
let stream = fs.createReadStream('input.csv')
.pipe(csv({ headers }))
.on("error", reject)
.on("data", async doc => {
stream.pause();
buffer.push(doc);
counter++;
log(doc);
try {
if ( counter > 10000 ) {
await Rank.insertMany(buffer);
buffer = [];
counter = 0;
}
} catch(e) {
stream.destroy(e);
}
stream.resume();
})
.on("end", async () => {
try {
if ( counter > 0 ) {
await Rank.insertMany(buffer);
buffer = [];
counter = 0;
resolve();
}
} catch(e) {
stream.destroy(e);
}
});
});
} catch(e) {
console.error(e)
} finally {
process.exit()
}
})()
As long as the schema actually lines up to the provided CSV then it's okay. These are the corrections that I can see but if you need the actual field names aligned differently then you need to adjust. But there was basically a Number in the position where there is a String and essentially an extra field, which I'm presuming is the blank one in the CSV.
The general things are getting the array of field names from the schema and passing that into the options when making the csv parser instance:
let headers = Object.keys(Rank.schema.paths)
.filter(k => ['_id','__v'].indexOf(k) === -1);
let stream = fs.createReadStream('input.csv')
.pipe(csv({ headers }))
Once you actually do that then you get an "Object" back instead of an array:
{
"serverid": "9",
"resetid": "1557",
"rank": "358",
"name": "286",
"land": "Mutantville",
"networth": "4368",
"tag": "2358026",
"stuff": "",
"gov": "M",
"gdi": "0",
"protection": "0",
"vacation": "0",
"alive": "1",
"deleted": "0"
}
Don't worry about the "types" because Mongoose will cast the values according to schema.
The rest happens within the handler for the data event. For maximum efficiency we are using insertMany() to only write to the database once every 10,000 lines. How that actually goes to the server and processes depends on the MongoDB version, but 10,000 should be pretty reasonable based on the average number of fields you would import for a single collection in terms of the "trade-off" for memory usage and writing a reasonable network request. Make the number smaller if necessary.
The important parts are to mark these calls as async functions and await the result of the insertMany() before continuing. Also we need to pause() the stream and resume() on each item otherwise we run the risk of overwriting the buffer of documents to insert before they are actually sent. The pause() and resume() are necessary to put "back-pressure" on the pipe, otherwise items just keep "coming out" and firing the data event.
Naturally the control for the 10,000 entries requires we check that both on each iteration and on stream completion in order to empty the buffer and send any remaining documents to the server.
That's really what you want to do, as you certainly don't want to fire off an async request to the server both on "every" iteration through the data event or essentially without waiting for each request to complete. You'll get away with not checking that for "very small files", but for any real world load you're certain to exceed the call stack due to "in flight" async calls which have not yet completed.
FYI - a package.json used. The mz is optional as it's just a modernized Promise enabled library of standard node "built-in" libraries that I'm simply used to using. The code is of course completely interchangeable with the fs module.
{
"description": "",
"main": "index.js",
"dependencies": {
"fast-csv": "^2.4.1",
"mongoose": "^5.1.1",
"mz": "^2.7.0"
},
"keywords": [],
"author": "",
"license": "ISC"
}
Actually with Node v8.9.x and above then we can even make this much simpler with an implementation of AsyncIterator through the stream-to-iterator module. It's still in Iterator<Promise<T>> mode, but it should do until Node v10.x becomes stable LTS:
const fs = require('mz/fs');
const csv = require('fast-csv');
const streamToIterator = require('stream-to-iterator');
const { Schema } = mongoose = require('mongoose');
const uri = 'mongodb://localhost/test';
mongoose.Promise = global.Promise;
mongoose.set('debug', true);
const rankSchema = new Schema({
serverid: Number,
resetid: Number,
rank: Number,
name: String,
land: String,
networth: Number,
tag: String,
stuff: String, // the empty field
gov: String,
gdi: Number,
protection: Number,
vacation: Number,
alive: Number,
deleted: Number
});
const Rank = mongoose.model('Rank', rankSchema);
const log = data => console.log(JSON.stringify(data, undefined, 2));
(async function() {
try {
const conn = await mongoose.connect(uri);
await Promise.all(Object.entries(conn.models).map(([k,m]) => m.remove()));
let headers = Object.keys(Rank.schema.paths)
.filter(k => ['_id','__v'].indexOf(k) === -1);
//console.log(headers);
let stream = fs.createReadStream('input.csv')
.pipe(csv({ headers }));
const iterator = await streamToIterator(stream).init();
let buffer = [],
counter = 0;
for ( let docPromise of iterator ) {
let doc = await docPromise;
buffer.push(doc);
counter++;
if ( counter > 10000 ) {
await Rank.insertMany(buffer);
buffer = [];
counter = 0;
}
}
if ( counter > 0 ) {
await Rank.insertMany(buffer);
buffer = [];
counter = 0;
}
} catch(e) {
console.error(e)
} finally {
process.exit()
}
})()
Basically, all of the stream "event" handling and pausing and resuming gets replaced by a simple for loop:
const iterator = await streamToIterator(stream).init();
for ( let docPromise of iterator ) {
let doc = await docPromise;
// ... The things in the loop
}
Easy! This gets cleaned up in later node implementation with for..await..of when it becomes more stable. But the above runs fine on the from the specified version and above.
By saying #Neil Lunn need headerline within the CSV itself.
Example using csvtojson module.
const csv = require('csvtojson');
const csvArray = [];
csv()
.fromFile(file-path)
.on('json', (jsonObj) => {
csvArray.push({ name: jsonObj.name, id: jsonObj.id });
})
.on('done', (error) => {
if (error) {
return res.status(500).json({ error});
}
Model.create(csvArray)
.then((result) => {
return res.status(200).json({result});
}).catch((err) => {
return res.status(500).json({ error});
});
});
});

Categories

Resources