Why the following code takes more time locally? - javascript

I wrote the following lambda to move messages from queueA to queueB
async function reprocess_messages(fromQueue, toQueue) {
try {
const response1 = await sqs.send(new GetQueueUrlCommand({ QueueName: fromQueue }));
const response2 = await sqs.send(new GetQueueUrlCommand({ QueueName: toQueue }));
const fromQueueUrl = response1.QueueUrl;
const toQueueUrl = response2.QueueUrl;
let completed = false;
while (!completed) {
completed = await moveMessage(toQueueUrl, fromQueueUrl);
// console.log(status);
}
// console.log(completed);
return completed;
} catch (err) {
console.error(err);
}
}
async function moveMessage(toQueueUrl, fromQueueUrl) {
try {
const receiveMessageParams = {
MaxNumberOfMessages: 10,
MessageAttributeNames: ["Messsages"],
QueueUrl: fromQueueUrl,
VisibilityTimeout: 2,
WaitTimeSeconds: 0,
};
const receiveData = await sqs.send(new ReceiveMessageCommand(receiveMessageParams));
// console.log(receiveData);
if (!receiveData.Messages) {
console.log("finished");
return true;
}
const messages = [];
receiveData.Messages.forEach(msg => {
messages.push({ body: msg["Body"], receiptHandle: msg["ReceiptHandle"] });
});
const sendMsg = async ({ body, receiptHandle }) => {
const sendMessageParams = {
MessageBody: body,
QueueUrl: toQueueUrl
};
await sqs.send(new SendMessageCommand(sendMessageParams));
// console.log("Success, message sent. MessageID: ", sentData.MessageId);
return "Success";
};
const deleteMsg = async ({ body, receiptHandle }) => {
const deleteMessageParams = {
QueueUrl: fromQueueUrl,
ReceiptHandle: receiptHandle
};
await sqs.send(new DeleteMessageCommand(deleteMessageParams));
// console.log("Message deleted", deleteData);
return "Deleted";
};
const sent = await Promise.all(messages.map(sendMsg));
// console.log(sent);
await Promise.all(messages.map(deleteMsg));
// console.log(deleted);
console.log(sent.length);
return false;
} catch (err) {
console.log(err);
}
}
export const handler = async function (event, context) {
console.log("Invoking lambda");
const response = await reprocess_messages("queueA", "queueB");
console.log(response);
}
With lambda config of 256 MB it takes 19691ms and with 512 MB it takes 10171ms to move 1000 messages from queueA to queueB. However, on my local system when I run reprocess_messages(qA, qB) it takes around 2 mins to move messages from queueA to queueB.
Does this mean that if I increase the memory limit to 1024 MB it will take only around 5000ms and how can I find the optimal memory limit?

It will most likely always be the case that running code on your local machine to interact with AWS services will be slower than if you were to run this code on an AWS service like Lambda. When your code is running on Lambda and interacting with AWS services in the same region, the latency is often only from the AWS network which can be drastically lower than the latency between your network and the AWS region you're working with.
In terms of finding the optimal performance, this is trial and error. You are trying to find the sweet spot between price and performance. There are tools like compute optimizer that can assist you with this.
A useful note to bear in mind here is that as you increase Lambda memory, you can avail of further vCPU cores, it is roughly every 1,720mb more gives you another vCPU core. So while your current performance increase is tied to just a memory increase, if you were to increase to a number that gives an additional vCPU core, it may have a greater affect. Unfortunately it can be difficult to theorize the results and it is best just to trial and error the different scenarios

Related

Cloud Function scheduler with memory allocation management

I'm trying to create a scheduled function on Firebase Cloud Functions with third-party APIs. As the size of the data collected through the third-party API and passed to this scheduled function is huge, it returns Function invocation was interrupted. Error: memory limit exceeded.
I have written this index.js (below) with help, but still looking for the way how it should handle the output data of large size inside the scheduler function.
index.js
const firebaseAdmin = require("firebase-admin");
const firebaseFunctions = require("firebase-functions");
firebaseAdmin.initializeApp();
const fireStore = firebaseAdmin.firestore();
const express = require("express");
const axios = require("axios");
const cors = require("cors");
const serviceToken = "SERVICE-TOKEN";
const serviceBaseUrl = "https://api.service.com/";
const app = express();
app.use(cors());
const getAllExamples = async () => {
var url = `${serviceBaseUrl}/examples?token=${serviceToken}`;
var config = {
method: "get",
url: url,
headers: {}
};
return axios(config).then((res) => {
console.log("Data saved!");
return res.data;
}).catch((err) => {
console.log("Data not saved: ", err);
return err;
});
}
const setExample = async (documentId, dataObject) => {
return fireStore.collection("examples").doc(documentId).set(dataObject).then(() => {
console.log("Document written!");
}).catch((err) => {
console.log("Document not written: ", err);
});
}
module.exports.updateExamplesRoutinely = firebaseFunctions.pubsub.schedule("0 0 * * *").timeZone("America/Los_Angeles").onRun(async (context) => {
const examples = await getAllExamples(); // This returns an object of large size, containing 10,000+ arrays
const promises = [];
for(var i = 0; i < examples.length; i++) {
var example = examples[i];
var exampleId = example["id"];
if(exampleId && example) promises.push(setExample(exampleId, example));
}
return Promise.all(promises);
});
Firebase's official document simply tells how to set timeout and memory allocation manually as below, and I'm looking for the way how we should incorporate it with the above scheduler function.
exports.convertLargeFile = functions
.runWith({
// Ensure the function has enough memory and time
// to process large files
timeoutSeconds: 300,
memory: "1GB",
})
.storage.object()
.onFinalize((object) => {
// Do some complicated things that take a lot of memory and time
});
Firebase's official document simply tells how to set timeout and
memory allocation manually as below, and I'm looking for the way how
we should incorporate it with the above scheduler function.
You should do as follows:
module.exports.updateExamplesRoutinely = firebaseFunctions
.runWith({
timeoutSeconds: 540,
memory: "8GB",
})
.pubsub
.schedule("0 0 * * *")
.timeZone("America/Los_Angeles")
.onRun(async (context) => {...)
However you may still encounter the same error if you treat a huge number of "examples" in your CF. As you mentioned in the comment to the other answer it is advisable to cut it into chunks.
How to do that? It's highly depending on your specific case (ex: do you processes 10,000+ examples at each run? Or it is only going to happen once, in order to "digest" a backlog?).
You could treat only a couple of thousands of docs in the scheduled function and schedule it to run every xx seconds. Or you could distribute the work among several instances of the CF by using PubSub triggered versions of your Cloud Function.

Node.js: share connection object throughout the application

I am a having issues with implementing generic-pool using puppeteer. Below is my relevant part of the code.
UPDATE
Thanks #Jacob for the help and i am more clear about the concept and how it works and the code is also more readable and clear. I am still having issues where a generic pool is getting created on every request. How do i ensure that the same generic pool is used every time instead of creating new one
browser-pool.js
const genericPool = require('generic-pool');
const puppeteer = require('puppeteer');
class BrowserPool {
static async getPool() {
const browserParams = process.env.NODE_ENV == 'development' ? {
headless: false,
devtools: false,
executablePath: '/Applications/Google Chrome.app/Contents/MacOS/Google Chrome'
}
:
{
headless: true,
devtools: false,
executablePath: 'google-chrome-unstable',
args: ['--no-sandbox', '--disable-dev-shm-usage']
};
const factory = {
create: function() {
return puppeteer.launch(browserParams);
},
destroy: function(instance) {
console.log('closing browser in hrere.....');
instance.close();
}
};
const opts = {
max: 5
};
this.myBrowserPool = genericPool.createPool(factory, opts);
}
static async returnPool() {
if (this.myBrowserPool == "") {
getPool();
}
return this.myBrowserPool.acquire();
}
}
BrowserPool.myBrowserPool = null;
module.exports = BrowserPool;
process-export.js
const BrowserPool = require('./browser-pool');
async function performExport(params){
const myPool = BrowserPool.getPool();
const resp = BrowserPool.myBrowserPool.acquire().then(async function(client){
try {
const url = config.get('url');
const page = await client.newPage();
await page.goto(url, {waitUntil: ['networkidle2', 'domcontentloaded']});
let gotoUrl = `${url}/dashboards/${exportParams.dashboardId}?csv_export_id=${exportParams.csvExportId}`;
//more processing
await page.goto(gotoUrl, {waitUntil: 'networkidle2' })
await myPool().myBrowserPool.release(client);
return Data;
} catch(err) {
try {
const l = await BrowserPool.myBrowserPool.destroy(client);
} catch(e) {
}
return err;
}
}).catch(function(err) {
return err;
});
return resp;
}
module.exports.performExport = performExport;
My understanding is that
1) When the application starts I can spin up for example 2 chromium instances and then when ever i want to visit a page i can use either of the two connections, so the browsers are essentially open and we improve the performance since the browser start can take time. is this correct?
2) Where do I place the acquire() code, I understand this should be in the app.js, so we acquire the instances rite when the app boots, but my pupeteer code is in a different file, how do i pass the browser reference in the file which has my pupeteer code.
When I use the above the code, a new browser instances spins up every time and the max property is not considered and it opens up as many instances are requested.
My apologies if its something very trial and i might have not understood the concept fully. Any help in clarifying this would be really helpful.
When using a pool, you'll need to use .acquire() to obtain an object, and then .release() when you're done so the object is returned to the pool and made available to something else. Without using .release(), you'd might as well have no pool at all. I like to use this helper pattern with pools:
class BrowserPool {
// ...
static async withBrowser(fn) {
const pool = BrowserPool.myBrowserPool;
const browser = await pool.acquire();
try {
await fn(browser);
} finally {
pool.release(browser);
}
}
}
This can be used like this anywhere in your code:
await BrowserPool.withBrowser(async browser => {
await browser.doSomeThing();
await browser.doSomeThingElse();
});
The key is the finally clause makes sure that whether your tasks complete or throw an error, you'll cleanly release the browser back to the pool every time.
It sounds like you might have the concept of the max option backwards as well and are expecting the browser instances to be spawned up to max. Rather, max means "only create up to max number of resources." If you try to acquire a sixth resource without anything having been released, for example, the acquire(...) call will block until one item is returned to the pool.
The min option, on the other hand, means "keep at least this many items on hand at all times", which you can use to pre-allocate resources. If you want 5 items to be created in advance, set min to 5. If you want 5 items and only five items to be created, set both min and max to 5.
Update:
I notice in your original code that you destroy in case of error and release when there isn't an error. Still would prefer the benefit of a wrapper function like mine to centralize all resource acquiring/releasing logic (the SRP approach). Here's how it could be updated to automatically destroy on errors instead:
class BrowserPool {
// ...
static async withBrowser(fn) {
const pool = BrowserPool.myBrowserPool;
const browser = await pool.acquire();
try {
await fn(browser);
pool.release(browser);
} catch (err) {
await pool.destroy(browser);
throw err;
}
}
}
Addendum
Figuring out what's going on in your code will be easier if you embrace the async function instead of mixing async function stuff and Promise callback stuff. Here's how it can be rewritten:
async function performExport(params){
const myPool = BrowserPool.myBrowserPool;
const client = await myPool.acquire();
try {
const url = config.get('url');
const page = await client.newPage();
await page.goto(url, {waitUntil: ['networkidle2', 'domcontentloaded']});
let gotoUrl = `${url}/dashboards/${exportParams.dashboardId}?csv_export_id=${exportParams.csvExportId}`;
//more processing
await page.goto(gotoUrl, {waitUntil: 'networkidle2' })
await myPool.release(client);
return Data;
} catch(err) {
try {
const l = await myPool.destroy(client);
} catch(e) {
}
return err; // Are you sure you want to do this? Would suggest throw err.
}
}

Using promises in async function

I'm trying to listen for a Stripe webhook call, then carry out some actions such as sending an email. My site is on Netlify and I've adapted some code I found in a tutorial:
This works locally, but not when I run it as a Netlify function (basically a lambda). Basically, the part from "client.getSpace.." doesn't appear to run at all. I suspect this is something to do with using these .then promises within an async function, but I'm not sure.
require('dotenv').config();
const stripe = require('stripe')(process.env.STRIPE_SECRET_KEY);
const contentful = require('contentful-management');
const client = contentful.createClient({
accessToken:
process.env.CONTENTFUL_CONTENT_MANAGEMENT_ACCESS_TOKEN
});
var postmark = require("postmark");
var serverToken = process.env.POSTMARK_SERVER_TOKEN;
var postmark_client = new postmark.ServerClient(serverToken);
exports.handler = async ({ body, headers }) => {
try {
const stripeEvent = stripe.webhooks.constructEvent(
body,
headers['stripe-signature'],
process.env.STRIPE_WEBHOOK_SECRET
);
if (stripeEvent.type === 'checkout.session.completed') {
console.log('confirm checkout session completed');
const eventObject = stripeEvent.data.object;
const entryId = eventObject.client_reference_id;
let email = "";
let secret = "";
client.getSpace(process.env.WEBSITE_CONTENTFUL_SPACE_ID)
.then(space => space.getEnvironment('master'))
.then(environment => environment.getEntry(entryId))
.then(entry => {
entry.fields.paymentStatus['en-GB'] = 'Paid';
email = entry.fields.email['en-GB'];
return entry.update();
})
.then(entry => entry.publish())
.then(entry => postmark_client.sendEmailWithTemplate({
"From": "x#test.com",
"To": email,
"TemplateId": 12345678,
"TemplateModel": {
"abc": "xyz"
}
}))
.catch(console.error)
}
return {
statusCode: 200,
body: JSON.stringify({ received: true }),
};
} catch (err) {
console.log(`Stripe webhook failed with ${err}`);
return {
statusCode: 400,
body: `Webhook Error: ${err.message}`,
};
}
};
For what it's worth to you and anyone else who comes across this question. I had a similar issue using NextJS on Vercel. I rewrote the .then syntax using async/await and the problem seems to be solved, so far. I'm no expert, but I think in this case you would begin by replacing
client.getSpace(process.env.WEBSITE_CONTENTFUL_SPACE_ID)
.then(space => space.getEnvironment('master'))
with something like
const send = await client.getSpace(process.env.WEBSITE_CONTENTFUL_SPACE_ID)
const getEnvironment = await space.getEnvironment('master')
so on and so forth. I'm not sure how you would rewrite everything else, or if this will even help, but it put me back on track.

How do I make multiple fetch calls without getting 429 error?

I came across a problem in a book which I can't seem to figure out. Unfortunately, I don't have a live link for it, so if anyone could help me with my approach to this theoretically, I'd really appreciate it.
The process:
I get from a fetch call an array of string codes (["abcde", "fghij", "klmno", "pqrst"]).
I want to make a call to a link with each string code.
example:
fetch('http://my-url/abcde').then(res => res.json()).then(res => res).catch(error => new Error(`Error: ${error}`)); // result: 12345
fetch('http://my-url/fghij').then(res => res.json()).then(res => res).catch(error => new Error(`Error: ${error}`)); // result: 67891
...etc
Each of the calls is going to give me a number code, as shown.
I need to get the highest number of the 5 and get its afferent string code and make another call with that.
"abcde" => 1234
"fghij" => 5314
"klmno" => 3465
"pqrst" => 7234 <--- winner
fetch('http://my-url/pqrst').then(res => res.json()).then(res => res).catch(error => new Error(`Error: ${error}`));
What I tried:
let codesArr = []; // array of string codes
let promiseArr = []; // array of fetch using each string code in `codesArr`, meant to be used in Promise.all()
let codesObj = {}; // object with string code and its afferent number code gotten from the Promise.all()
fetch('http://my-url/some-code')
.then(res => res.json())
.then(res => codesArr = res) // now `codesArr` is ["abcde", "fghij", "klmno", "pqrst"]
.catch(error => new Error(`Error: ${error}`);
for(let i = 0; i < codesArr.length; i++) {
promiseArr.push(
fetch(`http://my-url/${codesArr[i]}`)
.then(res => res.text())
.then(res => {
codesObj[codesArr[i]] = res;
// This is to get an object from which I can later get the highest number and its string code. Like this:
// codesObj = {
// "abcde": 12345,
// "fghij": 67891
// }
})
.catch(error => new Error(`Error: ${error}`));
// I am trying to make an array with fetch, so that I can use it later in Promise.all()
}
Promise.all(promiseArray) // I wanted this to go through all the fetches inside the `promiseArr` and return all of the results at once.
.then(res => {
for(let i = 0; i < res.length; i++) {
console.log(res[i]);
// this should output the code number for each call (`12345`, `67891`...etc)
// this is where I get lost
}
})
One of the problems with my approach so far seems to be that it makes too many requests and I get 429 error. I sometimes get the number codes all right, but not too often.
Like you already found out the 429 means that you send too many requests:
429 Too Many Requests
The user has sent too many requests in a given amount of time ("rate
limiting").
The response representations SHOULD include details explaining the
condition, and MAY include a Retry-After header indicating how long to
wait before making a new request.
For example:
HTTP/1.1 429 Too Many Requests
Content-Type: text/html
Retry-After: 3600
<html>
<head>
<title>Too Many Requests</title>
</head>
<body>
<h1>Too Many Requests</h1>
<p>I only allow 50 requests per hour to this Web site per
logged in user. Try again soon.</p>
</body>
</html>
Note that this specification does not define how the origin server
identifies the user, nor how it counts requests. For example, an
origin server that is limiting request rates can do so based upon
counts of requests on a per-resource basis, across the entire server,
or even among a set of servers. Likewise, it might identify the user
by its authentication credentials, or a stateful cookie.
Responses with the 429 status code MUST NOT be stored by a cache.
To handle this issue you should reduce the amount of requests made in a set amount of time. You should iterate your codes with a delay, spacing out the request by a few seconds. If not specified in the API documentation or the 429 response, you have to use trial and error approach to find a delay that works. In the example below I've spaced them out 2 seconds (2000 milliseconds).
The can be done by using the setTimeout() to execute some piece of code later, combine this with a Promise to create a sleep function. When iterating the initially returned array, make sure to await sleep(2000) to create a 2 second delay between each iteration.
An example could be:
const fetch = createFetchMock({
"/some-code": ["abcde", "fghij", "klmno", "pqrst"],
"/abcde": 12345,
"/fghij": 67891,
"/klmno": 23456,
"/pqrst": 78912,
});
const sleep = (ms) => new Promise((resolve) => setTimeout(resolve, ms));
(async function () {
try {
const url = "https://my-url/some-code";
console.log("fetching url", url);
const response = await fetch(url);
const codes = await response.json();
console.log("got", codes);
const codesObj = {};
for (const code of codes) {
await sleep(2000);
const url = `https://my-url/${code}`;
console.log("fetching url", url);
const response = await fetch(url);
const value = await response.json();
console.log("got", value);
codesObj[code] = value;
}
console.log("codesObj =", codesObj);
} catch (error) {
console.error(error);
}
})();
// fetch mocker factory
function createFetchMock(dataByPath = {}) {
const empty = new Blob([], {type: "text/plain"});
const status = {
ok: { status: 200, statusText: "OK" },
notFound: { status: 404, statusText: "Not Found" },
};
const blobByPath = Object.create(null);
for (const path in dataByPath) {
const json = JSON.stringify(dataByPath[path]);
blobByPath[path] = new Blob([json], { type: "application/json" });
}
return function (url) {
const path = new URL(url).pathname;
const response = (path in blobByPath)
? new Response(blobByPath[path], status.ok)
: new Response(empty, status.notFound);
return Promise.resolve(response);
};
}
In this case... You should run and wait each fetch run finish before run new fetch by using async/await
runFetch = async (codesArr) => {
for(let i = 0; i < codesArr.length; i++){
const rawResponse = await fetch(`http://my-url/${codesArr[i]}`);
const codeResponse = rawResponse.json();
console.log(rawResponse);
codesObj[codesArr[i]] = codeResponse;
}
}
hope that help you.

how to use Firebase Cloud Functions with promises and forEach?

I am trying to do TWO things here.
1)Send a notification to all employees. 2)Copy a Specific ref to the
Employees id ref. if no Special ref exists i will copy General ref.
The program runs without errors. Infact its perfect. But sometimes i get a Timed out error with the Notifications code part.
Error: fcm.googleapis.com network timeout. Please try again.
The code that copys one reference to another, always works, never ever received an error there.
I feel the error is due to not correctly handling promises with forEach. Could you help me get this code to excecute flawlessly, with correctly placed Promises?
exports.myFunc = functions.https.onRequest( (request, response) => {
admin.database().ref('/Employees').once('value').then(function(snap) {
snap.forEach(function (snapshot) {
var obj = snapshot.val();
if(obj.department){//only go ahead if dept is present
console.log(' : ' + obj.department);
var id, tkid, dept;
id = obj.empId; tkid = obj.tokenId; dept = obj.department;
var welcomeStr="hello! Welcom to our Department";
//================================================================notifications
var payload = {
data: {
greeting: welcomeStr,
to_who: id
}
};
admin.messaging().sendToDevice(tkid,payload)
.then(function(response){
console.log("Successfully sent message: ", response);
})
.catch(function(error){
console.log("Error sending message: ", error);
})
//===================================================Ref copying
var destinationRef = admin.database().ref('/Employees/' + id);//final destination
var option2Ref = admin.database().ref('/Company/General');//when special doesnt exist
var option1Ref = admin.database().ref('/Company/Special');//if special exists
option1.once('value', function(snapshot1){
if (snapshot1.exists()){//copy from straing from option11 to Employees/id
option1.once('value', function(snap) {
destinationRef.set( snap.val(), function(error) {
if( error && typeof(console) !== 'undefined' && console.error ) { console.error(error); }
console.log('DONE .... ' + id);
});
});
}
else{//we need to copy from option2 to Employees/id
option2Ref.once('value', function(snap) {
newRef.set( snap.val(), function(error) {
if( error && typeof(console) !== 'undefined' && console.error ) { console.error(error); }
console.log('DONE .... ' + id);
});
});
}
});
}
else{
console.log('No Department: ' + obj.dept);
return;
}
});
});
response.send("WOKAY!");
});
here i've rewritten your code in hopes to clean up the complicated promise chains
dropped promises are one of the most common and difficult problems to debug, i've seen my fair share
important changes to your code:
modern async syntax
so that the promises are cleaner to organize
use Promise.all instead of forEach
this way every promise is awaited without being forgotten
(hopefully) all of the promises are returned properly
all snapshot operations are run concurrently, and the onRequest handler should wait until they're all finished before terminating
using promises for once and set instead of callbacks
i'm not really sure what libraries these are
it looks like they accept promise-based usage
so i eliminated callback usage in favor of promises
please review the TODO mark
not really sure what's intended for that else block, so be sure to patch that up
async function handleSnapshot(snapshot) {
const {empId, tokenId, department} = snapshot.val()
// only go ahead if dept is present
if (!department) throw new Error("no department")
console.log("department:", department)
//================================================================notifications
const payload = {
data: {
greeting: "Hello! Welcome to our Department",
to_who: empId
}
}
const response = await admin.messaging().sendToDevice(tokenId, payload)
console.log("successfully sent message", response)
//===================================================Ref copying
const destinationRef = admin.database().ref('/Employees/' + empId) // final destination
const option2Ref = admin.database().ref('/Company/General') // when special doesnt exist
const option1Ref = admin.database().ref('/Company/Special') // if special exists
const snapshot1 = await option1Ref.once("value")
// copy from string from option1 to Employees/id
if (snapshot1.exists()) {
await destinationRef.set(snapshot1.val())
console.log("DONE1...", empId)
}
// TODO review this block
// we need to copy from option2 to Employees/id
else {
const snapshot2 = await option2Ref.once("value")
await destinationRef.set(snapshot2.val())
console.log("DONE2...", empId)
}
}
exports.myFunc = functions.https.onRequest(async(request, response) => {
const snapshots = await admin.database().ref('/Employees').once('value')
await Promise.all(snapshots.map(handleSnapshot))
response.send("WOKAY!")
})
To add one very important step to #ChaseMoskal answer.
For those using TypeScript with Firebase, since firebase server is not running v8+ in NodeJs, theres a great chance you might get this error:
"TypeError: snapshots.map is not a function"... on the line: await Promise.all(snapshots.map(handleSnapshot)).
Thats cause in your tsconfig.json its possibly "lib": ["es6"]. In that case just add this small snippet to the accepted answer, to get the Firebase Datasnapshot into an array that could be used with .map(...)
Longer Version:
exports.myFunc = functions.https.onRequest(async(request, response) => {
const snapshots = await admin.database().ref('/Employees').once('value')
var data_snap_arr = [];
snapshots.forEach(function(child_Snapshot) {
var stuff = child_Snapshot.val();
stuff.key = child_Snapshot.key;
data_snap_arr.push(stuff);
await Promise.all(data_snap_arr.map(handleSnapshot))
response.send("WOKAY!")
})
Shorter Version:
exports.myFunc = functions.https.onRequest(async(request, response) => {
const snapshots = await admin.database().ref('/Employees').once('value')
let data_snap_arr = Object.keys(snapshots.val() || {}) .map(k => snapshots.val()[k]);
await Promise.all(data_snap_arr.map(handleSnapshot))
response.send("WOKAY!")
})

Categories

Resources