Make page display 10 seconds in a loop - javascript

I'm building a guessing game with Node JS. After collecting some data on the back-end, I send it to the front-end and the game starts. The data contains all 10 levels, so the game can run on a single page. Each level runs for 10 seconds. After the time is up, the user selection is sent to the server, and a result comes back. The answer is displayed, and then the content is changed to the "next level" (using the content in the big data object, therefore no refresh is needed).
I'm having some issues with having 10 levels run for 10 seconds each (or ~12 seconds with a delay for displaying the results).
This can't be done in some type of loop, since all awaits for each level will run at once. For instance:
function timeout(ms) {
return new Promise(resolve => setTimeout(resolve, ms));
}
for (let i = 0; i < 10; i++) {
displayPage(i);
await timeout(10000);
const result = await $.post(...) // gets results
displayResults(result);
await timeout(2000);
}
all the timeouts will run at once, and it won't work.
I thought of using a setInterval, but I'm not sure how to.. since I want to wait 10 seconds until checking the input, and then display the results for 2 seconds, and then move on.
For now the result I came up with is:
displayPage(level1);
await timeout(10000);
const result = await $.post(...)
displayResults(result);
await timeout(2000);
displayPage(level2);
await timeout(10000);
const result = await $.post(...)
displayResults(result);
await timeout(2000);
displayPage(level3);
await timeout(10000);
const result = await $.post(...)
displayResults(result);
await timeout(2000);
displayPage(level4);
await timeout(10000);
const result = await $.post(...)
displayResults(result);
await timeout(2000);
...
This does not seem efficient and I think there's a better way to do this, but I'm not sure how.
Any suggestions would be appreciated. Thanks!

I think this is what you are looking for:
const pages = [1, 2, 3, 4, 5];
run();
async function run() {
for (let i = 0; i < pages.length; i++) {
await displayPage(i);
const result = 'Some result';
await displayResult(result);
}
}
function displayPage(number) {
text.innerText = 'Page ' + number;
return new Promise(res => {
setTimeout(res, 10000);
});
}
function displayResult(result) {
text.innerText = 'Result: ' + result;
return new Promise(res => {
setTimeout(res, 2000);
});
}
<div id="text"><div>
Another solution, without promises and loops:
const pages = [1, 2, 3, 4, 5];
let currentPageIndex = 0;
displayPage();
function displayPage() {
const index = currentPageIndex++;
if (pages[index] === undefined) return;
const pageNumber = pages[index];
text.innerText = 'Page ' + pageNumber;
const result = 'Some result';
setTimeout(() => {
displayResult(result);
}, 10000);
}
function displayResult(result) {
text.innerText = 'Result: ' + result;
setTimeout(() => {
displayPage();
}, 2000);
}
<div id="text"></div>

Your first option seems to work assuming it is wrapped within an async function:
function timeout(ms) {
return new Promise(resolve => setTimeout(resolve, ms));
}
const testFunc = async () =>{
for (let i = 0; i < 10; i++) {
console.log('page' , i+1 , '- question')
await timeout(3000);
console.log('page' , i+1 , '- answer')
await timeout(1000);
}
}
testFunc()

use setInterval on about 1000ms to create worker and add a state-machine that toggles the playing(10s) and the waiting(2s). You need a procedure that does the post call to the server and an object to keep the data(levels and stuff).
for example:
setInterval(funcWorker,1000,s_gameObj);
function funcWorker(f_context){
var l_dateNow = new Date();
if(f_context.is_playing){
var l_elapsed = l_dateNow.getTime() - f_context.dateLevelStart.getTime();
if(l_elapsed.totalSeconds() >= f_context.LEVEL_TIME){
f_context.end_level();
}
}else if(f_context.is_waiting_user){
//same check as above but on the waiting start
....
f_context.next_level();
}else if(f_context.is_waiting_server){
//do whatever
}
}
the end_level() should set the state flag in the context(game) object, to sending and send to the server. As the server returns in the response function set the state to waiting user and init the corresponding time variable to now(). The next_level() should set the state to playing and init the corresponding time variable to now() so that the timer can count. Consider the code above as a reference and not as a copy-paste resource.

Related

Wait until an external operation completes with setInterval before finishing

I got a JS code that simply activates a method. This method invokes an HTTP request which is causing some remote process to begin. I then need to check every 5 seconds if the remote process ended with a timeout of 5 minutes, after which I need to stop the waiting and throw an error if the timeout had expired, otherwise I need to simply log the result and complete the method.
What I'm not sure is how do I stop the execution of the main method until I get the response so I can have a value to log. This is what I got so far:
(async function(param)
{
...
var res = await fetch(...); //activate the remote proccess
var textAnswer = await res.text();
var infoObj = JSON.parse(textAnswer);
startChecks(infoObj.info.id); // this is the method which I need to await on somehow
}("paramValue");
async function startChecks(id)
{
var startTime = new Date().getTime();
intervalId = setInterval(checkStatus, 5000, id, startTime);
}
async function checkStatus(id, startTime)
{
//if more than 5 minutes had passed
if(new Date().getTime() - startTime > 300000)
{
clearInterval(intervalId);
throw new Error("External pipeline timeout expired");
}
var res = await fetch(...); //check remote process
var ans = await res.text();
var obj = JSON.parse(ans);
if(obj.finished) clearInterval(intervalId);
}
Like I said, what I want to achieve is that my main function won't end until all intervals are done with either the error thrown or the process finishes. How can I achieve that?
You would create a helper function that executes your function in given intervals until it resolves to something different than undefined. But only for the maximum amount of time.
This could look something like this:
// helper to wait for time milliseconds
async function sleep(time) {
return new Promise(resolve => setTimeout(resolve, time));
}
async function repeatedExecution(callback, interval, timeout) {
// get start time
const start = Date.now();
// repeat as long the start time + timout is larger then the current time
while (start + timeout > Date.now()) {
// get a promise that resolves in interval seconds
let sleeper = sleep(interval)
// execute the callback
let res
if (res = await callback()) {
// if the callback returns something truthy return it
return res;
}
// wait until time for interval ends and then continue the loop
await sleeper;
}
// if we reach this point we timed out
throw new Error('timed out');
}
async function run() {
/*
var res = await fetch(...); //activate the remote proccess
var textAnswer = await res.text();
var infoObj = JSON.parse(textAnswer);
*/
try {
let result = await repeatedExecution(() => {
/*
var res = await fetch(...); //check remote process
var ans = await res.text();
var obj = JSON.parse(ans);
if(obj.finished) {
return true
}
*/
}, 1000, 3000);
// do something on success
} catch (err) {
// handle the error (timeout case)
console.error(err)
}
}
run();

How do i make javascript loop wait for each iteration of loop to finish before starting the next?

I have a loop that gets iterated through 1000 times where each iteration makes a request and then prints the result of that request.
Similar to below.
let start = console.log(Date.now())
for (i = 0; i < 1000; i++) {
request.then(data => {
console.log(Date.now() - start)
})
}
This however results in the first request taking much longer to be completed then if there was just 1 iteration of the loop.
with i < 1000:
5860
...
with i < 1:
220,
...
For the aim of this script however, I want to wait for each result to be received before starting the next iteration of the loop.
If you want to stick with the ES5 way of handling Promises, you could use the following approach:
const start = console.log(Date.now())
// Create a instantly resolved promise
for (let i = 0, p = Promise.resolve(); i < 1000; i++) {
// append new promise to the chain
p = p.then(() => request())
.then(() => console.log(Date.now() - start));
}
If you can use ES6 methods, you could write this using the async/await pattern like so:
const asyncLoop = async () => {
const start = console.log(Date.now())
for (let i = 0; i < 1000; i++) {
const data = await request();
console.log(Date.now() - start);
}
}
asyncLoop();
The chance that you really want to make the requests one after the other is actually pretty small though, so in case you want to make the request simultaneously, but do something after all of them resolved, you can use Promise.all(...)
const start = console.log(Date.now())
const requests = [request1, request2, ...];
Promise.all(requests)
.then(() => console.log(Date.now() - start));
You can use the async-await pattern here.
You can achieve this by changing the code
async function iteration() {
let start = console.log(Date.now())
for (let i = 0; i < 1000; i++) {
const data = await httpRequest()
console.log(Date.now() - start)
}
}
async function httpRequest() {
return new Promise((resolve, reject) => {
request.then(data => {
//Do anything you want to do with data
resolve(data)
}).catch(error => {
console.error(`Error in request`)
reject(error)
})
})
}
Explanation:
I have moved the request code in a separate function httpRequest and this function will return promise on success
I called httpRequest in for loop using await keyword, now loop will wait till the request is completed
I have also wrapped for loop code in function since you can only use await inside an async function

NodeJs Pagination,recursive promise problem

I am scraping multiple pages with cheerio and axios in node.js
I am having a hard time with Promises, can someone help me return the JSON if I hit the last page? Thanks!
const getWebsiteContent = async (url) => {
await axios.get(url).then(res => {
const $ = cheerio.load(res.data)
pageNum = getTotalpages($); // Get the pagination
console.log(url);
//Some scraping here
})
indexPage++; // Increment to the next page
const nextPageLink = baseUrl + '&page=' + indexPage; // get next page
if (indexPage > pageNum) {
var editedText = text.slice(0, text.length - 1);
editedText += ']}';
editedText = JSON.parse(editedText); // I want to return this and use elsewhere
return editedText;
}
setTimeout(async () => {
getWebsiteContent(nextPageLink); // Call itself
}, 1000);
}
var myJSON= await getWebsiteContent(baseUrl); // something like this
I would write getPages as an async generator -
async function* getPages (href, initPage = 0) {
const res = await axios.get(setPage(href, initPage))
const $ = cheerio.load(res.data)
const pages = getTotalpages($)
yield { page: initPage, dom: $ }
for (let p = initPage; p < pages; p++) {
await sleep(1000)
const r = await axios.get(setPage(href, p))
yield { page: p, dom: cheerio.load(r.data) }
}
}
This depends on helper setPage that manipulates the href page number using the url module, which is much safer than hobbling together strings by hand -
function setPage (href, page) {
const u = new URL(href)
u.searchParams.set("page", page)
return u.toString()
}
And another helper, sleep, which prevents the mixing of setTimeout with async-based code. This allows us to easily pause between pages -
async function sleep (ms) {
return new Promise(r => setTimeout(r, ms))
}
Finally we write scrape which is a simple wrapper around getPages. This allows us to reuse the getPages function to scrape various elements as needed. A benefit of using this approach is that the caller can determine what happens with each page. Below we push to result array, but as another example we could write each page to disk using the fs module. Obviously this for you to decide -
async function scrape (href) {
const result = []
for await (const {page, dom} of getPages(href)) {
console.log("scraped page", page) // some status message
result.push(getSomeData(dom)) // get something from each page
}
return result
}
scrape(myUrl).then(console.log, console.error)
You shouldn't be using then with your async / await code.
pagination should look something like this:
let response = await axios.get(url)
let $ = cheerio.load(response.data)
// do some scraping
while(url = $('[rel=next]').attr('href')){
response = await axios.get(url)
$ = cheerio.load(response.data)
// do more scraping
}

How to wait until a promise is completed before starting the next iteration of an "infinite" for loop in JavaScript

I am trying to figure out how to wait for a promise to be resolved before starting the next iteration in a for loop. Someone had suggested for me to use the setInterval() function instead of a for loop, which is fine if you can guess the time that it will take for the promise to resolve, but it obviously is not ideal.
const puppeteer = require('puppeteer-extra')
const StealPlugin = require('puppeteer-extra-plugin-stealth')
puppeteer.use(StealPlugin())
let arrayOfUrls = [
"https://google.com",
"https://facebook.com",
"https://youtube.com",
];
let initialIndex = 0;
let finalIndex = 0;
async function scraper(url) {
const browser = await puppeteer.launch({headless: false});
const page = await browser.newPage();
await page.goto(url);
await page.screenshot({path: 'example' + initialIndex.toString() + '.png'});
await console.log(url + " screenshot complete!")
await browser.close();
}
const interval = setInterval(() => {
if (initialIndex < arrayOfUrls.length) {
scraper(arrayOfUrls[initialIndex]);
initialIndex += 1;
} else {
clearInterval(interval);
console.log("All complete!")
loopy()
}
}, 300)
function loopy() {
setInterval(() => {
if (finalIndex === arrayOfUrls.length) {
finalIndex = 0;
}
scraper(arrayOfUrls[finalIndex]);
finalIndex += 1;
}, 300)
}
This above code is just experimental at the moment, but what I am ultimately trying to achieve is make a series of API requests using URLs from a text file and then create an array containing an object for each URL. This is the const interval = setInterval(() => { in my code.
Then I want to be able to periodically check each request again and check if there is a change in the API request and have this be performed indefinitely. This is the loopy() function in my experimental code. If there is I want to send a notification to myself.
My current implementation works fine if I set the time for the setInterval() to something high like 5000ms, but if it is something low like 300ms then the promises cannot be fullfilled quickly enough and I end up getting this error:
(node:9652) MaxListenersExceededWarning: Possible EventEmitter memory leak detected. 11 exit listeners added to [process]. Use emitter.setMaxListeners() to increase limit
What would be the best way to implement the logic for such a program?
Edit:
After the idea in the comments from WSC I attempted the following and it seems to work.
const puppeteer = require('puppeteer-extra')
const StealPlugin = require('puppeteer-extra-plugin-stealth')
puppeteer.use(StealPlugin())
let arrayOfUrls = [
"https://google.com",
"https://facebook.com",
"https://youtube.com",
];
let initialIndex = 0;
let finalIndex = 0;
async function scraper(url) {
const browser = await puppeteer.launch({headless: false});
const page = await browser.newPage();
await page.waitFor(5000)
await page.goto(url);
await page.screenshot({path: 'example' + initialIndex.toString() + '.png'});
await console.log(url + " screenshot complete!")
await browser.close();
}
async function initialScrape() {
if (initialIndex < arrayOfUrls.length) {
await scraper(arrayOfUrls[initialIndex]);
initialIndex += 1;
initialScrape()
} else {
console.log("All complete!")
loopy()
}
}
async function loopy() {
if (finalIndex === arrayOfUrls.length) {
finalIndex = 0;
}
await scraper(arrayOfUrls[finalIndex]);
finalIndex += 1;
loopy()
}
initialScrape()
I have implemented the artificial delay into the scraper() function instead in the form of await page.waitFor(5000). However, I am not entirely sure if this particular implementation is recommended or not for the program I am trying to achieve.
The async/await syntax works fine with loops. You don't need to take a recursive approach.
async function main() {
for (let initialIndex=0; initialIndex<arrayOfUrls.length; initialIndex++) {
await scraper(arrayOfUrls[initialIndex]);
}
console.log("All complete!");
while (true) {
for (let finalIndex=0; finalIndex<arrayOfUrls.length; finalIndex++) {
await scraper(arrayOfUrls[finalIndex]);
}
}
}
main().catch(console.error);
Or even easier with for … of loops:
async function main() {
for (const url of arrayOfUrls) {
await scraper(url);
}
console.log("All complete!");
while (true) {
for (const url of arrayOfUrls) {
await scraper(url);
}
}
}
main().catch(console.error);
Btw, for performance I would recommend to call puppeteer.launch({headless: false}); only once and then do all screenshots with the same browser instance.

Prevent recursive function running setTimeout from compounding

I wrote a Chip-8 emmulator in JavaScript (source) and made a playable browser version here.
It contains an HTML select:
<select>
<option value="PONG">PONG</option>
<option value="TETRIS">TETRIS</option>
</select>
Which loads a ROM from a file every time one is selected:
document.querySelector('select').addEventListener('change', event => {
const rom = event.target.value
loadRom(rom)
})
This loadRom function fetches the ROM, converts it to a useful form, and loads it into an instance of a CPU class. The cpu has a fetch-decode-execute cycle that gets called with step(). I created this cycle (main) function to call itself in a setTimeout.
const loadRom = async rom => {
const response = await fetch(`./roms/${rom}`)
const arrayBuffer = await response.arrayBuffer()
const uint8View = new Uint8Array(arrayBuffer);
const romBuffer = new RomBuffer(uint8View)
cpu.interface.clearDisplay()
cpu.load(romBuffer)
let timer = 0
async function cycle() {
timer++
if (timer % 5 === 0) {
cpu.tick()
timer = 0
}
await cpu.step()
setTimeout(cycle, 3)
}
cycle()
}
This works fine, until I load a new ROM with the select. Now the cycle is compounded, and the game goes twice as fast. Every time you load a new ROM, it compounds again and creates a new cycle.
How can I create an infinite loop, but stop it and start a brand new one without compounding it?
To start with, have the current timeout to be a persistent variable, and then call clearTimeout with it right before calling loadRom. If nothing has been loaded yet, the clearTimeout just won't do anything.
But because you have awaits as well, you'll need to check whether a new rom gets loaded while the awaits are going on. One way to accomplish this would be to have another persistent variable, the current romBuffer being used - if it's not the same as the romBuffer in the function closure, then another rom has started, so return immediately (and don't recursively create a timeout).
let timeout;
let currentRomBuffer;
const loadRom = async rom => {
const response = await fetch(`./roms/${rom}`)
const arrayBuffer = await response.arrayBuffer()
const uint8View = new Uint8Array(arrayBuffer);
const romBuffer = new RomBuffer(uint8View)
currentRomBuffer = romBuffer;
cpu.interface.clearDisplay()
cpu.load(romBuffer)
let timer = 0
async function cycle() {
timer++
if (timer % 5 === 0) {
cpu.tick()
timer = 0
}
await cpu.step();
if (romBuffer !== currentRomBuffer) {
return;
}
timeout = setTimeout(cycle, 3);
}
cycle()
};
Try using the setInerval and keep track of the handle.
Then clear it when loading the new (or the same) rom.
const loadRom = async rom => {
const response = await fetch(`./roms/${rom}`)
const arrayBuffer = await response.arrayBuffer()
const uint8View = new Uint8Array(arrayBuffer);
const romBuffer = new RomBuffer(uint8View)
cpu.interface.clearDisplay()
cpu.load(romBuffer)
// if rom is loaded, clear it!
if(this.lastLoad)
clearInterval(this.lastLoad);
let timer = 0
async function cycle() {
timer++
if (timer % 5 === 0) cpu.tick()
await cpu.step()
}
// keep track the handle.
this.lastLoad = setInterval(cycle, 3);
}

Categories

Resources