NodeJS: Abort request function inside loop - javascript

I am creating a electron application that can download multiple pages on a website. I want to be able to stop downloading the pages whenever I hit the stop button by instantly terminating the function. I tried stopping the function by setting it to a new function but because downloading happens in a function loop it is not stopped (I am unsure if this approach would even stop the startDownload function).
Is there a way to easily stop the execution of a function, that does not stop the whole script?
Edit: If there isn't a way to stop the execution of the function, is there a way to send a message to and stop the NodeJS request?
Edit 2: NodeJS requests have an abort method, but I am unsure how to tell the function to abort the request?
startDownload('website.com');
startDownload(url) {
var startAt = 0;
var maxPages = 15;
download(url, startAt, maxPages);
}
download(url, page, maxPages) {
if (page == maxPages) { finishDownload(url); return; }
request(url + '?p=' + page, (error, response, html) => {
downloadPage(html);
download(url, page + 1, maxPages);
}).catch((error) => {
finishDownload(url, 'Failed to download');
});
}
finishDownload(url, error = undefined) {
if (!error) {
alert(url + ' finished downloading');
} else {
alert(url + error);
}
}
$(document).on('click', '#stopDownload', function() {
var downloadFunction = startDownload;
startDownload = function() {return false};
startDownload = downloadFunction;
alert('download stopped by killing function');
});

yes you can stop by
var r = request({uri: 'http://stackoverflow.com' }, function (error, response, body) {
console.log('url requested ') ;
if (!error){
console.log(body);
}
else
{
console.log(error);
}
});
r.abort();

Related

How can I add a setTimeOut() to run a function and insert data multiple times?

A few days ago I did a project and I had some problems, which where solved in this question, let me try to resume it.
I need to insert multiple objects into a DB in SQLServer, for that, I did a function that loops another function, which opens a connection, inserts and closes the connection, then, repeats it over and over again.
It worked fine, till today that was tested in a collegue PC, in the server of the job, I get this error:
Error: Requests can only be made in the LoggedIn state, not the LoggedInSendingInitialSql state
Error: Requests can only be made in the LoggedIn state, not the SentLogin7WithStandardLogin state
Here's the code we tested (the same in my last question), it works in my PC, but not in the other:
var config = {
...
};
function insertOffice(index) {
var connection = new Connection(config);
connection.on("connect", function (err) {
console.log("Successful connection");
});
connection.connect();
let url = `https://api.openweathermap.org/data/2.5/weather?lat=${offices[index].latjson}&lon=${offices[index].lonjson}&appid=${api_key}&units=metric&lang=sp`;
fetch(url)
.then((response) => { return response.json(); })
.then(function (data) {
var myObject = {
Id_Oficina: offices[index].IdOficina,
...
};
const request = new Request(
"EXEC USP_BI_CSL_insert_reg_RegistroTemperaturaXidOdicina #IdOficina, ...",
function (err) {
if (err) {
console.log("Couldnt insert data (" + index + "), " + err);
} else {
console.log("Data with ID: " + myObject.Id_Oficina +" inserted succesfully(" + index + ").")
}
}
);
request.addParameter("IdOficina", TYPES.SmallInt, myObject.Id_Oficina);
...
request.on("row", function (columns) {
columns.forEach(function (column) {
if (column.value === null) {
console.log("NULL");
} else {
console.log("Product id of inserted item is " + column.value);
}
});
});
request.on("requestCompleted", function () {
connection.close();
});
connection.execSql(request);
});
}
function functionLooper() {
for (let i = 0; i < offices.length; i++) {
let response = insertOffice(i);
}
}
functionLooper();
So, I thought it would be a good idea to use a setTimeOut, to:
Run functionLooper().
Open connection, insert and close.
Wait a few seconds.
Repeat.
So, I changed to this:
setTimeout(functionLooper, 2000);
function functionLooper() {
for (let i = 0; i < offices.length; i++) {
let response = insertOffice(i);
}
}
It works, but, as you can see, only waits when I first run it, so tried to make a function that runs setTimeout(functionLooper, 2000); like functionLooper() does, but it didn't work either.
function TimerLooper() {
for (let i = 0; i < offices.length; i++) {
setTimeout(functionLooper, 500);
}
}
function functionLooper() {
for (let i = 0; i < offices.length; i++) {
let response = insertOffice(i);
}
}
TimerLooper();
This shows me this error:
Error: Validation failed for parameter 'Descripcion'. No collation was set by the server for the current connection.
file:///...:/.../.../node_modules/node-fetch/src/index.js:95
reject(new FetchError(request to ${request.url} failed, reason: ${error.message}, 'system', error));
^ FetchError: request to https://api.openweathermap.org/data/2.5/weather?lat=XX&lon=XX&appid=XX&units=metric&lang=sp failed, reason: connect ETIMEDOUT X.X.X.X:X
So, I have some questions
How can I use properly setTimeOut? I did this function based on what I watch here in SO, but I just can't get it and I don't know what I'm doing wrong.
Why it works in my PC and the other don't? Do we have to change some kind of config or something?
Using setTimeOut, is the correct way to solve this problem? if not, what would you suggest me?
Could you do something like:
//edit: not disconnect but end
connection.on("end", function(){
functionLopper(index++)
})
function functionLooper(i) {
if(i<offices.length) insertOffice(i)
}
Edit: according to tidious doc
There is an end event emitted on connection.close()
Event: 'end'
function () { }
The connection has ended. This may be as a result of the client calling close(), the server closing the connection, or a network error.
My suggestion from above
var config = {
...
};
function insertOffice(index) {
var connection = new Connection(config);
connection.on("connect", function (err) {
console.log("Successful connection");
});
connection.connect();
let url = `...`;
fetch(url)
.then((response) => { return response.json(); })
.then(function (data) {
...
});
connection.on("end", function(){
functionLopper(index++)
})
}
function functionLooper(i) {
if(i<offices.length) insertOffice(i)
}
``

Throw custom timeout exception

I have a Google Apps Script web app ("Web App") that executes as the user, then calls individual functions from another Apps Script project ("API Executable") via the Apps Script API using UrlFetchApp.fetch() and executes them as me (see Get user info when someone runs Google Apps Script web app as me).
A limitation of this method is that UrlFetchApp.fetch() has a 60s timeout, and one of my functions often takes longer than this. The API Executable function finishes running successfully, but the web app throws a timeout exception. I would like to handle this exception by running a second "follow-up" function that finds and returns the URL of the Google Sheet successfully created by the original function. However, I'll need to pass the follow-up function one of the parameters passed to the original function, and it appears I can't do this within a standard try...catch block.
My idea was to throw an exception that contains the needed parameter, but I can't figure out how to throw my own timeout exception; since Google Apps Script is synchronous, there's no way to track how long UrlFetchApp.fetch() has been running while it's running.
Is there a way to throw your own timeout exception? Or, is there another way I can pass the needed parameter to a function that executes if there's a timeout error?
I tagged Javascript in this post as well since there's a lot of overlap with Google Apps Script and I figured it would improve my chance of connecting with someone who has an answer--hope that's okay. Below is the function I'm using in my web app to call my API Executable functions, in case that's helpful.
EDIT: Based on #TheMaster's comment, I decided to write the script as though parameters passed to executeAsMe() WERE being passed to the catch() block, to see what happened. I expected an exception regarding the fact the opt_timeoutFunction was undefined, but strangely it looks like only the first line of the catch() block is even running, and I'm not sure why.
function executeAsMe(functionName, paramsArray, opt_timeoutFunction, opt_timeoutParams) {
try {
console.log('Using Apps Script API to call function ' + functionName.toString() + ' with parameter(s) ' + paramsArray.toString());
var url = 'https://script.googleapis.com/v1/scripts/Mq71nLXJPX95eVDFPW2DJzcB61X_XfA8E:run';
var payload = JSON.stringify({"function": functionName, "parameters": paramsArray, "devMode": true})
var params = {method:"POST",
headers: {Authorization: 'Bearer ' + getAppsScriptService().getAccessToken()},
payload:payload,
contentType:"application/json",
muteHttpExceptions:true};
var results = UrlFetchApp.fetch(url, params);
var jsonResponse = JSON.parse(results).response;
if (jsonResponse == undefined) {
var jsonResults = undefined;
} else {
var jsonResults = jsonResponse.result;
}
} catch(error) {
console.log('error = ' + error); // I'm seeing this in the logs...
console.log('error.indexOf("Timeout") = ' + error.indexOf("Timeout").toString); // ...but not this. It skips straight to the finally block
if (error.indexOf('Timeout') > 0) { // If error is a timeout error, call follow-up function
console.log('Using Apps Script API to call follow-up function ' + opt_timeoutFunction.toString() + ' with parameter(s) ' + paramsArray.toString());
var url = 'https://script.googleapis.com/v1/scripts/Mq71nLXJPX95eVDFPW2DJzcB61X_XfA8E:run';
var payload = JSON.stringify({"function": opt_timeoutFunction, "parameters": opt_timeoutParams, "devMode": true})
var params = {method:"POST",
headers: {Authorization: 'Bearer ' + getAppsScriptService().getAccessToken()},
payload:payload,
contentType:"application/json",
muteHttpExceptions:true};
var results = UrlFetchApp.fetch(url, params);
var jsonResponse = JSON.parse(results).response;
if (jsonResponse == undefined) {
var jsonResults = undefined;
} else {
var jsonResults = jsonResponse.result;
}
}
} finally {
console.log('jsonResults = ' + jsonResults);
return jsonResults;
}
}
I ended up using the '''catch()''' block to throw an exception back to the client side and handle it there.
Google Apps Script:
function executeAsMe(functionName, paramsArray) {
try {
console.log('Using Apps Script API to call function ' + functionName.toString() + ' with parameter(s) ' + paramsArray.toString());
var url = 'https://script.googleapis.com/v1/scripts/Mq71nLXJPX95eVDFPW2DJzcB61X_XfA8E:run';
var payload = JSON.stringify({"function": functionName, "parameters": paramsArray, "devMode": true})
var params = {method:"POST",
headers: {Authorization: 'Bearer ' + getAppsScriptService().getAccessToken()},
payload:payload,
contentType:"application/json",
muteHttpExceptions:true};
var results = UrlFetchApp.fetch(url, params);
var jsonResponse = JSON.parse(results).response;
if (jsonResponse == undefined) {
var jsonResults = undefined;
} else {
var jsonResults = jsonResponse.result;
}
return jsonResults;
} catch(error) {
console.log('error = ' + error);
if (error.toString().indexOf('Timeout') > 0) {
console.log('Throwing new error');
throw new Error('timeout');
} else {
throw new Error('unknown');
}
} finally {
}
}
Client-side Javascript (a simplified version):
function createMcs() {
var userFolder = getDataFromHtml().userFolder;
google.script.run
.withSuccessHandler(createMcsSuccess)
.withFailureHandler(createMcsFailure)
.withUserObject(userFolder)
.executeAsMe('createMasterCombinedSchedule', [userFolder]);
}
function createMcsSuccess(mcsParameter) {
if (mcsParameter == undefined) {
simpleErrorModal.style.display = "block"; // A generic error message
} else {
document.getElementById("simpleAlertHeaderDiv").innerHTML = 'Master Combined Schedule Created Successfully';
document.getElementById("simpleAlertBodyDiv").innerHTML = 'Your Master Combined Schedule was created successfully. Click here to view.';
simpleAlertModal.style.display = "block";
}
}
function createMcsFailure(mcsError, userFolder, counter) { // The exception I threw will activate this function
if (!counter) { // Added a counter to increment every time checkForCreatedMcs() runs so it doesn't run indefinitely
var counter = 0;
}
if (mcsError.message == 'Error: timeout' && counter < 5) { // If timeout error, wait 10s and look for MCS URL
window.setTimeout(checkForCreatedMcs(mcsError, userFolder, counter), 10000);
} else if (mcsError.message == 'Error: timeout' && counter == 5) { // If it hasn't worked after 5 tries, show generic error message
simpleErrorModal.style.display = "block";
} else { // For any error that's not a timeout exception, show generic error message
simpleErrorModal.style.display = "block";
}
}
function checkForCreatedMcs(mcsError, userFolder, counter) {
counter++;
google.script.run
.withSuccessHandler(checkForCreatedMcsSuccess)
.withUserObject([mcsError, userFolder, counter])
.executeAsMe('checkIfMcsExists', [userFolder]); // checkIfMcsExists() is a pre-existing function in my API Executable project I had already used elsewhere
}
function checkForCreatedMcsSuccess(mcsExistsParameter, params) {
var mcsError = params[0];
var userFolder = params[1];
var counter = params[2];
if (mcsExistsParameter == undefined) { // If function returns undefined, show generic error message
simpleErrorModal.style.display = "block";
} else if (mcsExistsParameter == false) { // If function returns false, wait 10s and try again
createMcsFailure(mcsError, userFolder, counter);
} else { // If function returns URL, show success modal with link
document.getElementById("simpleAlertHeaderDiv").innerHTML = 'Master Combined Schedule Created Successfully';
document.getElementById("simpleAlertBodyDiv").innerHTML = 'Your Master Combined Schedule was created successfully. Click here to view.';
simpleAlertModal.style.display = "block";
}
}
I am sure there has to be a tidier/less complex way to do this, but this worked!

Get Status from PHP /w two ajax calls

Given two api endpoints in my php backend:
Long Function Endpoint: /_api/_long_function &
Status Endpoint: /_api/_status_long_function
In the LongFunction I write session data and in StatusFunction I read them and output them in json. I think it is not relevant to the question but I could show the session getting and setting as well if needed.
I now wanted to start the long function with an ajax call and then periodically request the status with a second ajax call.
The JS Code
$(document).on("click", "#submitSummoner", function () {
...INIT VARS...
/**
* Periodically check Session Status
*/
var to,
clearTime = false,
called = 0,
set_delay = 1000,
callout = function () {
console.log('Called SessionCheck ' + called);
$.get($url_session, {
my_data: my_data
})
.done(function (response) {
if (called === 0) {
console.log('Called LongFunction');
$.get($url, {
my_data, my_data
}).done(function (data) {
console.log('Finished Request');
if (to) {
clearTimeout(to);
clearTime = true;
}
}).fail(function () {
if (to) {
clearTime = true;
clearTimeout(to);
}
console.log('Failed Request');
});
}
called++;
// This is the output I need
console.log('Current Status: '+response.status);
})
.always(function () {
if (clearTime === false) {
to = setTimeout(callout, set_delay);
}
});
};
callout();
});
Unfortunately what happens now is this:
Console Output
1: Called SessionCheck 0
2: Called LongFunction (immediately after)
3: Current Status: Whatever (immediatley)
4: Called SessionCheck 1 (immidately)
LONG PAUSE UNTIL LONG FUNCTION FINISHED
5: Current Status: Finished
after Finished the setTimout gets cleared as expected.
But why does the the setTimeout not get called every 1000ms altough the long function takes 10000ms+?
It seems like the second SessionCheck waits for the Long Check to finish, I don't know why? Is it possible that the server "hangs", CPU/RAM seem fine during the longFunction. Anything else that could "lock" it
Even it the php session check function crashes, shouldn't the function still try it again after 1000ms?
Any hint appreciated!
Solution
I figured it out, the session wasn't saving properly in the LongFunction: https://codingexplained.com/coding/php/solving-concurrent-request-blocking-in-php
I've re-written your solution using jQuery promises to simulate the get requests, and it works as you desired. I believe this proves the issue is on the back-end, that the "short" get request is not resolving the 2nd time. Please provide the code that resolves that request.
var to,
clearTime = false,
called = 0,
set_delay = 1000,
callout = function () {
console.log('Called SessionCheck ' + called);
var shortGet = $.Deferred();
shortGet
.done(function () {
if (called === 0) {
var longGet = $.Deferred();
console.log('Called LongFunction');
longGet.done(function (data) {
console.log('Finished Request');
if (to) {
clearTimeout(to);
clearTime = true;
}
}).fail(function () {
if (to) {
clearTime = true;
clearTimeout(to);
}
console.log('Failed Request');
});
setTimeout(function() {
longGet.resolve();
}, 10000);
}
called++;
// This is the output I need
console.log('Current Status: still going');
})
.always(function () {
if (clearTime === false) {
to = setTimeout(callout, set_delay);
}
});
setTimeout(function() {
shortGet.resolve();
}, 200);
};
callout();
<script src="https://cdnjs.cloudflare.com/ajax/libs/jquery/3.3.1/jquery.min.js"></script>

How to repeat a "request" until success? NODEJS

I checked a few thread in StackOverflow, but nothing works for me.
I have this request call and I need it to try to send the request until it succeed (but if it fails, it has to wait at least 3 seconds):
sortingKeywords.sortVerifiedPhrase = function(phrase) {
var URL = "an API URL"+phrase; //<== Obviously that in my program it has an actual API URL
request(URL, function(error, response, body) {
if(!error && response.statusCode == 200) {
var keyword = JSON.parse(body);
if(sortingKeywords.isKeyMeetRequirements(keyword)){ //Check if the data is up to a certain criteria
sortingKeywords.addKeyToKeywordsCollection(keyword); //Adding to the DB
} else {
console.log("doesn't meet rquirement");
}
} else {
console.log(phrase);
console.log("Error: "+ error);
}
});
};
Here's the weird part, if I call the same phrases in a row from the browser, it works almost without errors (it usually states: rate limit time esceeded).
Appreciate your help.
Thanks in advance.
Here is a working program that I've written for this request. It sends the request via a function, if the request fails, it returns error handler and calls the function again.
If the function succeeds the program returns a promise and quits executing.
NOTE: if you enter an invalid url the programs exits right away, that's something that's have to do with request module, which I like to be honest. It lets you know that you have an invalid url. So you must include https:// or http:// in the url
var request = require('request');
var myReq;
//our request function
function sendTheReq(){
myReq = request.get({
url: 'http://www.google.com/',
json: true
}, (err, res, data) => {
if (err) {
console.log('Error:', err)
} else if (res.statusCode !== 200) {
console.log('Status:', res.statusCode)
} else {
// data is already parsed as JSON:
//console.log(data);
}
})
}
sendTheReq();
//promise function
function resolveWhenDone(x) {
return new Promise(resolve => {
myReq.on('end', function(){
resolve(x)
})
myReq.on('error', function(err){
console.log('there was an error: ---Trying again');
sendTheReq(); //sending the request again
f1(); //starting the promise again
})
});
}
//success handler
async function f1() {
var x = await resolveWhenDone(100);
if(x == 100){
console.log("request has been completed");
//handle other stuff
}
}
f1();
On error run this code
setTimeout(function(){}, 3000);
See this https://www.w3schools.com/jsref/met_win_settimeout.asp
Also you can make a code like this
var func1 = function(){};
setTimeout(func1, 3000);

Node.js unexpected stack overflow with multiple get requests

I have a function that GETs a JSON object from a remote server, or from a local cache on-disk.
In a use-case, i have to call this function several thousand times with varying arguments, but when i do so, i get max stack overflow errors. I must be making a recursive call somewhere, but i can't see where it could be as my process.nextTick function calls seem to be in the right place.
I get none of my log.error readouts in the console, which would be evident if any of the recursive calls to retry the request were made.
The console output shows a repeated occurrence of
(node) warning: Recursive process.nextTick detected. This will break in the next version of node. Please use setImmediate for recursive deferral.
then...
RangeError: Maximum call stack size exceeded
Then the program exits.
Can anyone offer any help regarding what i may be doing wrong? I'm completely stumped.
Below is the function that invokes the problematic function "tf2inv.loadInventory()"
function refreshInventories(accounts, force, callback) {
//job executes download function, then pushes to inventories object
var inventories = {};
var Qinv = async.queue(function (task, invCallback) {
tf2inv.loadInventory(
task.force,
task.steamid,
function(inv, alias) {
inventories[alias] = inv;
process.nextTick(invCallback);
}
);
}, 100)
//when all queue jobs have finished, callback with populated inventories object
Qinv.drain = function (err) {
log.info('All inventories downloaded');
callback(inventories);
}
//adding jobs to the queue
for (var i = accounts.length - 1; i >= 0; i--) {
Qinv.push({
force: force,
steamid: accounts[i]
});
};
}
Shown here is the function that either parses from the cache, or requests from the remote server.
//tf2inv
var loadInventory = function(force, sid, callback) {
var invLoc = invFolder+sid
if(force) {
if(fs.existsSync(invLoc)) {
fs.unlinkSync(invLoc);
}
}
if(fs.existsSync(invLoc)) {
var body = fs.readFileSync(invLoc);
try {
var inventory = JSON.parse(body);
} catch (e) {
fs.unlinkSync(invLoc);
log.error("parsing " + sid+"'s inventory");
loadInventory(true, sid, invFolder, callback);
return;
}
process.nextTick(function() { callback(inventory, sid) })
return;
} else {
var urlPre = "http://api.steampowered.com/IEconItems_440/GetPlayerItems/v0001/?key=";
var urlSidPre = "&steamid=";
var urlInvSuf = "&inventory=yes";
var URL = urlPre+steam_API+urlSidPre+sid+urlInvSuf;
http.get(URL, function (res) {
var body = '';
res.on('data', function (data) {
body+=data;
fs.appendFile(invLoc, data);
});
res.on('end', function() {
try {
inventory = JSON.parse(body);
} catch (e) {
if(fs.existsSync(invLoc)) {
fs.unlinkSync(invLoc);
}
log.error("parsing " + sid+"'s downloaded inventory");
loadInventory(force, sid, invFolder, callback)
return;
}
process.nextTick(function() { callback(inventory, sid) })
return;
});
res.on('error', function (e, socket) {
log.error(sid + " inventory error")
if(fs.existsSync(invLoc)) {
fs.unlinkSync(invLoc);
}
log.debug('Retrying inventory')
loadInventory(force, sid, invFolder, callback);
return;
})
res.on('close', function () {res.emit('end'); log.error('connection closed')})
})
.on('error', function(e) {
log.error(JSON.stringify(e));
if(fs.existsSync(invLoc)) {
fs.unlinkSync(invLoc);
}
log.debug('Retrying inventory')
loadInventory(force, sid, invFolder, callback)
return;
})
}
};
It is likely to be failing to parse the body coming back from the server. It then immediately calls itself again, failing again, infinitely looping and causing a stack overflow.
I suggest you do not retry automatically on a failed parse - if it fails once, it is likely to fail again. It would be best to call back with the error, and let the part of your programming calling this handle the error, or passing it back to the point where it can let the user know that something is wrong.

Categories

Resources