node.js looping through GETs with promise - javascript

I'm new to promises and I'm sure there's an answer/pattern out there but I just couldn't find one that was obvious enough to me to be the right one. I'm using node.js v4.2.4 and https://www.promisejs.org/
This should be pretty easy I think...I need to do multiple blocks of async in a specific order, and one of the middle blocks will be looping through an array of HTTP GETs.
//New Promise = asyncblock1 - FTP List, resolve the returned list array
//.then(asynchblock2(list)) - loop through list array and HTTP GET needed files
//.then(asynchblock3(list)) - update local log
I tried creating a new Promise, resolving it, passing the list to the .then, doing the GET loop, then the file update. I tried using a nested promise.all inside asynchblock2, but it's actually going in reverse order, 3, 2, and 1 due to the timing of those events. Thanks for any help.
EDIT: Ok, this is the pattern that I'm using which works, I just need a GET loop in the middle one now.
var p = new Promise((resolve, reject) => {
setTimeout(() => {
console.log('2 sec');
resolve(1);
},
2000);
}).then(() => {
return new Promise((resolve) => {
setTimeout(() => {
console.log('1.5 sec');
// instead of this section, here I'd like to do something like:
// for(var i = 0; i < dynamicarray.length; i++){
// globalvar[i] = ftpclient.getfile(dynamicarray[i])
// }
// after this loop is done, resolve
resolve(1);
},
1500);
});
}).then(() => {
return new Promise((resolve) => {
setTimeout(() => {
console.log('1 sec');
resolve(1);
},
1000);
});
});
EDIT Here is the almost working code!
var pORecAlert = (function(){
var pa;
var newans = [];
var anstodownload = [];
var anfound = false;//anfound in log file
var nexttab;
var lastchar;
var po;
var fnar = [];
var antext = '';
//-->> This section works fine; it's just creating a JSON object from a local file
try{
console.log('trying');
porfile = fs.readFileSync('an_record_files.json', 'utf8');
if(porfile == null || porfile == ''){
console.log('No data in log file - uploaded_files_data.json being initialized!');
plogObj = [];
}
else{
plogObj = JSON.parse(porfile);
}
}
catch(jpfp){
console.log('Error parsing log file for PO Receiving Alert: ' + jpfp);
return endPORecAlertProgram();
};
if((typeof plogObj) === 'object'){
console.log('an_record_files.json log file found and parsed for PO Receiving Alert!');
}
else{
return mkError(ferror, 'pORecAlert');
};
//finish creating JSON Object
pa = new Client();
pa.connect(ftpoptions);
console.log('FTP Connection for FTP Check Acknowledgement begun...');
pa.on('greeting', function(msg){
console.log('FTP Received Greeting from Server for ftpCheckAcknowledgement: ' + msg);
});
pa.on('ready', function(){
console.log('on ready');
//START PROMISE LIST
var listpromise = new Promise((reslp, rejlp) => {
pa.list('/public_html/test/out', false, (cerr, clist) => {
if(cerr){
return mkError(ferror, 'pORecAlert');
}
else{
console.log('Resolving clist');
reslp(clist);
}
});
});
listpromise.then((reclist) => {
ftpplist:
for(var pcl = 0; pcl < reclist.length; pcl++){
console.log('reclist iteration: ' + pcl);
console.log('checking name: ', reclist[pcl].name);
if(reclist[pcl].name.substring(0, 2) !== 'AN'){
console.log('Not AN - skipping');
continue ftpplist;
}
else{//found an AN
for(var plc = 0; plc < plogObj.length; plc++){
if(reclist[pcl].name === plogObj[plc].anname){
//console.log('Found reclist[pcl].name in local log');
anfound = true;
};
};
if(anfound === false){
console.log('Found AN file to download: ', reclist[pcl].name);
anstodownload.push(reclist[pcl].name);
};
};
};
console.log('anstodownload array:');
console.dir(anstodownload);
return anstodownload;
}).then((fnar) => {
//for simplicity/transparency, here is the array being overwritten
fnar = new Array('AN_17650_37411.699.txt', 'AN_17650_37411.700', 'AN_17650_37411.701', 'AN_17650_37411.702.txt', 'AN_17650_37411.801', 'AN_17650_37411.802.txt');
return Promise.all(fnar.map((gfname) => {
var nsalertnames = [];
console.log('Getting: ', gfname);
debugger;
pa.get(('/public_html/test/out/' + gfname), function(err, anstream){//THE PROBLEM IS THAT THIS GET GETS TRIGGERED AN EXTRA TIME FOR EVERY OTHER FILE!!!
antext = '';
console.log('Get begun for: ', gfname);
debugger;
if(err){
ferror.nsrest_trace = 'Error - could not download new AN file!';
ferror.details = err;
console.log('Error - could not download new AN file!');
console.log('************************* Exiting *************************')
logError(ferror, gfname);
}
else{
// anstream.on('data', (anchunk) => {
// console.log('Receiving data for: ', gfname);
// antext += anchunk;
// });
// anstream.on('end', () => {
// console.log('GET end for: ', gfname);
// //console.log('path to update - gfname ', gfname, '|| end text.');
// fs.appendFileSync(path.resolve('test/from', gfname), antext);
// console.log('Appended file');
// return antext;
// });//end end
};
});//get end
}));//end Promise.all and map
}).then((res99) => {
// pa.end();
// return Promise(() => {
console.log('end all. res99: ', res99);
// //res4(1);
// return 1;
// });
});
});
})();
-->> What happens here:
So I added the almost working code. What is happening is that for every other file, an additional Get request gets made (I don't know how it's being triggered), which fails with an "Unable to make data connection".
So for my iteration over this array of 6, there ends up being 9 Get requests. Element 1 gets requested (works and expected), then 2 (works and expected), then 2 again (fails and unexpected/don't know why it was triggered). Then 3 (works and expected), then 4 (works and expected), then 4 again (fails and unexpected) etc

what you need is Promise.all(), sample code for your app:
...
}).then(() => {
return Promise.all(arry.map(item => ftpclient.getFile(item)))
}).then((resultArray) => {
...

So thanks for the help (and the negative votes with no useful direction!)
I actually reached out to a good nodejs programmer and he said that there seemed to be a bug in the ftp module I was using, and even when trying to use a blackbird .map, the quick succession of requests somehow kicked off an error. I ended up using promise-ftp, blackbird, and promiseTaksQueue - the kicker was that I needed interval. Without it the ftp would end up causing a strange illogical error in the ftp module.

You need the async library. Use the async.eachSeries in situations where you need to use asynchronous operations within a loop, then execute a function when all of those are complete. There are many variations depending on the flow you want but this library does it all.
https://github.com/caolan/async
async.each(theArrayToLoop, function(item, callback) {
// Perform async operation on item here.
doSomethingAsync(item).then(function(){
callback();
})
}, function(err){
//All your async calls are finished continue along here
});

Related

How can I add a setTimeOut() to run a function and insert data multiple times?

A few days ago I did a project and I had some problems, which where solved in this question, let me try to resume it.
I need to insert multiple objects into a DB in SQLServer, for that, I did a function that loops another function, which opens a connection, inserts and closes the connection, then, repeats it over and over again.
It worked fine, till today that was tested in a collegue PC, in the server of the job, I get this error:
Error: Requests can only be made in the LoggedIn state, not the LoggedInSendingInitialSql state
Error: Requests can only be made in the LoggedIn state, not the SentLogin7WithStandardLogin state
Here's the code we tested (the same in my last question), it works in my PC, but not in the other:
var config = {
...
};
function insertOffice(index) {
var connection = new Connection(config);
connection.on("connect", function (err) {
console.log("Successful connection");
});
connection.connect();
let url = `https://api.openweathermap.org/data/2.5/weather?lat=${offices[index].latjson}&lon=${offices[index].lonjson}&appid=${api_key}&units=metric&lang=sp`;
fetch(url)
.then((response) => { return response.json(); })
.then(function (data) {
var myObject = {
Id_Oficina: offices[index].IdOficina,
...
};
const request = new Request(
"EXEC USP_BI_CSL_insert_reg_RegistroTemperaturaXidOdicina #IdOficina, ...",
function (err) {
if (err) {
console.log("Couldnt insert data (" + index + "), " + err);
} else {
console.log("Data with ID: " + myObject.Id_Oficina +" inserted succesfully(" + index + ").")
}
}
);
request.addParameter("IdOficina", TYPES.SmallInt, myObject.Id_Oficina);
...
request.on("row", function (columns) {
columns.forEach(function (column) {
if (column.value === null) {
console.log("NULL");
} else {
console.log("Product id of inserted item is " + column.value);
}
});
});
request.on("requestCompleted", function () {
connection.close();
});
connection.execSql(request);
});
}
function functionLooper() {
for (let i = 0; i < offices.length; i++) {
let response = insertOffice(i);
}
}
functionLooper();
So, I thought it would be a good idea to use a setTimeOut, to:
Run functionLooper().
Open connection, insert and close.
Wait a few seconds.
Repeat.
So, I changed to this:
setTimeout(functionLooper, 2000);
function functionLooper() {
for (let i = 0; i < offices.length; i++) {
let response = insertOffice(i);
}
}
It works, but, as you can see, only waits when I first run it, so tried to make a function that runs setTimeout(functionLooper, 2000); like functionLooper() does, but it didn't work either.
function TimerLooper() {
for (let i = 0; i < offices.length; i++) {
setTimeout(functionLooper, 500);
}
}
function functionLooper() {
for (let i = 0; i < offices.length; i++) {
let response = insertOffice(i);
}
}
TimerLooper();
This shows me this error:
Error: Validation failed for parameter 'Descripcion'. No collation was set by the server for the current connection.
file:///...:/.../.../node_modules/node-fetch/src/index.js:95
reject(new FetchError(request to ${request.url} failed, reason: ${error.message}, 'system', error));
^ FetchError: request to https://api.openweathermap.org/data/2.5/weather?lat=XX&lon=XX&appid=XX&units=metric&lang=sp failed, reason: connect ETIMEDOUT X.X.X.X:X
So, I have some questions
How can I use properly setTimeOut? I did this function based on what I watch here in SO, but I just can't get it and I don't know what I'm doing wrong.
Why it works in my PC and the other don't? Do we have to change some kind of config or something?
Using setTimeOut, is the correct way to solve this problem? if not, what would you suggest me?
Could you do something like:
//edit: not disconnect but end
connection.on("end", function(){
functionLopper(index++)
})
function functionLooper(i) {
if(i<offices.length) insertOffice(i)
}
Edit: according to tidious doc
There is an end event emitted on connection.close()
Event: 'end'
function () { }
The connection has ended. This may be as a result of the client calling close(), the server closing the connection, or a network error.
My suggestion from above
var config = {
...
};
function insertOffice(index) {
var connection = new Connection(config);
connection.on("connect", function (err) {
console.log("Successful connection");
});
connection.connect();
let url = `...`;
fetch(url)
.then((response) => { return response.json(); })
.then(function (data) {
...
});
connection.on("end", function(){
functionLopper(index++)
})
}
function functionLooper(i) {
if(i<offices.length) insertOffice(i)
}
``

Best way to turn JSON change into event

I'm creating a YouTube upload notification bot for a Discord Server I am in using the YouTube RSS Feed and am having problems with it. I have issues with the bot sending the same video twice even though I've tried everything to fix it. The bot cycles through different users in a for loop and checks the user's latest video's ID with one stored in a JSON file. If they do not match, it sends a message and updates the JSON. Here is my current code:
function update(videoId, n) {
var u = JSON.parse(fs.readFileSync("./jsons/uploads.json"))
u[n].id = videoId
fs.writeFile("./jsons/uploads.json", JSON.stringify(u, null, 2), (err) => {
if (err) throw err;
// client.channels.cache.get("776895633033396284").send()
console.log('Hey, Listen! ' + n + ' just released a new video! Go watch it: https://youtu.be/' + videoId + "\n\n")
});
}
async function uploadHandler() {
try {
var u = require('./jsons/uploads.json');
var users = require('./jsons/users.json');
for (i = 0; i < Object.keys(users).length; i++) {
// sleep(1000)
setTimeout(function(i) {
var username = Object.keys(users)[i]
let xml = f("https://www.youtube.com/feeds/videos.xml?channel_id=" + users[username]).text()
parseString(xml, function(err, result) {
if (err) {} else {
let videoId = result.feed.entry[0]["yt:videoId"][0]
let isMatch = u[username].id == videoId ? true : false
if (isMatch) {} else {
if (!isMatch) {
u[username] = videoId
update(videoId, username)
}
}
}
});
}, i * 1000, i)
}
} catch (e) {
console.log(e)
}
}
My code is rather simple but I've had the same issue with other codes that use this method; therefore what would be the best way to accomplish this? Any advice is appreciated
There are a few issues with your code that I would call out right off the bat:
Empty blocks. You use this especially with your if statements, e.g. if (condition) {} else { // Do the thing }. Instead, you should negate the condition, e.g. if (!condition) { // Do the thing }.
You declare the function uploadHandler as async, but you never declare that you're doing anything asynchronously. I'm suspecting that f is your asynchronous Promise that you're trying to handle.
You've linked the duration of the timeout to your incrementing variable, so in the first run of your for block, the timeout will wait zero seconds (i is 0, times 1000), then one second, then two seconds, then three...
Here's a swag at a refactor with some notes that I hope are helpful in there:
// Only require these values once
const u = require('./jsons/uploads.json');
const users = require('./jsons/users.json');
// This just makes the code a little more readable, I think
const URL_BASE = 'https://www.youtube.com/feeds/videos.xml?channel_id=';
function uploadHandler() {
Object.keys(users).forEach(username => {
// We will run this code once for each username that we find in users
// I am assuming `f` is a Promise. When it resolves, we'll have xml available to us in the .then method
f(`${URL_BASE}${username}`).then(xml => {
parseString(xml, (err, result) => {
if (!err) {
const [videoId] = result.feed.entry[0]['yt:videoId']; // We can use destructuring to get element 0 from this nested value
if (videoId !== u[username].id) {
// Update the in-memory value for this user's most recent video
u[username].id = videoId;
// Console.log the update
console.log(`Hey listen! ${username} just released a new video! Go watch it: https://youtu.be/${videoId}\n\n`);
// Attempt to update the json file; this won't affect the u object in memory, but will keep your app up to date
// when you restart it in the future.
fs.writeFile('./jsons/uploads.json', JSON.stringify(u, null, 2), err => {
if (err) {
console.err(`There was a problem updating uploads.json with the new videoId ${videoId} for user ${username}`);
}
});
}
}
});
})
// This .catch method will run if the call made by `f` fails for any reason
.catch(err => console.error(err));
});
}
// I am assuming that what you want is to check for updates once every second.
setInterval(uploadHandler, 1000);

promise chain is repeating itself after the call takes too long

I have a very long promise chain inside of a $http.get call that takes quite a few minutes to complete. The part that takes a long time is a for loop that iterates through 160 some array elements and runs a long series of socket connect tests. However around the 84th iteration in the for loop, the whole promise chain (or maybe the get call) starts all over again. While the other one is still running. Then once the first one finishes the res.send never goes through while the new chain runs and this repeats ad infinitum.
router.get('/', function(req, res) {
fs.readdir('C:\\Temp\\hostPorts', function(err, files) {
console.log('files', files);
chooseFile(files).then(response => {readTheFile(response).then(async (result) => {
splitText(result).then( async (final) => {
console.log('final version', final);
res.send({file: final});
})
// res.send({file: result});
}).catch(error => {
console.log(error);
}) //end catch
}); //end promise
}); //end read
}); //end get
this is my get call, the splitText function is where it is getting stuck. I will post the source for the splitText function below but I'm certain that its somehow creating two instances since every time at the 84th iteration my terminal console reprints the initial console.log('files', files) and then runs through the other promises in the chain.
It is eventually finishing the first one because the console.log('final version', final) does print out. but the res.send never happens and the second promise chain continues to run. then the third, etc, etc.
here's the code from the long loop
async function splitText(file){
let tableData = "<table border=1 cellspacing=0><tr>";
let splited = file.trim().split(/\s+/);
//vars for checking connectivity
for (let i = 0; i < splited.length; i++) {
console.log(splited[i] + " " + i);
if(i < 4 ) {
tableData += "<th>" + splited[i] + "</th>";
//if its less than 4 print out table headers
}
else if (i == 4){
tableData += "</tr><tr><td>" + splited[i] + "</td>";
//if its 4 create a new row for data
}
else if (i % 3 == 0){
//if modulo 3 is 0 then its on a port, checks connectivity and adds it to the row as data after port
//then starts a new row
let host = splited[(i - 1)]; //1 index ago was host
let port = parseInt(splited[(i)]); //current array index is port
console.log('host: ' + host );
console.log('port: ' + port );
await testPort(port, host).then(async (reachable) => {
console.log(reachable);
if (reachable) {
tableData += "<td>" + splited[i] + "</td><td>" + "<font color=\"GREEN\">UP</font>" + "</tr><tr>";
}
else {
tableData += "<td>" + splited[i] + "</td><td>" + "<font color=\"RED\">DOWN</font>" + "</tr><tr>";
}
});
} //end else if
else {
tableData += "<td>" + splited[i] + "</td>";
//otherwise adds tabledata
}
} //end for
return tableData;
} //end function
And this is the async function that checks if the host/ports are up.
async function testPort(port, host){
return new Promise(resolve => {
const socket = new net.Socket();
const onError = () => {
socket.destroy();
resolve(false);
};
socket.setTimeout(10000);
socket.on('error', onError);
socket.on('timeout', onError);
socket.connect(port, host, () => {
socket.end();
resolve(true);
}); //end promise
});
I'm not sure if this is an issue of the HTTP. get restarting after it takes too long however i have the timeout set to 5 minutes. Or if this is the promise chain restarting after not getting a response. But I'm really banging my head on this one as either way im never getting data back to my client and as far as I see i never recalled a function or created an infinite loop.
I'm not sure what framework you are using and whether it has internal timeout for processing the request to retry. One of the problem with the above code is testing the network connection is happening serially. When you have lot of hosts to check, it bound to fail/timeout. You can test multiple hosts in parallel. For this, the code to split, test & built output should be separated. Here is a rudimentary version.
function _splitText(file = '') {
let ret = {
header: [],
hosts: {}
};
if (!file.length) {
return ret;
}
//split the content
let splitted = file.trim().split(/\s+/);
if (splitted.length % 3 !== 1 && splitted.length < 4) {
console.log('Invalid data!');
return ret;
}
//get header
ret.header = splitted.splice(0, 4);
while (splitted.length) {
const [name, host, port, ...rest] = splitted;
ret.hosts[name] = {
host,
port,
isReachable: false
};
splitted = rest;
}
return ret;
}
async function testPort(name, port, host) {
return new Promise(resolve => {
const socket = new net.Socket();
const onError = () => {
socket.destroy();
resolve({
name,
isReachable: false
});
};
socket.setTimeout(10000);
socket.on('error', onError);
socket.on('timeout', onError);
socket.connect(port, host, () => {
socket.end();
resolve({
name,
isReachable: true
});
}); //end promise
});
}
async function testPortsParallel(o, nParallel = 5, timeout = 10000) {
const hostnames = Object.keys(o.hosts);
let temp;
while ((temp = hostnames.splice(0, nParallel))) {
//create async promise for all hosts and wait for them at one go.
await Promise.all(temp.map(v => testPort(v, o.hosts[v].host, o.hosts[v].port))).then(values => values.forEach(v => o.hosts[v.name].isReachable = v.isReachable));
}
}
function buildOutput(o) {
let ret = '<table border=1 cellspacing=0>';
//add header
ret += '<tr><th>' + o.header.join('</th><th>') + '</th></tr>';
//add hosts
ret += o.hosts.keys().map(v => '<tr><td>' + [v, o.hosts[v].host, o.hosts[v].port].join('</td><td>') + '</td></tr>').join('');
ret += '</table>'
}
async function splitText(s) {
let data = _splitText(s);
await testPortsParallel(data);
return buildOutput(data);
}
splitText('name host port IsReachable 1 a b 2 c d');
//console.log(JSON.stringify(output));
Hope this might be somewhat helpful.You can adjust the number of servers to test in parallel as per your need.
Note: There is a slight chagne in your testPort fn as well.
You are not helping yourself by mixing three different styles on async syntax. The first step should really be to simplify the code into a single style which often reveals the issue.
At a glance I suspect your issue is how you are chaining promises. Some of your then statements are executing new promises but not returning a promise which means you create multiple promise chains. You also are losing errors as they have no catch clauses.
I would refactor into
router.get('/', function(req, res) {
fs.readdir('C:\\Temp\\hostPorts', function(err, files) {
sendFile()
.then(result => res.send(result));
.catch(err => {
console.error(err);
res.sendError("Boom");
}
}
}
async function sendFile() {
const file= await chooseFile(files);
const contents = await readTheFile(file);
const splitContents = await splitText(contents);
return {file: splitContents};
}
This uses async await which is easier to read than standard promise chains. You always have to remember to return Promises from your then clauses with classic promise chains or you can get into trouble.

JavaScript and Promise.all()

I'm trying to load a bunch of resources async using promises with Jimp.js. The loading code is a disaster trying to chain it all, and I need a cleaner solution.
What I came up with was below. This obviously doesn't do anything, because it's junk code, but I need to know if there was a failure loading any of the resources, and I need to know when they completed.
function doSomething(asdf) {
return new Promise((resolve, reject) => {
//console.log("It is done.");
// Succeed half of the time.
var x = Math.random();
if (x > .5) {
resolve(["SUCCESS",asdf,x])
} else {
reject(["Failure",asdf,x])
}
});
}
func();
function func() {
//Imagine a is actually an image loaded by doSomething
var a=null; doSomething("1").then(function (data) {a = data;},
(err) => {throw new err;});
//Imagine b is a font resource.
var b=null; doSomething("2").then(function (data) {b = data;},
(err) => {throw new err;});
Promise.all([a, b]).then(function() {
console.log(a);
console.log(b);
//Then here I expect everything to be correct, and continue on with the next function.
},
(err) => {console.log('Oops:' + err);}).
catch( (err) => {console.log('Oops:' + err);});
}
For some reason, this never outputs "Oops".
Here is a fail output:
[ 'SUCCESS', '1', 0.756461151774289 ]
null
What am I missing here?
Update
I took part of an answer I received and changed it so that it behaves exactly as I wanted:
function func() {
var a=doSomething("1").then(function (data) {a = data;});
var b=doSomething("2").then(function (data) {b = data;});
Promise.all([a, b]).then(function() {
console.log(a);
console.log(b);
},
(err) => {console.log('Reject:' + err);});
}
Update
Here is the actual code I'm using that's working great now:
LoadResources() {
var ps = [];
console.log("Loading now");
ps.push(jimp.read(this.ipath+"c4box.png").then(function (image) {obj.imBox = image;}));
ps.push(jimp.read(this.ipath+"red.png").then(function (image) {obj.imRed = image;}));
ps.push(jimp.read(this.ipath+"green.png").then(function (image) {obj.imGreen = image;}));
ps.push(jimp.read(this.ipath+"top.png").then(function (image) {obj.imTop = image;}));
ps.push(jimp.read(this.ipath+"bot.png").then(function (image) {obj.imBot = image;}));
ps.push(jimp.loadFont(jimp.FONT_SANS_32_WHITE).then(function (font) {obj.imFont = font;}));
Promise.all(ps).then( () => {
obj.loaded = true;
obj.imBg = new jimp(512, 576, function (err, image) { });
console.log("Actually loaded now.");
obj.StartGame();
});
console.log("Loading commands complete");
}
You cannot use those a and b variables for the images. (See here for the values that would be passed into Promise.all). You need to use variables for the promise objects that doSomething() returns. The images will only be available inside the then callback - Promise.all creates a promise that fulfills with an array of the results:
function func() {
// aPromise is a promise for an image loaded by doSomething
var aPromise = doSomething("1");
// bPromise is a promise for a font resource.
var bPromise = doSomething("2");
Promise.all([aPromise, bPromise]).then(function([a, b]) {
// ^^^^^^
console.log(a);
console.log(b);
// Then here I expect everything to be correct, and continue on with the next function.
}, (err) => {
console.log('Oops:' + err);})
});
}
Promise.all([a, b])
Cause a and b are null as you set them to null. Therefore Promise.all won't wait at all, it will resolve one tick afterwards, and as a and b get resolved / rejected very fast, that might has happened before and a / b gets set before it reaches
console.log(a)
which will log the right results sometimes, but thats based on chance.
Promise.all returns a promise and this promise contains the result of the previous promises
Promise.all([ doSomething('1'), doSomething('2')])
.then(results => {
// results is an array which contains the result of the previous promises
const [a, b] = results
}).catch(err => console.log('Oops:' + err))

JavaScript checking if resource is reachable with fetch

I'm basically just trying to verify if a resource is reachable from the executing client. I can not use XHR, because the target resource doesn't allow that.
I'm pretty new to JS and am currently working with this ( executable here ):
var done = false;
var i = 1;
var t = "https://i.stack.imgur.com/Ya15i.jpg";
while(!done && i < 4)
{
console.log("try "+i);
done = chk(t);
sleep(1000);
i = i+1;
if (done)
{
console.log("Reachable!");
break;
}
else
{
console.log("Unreachable.");
}
}
function chk(target)
{
console.log("checking "+target)
fetch(target, {mode: 'no-cors'}).then(r=>{
return true;
})
.catch(e=>{
return false;
});
}
// busy fake sleep
function sleep(s)
{
var now = new Date().getTime();
while(new Date().getTime() < now + s){ /* busy sleep */ }
}
I was expecting this code to check for the resource, print the result, then wait for a sec. Repeat this until 3 tries were unsuccessful or one of them was successful.
Instead the execution blocks for a while, then prints all of the console.logs at once and the resource is never reachable (which it is).
I do know that the fetch operation is asynchronous, but I figured if I previously declare done and implement a sleep it should work. In the worst case, the while loop would use the previously declared done.
How do I achieve the described behavior? Any advice is welcome.
Your sleep function is blocking, what you really want is a recursive function that returns a promise after checking the url n times with a delay of y seconds etc.
Something like this
function chk(target, times, delay) {
return new Promise((res, rej) => { // return a promise
(function rec(i) { // recursive IIFE
fetch(target, {mode: 'no-cors'}).then((r) => { // fetch the resourse
res(r); // resolve promise if success
}).catch( err => {
if (times === 0) // if number of tries reached
return rej(err); // don't try again
setTimeout(() => rec(--times), delay ) // otherwise, wait and try
}); // again until no more tries
})(times);
});
}
To be used like this
var t = "https://i.stack.imgur.com/Ya15i.jpg";
chk(t, 3, 1000).then( image => {
console.log('success')
}).catch( err => {
console.log('error')
});
And note that this does not fail on 404 or 500, any response is a successful request.
The main problem is that you are trying to return from callback. That makes no sense.
But fetch is Promise based request you can use Promise to simulate delays as well
Something like this should do the trick
// promise based delay
const delay = timeout => new Promise(resolve => setTimeout(resolve, timeout))
// check if target can be fetched
const check = target => fetch(target, {...})
.then(response => response.ok)
const ping = (target, times = 3, timeout = 1000) => check(target)
.then(found => {
if(!found && times) { // still can check
// wait then ping one more time
return delay(timeout).then(() => ping(target, times - 1, timeout))
}
return found
})
ping('https://i.stack.imgur.com/Ya15i.jpg')
.then(found => {
console.log(found ? 'Reachable': 'Unreachable')
})
Your chk function returns undefined, you return true/false from promise callbacks not from container function.
You should use recursion and timeout in catch callback.
It will be something like this:
var i = 0;
var done = false;
var t = "https://i.stack.imgur.com/Ya15i.jpg";
(function chk(target){
console.log("checking "+target)
fetch(target, {mode: 'no-cors'}).then(r=>{
done = true;
console.log("Reachable!");
})
.catch(e=>{
console.log("Unreachable.");
if(i<4){
setTimeout(function(){
chk(target)
},1000)
}
});
})(t)
You can't return within a callback. When you do, it is the callback that is returning, not the parent function. If fact, the function chk is never returning anything.
What it sounds like you are intending to do is return the promise returned by fetch. And attempt to fetch three times.
Try this:
const numberOfTries =3;
currentTry = 1;
var t = "https://i.stack.imgur.com/Ya15i.jpg";
chk(t);
function tryCheck(resource, currentTry) {
chk(resource).done(function(){
console.log("Reachable!");
}).catch(function(e) {
console.log("Unreachable.");
if (currentTry >= numberOfTries) return;
sleep(1000);
tryCheck(resource, currentTry + 1);
});
}
function chk(resource) {
console.log("checking "+target);
return fetch(target, {mode: 'no-cors'});
}
Try this, Hope it works
var myHeaders = new Headers();
myHeaders.append('Content-Type', 'image/jpeg');
var myInit = { method: 'GET',
headers: myHeaders,
mode: 'no-cors',
cache: 'default' };
var myRequest = new Request('https://i.stack.imgur.com/Ya15i.jpg');
fetch(myRequest,myInit).then(function(response) {
...
});

Categories

Resources