Update DOM with responses from several XMLHttpRequest - javascript

I am building a simple open source Chromium extension that retrieve some data from several urls and then update the DOM. I could find another way to do this than by adding the line to update the DOM inside the callback http1.onreadystatechange
My XMLHttpRequest requests were often stuck on http1.readyState = 3 so I have added a 3rd parameter to http1.open("GET"); to make the request synchronous like this:
http1.open("GET", url, false);
But I am still getting these errors:
results[1].join is not a function at XMLHttpRequest.http.onreadystatechange
annot read property 'join' of undefined at XMLHttpRequest.http.onreadystatechange
Even thought they don't prevent the script from running, I think this isn't the right way to do what I want. So here is my question: how to update the DOM with the responses from several XMLHttpRequest request? Let's say I need to retrieve and compare all the data before updating the DOM. Then is there a way to process all the data at once after we have retrieve all of them (cf my comment on the last line)?
Here is the relevant part of my script, the full script is available here:
var urls = [
["https://www.cnrtl.fr/morphologie/" + keyword, "vtoolbar", "morf_sound"], //best for plural
["https://www.cnrtl.fr/synonymie/" + keyword, "syno_format"],
]
// for test set keyword to any of this word : hibou, tribal, aller, lancer
var resultdiv = document.getElementById("result")
resultdiv.innerText = "requete en cours";
var results = [];
var errors = [];
urls.forEach((item, index) => {
var http = new XMLHttpRequest();
http.onreadystatechange = function () {
if (http.readyState == 4 && http.status == 200) {
parser = new DOMParser();
var ulr1response = parser.parseFromString(http.responseText, "text/html");
if (index == 0) {
//retrieve the data needed, save then in a list and push this list to the main list result
} else if (index == 1) {
//retrieve the data needed, save then in a list and push this list to the main list result
}
// update the DOM
if (results[1] == "") {
resultdiv.innerHTML = results[0].join(", ") + "</br></br>Pas de synonymes trouvés"
} else {
resultdiv.innerHTML = "<b>" + results[0].join(", ") + "</br></br>Synonymes:</b></br>● " + results[1].join('</br>● ')
}
} else {
errors.push(index);
resultdiv.innerText = "Erreur: " + index + " " + http.readyState + " " + http.status;
}
}
http.open("GET", item[0], false);
http.send(null); // null = no parameters
});
// it would be simplier if I could update the DOM here and not in http.onreadystatechange

If you want to execute some code once all requests have succeeded, you can try using Promise.all together with Fetch.
let keyword = "beaucoup";
let parser = new DOMParser();
let urls = [
["https://www.cnrtl.fr/morphologie/" + keyword, "vtoolbar", "morf_sound"], //best for plural
["https://www.cnrtl.fr/synonymie/" + keyword, "syno_format"]
];
let fetchPromises = urls.map(
item => fetch(item[0]).then(
response => parser.parseFromString(response.text(), "text/html")
)
);
Promise.all(fetchPromises).then(
results => {
// code in here executes once all fetchPromises have succeeded
// "results" will be an array of parsed response data
console.log(results);
}
).catch(console.error);

Related

API response is giving two responses when only one is requested

I'm making an API request in JS that is working, but it keeps giving two responses when I'm only expecting one. Here's the code:
CallCoinGecko();
function CallCoinGecko() {
var cgrequest = new XMLHttpRequest();
var url1 = 'https://api.coingecko.com/api/v3/simple/price?ids=';
var id = 'ethereum';
var url2 = '&vs_currencies=USD&include_market_cap=true&include_24hr_vol=true&include_24hr_change=true';
cgrequest.open("GET", url1 + id + url2, true);
cgrequest.send();
cgrequest.onreadystatechange = function () {
if (cgrequest.status == 200) {
var cgresponse = cgrequest.responseText;
var cgobj = JSON.parse(cgresponse);
console.log(cgobj);
var cgprice = (cgobj[id]['usd']);
console.log(cgprice);
}}}
Here is a console image that shows two responses:
image
Any thoughts on this are appreciated thanks
Try verifying the value of readyState after the request. Example:
if (cgrequest.status === 200 && cgrequest.readyState === 4)
The problem is that, as mentioned, the readyState can have multiple values - therefore everytime it changes, your code to print the response text will trigger. I'm sure if you looked in the network tab, the API request isn't being carried out two times, only your response text is being printed two times. Checking if it is 4 confirms that the request is done - and should only be carried out once.
My hunch that the API isn't being called two times is due to the fact that the data is exactly the same on each console.log().
Good luck :)
You should compare the readyState value in your if statement as well.
function CallCoinGecko() {
var cgrequest = new XMLHttpRequest();
var url1 = 'https://api.coingecko.com/api/v3/simple/price?ids=';
var id = 'ethereum';
var url2 = '&vs_currencies=USD&include_market_cap=true&include_24hr_vol=true&include_24hr_change=true';
cgrequest.open("GET", url1 + id + url2, true);
cgrequest.onreadystatechange = function(res){
if (cgrequest.status === 200 && cgrequest.readyState === XMLHttpRequest.DONE) {
var cgresponse = cgrequest.responseText;
var cgobj = JSON.parse(cgresponse);
console.log(cgrequest.status, cgobj);
var cgprice = (cgobj[id]['usd']);
console.log(cgrequest.status, cgprice);
}
};
cgrequest.send();
}

Return multiple values from stored procedure

The idea is that each subject has multiple topics, and when I call the function getTopicsForSubject() in order to get this data to a website page, it returns only 1 of the records from the table. I'm testing this using console.log(response) in the JavaScript file to see what is being passed in from the stored procedure/api connection. I'm thinking I need to read what's being passed by the stored procedure as if it were an array, although I'm not too sure how this is done.
Stored Procedure:
USE [Capstone]
GO
/****** Object: StoredProcedure [dbo].[getTopicsForSubject] Script Date: 2/21/2021 11:30:03 AM ******/
SET ANSI_NULLS ON
GO
SET QUOTED_IDENTIFIER ON
GO
ALTER PROCEDURE [dbo].[getTopicsForSubject]
#SubjectID int
AS
BEGIN
select *
from Topic
where SubjectID = #SubjectID
return;
END
API Code
private static string ExecuteSPGetSubjectsForTopic(string queryString, string subjectID)
{
string json = "";
string connectionString = ConfigurationManager.AppSettings["dbconn"].ToString();
using (SqlConnection conn = new SqlConnection(connectionString))
{
conn.Open();
// 1. create a command object identifying the stored procedure
SqlCommand cmd = new SqlCommand(queryString, conn);
// 2. set the command object so it knows to execute a stored procedure
cmd.CommandType = CommandType.StoredProcedure;
// 3. add parameter to command, which will be passed to the stored procedure
cmd.Parameters.Add(new SqlParameter("#SubjectID", subjectID));
// execute the command
using (SqlDataReader rdr = cmd.ExecuteReader())
{
// iterate through results, printing each to console
while (rdr.Read())
{
json = (string)rdr[0].ToString() + "|" + (string)rdr[1].ToString()+ "|" + (string)rdr[2].ToString() + "|" + (string)rdr[3].ToString();
}
}
}
return json;
}
JavaScript Code
function getTopicsForSubject()
{
var postObj = {
subjectID: localStorage.getItem('myFutureCurrentSubject')
};
console.log(postObj);
var req = new XMLHttpRequest();
req.open('POST', 'https://localhost:44303/api/JSON/getTopicsForSubject', true);
req.setRequestHeader('Content-Type', 'application/json');
req.onreadystatechange = function() { // Call a function when the state changes.
if (this.readyState === XMLHttpRequest.DONE && this.status === 200) {
console.log(req.response);
}
}
req.send(JSON.stringify(postObj));
return false;
}
You're reinitializing your JSON variable each time when reading a row. Try this:
json += (string)rdr[0].ToString() + "|" + (string)rdr[1].ToString()+ "|" + (string)rdr[2].ToString() + "|" + (string)rdr[3].ToString();
This is not the right way to return data. In JS you will still get this as a string and then parse it like this to get the actual values:
var array = req.response.split('|');
for (var i = 0; i < array.length; i++) {
console.log(array[i]);
}
I would suggest you use a proper way to handle this data by return an HTTP response from API instead of a string. E.g. create a list and then populate it while reading from the reader and return it. Try this:
List<object[]> topics = new List<object[]>();
while (rdr.Read())
{
object[] row = new object[rdr.FieldCount];
for (int i = 0; i < rdr.FieldCount; i++)
{
row[i] = rdr[i];
}
topics.Add(row);
}
return Ok(new { Data = topics });

Getting around Node's Asynchronous nature

I am writing a content scraper that scrapes information about shirts on a particular webiste. I have everything set up with NPM packages in Node to scrape and create a CSV file. The problem I am running into is that as many know, Node is asynchronous in nature. The CSV file I am trying to write is writing before the JSON object I create is finished being created (iterating with an each loop to build it), thus it passes in my 'fields' parameter for json2csv (npm package). But it passes in my data as an empty object. Can anyone tell me how to tell node to wait until my json object is built before trying to use fs.writefile to create the CSV file? Thank you
'use strict';
//require NPM packages
var request = require('request');
var cheerio = require('cheerio');
var fs = require('fs');
var json2csv = require('json2csv');
//Array for shirts JSON object for json2csv to write.
var ShirtProps = [];
var homeURL = "http://www.shirts4mike.com/";
//start the scraper
scraper();
//Initial scrape of the shirts link from the home page
function scraper () {
//use the datafolderexists function to check if data is a directory
if (!DataFolderExists('data')) {
fs.mkdir('data');
}
//initial request of the home url + the shirts.php link
request(homeURL + "shirts.php", function (error, response, html) {
if (!error && response.statusCode == 200) {
var $ = cheerio.load(html);
//scrape each of the links for its html data
$('ul.products li').each(function(i, element){
var ShirtURL = $(this).find('a').attr('href');
console.log(ShirtURL);
//pass in each shirtURL data to be scraped to add it to an object
ShirtHTMLScraper(ShirtURL);
});
FileWrite();
// end first request
} else {
console.error(error);
}
});
}
//create function to write the CSV file.
function FileWrite() {
var fields = ['Title', 'Price', 'ImageURL', 'URL', 'Time'];
var csv = json2csv({data: ShirtProps, fields: fields});
console.log(csv);
var d = new Date();
var month = d.getMonth()+1;
var day = d.getDate();
var output = d.getFullYear() + '-' +
((''+month).length<2 ? '0' : '') + month + '-' +
((''+day).length<2 ? '0' : '') + day;
fs.writeFile('./data/' + output + '.csv', csv, function (error) {
if (error) throw error;
});
}
//function to scrape each of the shirt links and create a shirtdata object for each.
function ShirtHTMLScraper(ShirtURL) {
request(homeURL + ShirtURL, function (error, response, html) {
if (!error && response.statusCode == 200) {
var $ = cheerio.load(html);
var time = new Date().toJSON().substring(0,19).replace('T',' ');
//json array for json2csv
var ShirtData = {
title: $('title').html(),
price: $(".price").html(),
imgURL: $('img').attr('src'),
url: homeURL + ShirtURL,
time: time.toString()
};
//push the shirt data scraped into the shirtprops array
ShirtProps.push(ShirtData);
console.log(ShirtProps);
// //set the feilds in order for the CSV file
// var fields = ['Title', 'Price', 'ImageURL', 'URL', 'Time'];
// //use json2csv to write the file -
// var csv = json2csv({data: ShirtProps, fields: fields});
// console.log(csv);
// //date for the filesystem to save the scrape with today's date.
// var d = new Date();
// var month = d.getMonth()+1;
// var day = d.getDate();
// var output = d.getFullYear() + '-' +
// ((''+month).length<2 ? '0' : '') + month + '-' +
// ((''+day).length<2 ? '0' : '') + day;
// //use filesystem to write the file, or overrite if it exists.
// fs.writeFile('./data/' + output + '.csv', csv, function (error) {
// if (error) throw error;
// }); //end writeFile
} else {
console.error(error);
}
});
}
//Check if data folder exists, source: http://stackoverflow.com/questions/4482686/check-synchronously-if-file-directory-exists-in-node-js
function DataFolderExists(folder) {
try {
// Query the entry
var DataFolder = fs.lstatSync(folder);
// Is it a directory?
if (DataFolder.isDirectory()) {
return true;
} else {
return false;
}
} //end try
catch (error) {
console.error(error);
}
}
It's not so much about node being asynchronous in nature as it is about certain functions being asynchronous. In this case, it's the calls using request that are asynchronous. You're calling FileWrite directly after the second request call (the one inside ShirtHTMLScraper) begins. Place the call to FileWrite in the callback of ShirtHTMLScraper, after populating ShirtProps.
edit: After looking closer, that won't work either. The problem is that you are calling an asynchronous function inside a synchronous loop. You can get that to work by creating a counter that increments on each asynchronous callback and checks to see if you've hit the length of the item you're iterating over. If you're on the last iteration, run FileWrite.
A better way to go might be to check out the Async library. You can use .each() to supply two callbacks, one to run on each iteration, and one to run when they've all finished.

javascript xmlhttp response works is odd

onreadystatechange() gets called 3 times. the second time 'results-div'.innerhtml is set to 'empty' and then erased on the last call to the function. also, does jquery handle ajax browser compatibility?
function loaddata(){
var res = new XMLHttpRequest();
res.onreadystatechange=function(){
alert('change'); //for debug
if(res.readyState == 4 && res.status==200){
document.getElementById('results-div').innerHTML = res.response;
}
else{
alert('nothing'); //for debug
document.getElementById('results-div').innerHTML = '<p> empty </p>';
}
}
res.open('GET', '?search=' + value);
res.send();
}
'results-div'.innerHTML erased on the last call. if(must be true) 'results'.innerHTML = res.response = ''

ajax - request data from multiple files on one page?

I am trying to run multiple Ajax functions on load in a single page which will get data from two different php pages. Both the Ajax functions will then print the retrieved data onto the page from which the ajax function was called. The problem I encountered was that the last function call which I make from the Ajax overrides the first function call, and so only the second function result is showed.
The code for one of the Ajax function (since both of the are very similar to each other):
function favorite_track_request(str){
switch(str){
case 'next_track':
var feed = 'require_fav_track_info';
var offset = track_currentOffset + 5;
if(offset > max_track_range){
offset -= 5;
}
break;
case 'prev_track':
var feed = 'require_fav_track_info';
var offset = track_currentOffset - 5;
if(offset < 0){
offset = 0;
}
break;
default:
var feed = 'require_fav_track_info';
var offset = 0;
}
request = new ajaxRequest()
request.open("GET", "scripts/"+feed+".php?offset="+offset, true)
request.onreadystatechange = function(){
if(this.readyState == 4){
if(this.status == 200){
if(this.responseText != null){
if(request.responseText){
document.getElementById('fav_tracks').innerHTML = request.responseText;
}
}else alert("No data recieved");
}else {
alert("Ajax error: "+this.statusText);
}
}
}
request.send(null);
track_currentOffset = offset;
}
This ajax would then print to <div id="fav_tracks"></div>, however this gets overridden because another call (similar to the Ajax above) is made and that overrides the previous one. Is there any way to stop this?
I built a data handler "class" to manage just such a thing. You are right, the one overrides the other. I haven't investigated it, but it's probabably because your are re-assigning the onEvent that AJAX uses.
Below is the class I built (I know, it's not JQuery... it works). What it does is uses timeouts to "know" when to fire the second and third async request. There probably is a JQuery function that does the same thing.
You would call this by using the below for each AJAX call (giving each call a unique var name):
dataHandler = new DataHandler("[name of datafile to call]");
dataHandler.query['[myQueryName]'] = 'myValue' //this is an Object used to build a query string, if needed, so use as many data pairs as you need
dataHandler.asynchronous(myOnReadyStateChangeFN);//put the fn you want to use for readystatechange as a reference... do not includ the ()
Here's the "class":
function DataHandler(dataFile){
this.dataFile = dataFile;
dataInProgress = false;
this.query = new Object();
this.asynchronous = function(fn){
var thisFunction = this.asynchronous
var rand = Math.floor(Math.random()*100001);
var query, timeOutFunctionString;
if(this.dataInProgress){
timeOutFunctionString = callingObjectName+".asynchronous("+fn+")";
this.thisTimeout = setTimeout(timeOutFunctionString,500);
}else{
dataInProgress = true;
this.assignRequestObject.xmlHttp.onreadystatechange = function () {
fn();
dataInProgress = false;
return;
};
}
query = this.dataFile + '?r=' + rand;
for (var key in this.query) query = query + '&' + key + '=' + this.query[key];
//console.info("DataHandler.asynchronous\nquery = "+query+'\nthis.dataFile = ' + this.dataFile);
this.assignRequestObject.xmlHttp.open('GET', query, true);
this.assignRequestObject.xmlHttp.send(null);
};
this.AssignRequestObject = function() {
try { this.xmlHttp = new XMLHttpRequest() } catch (e) {
try { this.xmlHttp = new ActiveXObject("Msxml2.XMLHTTP") } catch (e) {
try { this.xmlHttp = new ActiveXObject("Microsoft.XMLHTTP") } catch (e) {
alert("Your browser does not support AJAX!");
return false
}
}
}
};
this.assignRequestObject = new this.AssignRequestObject();
};

Categories

Resources