Below is my code
var request = require("request");
$('.xxx').siblings().each(function(i, elem) {
if (i === $('.xxx').siblings().length - 1) {
goToOrderDetail(true, i);
}
});
function goToOrderDetail(status, i) {
console.log('Processing' + i + 'data!');
var options = {
method: "POST",
url: "https://example.com"
}
request(options, function(error, response, body) {
console.log(response);
}
}
When I run this file on node.js, I will get message like below in my terminal.
Processing 1 data!
Processing 2 data!
Processing 3 data!
Processing 4 data!
Processing 5 data!
response message
response message
response message
response message
response message
But what I expect and want is
Processing 1 data!
response message
Processing 2 data!
response message
Processing 3 data!
response message
Processing 4 data!
response message
Processing 5 data!
response message
I know this is about sync or async issue, but How can I fix it?
How can I wait the goToOrderDetail function finished and then keep loop next element?
I did looked for async(enter link description here) package but have no idea which function I can use.
Your code is running well, but your console.log do not represent what is happening.
If you move your console.log like this
var options = {
method: "POST",
url: "https://example.com"
}
request(options, function(error, response, body) {
console.log('Processing' + i + 'data!');
console.log("test");
}
you will get
Processing 1 data!
test
Processing 2 data!
test
Processing 3 data!
test
Processing 4 data!
test
Processing 5 data!
test
It's not very clear what you want to do, or even how your function can get called that many times with the if(i === ...)that you put there. One way to really ensure that your requests really are called in a synchronous way is to "chain" them. It has the added benefit to state more clearly what you intend...
The following (not tested) code illustrates what I mean. Keep in mind that you lose the benefit of running parallel requests.
var request = require("request");
var numberOfRequests = $(".xxx").length;
function goToOrderDetail(status, index, maximum) {
console.log('Preparing' + index + 'th request!');
var options = {
method: "POST",
url: "https://example.com"
}
request(options, function(error, response, body) {
console.log("received " + index +"th request response : ");
console.log(response);
//here, the request is done. maybe we want to launch another one
if(index < maximum )
goToOrderDetail(status, index +1, maximum);
}
}
//call the function once, for the first request, and let the chaining happen
goToOrderDetail(true, 1, numberOfRequests);
Related
I'm using jquery.csv.min.js to create an array based on a csv. This part works fine. However, when I try and parse that 2d array into sessionStorage items, I'm getting the dreaded "Uncaught TypeError: Cannot read property '0' of undefined". Here's what I have so far:
var stationData;
var station = "";
// Gather data from CSVs
$.ajax({
type: "GET",
url: "document.csv",
dataType: "text",
success: function(response){
stationData = $.csv.toArrays(response);
console.log(stationData); // This works and confirms the array in the console
console.log("Station Variables Success!");
}
});
// Parse array into sessionStorage items
$(document).ready(function() {
setTimeout(() => {
station = getUrlParameter('stn');
var v;
var varLen = stationData[0].length;
for(v = 0; v < varLen; v++) {
sessionStorage.setItem(stationData[0][v], stationData[station][v]); // Line producing error
console.log("Setting sessionStorage:" + stationData[0][v] + " to \"" + stationData[station][v] + "\"");
}}, 2000);
});
When I run this on my local XAMPP Apache server, I was getting the same TypeError until I included the setTimeout() to space it out a bit figuring it was trying to set the storage items before the array could finish loading and it worked. Just a 10ms timeout is enough to run it on my local server, which is why it's there. However, when I try and upload this to our live server, the TypeError comes back. I've tried increasing the timeout in increments upto 10000ms but it's still happening. I've googled around with no luck, so I'm hoping someone here might be able to share some insight!
It's throwing TypeError because you are trying to read the value of an asynchronous operation before its returned by the server.
Increasing the timeout is not the option.
You just need to slightly refactor your code like this -
$(document).ready(function() {
function fetchCSVFromServer(successCb, errorCb) {
return $.ajax({
type: "GET",
url: "document.csv",
dataType: "text"
})
.done(successCb)
.fail(errorCb);
}
var successCb = function(response){
// Parse array into sessionStorage items
var stationData = $.csv.toArrays(response);
console.log(stationData); // This works and confirms the array in the console
console.log("Station Variables Success!");
setTimeout(() => {
var station = getUrlParameter('stn');
var varLen = stationData[0].length;
for(let v = 0; v < varLen; v++) {
sessionStorage.setItem(stationData[0][v], stationData[station][v]); // Line producing error
console.log("Setting sessionStorage:" + stationData[0][v] + " to \"" + stationData[station][v] + "\"");
}
}, 2000);
};
var errorCb = function(jqXHR, textStatus, errorThrown) {
console.error(jqXHR);
};
fetchCSVFromServer(successCb, errorCb);
});
This code has 2 parts to it, first making the XHR using the $.ajax in the fetchCSVFromServer function and the second synchronizing the code flow which follows the asynchronous XHR call which is written in the successCb callback.
The trick is to synchronize the callback to run only after the server has sent its response in the .done callback.
This is a well known and a solved problem, for more details refer this question.
I can't seem to manage to break out of my each loop if the ajax returns an error. I've tried
return false;
and other similar thing but the $.each still continues to run.
I need to be able to do this so that I can display error messages from my back end after posting it via ajax(I know this is bad practice however a client needed to be able to be able to send multiple forms off at once.).
Can anyone explain what I've done wrong?
var postAll = function(button_clicked)
{
e.preventDefault();
var form_response = [];
var formsCollection = document.getElementsByTagName("form");
$.each(formsCollection, function (key, value)
{
console.log(value.action);
console.log(value.id);
var url = value.action;
var id = value.id;
var data = ($('#' + id + '').serialize());
if (id == 'additionalInfo')
{
data = {'Add_info': $('#Add_info').val(),};
}
if (id != 'DONE')
{
$.ajax({
type: "POST",
dataType: 'json',
url: url,
beforeSend: function (xhr)
{
xhr.setRequestHeader('X-CSRF-TOKEN',$("#token").attr('content'));
},
data: data,
success: function (data)
{
console.log('success'); // show response from the php script.
form_response.push(data); // show response from the php script.
},
error: function (data)
{
console.log('fail'); // show response from the php script.
display_errors(data, id); // show response from the php script.
return true;
}
});
}
});
}
AJAX is asynchronous, when executing your $.each function it will execute the AJAX call and "Not wait" for the others to finish
To solve your problem you'll have to write a function that will execute the first ajax call and in the success it will execute itself again with the second ajax call.
Example:
var data = [form1,form2...etc];
function letsLoop(data,index = 0){
$.ajax({
url:....
success: function(){
letsLoop(data,index+1);
},
error: function(){
}
});
}
and here you call your function:
letsLoop(data,0);
If by breaking out of the loop you mean the return in your error handler, then it won't work as you think it would.
Your loop creates asynchronous requests 'at once'. Then each of these requests is handled by the browser (more or less simultaneously), then come responses. So by the time your error handler runs the loop has long finished.
BTW, the return in your case relates to the error handler, not the function inside the loop.
So, to achieve what you want you should 'queue' your AJAX requests and perform them one by one.
One possible solution is to create an array of forms then take (and remove it from the array) the first one, perform a request, on a response repeat the whole thing, and keep repeating until the array is empty.
I'm trying to process the returned JSON result from the request
so I need to expand its scope outside this request call.
To do that I declared data variable with an empty string and assign the result to this data but it doesn't print the result.
How can I accomplish this?
module.exports = function(callback) {
var request = require("request")
var url = "http://sheetsu.com/apis/94dc0db4"
var data = "";
request({
url: url,
json: true
}, function (error, response, body) {
if (!error && response.statusCode === 200) {
callback(body)
data = body;
}
})
console.log(data);
}
This is classic async confusion : your console.log call will happen before the http request callback.
Your script is executed in this order:
request() is executed
console.log(data)
request() callback function, where you asign data a value
If you want to print data, you must do it inside the request callback function. The async module, is very useful when performing async tasks, specially if you need to perform tasks in a specific order and use the data from this requests.
I'm doing a JSON call like this:
var desc = getItemDescriptions(xxx);
function getItemDescriptions(xxx) {
var url = "xxx;
var info = {};
request({
url: url,
json: true
}, function (error, response, body) {
if (!error && response.statusCode === 200) {
info ["..."] = body.result[xxx].yyy;
info ["..."] = body.result[xxx].yyy;
info ["..."] = body.result[xxx].yyy;
info ["..."] = body.result[xxx].yyy;
info ["..."] = body.result[xxx].yyy;
}
})
return info;
}
My Problem is, the JSON request need some time to get response back... and my function doesn't wait for this response. The Function returns the empty array without waiting.
How can i wait for response and then return the filled array?
thx
Its not like executing fast, it is the way javascript runs statement after statement. To get the data you need to do that in success callback function, the data would be available only when the server response comes back as its asynchronous call by the time response comes your javascript executes next statements.
Juhana already linked to you the best place to get a good solution. How to return the response from an async call
A quick and dirty hack would be (if request is a jQuery-like Ajax-function) to make the request synchronus.
This might be done by adding async: false to the first parameter passed to request:
request({ url: url,json: true, async: false}, function ....
That way return info will be executed AFTER your request has finished.
HTH
Georg
I have a little trouble with an Node application, the problem is: I have an Script in a web site "x" and this script calls a function that is on another server (like analytics), I'm calling the function with ajax and when the function returns the data, happen some curious, when I check the network with Developer Tools in Chrome, the callback shows the response like I wanted in JSON format, but not show me data. My code:
var xml2js = require('xml2js'),
http = require('http'),
request = require('request');
var parserController = function (aw){
console.log('Parse Controller load');
aw.get('/ads/:keyword/:sid/:pid/:count', function (req,res){
res.setHeader('Content-Type', 'application/json');
request('http://'+req.params.sid+'.'+req.params.pid+'.autoweb-xml.com/feed?&sid='+req.params.sid+'&auth=2Al5&subid=&q='+req.params.keyword+'&ip=127.0.0.1&ua=Mozilla/5.0%20(Windows%20NT%206.1;%20WOW64;%20rv:26.0)%20Gecko/20100101%20Firefox/26.0&ref=awebads.lan&count='+req.params.count+'&state=&city=', function (error, response, body) {
if (!error && response.statusCode == 200) {
var parser = xml2js.parseString;
var data = '';
parser(body,{explicitRoot : false}, function (err, result){
if(!err){
data = result;
dataP=data.results[0];
dataS=dataP.sponsored[0];
console.log(dataS.listing);
return res.send(dataS.listing);
}
else
{
console.log(err);
}
})
}
})//en del request
});
};
and my call function is:
var xhr = $.ajax({
type:'GET',
url:'http://HOST/ads/'+configParams.keyword+'/'+configParams.envSource+'/'+configParams.envPublisher+'/'+configParams.envCount,
dataType : 'jsonp',
async: false,
crossDomain : true
})
xhr.done(function (data){
console.log(data);
data.forEach(function(item){
window.collections.ads.add(item);
});
}).fail(function (err) {
//console.log('failed');
//console.log(err)
});
when I display the data in the console, this part show me the XMLHTTPRequest, thanks in advance for your help
You are sending JSON:
the callback shows the response like I wanted in JSON format
… but the client is expecting JSON-P
dataType : 'jsonp',
Either:
Tell the client to expect JSON (or just remove the dataType line and let it use the Content-Type header) and set Access-Control-Origin on the response headers to give the site permission to access it cross-domain or
Send JSON-P back instead (look at callback in the query string, send Content-Type: application/javascript (not JSON!), and return callback_value(your_json); as the response body.
In case of jsonp you need a callback function and then return your response in that callback.