This question already has answers here:
How do I return the response from an asynchronous call?
(41 answers)
Closed 5 years ago.
The server needs to respond to a http get with all the data from separate file. Here I am using a csv parser.
function sendFileData(req, res) {
var result = []
// Convert an csv file to Json entries
for (var i = 0; i < 4; i++) {
var dataArray = []
csvFilePath = ...
time = '"'
value = '""'
fs.createReadStream(csvFilePath)
.pipe(csv({headers: ['date', 'time', 'value']}))
.on('data', function (data) {
date = ...
time = ...
value = ...
dataArray.push('{' + date + time + value + '}')
})
.on('end', function () {
var sensorData = '[' + dataArray + ']'
result.push(sensorData)
})
}
res.send(result)
}
Since the for loop takes some time to finish, the result is always [], so I consider to add setTimeout() and a callback function, but I feel the setTimeout is a bad approach.
function sendFileData(req, res, callback) {
var result = []
// Convert an csv file to Json entries
for (var i = 0; i < 4; i++) {
var dataArray = []
csvFilePath = ...
time = '"'
value = '""'
fs.createReadStream(csvFilePath)
.pipe(csv({headers: ['date', 'time', 'value']}))
.on('data', function (data) {
date = ...
time = ...
value = ...
dataArray.push('{' + date + time + value + '}')
})
.on('end', function () {
var sensorData = '[' + dataArray + ']'
result.push(sensorData)
})
}
setTimeout(function () {
callback(res, result)
}, 1000)
}
function sendData (res, result) {
res.send(result)
}
// calling function
sendFileData(req, res, sendData)
Is there a better way to send all the data after the csv-parser finished reading ?
Send the response only when you have all the data. See the code below, which waits until the length of the result array is 4 (the number of iterations of the loop):
function sendFileData(req, res) {
var result = []
// Convert an csv file to Json entries
for (var i = 0; i < 4; i++) {
var dataArray = []
csvFilePath = ...
time = '"'
value = '""'
fs.createReadStream(csvFilePath)
.pipe(csv({headers: ['date', 'time', 'value']}))
.on('data', function (data) {
date = ...
time = ...
value = ...
dataArray.push('{' + date + time + value + '}')
})
.on('end', function () {
var sensorData = '[' + dataArray + ']'
result.push(sensorData)
remaining -= 1
if (result.length === 4) { // all done
res.send(result)
}
})
}
}
Related
I have implemented the following code to parse a csv file, convert to a JSON array and send the JSON result to apex controller, which invokes the batch class to process the DML operation for opportunityLineItem object. The code is working fine up to a maximum of 4000 rows (files have 22 columns with values). When there are 5000 records, the process throws an error and stops (it does not call the apex server). Why does it stop if there are 4000 records? Is there any limit for parsing the csv records in LWC?
Code:
if (!this.csvFile) {
console.log("file not found");
return;
}
this.showprogressbar = true;
let reader = new FileReader();
let ctx = this; // to control 'this' property in an event handler
reader.readAsText(this.csvFile, "Shift_JIS");
reader.onload = function (evt) {
console.log('reader:'+evt.target.result);
let payload = ctx.CSV2JSON(evt.target.result, ctx.CSVToArray);
let json = null;
let error = null;
console.log("payload:" + payload);
setJSON({
payload: payload,
crud: crud,
csvFile:ctx.csvFile
})
.then((result) => {
json = result;
var err = json.includes("Error");
console.log('err====='+err);
if(err)
{
console.log('json==###=='+json);
ctx.error = json;
console.log("error:"+ctx.error);
///s/ alert('error');
ctx.showloader=false;
ctx.hasError=true;
}
else{
ctx.jobinfo(json);
console.log("apex call setJSON() ===> success: " + json);
//ctx.error = undefined;
}
})
.catch((error) => {
error =ctx.error;
console.log("error:" + error.errorCode + ', message ' + error.message);
///s/ alert('error');
ctx.showloader=false;
ctx.hasError=true;
if (error && error.message) {
json = "{'no':1,'status':'Error', 'msg':'" + error.message + "'}";
} else {
json = "{'no':1,'status':'Error','msg':'Unknown error'}";
}
});
};
reader.onerror = function (evt) {
/*ctx.mycolumns = [
{ label: "no", fieldName: "no", type: "number", initialWidth: 50 },
{
label: "status",
fieldName: "status",
type: "text",
initialWidth: 100
},
{ label: "message", fieldName: "msg", type: "text" }
];
ctx.mydata = [{ no: "1", status: "Error", msg: "error reading file" }];
*/
//$A.util.toggleClass(spinner, "slds-hide"); // hide spinner
};
// ctx.showloader=false;
console.log("mydata:===" + ctx.mydata);
alert('onerror');
}
CSV2JSON(csv, csv2array) {
let array = csv2array(csv);
//console.log("csv:::"+csv);
//console.log("csv2array:"+csv2array);
let objArray = [];
//console.log("objArray:"+objArray);
var headervar = oppheader;//'Name,BillingCity,Type,Industry';
console.log('headervar:::'+headervar);
let headerArray = headervar.split(',');
for (let i = 1; i < array.length; i++) {
objArray[i - 1] = {};
/*for (let k = 0; k < array[0].length && k < array[i].length; k++) {
let key = array[0][k];
if(key === 'DW予定日')
elseif(key === 'DW予定日')
elseif(key === 'DW予定日')
console.log("key:"+key);
this.hearder=key;
objArray[i - 1][key] = array[i][k];
}*/
for (let k = 0; k < headerArray.length; k++) {
let key = headerArray[k];
console.log("key====:"+key);
this.hearder=key;
objArray[i - 1][key] = array[i][k];
}
}
objArray.pop();
//console.log("objArray:==="+objArray.length);
this.rowCount = objArray.length;
//console.log("rowCount+++++++" + this.rowCount);
let json = JSON.stringify(objArray);
//console.log("json:==="+json.length);
let str = json.replace("/},/g", "},\r\n");
//console.log("str:======="+str);
return str;
}
CSVToArray(strData, strDelimiter) {
console.log('CSVToArray');
// Check to see if the delimiter is defined. If not,
// then default to comma.
//console.log('strData:'+strData);
//console.log("strDelimiter::" + strDelimiter);
strDelimiter = strDelimiter || ",";
//console.log("strDelimiter:" + strDelimiter);
// Create a regular expression to parse the CSV values.
var objPattern = new RegExp(
// Delimiters.
"(\\" +
strDelimiter +
"|\\r?\\n|\\r|^)" +
// Quoted fields.
'(?:"([^"]*(?:""[^"]*)*)"|' +
// Standard fields.
'([^"\\' +
strDelimiter +
"\\r\\n]*))",
"gi"
);
// Create an array to hold our data. Give the array
// a default empty first row.
// console.log("objPattern:" + objPattern);
var arrData = [[]];
// Create an array to hold our individual pattern
// matching groups.
// console.log("arrData:" + arrData);
var arrMatches = null;
// Keep looping over the regular expression matches
// until we can no longer find a match.
while ((arrMatches = objPattern.exec(strData))) {
// Get the delimiter that was found.
var strMatchedDelimiter = arrMatches[1];
// Check to see if the given delimiter has a length
// (is not the start of string) and if it matches
// field delimiter. If id does not, then we know
// that this delimiter is a row delimiter.
if (strMatchedDelimiter.length && strMatchedDelimiter != strDelimiter) {
// Since we have reached a new row of data,
// add an empty row to our data array.
arrData.push([]);
}
// Now that we have our delimiter out of the way,
// let's check to see which kind of value we
// captured (quoted or unquoted).
if (arrMatches[2]) {
// We found a quoted value. When we capture
// this value, unescape any double quotes.
var strMatchedValue = arrMatches[2].replace(new RegExp('""', "g"), '"');
} else {
// We found a non-quoted value.
var strMatchedValue = arrMatches[3];
}
// Now that we have our value string, let's add
// it to the data array.
arrData[arrData.length - 1].push(strMatchedValue);
}
// Return the parsed data.
return arrData;
}
This question already has answers here:
How do I return the response from an asynchronous call?
(41 answers)
Closed 3 years ago.
I want to respond a JSON object to a path (e.g. /getAbsencesWithNames). I have used the includeNames() function to read data from a JSON file, process it and save it in a the global JSON object i am trying to respond. The problem is the following command res.end(JSON.stringify(temp, null, "\t")); executes asynchronously since there is I/O code within includeNames() function. How can i make this command wait for my function to complete, in other words make it run synchronously?
function includeNames(){
members().then(membersPayload => {
// var counter = 1;
for (var i in respondJson){
var absencesID = respondJson[i].userId;
for (var j in membersPayload){
var membersID = membersPayload[j].userId;
if (absencesID == membersID){
var nameValue = membersPayload[j].name;
JSON.stringify(nameValue);
respondJson[i]["name"] = nameValue;
// console.log(counter + ": " + membersPayload[j].name);
// counter++;
break;
}
}
}
console.log("ITERATION COMPLETED");
}).catch((err) => console.error('error: ' + error.message));
return respondJson;
};
app.get('/getAbsencesWithNames', async (req, res) => {
var temp = await includeNames();
res.end(JSON.stringify(temp, null, "\t"));
console.log("RESPOND SENT");
});
Console output is:
RESPOND SENT
ITERATION COMPLETED
When i was expecting:
ITERATION COMPLETED
RESPOND SENT
You have to rewrite your function to return Promise.
function includeNames() {
return new Promise((resolve, reject) => {
members().then(membersPayload => {
// var counter = 1;
for (var i in respondJson){
var absencesID = respondJson[i].userId;
for (var j in membersPayload){
var membersID = membersPayload[j].userId;
if (absencesID == membersID){
var nameValue = membersPayload[j].name;
JSON.stringify(nameValue);
respondJson[i]["name"] = nameValue;
// console.log(counter + ": " + membersPayload[j].name);
// counter++;
break;
}
}
}
resolve(respondJson)
console.log("ITERATION COMPLETED");
})
});
};
// you cant use it as you're using
const result = await includeNames()
// or with Promise chain
includeNames().then(result => console.log(result))
I am trying to build a file of json data from repeated calls to a restAPI. The final file to be written is the sum of the data received from all the calls. At present the file is being written with contents of the first call then overwritten by the contents of the first + second call (see console output below code).
As I have to make many calls, once the code is working, I would like to only write the file once the request has finished and the json string has been built. Does anyone now how I would go about doing this? Maybe with a callback(?), which I still don't have the hang of, once the requests have finished or the json string has finished being built.
"use strict";
const fs = require('fs');
const request = require('request');
var parse = require('csv-parse');
const path = "../path tocsv.csv";
const pathJSON = "../pathtoJSON.json";
var shapes = "https://url";
var options = {
url: '',
method: 'GET',
accept: "application/json",
json: true,
};
var csvData = [];
var jsonData = "[";
fs.createReadStream(path)
.pipe(parse({delimiter: ','}))
.on('data', function(data) {
csvData.push(data[1]);
})
.on('end',function() {
var start = Date.now();
var records = csvData.length //2212 objects
console.log(records);
var dataLength = 2 //set low at moment
for (var i = 0; i < dataLength; i += 1) {
var url = shapes + csvData[i];
options.url = url; //set url query
request(options, function(error, response, body) {
var time = Date.now() - start;
var s = JSON.stringify(body.response);
console.log( '\n' + (Buffer.byteLength(s)/1000).toFixed(2)+
" kilobytes downloaded in: " + (time/1000) + " sec");
console.log(i)
buildJSON(s);
});
}
function buildJSON(s) {
var newStr = s.substring(1, s .length-1);
jsonData += newStr + ',';
writeFile(jsonData);
}
function writeFile(jsonData) {
fs.writeFile(pathJSON, jsonData, function(err) {
if (err) {
return console.log(err);
} else {
console.log("file complete")
}
});
}
});
128.13 kilobytes downloaded in: 2.796 sec
2
file complete
256.21 kilobytes downloaded in: 3.167 sec
2
file complete
Perhaps writing to the file after all requests are complete will help. In the current code, the writeFile function is called each time a request is completed (which overwrites the file each time)
A quick way to fix this is to count requests (and failures) and write to file only after all the requests are complete.
"use strict";
const fs = require('fs');
const request = require('request');
var parse = require('csv-parse');
const path = "../path tocsv.csv";
const pathJSON = "../pathtoJSON.json";
var shapes = "https://url";
var options = {
url: '',
method: 'GET',
accept: "application/json",
json: true,
};
var csvData = [];
var jsonData = "[";
fs.createReadStream(path)
.pipe(parse({
delimiter: ','
}))
.on('data', function (data) {
csvData.push(data[1]);
})
.on('end', function () {
var start = Date.now();
var records = csvData.length //2212 objects
console.log(records);
var dataLength = 2 //set low at moment
var jsonsDownloaded = 0; // Counter to track complete JSON requests
var jsonsFailed = 0; // Counter to handle failed JSON requests
for (var i = 0; i < dataLength; i += 1) {
var url = shapes + csvData[i];
options.url = url; //set url query
request(options, function (error, response, body) {
if(error){
jsonsFailed++;
writeFile(jsonData);
return;
}
jsonsDownloaded++;
var time = Date.now() - start;
var s = JSON.stringify(body.response);
console.log('\n' + (Buffer.byteLength(s) / 1000).toFixed(2) +
" kilobytes downloaded in: " + (time / 1000) + " sec");
console.log(i)
buildJSON(s);
});
}
function buildJSON(s) {
var newStr = s.substring(1, s.length - 1);
jsonData += newStr + ',';
writeFile(jsonData);
}
function writeFile(jsonData) {
if(dataLength - (jsonsDownloaded + jsonsFailed) > 0){
return;
}
fs.writeFile(pathJSON, jsonData, function (err) {
if (err) {
return console.log(err);
} else {
console.log("file complete")
}
});
}
});
Note:
Requests being fired in quick succession like (2000 requests in a for loop) in my experience does not work well.. Try batching them. Also, doing it this way does not guarantee order (if that is important in your usecase)
An alternative would be to open your file in append mode. You can do this by passing an extra options object with flag set to your fs.writeFile call.
fs.writeFile(pathJSON, jsonData, {
flag: 'a'
}, function (err) {
if (err) {
return console.log(err);
}
});
References:
fs.writeFile Docs
File system flags
I am porting an old ruby script over to use javascript setting the function as a cron instance so it will run on schedule. The function queries our mysql database and retrieves inventory information for our products and then sends requests to a trading partners api to update our inventory on their site.
Due to nodes a-synchronicity I am running into issues. We need to chunk requests into 1000 items per request, and we are sending 10k products. The issue is each request is just sending the last 1000 items each time. The for loop that is inside the while loop is moving forward before it finishes crafting the json request body. I tried creating anon setTimeout functions in the while loop to try and handle it, as well as creating an object with the request function and the variables to be passed and stuffing it into an array to iterate over once the while loop completes but I am getting the same result. Not sure whats the best way to handle it so that each requests gets the correct batch of items. I also need to wait 3 minutes between each request of 1000 items to not hit the request cap.
query.on('end',()=>{
connection.release();
writeArray = itemArray.slice(0),
alteredArray = [];
var csv = json2csv({data: writeArray,fields:fields}),
timestamp = new Date(Date.now());
timestamp = timestamp.getFullYear() + '-' +(timestamp.getMonth() + 1) + '-' + timestamp.getDate()+ ' '+timestamp.getHours() +':'+timestamp.getMinutes()+':'+timestamp.getSeconds();
let fpath = './public/assets/archives/opalEdiInventory-'+timestamp+'.csv';
while(itemArray.length > 0){
alteredArray = itemArray.splice(0,999);
for(let i = 0; i < alteredArray.length; i++){
jsonObjectArray.push({
sku: alteredArray[i]['sku'],
quantity: alteredArray[i]["quantity"],
overstockquantity: alteredArray[i]["osInv"],
warehouse: warehouse,
isdiscontinued: alteredArray[i]["disc"],
backorderdate: alteredArray[i]["etd"],
backorderavailability: alteredArray[i]["boq"]
});
}
var jsonObject = {
login: user,
password: password,
items: jsonObjectArray
};
postOptions.url = endpoint;
postOptions.body = JSON.stringify(jsonObject);
funcArray.push({func:function(postOptions){request(postOptions,(err,res,body)=>{if(err){console.error(err);throw err;}console.log(body);})},vars:postOptions});
jsonObjectArray.length = 0;
}
var mili = 180000;
for(let i = 0;i < funcArray.length; i++){
setTimeout(()=>{
var d = JSON.parse(funcArray[i]['vars'].body);
console.log(d);
console.log('request '+ i);
//funcArray[i]['func'](funcArray[i]['vars']);
}, mili * i);
}
});
});
You would need async/await or Promise to handle async actions in node js.
I am not sure if you have node version which supports Async/await so i have tried a promise based solution.
query.on('end', () => {
connection.release();
writeArray = itemArray.slice(0),
alteredArray = [];
var csv = json2csv({ data: writeArray, fields: fields }),
timestamp = new Date(Date.now());
timestamp = timestamp.getFullYear() + '-' + (timestamp.getMonth() + 1) + '-' + timestamp.getDate() + ' ' + timestamp.getHours() + ':' + timestamp.getMinutes() + ':' + timestamp.getSeconds();
let fpath = './public/assets/archives/opalEdiInventory-' + timestamp + '.csv';
var calls = chunk(itemArray, 1000)
.map(function(chunk) {
var renameditemsArray = chunk.map((item) => new renamedItem(item, warehouse));
var postOptions = {};
postOptions.url = endpoint;
postOptions.body = JSON.stringify({
login: user,
password: password,
items: renameditemsArray
});
return postOptions;
});
sequenceBatch(calls, makeRequest)
.then(function() {
console.log('done');
})
.catch(function(err) {
console.log('failed', err)
});
function sequenceBatch (calls, cb) {
var sequence = Promise.resolve();
var count = 1;
calls.forEach(function (callOptions) {
count++;
sequence = sequence.then(()=> {
return new Promise(function (resolve, reject){
setTimeout(function () {
try {
cb(callOptions);
resolve(`callsequence${count} done`);
}
catch(err) {
reject(`callsequence ${count} failed`);
}
}, 180000);
});
})
});
return sequence;
}
function makeRequest(postOptions) {
request(postOptions, (err, res, body) => {
if (err) {
console.error(err);
throw err;
}
console.log(body)
});
}
function chunk(arr, len) {
var chunks = [],
i = 0,
n = arr.length;
while (i < n) {
chunks.push(arr.slice(i, i += len));
}
return chunks;
}
function renamedItem(item, warehouse) {
this.sku = item['sku']
this.quantity = item["quantity"]
this.overstockquantity = item["osInv"]
this.warehouse = warehouse
this.isdiscontinued = item["disc"]
this.backorderdate = item["etd"]
this.backorderavailability= item["boq"]
}
});
Could you please try this snippet and let me know if it works?I couldn't test it since made it up on the fly. the core logic is in the sequenceBatch function. the The answer is based on an another question which explains how timeouts and promises works together.
Turns out this wasn't a closure or async issues at all, the request object I was building was using references to objects instead of shallow copies resulting in the data all being linked to the same object ref in the ending array.
I have a nodejs program that requests a series of XML files, parses them and then puts the output in an array which is written to disk as a CSV file.
The program mostly works, however occasionally the files end up in the wrong order in the array.
I want the order of the results to be in the same as the order as the URLs. The URLs are stored in an array, so when I get the XML file I check what the index of the URL was in the source array and insert the results at the same index in the destination URL.
can anyone see the flaw that is allowing the results to end up in the wrong order?
addResult = function (url, value, timestamp) {
data[config.sources.indexOf(url)] = {
value : value,
timestamp : timestamp,
url : url
};
numResults++;
if (numResults === config.sources.length) { //once all results are in build the output file
createOutputData();
}
}
fs.readFile("config.json", function (fileError, data) {
var eachSource, processResponse = function (responseError, response, body) {
if (responseError) {
console.log(responseError);
} else {
parseXML(body, {
explicitArray : false
}, function (xmlError, result) {
if (xmlError) {
console.log(xmlError);
}
addResult(response.request.uri.href, result.Hilltop.Measurement.Data.E.I1, moment(result.Hilltop.Measurement.Data.E.T));
});
}
};
if (fileError) {
console.log(fileError);
} else {
config = JSON.parse(data); //read in config file
for (eachSource = 0; eachSource < config.sources.length; eachSource++) {
config.sources[eachSource] = config.sources[eachSource].replace(/ /g, "%20"); //replace all %20 with " "
request(config.sources[eachSource], processResponse); //request each source
}
}
});
var writeOutputData, createOutputData, numResults = 0, data = [], eachDataPoint, multipliedFlow = 0;
writeOutputData = function (output, attempts) {
csv.writeToPath(config.outputFile, [ output ], {
headers : false
}).on("finish", function () {
console.log("successfully wrote data to: ", config.outputFile);
}).on("error", function (err) { //on write error
console.log(err);
if (attempts < 2) { //if there has been less than 3 attempts try writing again after 500ms
setTimeout(function () {
writeOutputData(output, attempts + 1);
}, 500);
}
});
};
createOutputData = function () {
var csvTimestamp, output = [];
if (config.hasOwnProperty("timestampFromSource")) {
csvTimestamp = data.filter(function (a) {
return a.url === config.sources[config.timestampFromSource];
})[0].timestamp.format("HHmm");
console.log("timestamp from source [" + config.timestampFromSource + "]:", csvTimestamp);
} else {
csvTimestamp = data.sort(function (a, b) { //sort results from oldest to newest
return a.timestamp.unix() - b.timestamp.unix();
});
csvTimestamp = csvTimestamp[0].timestamp.format("HHmm");//use the oldest date for the timestamp
console.log("timestamp from oldest source:", csvTimestamp);
}
//build array to represent data to be written
output.push(config.plDestVar); //pl var head address first
output.push(config.sources.length + 1); //number if vars to import
output.push(csvTimestamp); //the date of the data
for (eachDataPoint = 0; eachDataPoint < data.length; eachDataPoint++) { //add each data point
if (config.flowMultiplier) {
multipliedFlow = Math.round(data[eachDataPoint].value * config.flowMultiplier); //round to 1dp and remove decimal by *10
} else {
multipliedFlow = Math.round(data[eachDataPoint].value * 10); //round to 1dp and remove decimal by *10
}
if (multipliedFlow > 32766) {
multipliedFlow = 32766;
} else if (multipliedFlow < 0) {
multipliedFlow = 0;
}
output.push(multipliedFlow);
}
console.log(output);
writeOutputData(output, 0); //write the results, 0 is signalling first attempt
};
I think that the url to index code needs debugging.
Here is an example that uses an object that is pre-populated with keys in the for loop.
`
var http = require('http');
var fs = require("fs");
var allRequestsComplete = function(results){
console.log("All Requests Complete");
console.log(results);
};
fs.readFile("urls.json", function (fileError, data) {
var responseCount = 0;
if (fileError) {
console.log(fileError);
} else {
var allResponses = {};
config = JSON.parse(data); //read in config file
var requestComplete = function(url, fileData){
responseCount++;
allResponses[url] = fileData;
if(responseCount===config.sources.length){
allRequestsComplete(allResponses);
}
};
for (var eachSource = 0; eachSource < config.sources.length; eachSource++) {
(function(url){
allResponses[url] = "Waiting";
http.get({host: url,path: "/"}, function(response) {
response.on('error', function (chunk) {
requestComplete(url, "ERROR");
});
var str = ''
response.on('data', function (chunk) {
str += chunk;
});
response.on('end', function () {
requestComplete(url, str);
});
});
}(config.sources[eachSource].replace(/ /g, "%20").replace("http://", "")));
}
}
});
`
I agree with #Kevin B, you cannot assume that async callbacks will return in the same order of which you send them. However, you could ensure the order, by adding an index function on processResponse.
say you add the following to addResult
addResult = function (index, url, value, timestamp) {
data[index] = {
value : value,
timestamp : timestamp,
url : url
};
numResults++;
if (numResults === config.sources.length) { //once all results are in build the output file
createOutputData();
}
}
and use an extra function to call your request
function doRequest(index, url) {
request(url, function(responseError, response, body) {
if (responseError) {
console.log(responseError);
} else {
parseXML(body, {
explicitArray : false
}, function (xmlError, result) {
if (xmlError) {
console.log(xmlError);
}
addResult(index, response.request.uri.href, result.Hilltop.Measurement.Data.E.I1, moment(result.Hilltop.Measurement.Data.E.T));
});
}
});
}
then you can also change your loop to:
for (eachSource = 0; eachSource < config.sources.length; eachSource++) {
config.sources[eachSource] = config.sources[eachSource].replace(/ /g, "%20"); //replace all %20 with " "
doRequest(eachSource, config.sources[eachSource]); //request each source
}