The UI of the application is a 4x3 grid. Each tile in the grid gets poplulated by a external handlebar template.
The seqence is like this -
Create an JSON object of required data. data =
{___:"",___:"",___:""...};
Push these objects into an Array called dataArray = [{},{},{},{}...];
Pass this arrya to a fucntion which renders the template createGrid(dataArray,callback);
The sequence in the template Function createGrid()
iterate over dataArray using forEach((val)=>{})
Pass val to AJAX call to render and return the template data using promises
The data gets loaded and template gets rendered but the problem is sometime the seqence of populated tiles is not correct. Tile one appears in the last row or tile 3 gets switch with tile 4 etc.
I expect the data to be populated according to the sequence of the content in the dataArray but it doesn't happen.
Before I used async:false AJAX call for generating the template and it worked. I know that is depricated and doesn't go with the pattern.
[problem 2] Another problem is, I am attaching some images and sound in the template and I am using another AJAX call to validate if files exists or not and attach the file path in the object that is being pushed in the dataArray. I don't know how to call that pathValidate function during object creating. If I use sync AJAX call then I can return the value but with sync call, I am lost
Now the code -
// file path validation method
let validatePath = (folder, index, extension) => { // file path validation method
let url = folder + index + extension;
return new Promise((resolve, reject) => {
let xhttp = new XMLHttpRequest();
xhttp.open('GET', url, true);
xhttp.onload = () => {
if (xhttp.status == 200) resolve(url);
else {
if (index != 11) resolve('img/' + "default" + extension);
else resolve('img/' + "11" + extension);
}
};
xhttp.onerror = () => {
reject(xhttp.statusText);
};
xhttp.send();
});
}
let getTemplateAjax = (url, cardData) => {
return new Promise((resolve, reject) => {
$.ajax({
url: url,
type: 'GET'
}).done((data) => {
let template = Handlebars.compile(data),
finalContent = template(cardData);
resolve(finalContent);
}).fail((err) => {
console.log(err + "can't find template");
reject(err);
});
});
}
let createTiles = (target, dataArray, callback) => {
dataArray.forEach((val, index) => {
getTemplateAjax('templates/card.hbs', val).then((data) => {
let tile = $('<div>', {
id: "tile_" + index,
class: "tiles-sub-grid-item"
}).css({
'grid-column': (index % 4) / (index / 3),
'grid-row': (index % 3) / (index / 4),
'border-radius': '3px',
'cursor': 'pointer',
'background': 'rgba(0, 0, 0, 0.8)',
'box-shadow': '0px 0px 10px rgba(0,0,0,0.5)'
});
tile.append(data);
tile.appendTo(target);
if (callback) callback(tile);
}).catch(() => {
console.log('sorry no template');
});
});
}
Calling these function into rendering the main menu
let contentArray = [],
imagePromises = [],
audioPromises = [];
for (let i = 0; i < 12; i++) {
imagePromises.push(validatePath("img/com/", i, ".svg "));
}
Promise.all(imagePromises)
.then((dataArray) => {
dataArray.forEach((val, index) => {
contentArray.push({
index: index + 1,
title: title[index],
description: description[index],
image: val,
audio: " ",
status: "",
url: ""
});
});
createTiles("#main-content", contentArray).then((data) => {
console.log(data);
data.on('click', () => {
let tileId = $(data).find('.card-index').text();
let tileTitle = $(data).find('.card-title').text();
if ((parseInt(tileId) - 1) == 2) {
bodyParts();
} else if ((parseInt(tileId) - 1) == 11) {
alexaCommands();
} else {
let message = $(data).find('.card-description').text();
responsiveVoice.speak(message);
}
});
});
})
.catch((e) => {
// handle errors here
console.log(e);
});
Related
Using function below I preload the videos, voices, pictures data as blobs to users browser:
Note that this is a simple XMLHttpRequest to get the files and store the blob in the browser of the user and if you rather not to look at the code it's ok please continue reading the question itself:
async function Preloading() {
(function (global, factory) {
typeof exports === 'object' && typeof module !== 'undefined' ? module.exports = factory() :
typeof definer === 'function' && definer.amd ? definer(factory) :
(global.Preload = factory());
}(this, (function () {
'use strict';
function preloadOne(url, done) {
const xhr = new XMLHttpRequest();
xhr.open('GET', url, true);
xhr.responseType = 'blob';
xhr.onprogress = event => {
if(!event.lengthComputable) return false
let item = this.getItemByUrl(event.target.responseURL);
item.completion = parseInt((event.loaded / event.total) * 100);
item.downloaded = event.loaded;
item.total = event.total;
this.updateProgressBar(item);
};
xhr.onload = event => {
let type = event.target.response.type;
let blob = new Blob([event.target.response], { type: type });
let url = URL.createObjectURL(blob);
let responseURL = event.target.responseURL;
let item = this.getItemByUrl(responseURL);
item.blobUrl = url;
item.fileName = responseURL.substring(responseURL.lastIndexOf('/') + 1);
item.type = type;
item.size = blob.size;
done(item);
};
xhr.onerror = event => {
console.log('Error Happend in Preloading');
reload();
};
xhr.send();
}
function updateProgressBar(item) {
var sumCompletion = 0;
var maxCompletion = this.status.length * 100;
for(var itemStatus of this.status) {
if(itemStatus.completion) {
sumCompletion += itemStatus.completion;
}
}
var totalCompletion = parseInt((sumCompletion / maxCompletion) * 100);
if(!isNaN(totalCompletion)) {
this.onprogress({
progress: totalCompletion,
item: item
});
}
}
function getItemByUrl(rawUrl) {
for (var item of this.status) {
if (item.url == rawUrl) return item
}
}
function fetch(list) {
return new Promise((resolve, reject) => {
this.loaded = list.length;
for (let item of list) {
this.status.push({ url: item });
this.preloadOne(item, item => {
this.onfetched(item);
this.loaded--;
if (this.loaded == 0) {
this.oncomplete(this.status);
resolve(this.status);
}
});
}
})
}
function Preload() {
return {
status: [],
loaded: false,
onprogress: () => {},
oncomplete: () => {},
onfetched: () => {},
fetch,
updateProgressBar,
preloadOne,
getItemByUrl
}
}
return Preload;
})));
const preload = Preload();
preload.fetch(preloadingData.preloadURLs).then(items => {});
preload.oncomplete = items => {
Blobs = generateBlobs(items); // HERE WE SAVE BLOBS FOR USERS
}
preload.onprogress = event => { preloadProgress(event.progress); }
preload.onfetched = item => {}
}
The issue is when multiple users try to download their data using the above function, sometimes the function doesn't respond to them and user will miss some of the blobs ( some blobs will not be created if server is busy )
Now I have a straight question:
What if I place my files ( video, voices, pictures, etc..) in a download server, and my Nodejs app running in another so that downloading the files does not impact the application server ? does this fix the issue and lowers the workload of the application's server ?
I'm new to async and await, I have a simple web app with firebase which gets files through input fields and upload them to the firebase via a button click but when I click button it does,t wait for async function to uload the files at first click. But when I click second time the files uploaded and I got the expected output. How can I solve this?
Here are my codes
Upload Function
async function uploadImages() {
var storageRef = firebase.storage().ref();
var uploadImages = document.getElementsByName("fupload").forEach((element) => {
var imageRef = storageRef.child(
"projects/" + projectName + "/" + (element as HTMLInputElement).files[0].name
);
let file = (element as HTMLInputElement).files[0];
imageRef.put(file).then((snapshot) => {
snapshot.ref.getDownloadURL().then(function (downloadURL) {
paragraphUrl.push(downloadURL);
});
});
});
if (document.getElementsByName("fupload").length == paragraphUrl.length) {
return paragraphUrl;
} else {
return 1;
}
}
Button click function
async function upload(){
await uploadImages().then((data) => {
if (data != 1) {
paragraphData = paragraphData.map(
function (x, i) {
return {
Title: x.Title,
Paragraph: x.Paragraph,
Image: data[i],
};
}.bind(this)
);
console.log(paragraphData);
//dispatch("paragraphData",{data})
} else {
console.log("d");
}
});
}
Thank you all I fixed the problem I'll add my code below.
Upload function
async function uploadImages() {
var storageRef = firebase.storage().ref();
for (const file of document.getElementsByName("fupload")) {
// let test = (file as HTMLInputElement).files[0].name;
// console.log(test);
var imageRef = storageRef.child(
"projects/" + projectName + "/" + (file as HTMLInputElement).files[0].name
);
let test = (file as HTMLInputElement).files[0].name;
let testFile = (file as HTMLInputElement).files[0];
await imageRef.put(testFile).then((snapshot) => {
snapshot.ref.getDownloadURL().then(function (downloadURL) {
paragraphUrl.push(downloadURL);
});
});
}
return paragraphUrl;
}
Button Click function
async function submitParagraphData() {
paragraphTitles = [];
paragraphs = [];
var e = document.getElementsByName("ParagrphTitle").forEach((element) => {
paragraphTitles.push((element as HTMLInputElement).value);
});
var f = document.getElementsByName("Paragraph").forEach((element) => {
paragraphs.push((element as HTMLInputElement).value);
});
let paragraphData = paragraphTitles.map(
function (x, i) {
return { Title: x, Paragraph: paragraphs[i] };
}.bind(this)
);
await uploadImages().then((data) => {
console.log(data);
});
}
The problem I had was when I click the button it displayed an empty array because file upload takes some time but when I click second time it displays the expected output because file was uploaded. So I added await to the line
imageRef.put(testFile) ............
So it fixed my problem.Thank you all for the support.
I am trying to read some data from a file and store it in a database.
This is part of a larger transaction and I need the returned ids for further steps.
async parseHeaders(mysqlCon, ghID, csv) {
var self = this;
var hIDs = [];
var skip = true;
var idx = 0;
console.log("Parsing headers");
return new Promise(async function(resolve, reject) {
try {
var lineReader = require('readline').createInterface({
input: require('fs').createReadStream(csv)
});
await lineReader.on('close', async function () {
console.log("done: ", JSON.stringify(hIDs));
resolve(hIDs);
});
await lineReader.on('line', async function (line) {
line = line.replace(/\"/g, '');
if (line.startsWith("Variable")) { //Variable,Statistics,Category,Control
console.log("found variables");
skip = false; //Ignore all data and skip to the parameter description.
return; //Skip also the header line.
}
if (!skip) {
var data = line.split(",");
if (data.length < 2) { //Variable section done return results.
console.log("Found sub?",line);
return lineReader.close();
}
var v = data[0];
var bidx = data[0].indexOf(" [");
if (bidx > 0)
v = data[0].substring(0, bidx); //[] are disturbing mysql (E.g.; Air temperature [�C])
var c = data[2];
hIDs[idx++] = await self.getParamID(mysqlCon, ghID, v, c, data);//, function(hID,sidx) { //add data in case the parameter is not in DB, yet.
}
});
} catch(e) {
console.log(JSON.stringify(e));
reject("some error occured: " + e);
}
});
}
async getParamID(mysqlCon,ghID,variable,category,data) {
return new Promise(function(resolve, reject) {
var sql = "SELECT ID FROM Parameter WHERE GreenHouseID="+ghID+" AND Variable = '" + variable + "' AND Category='" + category + "'";
mysqlCon.query(sql, function (err, result, fields) {
if(result.length === 0 || err) { //apparently not in DB, yet ... add it (Acronym and Machine need to be set manually).
sql = "INSERT INTO Parameter (GreenHouseID,Variable,Category,Control) VALUES ("+ghID+",'"+variable+"','"+category+"','"+data[3]+"')";
mysqlCon.query(sql, function (err, result) {
if(err) {
console.log(result,err,this.sql);
reject(err);
} else {
console.log("Inserting ",variable," into DB: ",JSON.stringify(result));
resolve(result.insertId); //added, return generated ID.
}
});
} else {
resolve(result[0].ID); //found in DB .. return ID.
}
});
});
}
The functions above are in the base class and called by the following code:
let headerIDs = await self.parseHeaders(mysqlCon, ghID, filePath);
console.log("headers:",JSON.stringify(headerIDs));
The sequence of events is that everything in parseHeaders completes except for the call to self.getParamID and control returns to the calling function which prints an empty array for headerIDs.
The console.log statements in self.getParamID are then printed afterward.
What am I missing?
Thank you
As you want to execute an asynchronous action for every line we could define a handler to do right that:
const once = (target, evt) => new Promise(res => target.on(evt, res));
function mapLines(reader, action) {
const results = [];
let index = 0;
reader.on("line", line => results.push(action(line, index++)));
return once(reader, "close").then(() => Promise.all(results));
}
So now you can solve that easily:
let skip = false;
const hIDs = [];
await mapLines(lineReader, async function (line, idx) {
line = line.replace(/\"/g, '');
if (line.startsWith("Variable")) { //Variable,Statistics,Category,Control
console.log("found variables");
skip = false; //Ignore all data and skip to the parameter description.
return; //Skip also the header line.
}
if (!skip) {
var data = line.split(",");
if (data.length < 2) { //Variable section done return results.
console.log("Found sub?",line);
return lineReader.close();
}
var v = data[0];
var bidx = data[0].indexOf(" [");
if (bidx > 0)
v = data[0].substring(0, bidx); //[] are disturbing mysql (E.g.; Air temperature [�C])
var c = data[2];
hIDs[idx] = await self.getParamID(mysqlCon, ghID, v, c, data);
}
});
I have a getValidUrls function that takes a maxId param.
Within this function it loops backwards and sends a request for the url.
Each loop decrements the maxId.
My issue is that I am trying to add these valid urls to an array, but I cannot update the array from within the .then of the promise. I have added a simple total variable to see if I could increment it and could not.
getValidUrls = (maxId) => {
return new Promise((resolve, reject) => {
let validUrls = [];
let idCounter = maxId;
let total = 0; // test to see if updated from inside (it doesn't)
// while(validUrls.length < 10 && idCounter > 0) {
for(let i = maxId; i > 0; i--){
let newsUrl = `https://hacker-news.firebaseio.com/v0/item/${i}.json?print=pretty`;
//console.log(newsUrl); // this shows all the urls & works
getJSONObject(newsUrl)
.then((story) => {
total++;
console.log(total); // this never gets shown
return getUserObject(story.by);
}).then((user) => {
if(user.karma > 5) {
validUrls.push(story);
if(validUrls.length >= 10) {
resolve(validUrls);
}
}
});
}
});
};
The following returns a json object for the url
getJSONObject = (url) => {
return new Promise((resolve, reject) => {
console.log(url); // this works and shows all urls
https.get(url, (response) => {
response.on('data', (data) => {
console.log(JSON.parse(data)); // This never gets shown
resolve(JSON.parse(data));
}, (err) => reject(err));
}, (err) => reject(err));
});
};
I have a nodejs program that requests a series of XML files, parses them and then puts the output in an array which is written to disk as a CSV file.
The program mostly works, however occasionally the files end up in the wrong order in the array.
I want the order of the results to be in the same as the order as the URLs. The URLs are stored in an array, so when I get the XML file I check what the index of the URL was in the source array and insert the results at the same index in the destination URL.
can anyone see the flaw that is allowing the results to end up in the wrong order?
addResult = function (url, value, timestamp) {
data[config.sources.indexOf(url)] = {
value : value,
timestamp : timestamp,
url : url
};
numResults++;
if (numResults === config.sources.length) { //once all results are in build the output file
createOutputData();
}
}
fs.readFile("config.json", function (fileError, data) {
var eachSource, processResponse = function (responseError, response, body) {
if (responseError) {
console.log(responseError);
} else {
parseXML(body, {
explicitArray : false
}, function (xmlError, result) {
if (xmlError) {
console.log(xmlError);
}
addResult(response.request.uri.href, result.Hilltop.Measurement.Data.E.I1, moment(result.Hilltop.Measurement.Data.E.T));
});
}
};
if (fileError) {
console.log(fileError);
} else {
config = JSON.parse(data); //read in config file
for (eachSource = 0; eachSource < config.sources.length; eachSource++) {
config.sources[eachSource] = config.sources[eachSource].replace(/ /g, "%20"); //replace all %20 with " "
request(config.sources[eachSource], processResponse); //request each source
}
}
});
var writeOutputData, createOutputData, numResults = 0, data = [], eachDataPoint, multipliedFlow = 0;
writeOutputData = function (output, attempts) {
csv.writeToPath(config.outputFile, [ output ], {
headers : false
}).on("finish", function () {
console.log("successfully wrote data to: ", config.outputFile);
}).on("error", function (err) { //on write error
console.log(err);
if (attempts < 2) { //if there has been less than 3 attempts try writing again after 500ms
setTimeout(function () {
writeOutputData(output, attempts + 1);
}, 500);
}
});
};
createOutputData = function () {
var csvTimestamp, output = [];
if (config.hasOwnProperty("timestampFromSource")) {
csvTimestamp = data.filter(function (a) {
return a.url === config.sources[config.timestampFromSource];
})[0].timestamp.format("HHmm");
console.log("timestamp from source [" + config.timestampFromSource + "]:", csvTimestamp);
} else {
csvTimestamp = data.sort(function (a, b) { //sort results from oldest to newest
return a.timestamp.unix() - b.timestamp.unix();
});
csvTimestamp = csvTimestamp[0].timestamp.format("HHmm");//use the oldest date for the timestamp
console.log("timestamp from oldest source:", csvTimestamp);
}
//build array to represent data to be written
output.push(config.plDestVar); //pl var head address first
output.push(config.sources.length + 1); //number if vars to import
output.push(csvTimestamp); //the date of the data
for (eachDataPoint = 0; eachDataPoint < data.length; eachDataPoint++) { //add each data point
if (config.flowMultiplier) {
multipliedFlow = Math.round(data[eachDataPoint].value * config.flowMultiplier); //round to 1dp and remove decimal by *10
} else {
multipliedFlow = Math.round(data[eachDataPoint].value * 10); //round to 1dp and remove decimal by *10
}
if (multipliedFlow > 32766) {
multipliedFlow = 32766;
} else if (multipliedFlow < 0) {
multipliedFlow = 0;
}
output.push(multipliedFlow);
}
console.log(output);
writeOutputData(output, 0); //write the results, 0 is signalling first attempt
};
I think that the url to index code needs debugging.
Here is an example that uses an object that is pre-populated with keys in the for loop.
`
var http = require('http');
var fs = require("fs");
var allRequestsComplete = function(results){
console.log("All Requests Complete");
console.log(results);
};
fs.readFile("urls.json", function (fileError, data) {
var responseCount = 0;
if (fileError) {
console.log(fileError);
} else {
var allResponses = {};
config = JSON.parse(data); //read in config file
var requestComplete = function(url, fileData){
responseCount++;
allResponses[url] = fileData;
if(responseCount===config.sources.length){
allRequestsComplete(allResponses);
}
};
for (var eachSource = 0; eachSource < config.sources.length; eachSource++) {
(function(url){
allResponses[url] = "Waiting";
http.get({host: url,path: "/"}, function(response) {
response.on('error', function (chunk) {
requestComplete(url, "ERROR");
});
var str = ''
response.on('data', function (chunk) {
str += chunk;
});
response.on('end', function () {
requestComplete(url, str);
});
});
}(config.sources[eachSource].replace(/ /g, "%20").replace("http://", "")));
}
}
});
`
I agree with #Kevin B, you cannot assume that async callbacks will return in the same order of which you send them. However, you could ensure the order, by adding an index function on processResponse.
say you add the following to addResult
addResult = function (index, url, value, timestamp) {
data[index] = {
value : value,
timestamp : timestamp,
url : url
};
numResults++;
if (numResults === config.sources.length) { //once all results are in build the output file
createOutputData();
}
}
and use an extra function to call your request
function doRequest(index, url) {
request(url, function(responseError, response, body) {
if (responseError) {
console.log(responseError);
} else {
parseXML(body, {
explicitArray : false
}, function (xmlError, result) {
if (xmlError) {
console.log(xmlError);
}
addResult(index, response.request.uri.href, result.Hilltop.Measurement.Data.E.I1, moment(result.Hilltop.Measurement.Data.E.T));
});
}
});
}
then you can also change your loop to:
for (eachSource = 0; eachSource < config.sources.length; eachSource++) {
config.sources[eachSource] = config.sources[eachSource].replace(/ /g, "%20"); //replace all %20 with " "
doRequest(eachSource, config.sources[eachSource]); //request each source
}