Knock Out ObservableArray aync call binding order issue - javascript

I have a ObservableArray.Using ajax async calls I am fetching data for binding.There will be 1000's of data. In each calls will fetch 100's of data. Problem is due to ajax async call order will not be from 1 - 1000. Cannot make it as sync call because browser will not respond. How can i sort the async data in knock out?
var DataVM = ko.observableArray([]);
ko.applyBindings(DataVM, document.getElementById("ControlBlock"));
for (var i = 0; i < totalAjaxCall; i++) {
GetData(guid, start, end, self.DataCallback);
start = start + 100;
end = end +100;
}
DataCallback= function (result) {
var temp = JSON.parse(result.d);
var data = [];
var data = temp.Data;
for (var j = 0; j < data.length; j++) {
var tempItem_ = new Item();
tempItem_.Number = data[j].Number;
// Other codes
DataVM.push(tempItem_ );
}
};

You can remember the blocks as they come in, then reassemble things when you've received them all. See comments:
var DataVM = ko.observableArray([]);
ko.applyBindings(DataVM, document.getElementById("ControlBlock"));
// Remember the results in a temporary array of arrays
var received = 0;
var receivedBlocks = [];
for (var i = 0; i < totalAjaxCall; i++) {
// Tell `DataCallback` which block it's going to get
GetData(guid, start, end, self.DataCallback.bind(null, i));
// No need for `self` ----^^^^^
start = start + 100;
end = end +100;
}
DataCallback = function (i, result) {
// Create and remember the items for this block
receivedBlocks[i] = JSON.parse(result.d).map(function(e) {
var tempItem_ = new Item();
tempItem_.Number = num;
return tempItem_;
});
++received;
// Do we have them all?
if (received == totalAjaxCall) {
// Flatten our array of arrays, now we have all the pieces
var receivedItems = []
receivedBlocks.forEach(function(block) {
receivedItems.push.apply(result, block);
});
// Push all of those onto DataVM as a single operation
// Note: You were using `DataVM.push`, so I used that here,
// but if you wanted to *replace* the contents of `DataVM`,
// (or if you know it's empty), you'd just do:
// DataVM(receivedItems);
// instead.
DataVM.push.apply(DataVM, receivedItems);
}
};

I'm not sure how far you want to deviate from your current code, but I'd like to advertise some of knockout's additional features :)
If you create a small "in-between" model for your requests, you can make use of computed values to automatically keep track of a correctly sorted list of data.
For example, if you define a new Request() like so:
var Request = function(start, end) {
this.completed = ko.observable(false);
this.data = [];
getData(start, end, this.onLoad.bind(this));
};
Request.prototype.onLoad = function(data) {
this.data = data;
this.completed(true);
};
You can change your for loop to create those "in-between" models. This creates a Request for 0 to 100, 101 to 201, etc. Each of these models is stored in an array, in the order of creation.
function getDataRequests(start, end, chunkSize) {
var requests = [];
for (var i = start; i < end; i += chunkSize) {
requests.push(new Request(i, Math.min(i + chunkSize, end)));
}
return requests;
};
Now that you can create an ordered array, you can compute another ordered array of data by merging all completed requests together:
var DataVM = function(start, end, chunkSize) {
// We keep track of a list of requests
var requests = ko.observableArray(
getDataRequests(start, end, chunkSize)
);
// Because requests have an observable completed prop,
// we can automatically keep track of a list of completed
// requests
var completedRequests = ko.pureComputed(() =>
requests().filter(r => r.completed()));
// Now, whenever a requests completes, we flatten the
// `data` parts for `completed` requests
this.data = ko.pureComputed(() => completedRequests()
.reduce((items, r) => items.concat(r.data), []));
};
Because you have the requests array, you can easily compute UI properties. For example: firstLoaded is a computed that returns the completed value of your first request.
Here's a complete example (ES2015):
var DataVM = function(start, end, chunkSize) {
// We keep track of a list of requests
var requests = ko.observableArray(
getDataRequests(start, end, chunkSize)
);
// Because requests have an observable completed prop,
// we can automatically keep track of a list of completed
// requests
var completedRequests = ko.pureComputed(() =>
requests().filter(r => r.completed()));
// Now, whenever a requests completes, we flatten the
// `data` parts for `completed` requests
this.data = ko.pureComputed(() => completedRequests()
.reduce((items, r) => items.concat(r.data), []));
// Shows progress
this.loadingMsg = ko.pureComputed(() => {
var completedCount = completedRequests().length,
allCount = requests().length;
return completedCount === allCount
? `Done loading ${end - start} items in ${allCount} steps`
: `Loading... (${completedCount}/${allCount})`;
});
// Check if the first (if any) request has completed loading
this.firstCompleted = ko.pureComputed(() =>
requests().length && requests()[0].completed());
};
var Request = function(start, end) {
this.completed = ko.observable(false);
this.data = [];
getData(start, end, this.onLoad.bind(this));
};
Request.prototype.onLoad = function(data) {
this.data = data;
this.completed(true);
};
var vm = new DataVM(0, 50, 5);
ko.applyBindings(vm);
// Mock async ajax stuff and data getters
function getDataRequests(start, end, chunkSize) {
var requests = [];
for (var i = start; i < end; i += chunkSize) {
requests.push(new Request(i, Math.min(i + chunkSize, end)));
}
return requests;
};
function getData(start, end, cb) {
setTimeout(function() {
cb(mockData(start, end));
}, Math.random() * 3000 + 500);
}
function mockData(from, to) {
return Array(to - from).fill(from).map(function(_, i) {
return from + i;
});
}
<script src="https://cdnjs.cloudflare.com/ajax/libs/knockout/3.2.0/knockout-min.js"></script>
<div data-bind="text: loadingMsg"></div>
<ul data-bind="foreach: data, visible: firstCompleted" style="border: 1px solid black;">
<li data-bind="text: $data"></li>
</ul>

Related

Cannot push JSON elements to array inside for loop called from useEffect

I have an array candleRealTimeDataQueue which is not getting updated properly. Please find the code below:
let candleCurrentJSONDataWS = null;
var candleRealTimeDataQueue = [];
let tempDateTime = null;
let candleJsonData = {};
useEffect(() => {
getDataFromAPI();
}, []);
...
const getDataFromAPI = async () => {
let apiDataFetch = await fetch('https:/api/endpoint');
let response = await apiDataFetch.json(); // data from api obtained correctly
// total 4 values
for (var i = 0; i < 4; i++) {
tempDateTime = new Date(parseInt(response[i][0]));
candleJsonData['time'] = tempDateTime.toString();
candleJsonData['open'] = parseFloat(response[i][1]);
candleJsonData['high'] = parseFloat(response[i][2]);
candleJsonData['low'] = parseFloat(response[i][3]);
candleJsonData['close'] = parseFloat(response[i][4]);
console.log(candleJsonData); // this correctly prints different
// data for each different i
candleRealTimeDataQueue.push(candleJsonData);
console.log(candleRealTimeDataQueue); // PROBLEM is here: At the end
// candleRealTimeDataQueue array all
// have SAME elements. Its wrong. All
// 4 elements are of i = 3
}
}
Problem is at the end candleRealTimeDataQueue has 4 elements and all the elements are same. This should not happen because I am pushing DIFFERENT candleJsonData elements in the candleRealTimeDataQueue array in the for loop. Please help.
I believe the problem here is that you are reusing the candleJsonData object. When you run candleRealTimeDataQueue.push(candleJsonData), you are pushing the reference to candleJsonData into candleRealTimeDataQueue. So at the end of the loop, you have four references to the same object inside candleRealTimeDataQueue. And since you are modifying the same candleJsonData object over and over again, you'll just see four identical json data inside the queue when you log it and all four elements will be of i = 3.
Instead, you should be creating new candleJsonData objects inside your loop. So something like
for (var i = 0; i < 4; i++) {
tempDateTime = new Date(parseInt(response[i][0]));
let candleJsonData = {}
candleJsonData['time'] = tempDateTime.toString();
candleJsonData['open'] = parseFloat(response[i][1]);
candleJsonData['high'] = parseFloat(response[i][2]);
candleJsonData['low'] = parseFloat(response[i][3]);
candleJsonData['close'] = parseFloat(response[i][4]);
candleRealTimeDataQueue.push(candleJsonData);
}
it is because of the candleJsonData variable which is declared outside, so latest value is overriding previous value. In face there is no need of that variable and it can directly push in the array.
var candleRealTimeDataQueue = [];
React.useEffect(() => {
getDataFromAPI().then((data) => {
for (let i = 0; i < 4; i++) {
candleRealTimeDataQueue.push({
time: new Date(parseInt(data[i][0])).toString(),
open: parseFloat(data[i][1]),
low: parseFloat(data[i][3]),
close: parseFloat(data[i][4]),
});
}
});
return () => {
// do clean up here
};
}, []);
const getDataFromAPI = () => {
return fetch('https:/api/endpoint');
};

Issues with Array Variable

app.get("/indsalesx/:store/:mm", (req, res) => {
connect();
let ddd = [];
let staffarray = [{}];
let store = req.params.store;
let mm = req.params.mm;
const SP = mongoose.model(`sales${store}`, Sales);
let num = stafflist[store].length - 1;
for (i = 0; i <= num; i++) {
let staffname = stafflist[store][i];
let calc = 0;
SP.find(
{ v_salesperson: stafflist[store][i], v_month: mm },
"v_amount",
(err, doc) => {
let t = doc.length - 1;
doc.map((res) => {
calc = calc + res.v_amount;
});
ddd.name = staffname;
ddd.amount = calc;
staffarray.push(ddd);
}
);
}
console.log(staffarray);
});
The issue I have is: Why is staffarray returning an empty array? staffarray was declared as an empty array of objects, and in a loop function, objects were pushed to to array. But when I console.log(staffarray), it returns the empty array of objects declared initially.
Any help on what to do?
When using find(), you can use 2 approaches.
Pass a callback function
await the function to execute and return the results.
It appears that you used the first approach which means that you are passing a callback into the find() method which handles the result once received.
The console.log() code line will execute before the result will return since it's the next line to execute after the for loop.
So, let's go through what it happening here:
Javascript is executing the find() code line.
That line of code is being placed in the web API which are the pieces of the browser in which concurrency kicks in and makes the call to the server for us.
The console.log() line is being executed with an empty array (since the results haven't been received yet.
After some time, results came back and the callback is being set in the callback queue.
The JS event loop takes the callback from the callback queue and executes it.
This is part of the javascript event loop. you could read more about this here
Mongoose documentation: Model.find()
you can use for of with async/await instead of for
app.get("/indsalesx/:store/:mm", async(req, res) => {
connect();
let ddd = [];
let staffarray = [{}];
let store = req.params.store;
let mm = req.params.mm;
const SP = mongoose.model(`sales${store}`, Sales);
let num = stafflist[store].length - 1;
var list = Array.from(Array(num).keys());
for (let i of list) {
let staffname = stafflist[store][i];
let calc = 0;
let doc = await SP.find(
{ v_salesperson: stafflist[store][i], v_month: mm },
"v_amount"
);
let t = doc.length - 1;
doc.map((res) => {
calc = calc + res.v_amount;
});
ddd.name = staffname;
ddd.amount = calc;
staffarray.push(ddd);
}
console.log(staffarray);
});
I have been able to solve it, all I needed was proper structuring with the async and await statements.
app.get("/indsalesx/:store/:mm", async (req, res) => {
connect();
let ddd = {};
let staffarray = [];
let store = req.params.store;
let mm = req.params.mm;
const SP = mongoose.model(`sales${store}`, Sales);
let num = stafflist[store].length - 1;
for (i = 0; i <= num; i++) {
let staffname = stafflist[store][i];
let calc = 0;
await SP.find(
{ v_salesperson: stafflist[store][i], v_month: mm },
"v_amount",
(err, doc) => {
let t = doc.length - 1;
doc.map((res) => {
calc = calc + res.v_amount;
});
staffarray.push({ name: staffname, amount: calc });
}
);
}
console.log(staffarray);
res.send({ data: staffarray });
});

Node js - Batches Odata

I need to call the oData service for the HTTP POST operation. Currently I send them as batches of 1000 by calling all the batches at a time. But there is a problem ,because of concurrent batches that the table gets locked. So I would want to send using $batch with the change set. But I am not sure how to call the Odata service with Changeset in Node.
Current Code for the batches.
const parallel = async(headers, input) => {
let payload = [],
items = [],
header ;
const sendreq = async (headers, request, i) => {
//sending request
}
for (var i = 0, len = input.ITEM.length; i < len; i += 1000) {
items.push(input.ITEM.slice(i, i + 1000));
}
for (i = 0; i < items.length; i++) {
header.ITEM= items[i];
payload.push(header)
header = {}
}
let j = 0;
const batches = Array(Math.ceil(input.ITEM.length / 1000)).fill(Array(1).fill(sendreq))
for (const batch of batches) {
await Promise.all(batch.map(f => f(headers, payload, j++)))
}
}

Asynch fetch in For-loop - access to result (finished) variable

I'm new to JS asynchronous and I have a question about: how to start working on created array only if all queries are done. I fetch pages in for loop. That's my code:
var allOrgReposData = [];
var repoContributorsUrls = [];
for (var i=1; i <= orgPageIterations; i++) {
var orgReposUrl = 'https://api.github.com/orgs/angular/repos?page='+i;
fetch(orgReposUrl)
.then(response => response.json())
.then(orgReposData => {
allOrgReposData = allOrgReposData.concat(orgReposData);
console.log(allOrgReposData);
})
}
As You can see the allOrgReposData array is created on for loop, but If I try to do something on this array, script do It on every iteration so my operations are multipicated instead execution single time for exapmle (30 item on page): 30; 60; 90; 120; 150; 171 = 621 instead 171.
Is It possible to "wait" for finish fetching and get access to array without this. "multiplication"?
Greetings!
You can use Promise.all which will wait until all promises are complete:
var allOrgReposData = [];
var repoContributorsUrls = [];
var promises = [];
let orgPageIterations = 1;
for (var i = 1; i <= orgPageIterations; i++) {
let orgReposUrl = 'https://api.github.com/orgs/angular/repos?page=' + i;
promises.push(fetch(orgReposUrl).then(response => response.json()));
}
Promise.all(promises)
.then(data => {
allOrgReposData = data;
console.log(allOrgReposData);
})
.catch(err => console.error(err));
Please note that I've also changed var orgReposUrl to let orgReposUrl to make us of block scoping.
You could keep track of the number of calls you did with a variable :
var allOrgReposData = [];
var repoContributorsUrls = [];
var callSuccess = 1; //Variable keeping track of your ajax calls
for (var i=1; i <= orgPageIterations; i++) {
var orgReposUrl = 'https://api.github.com/orgs/angular/repos?page='+i;
fetch(orgReposUrl)
.then(response => response.json())
.then(orgReposData => {
allOrgReposData = allOrgReposData.concat(orgReposData);
console.log(allOrgReposData);
callSuccess++; //Increment your var for each call
if(callSuccess == orgPageIterations){ //If every call has already been made, then continue
//DO YOUR THING HERE
}
})
}

Best way to iterate and make an async call during each iteration

If you have to loop and make a bunch of calls to a repository or gateway in my case, how do I do that asynchronously meaning not wrapping my async calls inside a synchronous for loop?
For example, what would be a better approach (restructuring this code) to loop through a set of ids, and make the call to find() below like I'm trying to do?
The goal: I want to take an array of ids, iterate them, and during each iteration, use the id to call find() on my gateway to go get the object for that id, then stuff it into a final array in which I'll return when all said and done.
What I'm using:
q (for promises)
co-pg (to hit the database)
someModule.js
var _gateway = require('./database/someGateway');
var cars = [];
var car;
for (var i = 0; i < results.docs.length; i++){
var carId = results.docs[i].carId;
_gateway.find(carId)
.then(function(data){
console.log('data[0]: ' + data[0].id);
cars.push(data[0]);
})
.done();
}
console.log("cars: " + cars.length); // length here is 0 because my asyn calls weren't done yet
result(cars);
someGateway.js
'use strict';
var Q = require('q');
var _carModel = require('../../models/car');
module.exports = {
models: {
car: _carModel
},
find: _find
};
function _find(carId)
{
return _carModel.find(carId);
};
carModel.js
'use strict';
var Q = require('q');
var pg = require('co-pg')(require('pg'));
var config = require('../../models/database-config');
var car = module.exports = {};
car.find = Q.async(function *(id)
{
var query = 'SELECT id, title, description FROM car WHERE id = ' + id;
var connectionResults = yield pg.connectPromise(config.connection);
var client = connectionResults[0];
var done = connectionResults[1];
var result = yield client.queryPromise(query);
done();
console.log("result.rows[0].id: " + result.rows[0].id);
return result.rows;
});
so I need help understanding how to refactor my code in someModule.js to get that working properly, so that I make a call to find() for each id, stuff each found car into the array, then return the array. The carModel code is async. It goes out to a physical database to perform the actual query lookup.
UPDATE #1
Ok after a couple more hours of trying all sorts of sh** (q.all(), and a ton of other combinations of callback code, etc.) here's what I have at this point:
someModule.js
var _data;
var Q = require('q');
var _solrClient = require('../models/solr/query');
var _solrEndpoint = "q=_text&indent=true&rows=10";
var _postgreSQLGateway = require('./database/postgreSQLGateway');
module.exports = {
data: function(data){
_data = data;
},
find: function (text, result){
if(!searchText){
result(null);
};
_solrClient.query(endpoint, function(results){
var carIds = [];
var cars = [];
var car;
for (var i = 0; i < results.docs.length; i++){
carIds.push(results.docs[i].carId);
}
for (var i = 0; i < carIds.length; i++) {
var car = _postgreSQLGateway.find(carIds[i], function(o){
console.log("i: " + i);
});
};
});
}
};
someGateway.js
'use strict';
var Q = require('q');
var _carModel = require('../../models/postgreSQL/car');
module.exports = {
models: {
car: _carModel
},
find: _find
};
function _find(carId, foundCar)
{
console.log("CALL MADE");
_carModel.find(carId)
.then(function(car){
console.log("car: " + car[0].id);
foundCar(car);
});
};
carModel.js
[same code, has not changed]
Of course I noticed that the for loop fires off all my function calls asyncronously and so when I console.write the i, it's 10 because the for loop is done but then as we know, the rest of the console.logs happen later after the callbacks are done.
So I still can't get this working right...
Also when I was playing around I started down this path but it ended at a brick wall:
var find = Q.async(function(carIds, cars)
{
var tasks = [];
var foundCars = [];
for (var i = 0; i < carIds.length; i++) {
tasks.push(_postgreSQLGateway.find(carIds[' + i + ']));
};
Q.all([tasks.join()]).done(function (values) {
for (var i = 0; i < values.length; i++) {
console.log("VALUES: " + values[0]);
foundCars.push(values[0]);
}
cars(foundCars);
});
});
I ended up with [object promise] every time for values[i] instead of a car for value[i]
I don't know the Q promises library, but here's a solution using generic Promises built into node.js. This runs all the requests in parallel and then when all results have been collected, it runs the final .then() handler with all the results:
var _gateway = require('./database/someGateway');
var promises = [];
for (var i = 0; i < results.docs.length; i++) {
promises.push(_gateway.find(results.docs[i].carId).then(function (data) {
console.log('data[0]: ' + data[0].id);
return data[0];
}));
}
Promise.all(promises).then(function(cars) {
// cars will be an array of results in order
console.log("cars: " + cars.length);
result(cars);
});
Individual promise libraries (like the one I know Bluebird) have features built in that lets you do this kind of activity in even less code, but I've intentionally kept this answer to just using standard promise features.
This is potentially really easy with the vanilla Promise API from es6 (and replicated by Bluebird and other libs). First map the IDs to an array of promises:
var promises = results.docs.map(function(doc) {
return _gateway.find(doc.carId);
});
Then create a promise for the aggregate result:
var allDone = Promise.all(promises);
Then inside the done() callback of the aggregate promise, you'll have a final array of results, in the same length and order as the carId array:
allDone.then(function(results) {
// do something with "results"
});

Categories

Resources