Access IndexDB when Promise gets query error - javascript

I am doing a Progressive Web App and I want to access the data I have stored on IndexedDB when the ajax reponse fails. The data gets filled when the query is successful. Below is one of the attempts. In comments is the original format before I inserted the offline part (I already had base code).
getAudits: (startOfMonth, endOfMonth) => {
return new Promise((resolve, reject) => {
request
.get(url + "api/?method=Audit.data")
.query({
pageLength: "all",
format: "json",
where: startOfMonth
? Utils.hashQuery(
"a.date",
"between",
[startOfMonth, endOfMonth].toString()
)
: "",
ts: new Date().getTime()
})
.end((error, response) => {
var res;
if (error) {
res = getCacheAudits(startOfMonth, endOfMonth);
//reject(error);
} else {
let _audits = [];
const audits = Utils.toArray(JSON.parse(response.text).rows);
audits.forEach(audit => {
_audits.push(audit);
//console.log(audit);
});
res = _audits;
getServerAudits(_audits);
//resolve(res);
}
resolve(res);
});
});
}
Does anyone have any idea about how to handle this correctly? Thanks in advance.

Not sure why you copy Utils.toArray(JSON.parse(response.text).rows to audits, _audits and res, they are all the same.
Also not sure what getServerAudits is supposed to do because it doesn't return anything and you're not doing anything with it's return value (usually a bad design sign to call functions and ignore their return)
You could probably do something like this:
getAudits: (startOfMonth, endOfMonth) => {
return new Promise((resolve, reject) => {
request
.get(url + "api/?method=Audit.data")
.query({
pageLength: "all",
format: "json",
where: startOfMonth
? Utils.hashQuery(
"a.date",
"between",
[startOfMonth, endOfMonth].toString()
)
: "",
ts: new Date().getTime()
})
.end((error, response) =>
(error)
? reject([error,startOfMonth, endOfMonth])
: resolve(Utils.toArray(JSON.parse(response.text).rows))
)
})
.catch(
([err,startOfMonth, endOfMonth]) =>
getCacheAudits(startOfMonth, endOfMonth)
)
.then(
audits => {
//not sure what this does, you don't seem to care about it's
// result because your code doesn't do anything with it
getServerAudits(audits);
//probably want to save audits to local storage here
return Promise.all([
audits,
saveCacheAudits(startOfMonth,endOfMonth,audits)
]);
}
)
.then(
//resolve audits
([audits])=>audits
)
}

Lets make this easy with indexedb library jsstore -
var con= new JsStore.Instance(<db_name>);
getCacheAudits: (startOfMonth, endOfMonth)=>{
return con.select({
From: "Table_Name",
Where:
{
startOfMonth:startOfMonth,
endOfMonth: endOfMonth
}
})
}
getAudits: (startOfMonth, endOfMonth) => {
return new Promise((resolve, reject) => {
request
.get(url + "api/?method=Audit.data")
.query({
pageLength: "all",
format: "json",
where: startOfMonth
? Utils.hashQuery(
"a.date",
"between",
[startOfMonth, endOfMonth].toString()
)
: "",
ts: new Date().getTime()
})
.end((error, response) => {
var res;
if (error) {
//get the data from indexeddb
getCacheAudits.
then(function (results){
resolve(results);
})
.catch(function (error) {
reject(error._message);
});
} else {
let _audits = [];
const audits = Utils.toArray(JSON.parse(response.text).rows);
audits.forEach(audit => {
_audits.push(audit);
//console.log(audit);
});
res = _audits;
getServerAudits(_audits);
resolve(res);
}
//resolve(res);
});
});
}
You can also use where (http://jsstore.net/tutorial/where) and all others options to query the data.
If you are using angular to create the app, check out this article - http://ujjwalguptaofficial.blogspot.in/2017/10/angular4-crud-operation-in-indexeddb.html
Hope this will help you.

Just solved the problem. It wasn't actually just in this part, but in the getCacheAudits function too.
The getCacheAudits call became:
getCacheAudits(startOfMonth, endOfMonth, function (res) {
resolve(res);
})
And in the getCacheAudits function was added a callback
function getCacheAudits(startOfMonth, endOfMonth, callback){
let initOpenReq = indexedDB.open("AuditsDB");
let agregate = [];
initOpenReq.onsuccess = function() {
var db = initOpenReq.result;
var transaction = db.transaction("audits","readonly");
var obj_store = transaction.objectStore("audits");
obj_store.openCursor().onsuccess = function (event){
let cursor = event.target.result;
if(cursor){
agregate.push(cursor.value);
cursor.continue();
}else{
callback(agregate);
}
};
}
There are a few more bells a whistles in the function, but this made it work properly.
Sorry for not including the function right from the beginning.

Related

How to save thousand data in Parse Platform NodeJS

I am new to the parse platform and i'm trying to insert 81000 rows of data in to the Parse DB, here the code
const uri = "/the.json"
const res = await axios.get(uri)
const dataresult = Object.keys(res.data)
if (dataresult.length > 0) {
res.data.forEach(function (datakp) {
var kp = new Parse.Object("theClass");
kp.save(datakp)
.then((res) => {
console.log('oke ' + res.id)
}),
(error) => {
console.log('err : '+ error.message)
}
})
}
There is no error in console log, and no data is saved in Parse DB, but if I only insert 1000 rows, it will save to the database.
EG:
if (dataresult.length > 0) {
res.data.forEach(function (datakp, index) {
if (index < 1000) {
var kp = new Parse.Object("theClass");
kp.save(datakp)
.then((res) => {
console.log('oke ' + res.id)
}),
(error) => {
console.log('err : '+ error.message)
}
})
}
}
Thank You
UPDATE
I fix this case based on answer #davi-macĂȘdo
here a complete code
const uri = "/the.json"
const res = await axios.get(uri)
const dataresult = Object.keys(res.data)
const objs = [];
const theKP = Parse.Object.extend("theClass")
if (dataresult.length > 0) {
res.data.forEach(function (datakp) {
var thekp = new theKP()
thekp.set(datakp)
objs.push(thekp);
})
}
Parse.Object.saveAll(objs)
.then((res) => {
console.log('oke updated ' + dataresult.length)
}),
(error) => {
console.log('err : '+ error.message)
}
The most efficient way is using Parse.Object.saveAll function. Something like this:
const uri = "/the.json"
const res = await axios.get(uri)
const dataresult = Object.keys(res.data)
const objs = [];
if (dataresult.length > 0) {
res.data.forEach(function (datakp) {
objs.push(new Parse.Object("theClass", datakp));
})
}
Parse.Object.saveAll(objs)
.then((res) => {
console.log('oke ' + res.id)
}),
(error) => {
console.log('err : '+ error.message)
}
Anyways, since you have no error and no data currently being saved, you might be kitting some memory limit. So that's something you also need to be aware about.
You're probably hitting rate limits, I can't imagine saving 81,000 records in one shot is normal behaviour for many applications.
I looked through the documentation and couldn't find anything that might mention a save limit, however sending 1000 requests would trigger most rate limit protection

How to read multiple json file using fs and bulk request

I'm using elasticsearch search engine with my react app, I was reading one file at the backend as you see in the code and it work perfectly, but now I want to read three different JSON files to three different indexes using the "fs" package and bulk request, can you please help me?
the code:
// Start reading the json file
fs.readFile("DocRes.json", { encoding: "utf-8" }, function (err, data) {
if (err) {
throw err;
}
// Build up a giant bulk request for elasticsearch.
bulk_request = data.split("\n").reduce(function (bulk_request, line) {
var obj, ncar;
try {
obj = JSON.parse(line);
} catch (e) {
console.log("Done reading 1");
return bulk_request;
}
// Rework the data slightly
ncar = {
id: obj.id,
name: obj.name,
summary: obj.summary,
image: obj.image,
approvetool: obj.approvetool,
num: obj.num,
date: obj.date,
};
bulk_request.push({
index: { _index: "ncar_index", _type: "ncar", _id: ncar.id },
});
bulk_request.push(ncar);
return bulk_request;
}, []);
// A little voodoo to simulate synchronous insert
var busy = false;
var callback = function (err, resp) {
if (err) {
console.log(err);
}
busy = false;
};
// Recursively whittle away at bulk_request, 1000 at a time.
var perhaps_insert = function () {
if (!busy) {
busy = true;
client.bulk(
{
body: bulk_request.slice(0, 1000),
},
callback
);
bulk_request = bulk_request.slice(1000);
console.log(bulk_request.length);
}
if (bulk_request.length > 0) {
setTimeout(perhaps_insert, 100);
} else {
console.log("Inserted all records.");
}
};
perhaps_insert();
});
You can create multiple promises for each file read and feed it to the elastic search bulk_request.
const fsPromises = require('fs').promises,
files = ['filename1', 'filename1'],
response = [];
const fetchFile = async (filename) => {
return new Promise((resolve, reject) => {
const path = path.join(__dirname, filename);
try {
const data = await fsPromises.readFile(path)); // make sure path is correct
resolve(data);
} catch (e) {
reject(e)
}
});
files.forEach((fileName) => results.push(fetchFile()));
Promise.all(results).then(data => console.log(data)).catch(e => console.log(e));
}
Once you get data from all the promises pass it to the elastic search.

Trying to send params to axios in get - react

I'm new here on the site, and new to React.
I built a function that works great in nodejs. There are rare cases where I want to run this function according to the parameters I send it to, so I try to send it the parameters but I think I can not, I try to print it - and I do not get a print of the parameters I want to send.
i run the function throw click at buttom in react:
<Button onClick={() => {
const result = [1,2,3,4,5,"test"];
props.makeMatchVer2(result);
}}>
make match ver2
</Button>
the action I'm run in axios:
export const makeMatchVer2 = (data) => (dispatch) => {
dispatch({ type: LOADING_DATA });
axios
.get('/kmeans', {
params: {
filterArray: data
}
})
.then((res) => {
dispatch({
type: MAKE_MATCH,
payload: res.data
});
})
.catch((err) => {
dispatch({
type: MAKE_MATCH,
payload: []
});
});
};
the function I'm build in nodeJS:
exports.addUserKmeansMatch = (req, res) => {
console.log("addUserKmeansMatch function start:");
console.log(req.data);
if(req.params)
{
console.log(req.params);
}
let userIndex = 0;
let engineers = [];
let engineersHandles = [];
let engineerDetailsNumeric = {};
db.collection("preferences").get().then(querySnapshot => {
querySnapshot.forEach(doc => {
const engineerDetails = doc.data();
if (engineerDetails.handle === req.user.handle) {
engineersHandles.unshift(engineerDetails.handle);
delete engineerDetails.handle;
engineerDetailsNumeric = convertObjectWithStrToNumber(engineerDetails);
engineers.unshift(engineerDetailsNumeric);
}
else {
engineersHandles.push(engineerDetails.handle);
delete engineerDetails.handle;
engineerDetailsNumeric = convertObjectWithStrToNumber(engineerDetails);
engineers.push(engineerDetailsNumeric);
}
});
kmeans.clusterize(engineers, { k: 4, maxIterations: 5, debug: true }, (err, result) => {
if (err) {
console.error(err);
return res.status(500).json({ error: err.code });
} else {
const cluster = result.clusters;
let foundedMatches = GetUserSerialGroup(userIndex, [...cluster], [...engineers]);
let foundedMatchesHandle = GetUserSerialGroupHandle(userIndex, [...cluster], [...engineersHandles]);
let totalTest = {
foundedMatches: foundedMatches,
foundedMatchesHandle: foundedMatchesHandle,
cluster: cluster,
engineersHandles: engineersHandles,
engineers: engineers
};
let userMatchHandle = reduceUserMatchHandle(foundedMatchesHandle);
userMatchHandle.handle = req.user.handle;
db.doc(`/match/${req.user.handle}`)
.set(userMatchHandle)
.then(() => {
return res.json({ message: "Details added successfully" });
})
.catch((err) => {
console.error(err);
return res.status(500).json({ error: err.code });
});
}
})
})
};
Through the button, I send parameters to the function, but I do not see their print, probably something does not work, but I do not know why, I'm new to it
makeMatchVer2 is a thunk. You should call it with dispatch: dispatch(props.makeMatchVer2(result))
The code is correct, I accidentally sent the wrong object, I have 2 objects with almost identical names, one array and the other an object. And I accidentally sent the object instead of the array, it's working right now, thank you.

Not getting result in node js, mongo db using promise in loop

I am new in nodejs and mongodb. Its really very confusing to use promise in loop in nodejs for new developer.I require the final array or object. which then() give me final result. Please correct this.
I have a controller function described below.
let League = require('../../model/league.model');
let Leaguetype = require('../../model/leagueType.model');
let Leaguecategories = require('../../model/leagueCategories.model');
let fetchLeague = async function (req, res, next){
let body = req.body;
await mongo.findFromCollection(Leaguetype)
.then(function(types) {
return Promise.all(types.map(function(type){
return mongo.findFromCollection(Leaguecategories, {"league_type_id": type._id})
.then(function(categories) {
return Promise.all(categories.map(function(category){
return mongo.findFromCollection(League, {"league_category_id": category._id})
.then(function(leagues) {
return Promise.all(leagues.map(function(league){
return league;
}))
.then(function(league){
console.log(league);
})
})
}))
});
}))
})
.then(function(final){
console.log(final);
})
.catch (error => {
console.log('no',error);
})
}
mongo.findFromCollection function is looking like this.
findFromCollection = (model_name, query_obj = {}) => {
return new Promise((resolve, reject) => {
if (model_name !== undefined && model_name !== '') {
model_name.find(query_obj, function (e, result) {
if (!e) {
resolve(result)
} else {
reject(e);
}
})
} else {
reject({ status: 104, message: `Invalid search.` });
}
})
}
and here is my model file
var mongoose = require('mongoose');
const league_categories = new mongoose.Schema({
name: {
type: String,
required: true
},
active: {
type: String,
required: true
},
create_date: {
type: Date,
required: true,
default: Date.now
},
league_type_id: {
type: String,
required: 'league_type',
required:true
}
})
module.exports = mongoose.model('Leaguecategories', league_categories)
First i recommend you stop using callbacks wherever you can, its a bit dated and the code is much harder to read and maintain.
I re-wrote your code a little bit to look closer to what i'm used to, this does not mean this style is better, i just personally think its easier to understand what's going on.
async function fetchLeague(req, res, next) {
try {
//get types
let types = await Leaguetype.find({});
//iterate over all types.
let results = await Promise.all(types.map(async (type) => {
let categories = await Leaguecategories.find({"league_type_id": type._id});
return Promise.all(categories.map(async (category) => {
return League.find({"league_category_id": category._id})
}))
}));
// results is in the form of [ [ [ list of leagues] * per category ] * per type ]
// if a certain category or type did not have matches it will be an empty array.
return results;
} catch (error) {
console.log('no', error);
return []
}
}

If condition works but the same made inline doesn't work

I have and if statement and works pretty good but i want to made the same in the inline way and doesn't works, here's my code:
const request = require('request');
const rp = require('request-promise');
let options = {
url: "https://graph.facebook.com/v2.11/"+ data.entry[0]["changes"][0]["value"]["form_id"] +"/leads",
json: true,
qs: { "access_token": token },
};
let firstRequest = rp(options)
.then( (body) => {
body.data[0]["field_data"].forEach( (mydata, i, arr) => {
firstname = ( mydata.name === "first_name" ) ? mydata.values[0] : "N/D";
});
})
.then( () => {
console.log( firstname );
})
.catch( (err) => {
console.log( 'Error: ' + JSON.stringify(err) );
});
Any idea why this behavior? By the way, i am using nodejs with request-promise.
I think i found what is my error, in each loop the variable change it's value and that is the main reason why always i receive "N/D" in the firstname variable.

Categories

Resources