mysql node.js query error(undefined)? how to fix it - javascript

The problem is that when the second request is executed, it returns undefined, i.e. for some reason, it does not see the result of the second request. It should work like this: We make the first request, and if there are less than two lines, then we execute the second request. What could be the error? how to fix it
let arr = [name1, name2 /* ... */];
let ipObject = { Objects: [] };
arr.forEach(function(elem, index) {
connection.query("select 1 from i.goa where object_name = ?", elem, (err, rows) => {
// console.log (rows.length);
if (rows.length < 2) {
// console.log (elem);
connection.query(
"SELECT ip_adress FROM i.gs where server_kod=(SELECT server_kod FROM i.gol where object_kod =(SELECT object_kod FROM i.goa where object_name=?))",
elem,
(err, rows2) => {
console.log(elem);
console.log(rows2);
if (undefined !== rows2 && rows2.length > 0) {
// if(rows2.length>0 ){
ipObject.Objects.push({ objectName: elem, serverIp: rows2[0].ip_adress });
}
i++;
if (i > count) {
cb(JSON.stringify(ipObject));
console.log(JSON.stringify(ipObject));
// fs.writeFileSync('e.json',JSON.stringify(ipObject),'utf8');
}
},
);
} else if (rows.length >= 2) {
ipObject.Objects.push({ objectName: elem, serverIp: "ошибка" });
cb(JSON.stringify(ipObject));
}
});
});

You're probably bumping into asynchronicity issues here.
Refactoring things to use async/await and Promise.map(), maybe this is closer to what you want:
function queryP(connection, query, params) {
return new Promise((resolve, reject) => {
connection.query(query, params, (err, result) => {
if (err) {
return reject(err);
}
resolve(result);
});
});
}
async function queryForName(connection, objectName) {
const rows = await queryP(connection, "select 1 from i.goa where object_name = ?", objectName);
if (rows.length >= 2) {
return { objectName, serverIp: "ошибка" };
}
const rows2 = await queryP(connection, "SELECT ip_adress FROM i.gs where server_kod=(SELECT server_kod FROM i.gol where object_kod =(SELECT object_kod FROM i.goa where object_name=?))", objectName);
if (rows2.length > 0) {
return { objectName, serverIp: rows2[0].ip_adress };
}
return { objectName, serverIp: "???" };
}
async function queryForNames(connection, names) {
return {
Objects: await Promise.all(names.map((name) => queryForName(connection, name))),
};
}
// could as well be `const resultObject = await queryForNames(...)` if you're calling this from an async function.
queryForNames(connection, [name1, name2]).then((resultObject) => {
console.log(resultObject);
});

Related

Async and await in nodejs (Web Api)

I am trying to use promise in my loop but the loop doesn't wait but prints an empty array value which is declared above. Here is my code:
node:
let total_json = [];
await Promise.all(results.map(async (element) => {
if (element.start_date <= d && element.end_date >= d) {
let query = "select * from tb_voucher_category where id = " + "'" + element.cateID + "'";
body.query = query;
COMMON(body, (err, results) => {
if (err) { fatal_error.data = err; return res.json(fatal_error); }
if (results) {
if (results.length > 0) {
cate = results[0].cateName;
} else {
cate = "";
}
let json = {
id: element.id,
catename: cate,
title: element.title,
description: element.description,
expired_date: element.expired_date,
expired_time: element.expired_time,
vStatus: element.vStatus
}
total_json.push(json);
}
});
}
}));
///i need the json printed here but it becomes null. how can i use async or promise here?
console.log(total_json)
i have seen lots of stack question, I just couldn't implement it in my code.
the problem was in your COMMON function. in the below example you can easily figure out what the issue is. why it prints an empty array because of the callback function.
let total_json = [];
let results = [
{ catName: "cat", id: "1", title: "title", description: "desc", cateID: "2" }
];
results.map(element => {
let body = { query: "" };
let query =
"select * from tb_voucher_category where id = " +
"'" +
element.cateID +
"'";
body.query = query;
COMMON(body, (err, results) => {
if (err) {
fatal_error.data = err;
return res.json(fatal_error);
}
if (results) {
if (results.length > 0) {
cate = results[0].cateName;
} else {
cate = "";
}
let json = {
id: element.id,
catename: cate,
title: element.title,
description: element.description
};
total_json.push(json);
}
});
});
function COMMON(data, func) {
setTimeout(() => {
func(null, [{ catName: "cat" }]);
});
}
console.log(total_json);
so I just add Promise.all to resolve this issue & I set element as a parameter in the COMMON function and return it from callback as newElement
let total_json = [];
let results = [
{ catName: "cat", id: "1", title: "title", description: "desc", cateID: "2" },
{
catName: "cat2",
id: "10",
title: "title2",
description: "desc2",
cateID: "20"
}
];
results.forEach(element => {
let body = { query: "" };
let query =
"select * from tb_voucher_category where id = " +
"'" +
element.cateID +
"'";
body.query = query;
total_json.push(
new Promise((resolve, reject) => {
COMMON(body, element, (err, results, newElement) => {
if (err) {
fatal_error.data = err;
return res.json(fatal_error);
}
if (results) {
if (results.length > 0) {
cate = results[0].cateName;
} else {
cate = "";
}
let json = {
id: element.id,
catename: cate,
title: element.title,
description: element.description
};
resolve(json);
}
});
})
);
});
function COMMON(data, element, func) {
setTimeout(() => {
func(null, [{ cateName: "cat" }], element);
});
}
Promise.all(total_json).then(res=>{
console.log(res)
})
Assuming you are using Express.js (inferred from res.json()), I suggest to isolate the database calls from the response.
function resultsToJSON(results) {
if (results.length > 0) {
cate = results[0].cateName;
} else {
cate = "";
}
let json = {
id: element.id,
catename: cate,
title: element.title,
description: element.description
};
return json;
}
// returns a single promise that resolves to a list of json results,
// or rejects as an error.
function getData() {
// `promises` is a list of Promises that either resolve to the database result,
// or rejects due to a database error.
const promises = results
.filter(element => element.start_date <= d && element.end_date >= d)
.map(element => {
let query = "select * from tb_voucher_category where id = " + "'" + element.cateID + "'";
body.query = query;
return new Promise((resolve, reject) => {
COMMON(body, (err, results) => {
if (err) { reject(err); }
else { resolve(results); }
});
});
});
return Promise.all(promises);
}
Then, in your Express request handler, you can handle the error (and send the error response), or get the total_jsons array from resolving the Promise.
// mark function as `async` if you are going to use `await` in the body.
app.get("/yourAPI", async (req, res) => {
// given that `getData()` returns a promise that resolves to a list of results,
// `await getData()` waits for the promise to be resolved, and returns the list of results.
try {
const total_json = await getData();
} catch (error) {
// handle a failure in *any* of your database requests
fatal_error.data = error;
res.json(fatal_error);
}
});

How to add async/await to my functions in nodejs?

I tried to make the code asynchronous but I couldn't. What i need to do?
This is my functions:
1.
router.post('/urls', (req, response) => {
count = 2;
webUrl = req.body.url;
depth = req.body.depth;
letstart(webUrl, response);
});
function letstart(urlLink, response) {
request(urlLink, function (error, res, body) {
console.error('error:', error); // Print the error if one occurred
console.log('statusCode:', res && res.statusCode); // Print the response status code if a response was received
//console.log('body:', body); // Print the HTML for the Google homepage.
if (!error) {
getLinks(body);
if (!ifFinishAll) {
GetinsideLinks(linkslinst, response);
}
else {
console.log("Finish crawl");
}
}
else {
console.log("sorry");
return "sorry";
}
});
}
function GetinsideLinks(list, response) {
count++;
if (count <= depth) {
for (let i = 0; i < list.length; i++) {
const link = list[i].toString();
var includeUrl = link.includes(webUrl);
if (!includeUrl) {
request(link, function (error, res, body) {
console.error('error2:', error); // Print the error if one occurred
console.log('statusCode2:', res && res.statusCode); // Print the response status code if a response was received
if (!error) {
getLinks(body);
}
else {
console.log("sorry2");
}
});
}
}
ifFinishAll = true;
}
else {
console.log("finish");
ifFinishAll = true;
response.status(200).send(resArray);
};
return resArray;
}
function getLinks(body) {
const html = body;
const $ = cheerio.load(html);
const linkObjects = $('a');
const links = [];
linkObjects.each((index, element) => {
countLinks = linkObjects.length;
var strHref = $(element).attr('href');
var strText = $(element).text();
var existUrl = linkslinst.includes(strHref);
var existText = textslist.includes(strText);
if (strText !== '' && strText !== "" && strText !== null && strHref !== '' && strHref !== "" && strHref !== null && strHref !== undefined && !existUrl && !existText) {
var tel = strHref.startsWith("tel");
var mail = strHref.startsWith("mailto");
var linkInStart = isUrlValid(strHref);
if (!tel && !mail) {
if (linkInStart) {
links.push({
text: $(element).text(), // get the text
href: $(element).attr('href'), // get the href attribute
});
linkslinst.push($(element).attr('href'));
textslist.push($(element).text());
}
else {
links.push({
text: $(element).text(), // get the text
href: webUrl.toString() + $(element).attr('href'), // get the href attribute
});
linkslinst.push(webUrl.toString() + $(element).attr('href'))
textslist.push($(element).text());
}
}
}
});
const result = [];
const map = new Map();
for (const item of links) {
if (!map.has(item.text)) {
map.set(item.text, true); // set any value to Map
result.push({
text: item.text,
href: item.href
});
}
}
if (result.length > 0) {
resArray.push({ list: result, depth: count - 1 });
}
console.log('res', resArray);
return resArray;
}
I want to return/response finally to the "resArray". I tried to add async and await to function number 1 and number 2 but it didn't succeed. Maybe I need to add async/await to all functions? How can I fix that?
You can achieve your goal by using async-await.
An async function is a function declared with the async keyword, and the await keyword is permitted within them. The async and await keywords enable asynchronous, promise-based behavior to be written in a cleaner style, avoiding the need to explicitly configure promise chains.
Basic example:
function resolveImmediately() {
return new Promise(resolve => {
resolve(true);
});
}
function resolveAfter2Seconds() {
return new Promise(resolve => {
setTimeout(() => {
resolve('resolved');
}, 2000);
});
}
async function asyncCall() {
console.log('calling');
const result = await resolveImmediately();
console.log(result);
if(result) {
const anotherResult = await resolveAfter2Seconds();
console.log(anotherResult);
}
}
asyncCall();
Note: Your code is too long to debug. As a result, to make you understand about the approach (what & how to do), i have added a simple example into my answer.

MongoDB Find queries slow while updating/inserting schema

I'm doing a big loop once a day - which updating existing documents in the database (and also inserting new documents).
this loop get executed in a separate server ( prevents from the main server to be slow ), but the main problem is that all the find queries on the Data base (while the loop is executed) are very slow (the loop slows it down significantly).
This is a very big issue in my website ( this loop must be executed once a day ) and iv'e been trying to find a solution online - but i couldn't manage to find something.
Is there any way to prevent the find queries from being so slow while inserting/updating the database??
uploadProductsManually = async (name, products, map, valuesMap) => {
return new Promise(async function (resolve, reject) {
const company = await Company.findOne({ name }).exec();
if (!company) return reject(new errors.NotFound("Company not found"));
const rows = products;
const parsedRows = [];
const findCorrectKey = (key) => {
const correctKey = key.trim();
if (productFields[correctKey]) return productFields[correctKey];
const category = map.find((item) => {
return item.options.some((option) => {
return option.trim().toLowerCase() === correctKey.toLowerCase();
});
});
const categoryName = category && category.name;
return productFields[categoryName];
};
const hashProductValues = (product) => {
let valueToHash;
if (product.productId) {
valueToHash = product.productId;
} else if (product.certificateId) {
valueToHash = product.certificateId;
} else {
valueToHash = JSON.stringify(
product.size + product.color
);
}
return base64encode(valueToHash);
};
rows.forEach(function (row, i) {
var newProduct = {};
for (var key in row) {
var val = row[key];
if (val) {
let normalizedKey = findCorrectKey(key);
if (normalizedKey) {
newProduct[normalizedKey] = val;
}
let normalizedValue = normalizeValue(normalizedKey, val,valuesMap);
newProduct[normalizedKey] = normalizedValue;
}
}
newProduct.channels = [];
if (newProduct.productId) {
parsedRows.push(newProduct);
}
});
fetchProducts();
function fetchProducts() {
Product.find({ company: company._id }).exec(function (err, products) {
if (err) console.log(err);
var map = {};
if (products) {
products.forEach(function (product) {
const productIdentifier = hashProductValues(product);
map[productIdentifier] = product;
if (product.productStatus == "manual") {
// product.isAvailable = false;
// product.save();
} else {
product.status = "deleted";
product.save();
}
});
}
mergeData(map);
});
}
async function mergeData(map) {
let created = 0;
let updated = 0;
let manual = 0;
async.each(
parsedRows,
function (row, callback) {
const productIdentifier = hashProductValues(row);
let product = map[productIdentifier];
if (product) {
map[productIdentifier] = undefined;
Product.findByIdAndUpdate(id, { $set: updatedProduct }, function (
err,
updatedProd
) {
if (err) {
// errors.push(productIdentifier);
console.log("err is:", err);
}
updated++;
callback();
});
} else {
row = new Product(row);
row.save(function (err) {
if (err) {
// errors.push(productIdentifier);
console.log(err);
}
created++;
callback();
});
}
},
(err) => {
if (err) return reject(err);
Company.findByIdAndUpdate(
company._id,
{ lastUpdate: new Date() },
function (err, comp) {
if (err) console.log(err);
}
);
console.log(
`Created: ${created}\nUpdated: ${updated} \manual: ${manual}`
);
resolve({
created,
updated,
manual,
errors,
});
}
);
}
});
};

Updating many(100k+) documents in the most efficient way MongoDB

I have a function that runs periodically, that updates the item.price of some Documents in my Prices Collection. The Price Collection has 100k+ items. The function looks like this:
//Just a helper function for multiple GET requests with request.
let _request = (urls, cb) => {
let results = {}, i = urls.length, c = 0;
handler = (err, response, body) => {
let url = response.request.uri.href;
results[url] = { err, response, body };
if (++c === urls.length) {
cb(results);
}
};
while (i--) {
request(urls[i], handler);
}
};
// function to update the prices in our Prices collection.
const update = (cb) => {
Price.remove({}, (err, remove) => {
if (err) {
return logger.error(`Error removing items...`);
}
logger.info(`Removed all items... Beginning to update.`);
_request(urls, (responses) => {
let url, response, gameid;
for (url in responses) {
id = url.split('/')[5].split('?')[0];
response = responses[url];
if (response.err) {
logger.error(`Error in request to ${url}: ${err}`);
return;
}
if (response.body) {
logger.info(`Request to ${url} successful.`)
let jsonResult = {};
try {
jsonResult = JSON.parse(response.body);
} catch (e) {
logger.error(`Could not parse.`);
}
logger.info(`Response body for ${id} is ${Object.keys(jsonResult).length}.`);
let allItemsArray = Object.keys(jsonResult).map((key, index) => {
return {
itemid: id,
hash_name: key,
price: jsonResult[key]
}
});
Price.insertMany(allItemsArray).then(docs => {
logger.info(`Saved docs for ${id}`)
}, (e) => {
logger.error(`Error saving docs.`);
});
}
}
if (cb && typeof cb == 'function') {
cb();
}
})
});
}
As you can see, to avoid iterating through 100k+ Documents, and updating each and every one of them separately, I delete them all at the beginning, and just call the API that gives me these Items with prices, and use InsertMany to Insert all of them into my Prices Collection.
This updating process will happen every 30 minutes.
But I just now realised, what if some user wants to check the Prices and my Prices Collection is currently empty because it's in the middle of updating itself?
The Question
So do I have to iterate through all of them in order to not delete it? (Remember, there are MANY documents to be updated every 30 mins.) Or is there another solution?
Here's a picture of how my Prices Collection looks (there are 100k docs like these, I just want to update the price property):
Update:
I have re-written my update function a bit and now it looks like this:
const update = (cb = null) => {
Price.remove({}, (err, remove) => {
if (err) {
return logger.error(`Error removing items...`);
}
logger.info(`Removed all items... Beginning to update.`);
_request(urls, (responses) => {
let url, response, gameid;
for (url in responses) {
gameid = url.split('/')[5].split('?')[0];
response = responses[url];
if (response.err) {
logger.error(`Error in request to ${url}: ${err}`);
return;
}
if (response.body) {
logger.info(`Request to ${url} successful.`)
let jsonResult = {};
try {
jsonResult = JSON.parse(response.body);
} catch (e) {
logger.error(`Could not parse.`);
}
logger.info(`Response body for ${gameid} is ${Object.keys(jsonResult).length}.`);
let allItemsArray = Object.keys(jsonResult).map((key, index) => {
return {
game_id: gameid,
market_hash_name: key,
price: jsonResult[key]
}
});
let bulk = Price.collection.initializeUnorderedBulkOp();
allItemsArray.forEach(item => {
bulk.find({market_hash_name: item.market_hash_name})
.upsert().updateOne(item);
});
bulk.execute((err, bulkers) => {
if (err) {
return logger.error(`Error bulking: ${e}`);
}
logger.info(`Updated Items for ${gameid}`)
});
// Price.insertMany(allItemsArray).then(docs => {
// logger.info(`Saved docs for ${gameid}`)
// }, (e) => {
// logger.error(`Error saving docs.`);
// });
}
}
if (cb && typeof cb == 'function') {
cb();
}
})
});
}
Notice the bulk variable now (Thanks #Rahul) but now, the collection takes ages to update. My processor is burning up and it literally takes 3+ minutes to update 60k+ documents. I honestly feel like the previous method, while it might delete all of them and then reinserting them, it also takes 10x faster.
Anyone?
From my experience (updating millions of mongo docs on a hourly basis), here's a realistic approach to very large bulk updates:
do all your API calls separately and write results in as bson into a file
invoke mongoimport and import that bson file into a new empty collection prices_new. Javascript, let alone high-level OO wrappers, are just too slow for that
rename prices_new -> prices dropTarget=true (this will be atomic hence no downtime)
Schematically, it would look like this in JS
let fname = '/tmp/data.bson';
let apiUrls = [...];
async function doRequest(url) {
// perform a request and return an array of records
}
let responses = await Promise.all(apiUrls.map(doRequest));
// if the data too big to fit in memory, use streams instead of this:
let data = flatMap(responses, BSON.serialize).join('\n'));
await fs.writeFile(fname, data);
await child_process.exec(`mongoimport --collection prices_new --drop ${fname}`);
await db.prices_new.renameCollection('prices', true);
There's no need to clear the database and do a fresh insert. You can use the bulkWrite() method for this or use the updateMany() method to do the updates.
You can refactor the existing code to
const update = (cb) => {
_request(urls, responses => {
let bulkUpdateOps = [], gameid;
responses.forEach(url => {
let response = responses[url];
gameid = url.split('/')[5].split('?')[0];
if (response.err) {
logger.error(`Error in request to ${url}: ${response.err}`);
return;
}
if (response.body) {
logger.info(`Request to ${url} successful.`)
let jsonResult = {};
try {
jsonResult = JSON.parse(response.body);
} catch (e) {
logger.error(`Could not parse.`);
}
Object.keys(jsonResult).forEach(key => {
bulkUpdateOps.push({
"updateOne": {
"filter": { market_hash_name: key },
"update": { "$set": {
game_id: gameid,
price: jsonResult[key]
} },
"upsert": true
}
});
});
}
if (bulkUpdateOps.length === 1000) {
Price.bulkWrite(bulkUpdateOps).then(result => {
logger.info(`Updated Items`)
}).catch(e => logger.error(`Error bulking: ${e}`));
bulkUpdateOps = [];
}
});
if (bulkUpdateOps.length > 0) {
Price.bulkWrite(bulkUpdateOps).then(result => {
logger.info(`Updated Items`)
}).catch(e => logger.error(`Error bulking: ${e}`));
}
});
if (cb && typeof cb == 'function') {
cb();
}
}
I have not tested anything but you can try this, might be helpful. I am using bluebird library for concurrency.
let _request = (url) => {
return new Promise((resolve, reject) => {
request(url, (err, response, body) => {
if (err) {
reject(err);
}
resolve(body);
});
});
};
const formatRespose = async (response) => {
// do stuff
return {
query: {}, // itemid: id,
body: {}
};
}
const bulkUpsert = (allItemsArray) => {
let bulk = Price.collection.initializeUnorderedBulkOp();
return new Promise((resolve, reject) => {
allItemsArray.forEach(item => {
bulk.find(item.query).upsert().updateOne(item.body);
});
bulk.execute((err, bulkers) => {
if (err) {
return reject(err);
}
return resolve(bulkers);
});
});
}
const getAndUpdateData = async (urls) => {
const allItemsArray = urls.map((url) => {
const requestData = await _request(url); // you can make this also parallel
const formattedData = formatRespose(requestData); // return {query: {},body: {} };
return formattedData;
});
return await (bulkUpsert(allItemsArray));
};
function update() {
// split urls into as per your need 100/1000
var i, j, chunkUrls = [],
chunk = 100;
for (i = 0, j = urls.length; i < j; i += chunk) {
chunkUrls.push(getAndUpdateData(urls.slice(i, i + chunk)));
}
Bluebird.map(chunkUrls, function (chunk) {
return await chunk;
}, {
concurrency: 1 // depends on concurrent request change 1 = 100 request get and insert in db at time
}).then(function () {
console.log("done");
}).catch(function () {
console.log("error");
});
}

Returning empty response using ES6 promise in node.js

how can I call a query using ES6 native promises in node.js. Below is the code.
let arr= [];
conn.query('select * from table1', (err, b) => {
for (let i = 0; i < b.length; i++) {
console.log(b[i]["id"])
let promiseGetData = new Promise((resolve, reject) => {
conn.query('select * from table2 where id = ?', [b[i]["id"]], (err, ce) => {
if (err) {
const response = {
statusCode: 500,
body: JSON.stringify({
message: err
}),
}
reject(response);
} else {
if (ce.length != 0) {
resolve(ce);
}
}
});
});
promiseGetData .then((data) => {
b[i]['data'] = data;
arr.push(b[i])
}).catch((err) => {
console.log(err);
});
}
console.log(b)
})
I see an empty response when i do console.log(b). I dont know if I have used a promise in correct way, I think for first query I should execute in the promise as well. Any help is very much appreciated
Wrapping an async callback-based function into a promise is called Promisifying.
You can of course use a library for that, but basically what it does is :
const queryAsPromise = function( ...args ) {
return new Promise( function( resolve, reject ) {
try {
conn.query(...args, function( error, result ) {
if ( error ) {
reject( error );
} else {
resolve( result );
}
} );
} catch( error ) {
reject( error );
}
} )
} );
By doing this one time you will keep your code DRY and you can always use that promise for making queries now :
queryAsPromise('select * from table1')
.then( result => {
return Promise.all(
result.map( b => {
return queryAsPromise('select * from table2 where id = ?', b["id"])
.then( data => b["data"] = data )
} )
)
)
.catch( err => res.send(500) )
.then( console.log )
You got empty response from console.log(b) because the promise from querying database aren't finished. You have to wait all of them to finish before you can get the full result.
Sample:
let arr = [];
conn.query('select * from table1', (err, b) => {
var promiseArr = [];
for (let i = 0; i < b.length; i++) {
let promiseGetData = new Promise((resolve, reject) => {
conn.query('select * from table2 where id = ?', [b[i]["id"]], (err, ce) => {
if (err) {
const response = {
statusCode: 500,
body: JSON.stringify({
message: err
}),
}
reject(response);
} else {
if (ce.length != 0) {
resolve(ce);
}
}
});
});
promiseArr.push(promiseGetData);
}
Promise.all(promiseArr).then((resultArr) => {
//resultArr is all the resolved value returned from the promise in promiseArr
for (let i = 0; i < resultArr.length; i++) {
b[i]['data'] = resultArr[i];
arr.push(b[i]);
}
}).then(() => {
console.log(arr);
}).catch((err) => {
//if any promise throw/reject with error, it will go here directly
console.log(err);
});
})
Edit:
Ref: https://developer.mozilla.org/en-US/docs/Web/JavaScript/Reference/Global_Objects/Promise

Categories

Resources