Without a delay, 'update' won't fire - javascript

I have the following code:
myTable()
.update(data, {
where: criteria
})
.then(delay(100))
.then((entries) => {
...
...
The .then(delay(100)) part sets a delay of 100ms.
If I don't use that delay, sometimes entries (the resulted updated rows) aren't correct, meaning their fields were not updated. But sometimes they are.
If I'm using the delay, the content of entries is always correct.
Why do I have to set a delay for it to work?
My local MySQL my.cnf file:
[mysqld]
general_log_file = /var/log/mysql.log
general_log = 1
sql_mode = STRICT_TRANS_TABLES,NO_ZERO_IN_DATE,NO_ZERO_DATE,ERROR_FOR_DIVISION_BY_ZERO,NO_AUTO_CREATE_USER,NO_ENGINE_SUBSTITUTION
innodb_file_per_table = 1
innodb_log_file_size = 100M
interactive_timeout = 32000
lock_wait_timeout = 41536000
net_read_timeout = 120
net_write_timeout = 900
wait_timeout = 32000
max_allowed_packet = 1G
innodb_buffer_pool_size = 1G
In terms of the table schema and model:
It has a few double columns, a couple of datetime and char, one json column and one enum column.
They are defined the same in the model.

1) Please check the isolation level on database
2) In general the Transactions may be what you need, if the issue with isolation level appears, try to select different isolation level for a transaction
3) cluster mode may result in such issues

So, the .then(... block is executed after the update promise is resolved. And it does not have influence on the update query.
return sequelize.transaction(function (t) {
// chain all your queries here. make sure you return them.
return yourModel.update(updates,{where: { 'id': id } },
{transaction: t}).then(function (entries) {
// your logic with your entries.
});
}).then(function (result) {
// Transaction has been committed
cb(null,result);
console.log('transaction commited');
}).catch(function (err) {
// Transaction has been rolled back
cb(err,null);
console.log('Transaction rolled back');
});
And if you are curious on what was happening when you add .then(delay(100))... statement use catch block because when you find the entries diffrenet from what you have expected it is because the query has already failed to update.

Related

Unable to log user state information to transcript

I am using TranscriptLoggerMiddleware and CosmosDB to log my chatbot transcripts. We are trying to capture the user state information (user name, account number, account type, etc) as top level attributes in the transcript so that specific customers can easily be queried in the DB (if that information is just in the individual timestamp attributes of the document, they can't be queried).
Ideally I would just add the user state when I'm building the file, but I can't figure any way to access it since the logger is defined in index.js and TranscriptLoggerMiddleware only provides the activity to my function, not the full context. If anyone has a way to get the user state data via TranscriptLoggerMiddleware, let me know, that would solve this issue. Here is the customLogger code. Note that due to the function receiving both the user query and bot response, I couldn't get retrieving and resaving the transcript to work, so I'm overwriting the transcript from a local log object. Not trying to come up with a new approach here but if one would solve the overall issue I'd like to hear it.
// Copyright (c) Microsoft Corporation. All rights reserved.
// Licensed under the MIT License.
const { CosmosDbPartitionedStorage } = require('botbuilder-azure');
const path = require('path');
/**
* CustomLogger, takes in an activity and saves it for the duration of the conversation, writing to an emulator compatible transcript file in the transcriptsPath folder.
*/
class CustomLogger {
/**
* Log an activity to the log file.
* #param activity Activity being logged.
*/
// Set up Cosmos Storage
constructor(appInsightsClient) {
this.transcriptStorage = new CosmosDbPartitionedStorage({
cosmosDbEndpoint: process.env.COSMOS_SERVICE_ENDPOINT,
authKey: process.env.COSMOS_AUTH_KEY,
databaseId: process.env.DATABASE,
containerId: 'bot-transcripts'
});
this.conversationLogger = {};
this.appInsightsClient = appInsightsClient;
this.msDelay = 250;
}
async logActivity(activity) {
if (!activity) {
throw new Error('Activity is required.');
}
// Log only if this is type message
if (activity.type === 'message') {
if (activity.attachments) {
try {
var logTextDb = `${activity.from.name}: ${activity.attachments[0].content.text}`;
} catch (err) {
var logTextDb = `${activity.from.name}: ${activity.text}`;
}
} else {
var logTextDb = `${activity.from.name}: ${activity.text}`;
}
if (activity.conversation) {
var id = activity.conversation.id;
if (id.indexOf('|') !== -1) {
id = activity.conversation.id.replace(/\|.*/, '');
}
// Get today's date for datestamp
var currentDate = new Date();
var day = currentDate.getDate();
var month = currentDate.getMonth()+1;
var year = currentDate.getFullYear();
var datestamp = year + '-' + month + '-' + day;
var fileName = `${datestamp}_${id}`;
var timestamp = Math.floor(Date.now()/1);
// CosmosDB logging (JK)
if (!(fileName in this.conversationLogger)) {
this.conversationLogger[fileName] = {};
this.conversationLogger[fileName]['userData'] = {};
this.conversationLogger[fileName]['botName'] = process.env.BOTNAME;
}
this.conversationLogger[fileName][timestamp] = logTextDb;
let updateObj = {
[fileName]:{
...this.conversationLogger[fileName]
}
}
// Add delay to ensure messages logged sequentially
await this.wait(this.msDelay);
try {
let result = await this.transcriptStorage.write(updateObj);
} catch(err) {
console.log(err);
this.appInsightsClient.trackTrace({message: `Logger Error ${err.code} - ${path.basename(__filename)}`,severity: 3,properties: {'botName': process.env.BOTNAME, 'error':err.body}});
}
}
}
}
async wait(milliseconds) {
var start = new Date().getTime();
for (var i = 0; i < 1e7; i++) {
if ((new Date().getTime() - start) > milliseconds) {
break;
}
}
}
}
exports.CustomLogger = CustomLogger;
Not being able to get user state in this function, I decided to try a few other approaches. The most promising was creating a separate "updateTranscript" function to grab the transcript, add user state, and save it back. But I think it was catching it only on user request and getting overidden again by local object on bot response. I added a delay to try to combat this, but it still didn't work. On my very first prompt of providing customer number user state data is getting stored on transcript, but at the next activity it is gone and never comes back (even though I can see it is supposedly getting written to DB). Here is that update function.
const { CosmosDbStorage } = require('botbuilder-azure');
var updateTranscript = async (context, userData, appInsightsClient) => {
const transcriptStorage = new CosmosDbStorage({
serviceEndpoint: process.env.COSMOS_SERVICE_ENDPOINT,
authKey: process.env.COSMOS_AUTH_KEY,
databaseId: process.env.DATABASE,
collectionId: 'bot-transcripts',
partitionKey: process.env.BOTNAME
});
var id = context.activity.conversation.id;
if (id.indexOf('|') !== -1) {
id = context.activity.conversation.id.replace(/\|.*/, '');
}
// Get today's date for datestamp
var currentDate = new Date();
var day = currentDate.getDate();
var month = currentDate.getMonth()+1;
var year = currentDate.getFullYear();
var datestamp = year + '-' + month + '-' + day;
var filename = `${datestamp}_${id}`;
var msDelay = 500;
await new Promise(resolve => setTimeout(resolve, msDelay));
var transcript = await transcriptStorage.read([filename]);
transcript[filename]['userData'] = userData
try {
await transcriptStorage.write(transcript);
console.log('User data added to transcript');
} catch(err) {
console.log(err);
appInsightsClient.trackTrace({message: `Log Updater Error ${err.code} - ${path.basename(__filename)}`,severity: 3,properties: {'botName': process.env.BOTNAME, 'error':err.body}});
}
return;
}
module.exports.updateTranscript = updateTranscript
I realize this approach is a bit of a cluster but I've been unable to find anything better. I know the Microsoft COVID-19 bot has a really nice transcript retrieval function, but I haven't been able to get any input from them on how that was accomplished. That aside, I'm quite happy to continue with this implementation if someone can help me figure out how to get that user state into the transcript without being overwritten or running into concurrency issues.
As to why I can't query an account number even via substring() function, here's an example of the documents data object. I have no idea which string to check for a substring, in this case 122809. I don't know what that timestamp could be. If this is stored at the top level (e.g. userData/accountNumber) I know exactly where to look for the value. For further context, I've displayed what I see after the first prompt for account number, where userData is populated. But it gets overidden on subsequent writes and I can't seem to get it back even with a delay in my updateTranscript function.
"document": {
"userData": {},
"botName": "AveryCreek_OEM_CSC_Bot_QA",
"1594745997562": "AveryCreek_OEM_CSC_Bot_QA: Hi! I'm the OEM CSC Support Bot! Before we get started, can you please provide me with your 6-digit Vista number? If you don't have one, just type \"Skip\".",
"1594746003973": "You: 122809",
"1594746004241": "AveryCreek_OEM_CSC_Bot_QA: Thank you. What can I help you with today? \r\nYou can say **Menu** for a list of common commands, **Help** for chatbot tips, or choose one of the frequent actions below. \r\n \r\n I'm still being tested, so please use our [Feedback Form](https://forms.office.com/Pages/ResponsePage.aspx?id=lVxS1ga5GkO5Jum1G6Q8xHnUJxcBMMdAqVUeyOmrhgBUNFI3VEhMU1laV1YwMUdFTkhYVzcwWk9DMiQlQCN0PWcu) to let us know how well I'm doing and how I can be improved!",
"1594746011384": "You: what is my account number?",
"1594746011652": "AveryCreek_OEM_CSC_Bot_QA: Here is the informaiton I have stored: \n \n**Account Number:** 122809 \n\n I will forget everything except your account number after the end of this conversation.",
"1594746011920": "AveryCreek_OEM_CSC_Bot_QA: I can clear your information if you don't want me to store it or if you want to reneter it. Would you like me to clear your information now?",
"1594746016034": "You: no",
"1594746016301": "AveryCreek_OEM_CSC_Bot_QA: OK, I won't clear your information. You can ask again at any time."
},
"document": {
"userData": {
"accountNumber": "122809"
},
"botName": "AveryCreek_OEM_CSC_Bot_QA",
"1594746019952": "AveryCreek_OEM_CSC_Bot_QA: Hi! I'm the OEM CSC Support Bot! What can I help you with today? \r\nYou can say **Menu** for a list of common commands, **Help** for chatbot tips, or choose one of the frequent actions below. \r\n \r\n I'm still being tested, so please use our [Feedback Form](https://forms.office.com/Pages/ResponsePage.aspx?id=lVxS1ga5GkO5Jum1G6Q8xHnUJxcBMMdAqVUeyOmrhgBUNFI3VEhMU1laV1YwMUdFTkhYVzcwWk9DMiQlQCN0PWcu) to let us know how well I'm doing and how I can be improved!"
},
You had said you were encountering concurrency issues even though JavaScript is single-threaded. As strange as that sounds, I think you're right on some level. TranscriptLoggerMiddleware does have its own buffer that it uses to store activities throughout the turn and then it tries to log all of them all at once. It could easily have provided a way to get that whole buffer in your own logger function, but instead it just loops through the buffer so that you still only get to log them each individually. Also, it allows logActivity to return a promise but it never awaits it, so each activity will get logged "simultaneously" (it's not really simultaneous but the code will likely jump between function calls before waiting for them to complete). This is a problem for any operation that isn't atomic, because you'll be modifying state without knowing about its latest modifications.
while (transcript.length > 0) {
try {
const activity: Activity = transcript.shift();
// If the implementation of this.logger.logActivity() is asynchronous, we don't
// await it as to not block processing of activities.
// Because TranscriptLogger.logActivity() returns void or Promise<void>, we capture
// the result and see if it is a Promise.
const logActivityResult = this.logger.logActivity(activity);
// If this.logger.logActivity() returns a Promise, a catch is added in case there
// is no innate error handling in the method. This catch prevents
// UnhandledPromiseRejectionWarnings from being thrown and prints the error to the
// console.
if (logActivityResult instanceof Promise) {
logActivityResult.catch(err => {
this.transcriptLoggerErrorHandler(err);
});
}
} catch (err) {
this.transcriptLoggerErrorHandler(err);
}
}
All in all, I don't think transcript logger middleware is the way to go here. While it may purport to serve your purposes, there are just too many problems with it. I would either write my own middleware or just put the middleware code directly in my bot logic like this:
async onTurn(turnContext) {
const activity = turnContext.activity;
await this.logActivity(turnContext, activity);
turnContext.onSendActivities(async (ctx, activities, next) => {
for (const activity of activities) {
await this.logActivity(ctx, activity);
}
return await next();
});
// Bot code here
// Save state changes
await this.userState.saveChanges(turnContext);
}
async logActivity(turnContext, activity) {
var transcript = await this.transcriptProperty.get(turnContext, []);
transcript.push(activity);
await this.transcriptProperty.set(turnContext, transcript);
console.log('Activities saved: ' + transcript.length);
}
Since your transcript would be stored in your user state, that user state would also have the account number you need and hopefully you'd be able to query for it.
Kyle's answer did help me solve the issue, and I think that will be the most reusable piece for anyone experiencing similar issues. The key takeaway is that, if you're using nodejs, you should not be using TranscriptLoggerMiddleware and instead use Kyle's function in your onTurn handler (repeated here for reference):
// Function provided by Kyle Delaney
async onTurn(turnContext) {
const activity = turnContext.activity;
await this.logActivity(turnContext, activity);
turnContext.onSendActivities(async (ctx, activities, next) => {
for (const activity of activities) {
await this.logActivity(ctx, activity);
}
return await next();
});
// Bot code here
// Save state changes
await this.userState.saveChanges(turnContext);
}
You need to note, though, that his logActivity function is just storing the raw activities to the user state using a custom transcriptProperty. As of yet I haven't found a good method to give business/admin users access to this data in a way that is easily readable and searchable, nor construct some sort of file out output to send to a customer requesting a transcript of their conversation. As such, I continued using my CustomLogger instead. Here is how I accomplished that.
First, you must create the transcriptLogger in the constructor. If you create it inside your turn handler, you will lose the cache/buffer and it will only have the latest activity instead of the full history. May be common sense but this tripped me up briefly. I do this in the constructor via this.transcriptLogger = new CustomerLogger(appInsightsClient);. I also modified my logActivity function to accept the userData (my state object) as a second, optional parameter. I have successfully been able to use that userData object to add the required customer information to the bot transcript. To modify Kyle's function above you just need to replace this.logActivity with your function call, in my case this.transcriptLogger.logActivity(context, userData);.
While there are still some other issues with this approach, it does solve the title question of how to get user state data into the transcript.

Implementing reading through streams and inserting through streams in mysql

I have fetched the data from table tb_project_milestones and want to insert this projectMilestoneRow in a table tb_xyz using streams. I checked the documentation, but couldn't find how to implement it.
Has anyone implemented reading through streams and inserting through streams in MySQL.
let insertProjectMilestones = [];
const getProjectMilestones = executeQueryStream.query('SELECT * FROM tb_project_milestones WHERE project_id = ? ');
getProjectMilestones
.on('error', function(err) {
// Handle error, an 'end' event will be emitted after this as well
})
.on('result', function(projectMilestoneRow) {
// Pausing the connnection is useful if your processing involves I/O
connection.pause();
processRow(projectMilestoneRow, function() {
_.each(payload.projects, (project_id)=> {
_.each(projectMilestoneRow, (el)=> {
insertProjectMilestones.push([el.project_milestone_id, el.name, el.prefix, el.short_name, el.description, el.pre_requisites, project_id,
el.milestone_template_id, el.generic_milestone_id, el.planned_date, el.actual_date, el.forecast_date,
el.planned_date_only, el.forecast_date_only, el.actual_date_only, el.planned_time_only, el.forecast_time_only, el.actual_time_only,
el.planned_date_formula, el.actual_date_formula, el.forecast_date_formula, el.planned_date_is_active, el.forecast_date_is_active,
el.actual_date_is_active, el.creation_datetime, el.allow_notes, el.forecast_date_allow_notes, el.actual_date_allow_notes,
el.planned_date_allow_notes, 0, el.requires_approval]);
});
});
connection.resume();
});
})
.on('end', function() {
// all rows have been received
});
EDIT
I used streams in this case because millions of records are fetched from tb_project_milestones and then inserted into an array(after a manipulation) and then pushed into another table.
Considering the fact that pushing these many rows in the array would increase the memory of node I thought of using stream here.
Is stream better choice or could I just implement a batch insert in DB using transactions?
You can use knex stream and async iteration (ES2018/Node 10) for that
const knexClient = knex(someMysqlClientSettings);
const dbStream = knexClient("tb_project_milestones").where({ projectId }).stream();
for await (const row of dbStream){
const processedRowObj = process(row);
await knexClient("tb_xyz").insert(processedRowObj)
}
Wouldn't it be much faster and simpler to perform the single SQL statement:
INSERT INTO insertProjectMilestones (...)
SELECT ... FROM tb_project_milestones;
That way, the data is not shoveled to the client only to be turned around and shoveled back to the server.
And you could do transformations (expressions in the SELECT) and/or filtering (WHERE in SELECT) at the same time.
MySQL will impose essentially no limits on how big the table can be.

How to delay a function like this? [duplicate]

Using the Google Geocoder v3, if I try to geocode 20 addresses, I get an OVER_QUERY_LIMIT unless I time them to be ~1 second apart, but then it takes 20 seconds before my markers are all placed.
Is there any other way to do it, other than storing the coordinates in advance?
No, there is not really any other way : if you have many locations and want to display them on a map, the best solution is to :
fetch the latitude+longitude, using the geocoder, when a location is created
store those in your database, alongside the address
and use those stored latitude+longitude when you want to display the map.
This is, of course, considering that you have a lot less creation/modification of locations than you have consultations of locations.
Yes, it means you'll have to do a bit more work when saving the locations -- but it also means :
You'll be able to search by geographical coordinates
i.e. "I want a list of points that are near where I'm now"
Displaying the map will be a lot faster
Even with more than 20 locations on it
Oh, and, also (last but not least) : this will work ;-)
You will less likely hit the limit of X geocoder calls in N seconds.
And you will less likely hit the limit of Y geocoder calls per day.
You actually do not have to wait a full second for each request. I found that if I wait 200 miliseconds between each request I am able to avoid the OVER_QUERY_LIMIT response and the user experience is passable. With this solution you can load 20 items in 4 seconds.
$(items).each(function(i, item){
setTimeout(function(){
geoLocate("my address", function(myLatlng){
...
});
}, 200 * i);
}
Unfortunately this is a restriction of the Google maps service.
I am currently working on an application using the geocoding feature, and I'm saving each unique address on a per-user basis. I generate the address information (city, street, state, etc) based on the information returned by Google maps, and then save the lat/long information in the database as well. This prevents you from having to re-code things, and gives you nicely formatted addresses.
Another reason you want to do this is because there is a daily limit on the number of addresses that can be geocoded from a particular IP address. You don't want your application to fail for a person for that reason.
I'm facing the same problem trying to geocode 140 addresses.
My workaround was adding usleep(100000) for each loop of next geocoding request. If status of the request is OVER_QUERY_LIMIT, the usleep is increased by 50000 and request is repeated, and so on.
And of cause all received data (lat/long) are stored in XML file not to run request every time the page is loading.
EDIT:
Forgot to say that this solution is in pure js, the only thing you need is a browser that supports promises https://developer.mozilla.org/it/docs/Web/JavaScript/Reference/Global_Objects/Promise
For those who still needs to accomplish such, I've written my own solution that combines promises with timeouts.
Code:
/*
class: Geolocalizer
- Handles location triangulation and calculations.
-- Returns various prototypes to fetch position from strings or coords or dragons or whatever.
*/
var Geolocalizer = function () {
this.queue = []; // queue handler..
this.resolved = [];
this.geolocalizer = new google.maps.Geocoder();
};
Geolocalizer.prototype = {
/*
#fn: Localize
#scope: resolve single or multiple queued requests.
#params: <array> needles
#returns: <deferred> object
*/
Localize: function ( needles ) {
var that = this;
// Enqueue the needles.
for ( var i = 0; i < needles.length; i++ ) {
this.queue.push(needles[i]);
}
// return a promise and resolve it after every element have been fetched (either with success or failure), then reset the queue.
return new Promise (
function (resolve, reject) {
that.resolveQueueElements().then(function(resolved){
resolve(resolved);
that.queue = [];
that.resolved = [];
});
}
);
},
/*
#fn: resolveQueueElements
#scope: resolve queue elements.
#returns: <deferred> object (promise)
*/
resolveQueueElements: function (callback) {
var that = this;
return new Promise(
function(resolve, reject) {
// Loop the queue and resolve each element.
// Prevent QUERY_LIMIT by delaying actions by one second.
(function loopWithDelay(such, queue, i){
console.log("Attempting the resolution of " +queue[i-1]);
setTimeout(function(){
such.find(queue[i-1], function(res){
such.resolved.push(res);
});
if (--i) {
loopWithDelay(such,queue,i);
}
}, 1000);
})(that, that.queue, that.queue.length);
// Check every second if the queue has been cleared.
var it = setInterval(function(){
if (that.queue.length == that.resolved.length) {
resolve(that.resolved);
clearInterval(it);
}
}, 1000);
}
);
},
/*
#fn: find
#scope: resolve an address from string
#params: <string> s, <fn> Callback
*/
find: function (s, callback) {
this.geolocalizer.geocode({
"address": s
}, function(res, status){
if (status == google.maps.GeocoderStatus.OK) {
var r = {
originalString: s,
lat: res[0].geometry.location.lat(),
lng: res[0].geometry.location.lng()
};
callback(r);
}
else {
callback(undefined);
console.log(status);
console.log("could not locate " + s);
}
});
}
};
Please note that it's just a part of a bigger library I wrote to handle google maps stuff, hence comments may be confusing.
Usage is quite simple, the approach, however, is slightly different: instead of looping and resolving one address at a time, you will need to pass an array of addresses to the class and it will handle the search by itself, returning a promise which, when resolved, returns an array containing all the resolved (and unresolved) address.
Example:
var myAmazingGeo = new Geolocalizer();
var locations = ["Italy","California","Dragons are thugs...","China","Georgia"];
myAmazingGeo.Localize(locations).then(function(res){
console.log(res);
});
Console output:
Attempting the resolution of Georgia
Attempting the resolution of China
Attempting the resolution of Dragons are thugs...
Attempting the resolution of California
ZERO_RESULTS
could not locate Dragons are thugs...
Attempting the resolution of Italy
Object returned:
The whole magic happens here:
(function loopWithDelay(such, queue, i){
console.log("Attempting the resolution of " +queue[i-1]);
setTimeout(function(){
such.find(queue[i-1], function(res){
such.resolved.push(res);
});
if (--i) {
loopWithDelay(such,queue,i);
}
}, 750);
})(that, that.queue, that.queue.length);
Basically, it loops every item with a delay of 750 milliseconds between each of them, hence every 750 milliseconds an address is controlled.
I've made some further testings and I've found out that even at 700 milliseconds I was sometimes getting the QUERY_LIMIT error, while with 750 I haven't had any issue at all.
In any case, feel free to edit the 750 above if you feel you are safe by handling a lower delay.
Hope this helps someone in the near future ;)
I have just tested Google Geocoder and got the same problem as you have.
I noticed I only get the OVER_QUERY_LIMIT status once every 12 requests
So I wait for 1 second (that's the minimum delay to wait)
It slows down the application but less than waiting 1 second every request
info = getInfos(getLatLng(code)); //In here I call Google API
record(code, info);
generated++;
if(generated%interval == 0) {
holdOn(delay); // Every x requests, I sleep for 1 second
}
With the basic holdOn method :
private void holdOn(long delay) {
try {
Thread.sleep(delay);
} catch (InterruptedException ex) {
// ignore
}
}
Hope it helps
This worked well for me, after intermittent trial and error over the past couple days. I am using react instant-search-hooks via Algolia with Nextjs and Sanity for a new jobs site for a large company.
Postal Code is a facet for filtering/sorting/query matching that is defined in the algolia index. In another script file, I map out all of these facets (postal code, city, etc); Now that I have 100 returned files they can be mapped out by iterating through a mapped asynchronous import and the lat/lng coords matched to the corresponding zip codes defining a job posting (there are ~2500 postings but only ~100 zip codes to narrow down the coordinates of)
import * as dotenv from "dotenv";
dotenv.config();
import {
googleNetwork,
axiosConfig as googleAxiosConfig
} from "../utils/google-axios";
import JSONData from "../../public/data/postalCode/2022/05/26.json";
import fs from "fs";
import { join } from "path";
import type { GeneratedGeolocData } from "../types/algolia";
import { timezoneHelper } from "../utils/timezone-helper";
import { Unenumerate } from "../types/helpers";
let i = 0;
i < JSONData.postalCodes.facetHits.length;
i++;
const getGeoCode = (
record: Unenumerate<typeof JSONData.postalCodes.facetHits>
) =>
function () {
return JSONData.postalCodes.facetHits.map(async (data = record, u) => {
const googleBase = process.env.NEXT_PUBLIC_GOOGLE_MAPS_BASE_PATH ?? "";
const googleApiKey =
process.env.NEXT_PUBLIC_TAKEDA_JOBS_GOOGLE_SERVICES ?? "";
const params: (string | undefined)[][] = [
["address", data.value],
["key", googleApiKey]
];
const query = params
.reduce<string[]>((arr, [k, v]) => {
if (v) arr.push(`${k}=${encodeURIComponent(v)}`);
return arr;
}, [])
.join("&");
return await googleNetwork("GET")
.get(`${googleBase}geocode/json?${query}`, googleAxiosConfig)
.then(dat => {
const geoloc = dat.data as GeneratedGeolocData;
const {
[0]: Year,
[2]: Month,
[4]: Day
} = new Date(Date.now())
.toISOString()
.split(/(T)/)[0]
.split(/([-])/g);
const localizedTimestamp = timezoneHelper({
dateField: new Date(Date.now()),
timezone: "America/Chicago"
});
return setTimeout(
() =>
fs.appendFileSync(
join(
process.cwd(),
`public/data/geoloc/${Year}/${Month}/${Day}-${[i]}.json`
),
JSON.stringify(
{
generated: localizedTimestamp,
_geoloc: {
postalCode: data.value,
geolocation: geoloc
}
},
null,
2
)
),
1000
);
});
});
};
getGeoCode(JSONData.postalCodes.facetHits[i]);
It took a lot less time than anticipated -- under 4 seconds for 100 unique results to generate
Context on the Unenumerate type -- Unenumerate strips the internal repeating unit within an array:
type Unenumerate<T> = T extends Array<infer U> ? U : T;

How to collect and return aggregated data as an Array from a table in Protractor?

I am trying to aggregate a list of dates from a data table, written in Angular, in a Protractor test. I'm doing the aggregation from a PageObject class that is called in the Protractor test. I know that my code is successfully grabbing the text I want, but when I try to console.log the returned array, I get an empty array. I'm still new to Javascript/Typescript, Angular, and Protractor and this may be a result of my newness to the asynchronous nature of this development environment.
Code is as follows,
The PageObject SpecMapper class with method:
import { browser, element, by } from 'protractor';
export class SpecMapperPage {
getImportDateSubmittedColumnValues() {
let stringDatesArray: Array<string> = [];
// currently this css selector gets rows in both import and export tables
// TODO: get better identifiers on the import and export tables and columns
element.all(by.css('md-card-content tbody tr.ng-tns-c3-0')).each(function(row, index){
// check outerHTML for presence of "unclickable", the rows in the export table
row.getAttribute('outerHTML').then(function(outerHTML:string) {
// specifically look for rows without unclickable
if(outerHTML.indexOf("unclickable") < 0){
// grab the columns and get the third column, where the date submitted field is
// TODO: get better identifiers on the import and export columns
row.all(by.css("td.ng-tns-c3-0")).get(2).getText().then(function(text:string) {
stringDatesArray.push(text);
});
}
});
});
return stringDatesArray;
}
}
I know it's not the prettiest code, but it's temporary place holder while my devs make me better attributes/classes/ids to grab my variables. Key things to note is that I create a string Array to hold the values I consider relevant to be returned when the method is finished.
I used WebStorm and put a breakpoint at the stringDatesArray.push(text) and return stringDatesArray lines. The first line shows that the text variable has a string variable that I'm looking for and is successfully getting pushed. I see the success in debug mode as I can see the stringDatesArray and see the values in it. The second line though, the array return, shows that the local variable stringDatesArray is empty. This is echoed in the following code when I try to console.log the array:
The Protractor run Spec class with my test in it:
import { SpecMapperPage } from "./app.po";
import {browser, ExpectedConditions} from "protractor";
describe('spec mapper app', () => {
let page: SpecMapperPage;
let PROJECT_ID: string = '57';
let PROJECT_NAME: string = 'DO NOT DELETE - AUTOMATED TESTING PROJECT';
beforeEach(() => {
page = new SpecMapperPage();
});
describe('import/export page', () => {
it('verify sort order is desc', () => {
browser.waitForAngularEnabled(false);
// Step 1: Launch Map Data from Dashboard
page.navigateTo(PROJECT_ID);
browser.driver.sleep(5000).then(() => {
// Verify: Mapping Screen displays
// Verify on the specmapper page by checking the breadcrumbs
expect(page.getProjectNameBreadCrumbText()).toContain(PROJECT_NAME);
expect(page.getProjectMapperBreadCrumbText()).toEqual("MAPPER");
// Verify: Verify Latest Submitted Date is displayed at the top
// Verify: Verify the Submitted Date column is in descending order
console.log(page.getImportDateSubmittedColumnValues());
});
});
});
});
I acknowledge that this code is not actively using the niceties of Protractor, there's a known issue with our app that will not be addressed for a couple of months, so I am accessing the driver directly 99% of the time.
You'll note that I call the method I posted above as the very last line in the browser.driver.sleep().then() clause, page.getImportDateSubmittedColumnValues().
I thought maybe I was running into asynchronous issues with the call being done before the page was loaded, thus I put it in the .then() clause; but learned with debugging that was not the case. This code should work once I have the array returning properly though.
The console.log is printing an empty [] array. That is synonymous with the results I saw when debugging the above method directly in the PageObject SpecMapper class. I wish to do some verification that the strings are returned properly formatted, and then I'm going to do some date order comparisons. I feel like returning an array of data retrieved from a page is not an unusual request, but I can't seem to find a good way to Google what I'm trying to do.
My apologies if I am hitting some very obvious roadblock, I'm still learning the nuances of Typescript/Angular/Protractor. Thank you for your consideration!
My attempted to used collated promises seemed promising, but fell through on execution.
My Updated PageObject SpecMapper Class
import {browser, element, by, protractor} from 'protractor';
export class SpecMapperPage {
getImportDateSubmittedColumnValues() {
let promisesArray = [];
let stringDatesArray: Array<string> = [];
// This CSS selector grabs the import table and any cells with the label .created-date
element.all(by.css('.import-component .created-date')).each(function(cell, index) {
// cell.getText().then(function(text:string) {
// console.log(text);
// });
promisesArray.push(cell.getText());
});
return protractor.promise.all(promisesArray).then(function(results) {
for(let result of results) {
stringDatesArray.push(result);
}
return stringDatesArray;
});
}
}
My Updated Spec test Using The Updated SpecMapper PO Class
import { SpecMapperPage } from "./specMapper.po";
import {browser, ExpectedConditions} from "protractor";
describe('spec mapper app', () => {
let page: SpecMapperPage;
let PROJECT_ID: string = '57';
let PROJECT_NAME: string = 'DO NOT DELETE - AUTOMATED TESTING PROJECT';
beforeEach(() => {
page = new SpecMapperPage();
});
describe('import/export page', () => {
it('TC2963: ImportComponentGrid_ShouldDefaultSortBySubmittedDateInDescendingOrder_WhenPageIsLoaded', () => {
browser.waitForAngularEnabled(false);
// Step 1: Launch Map Data from Dashboard
page.navigateTo(PROJECT_ID);
browser.driver.sleep(5000).then(() => {
// Verify: Mapping Screen displays
// Verify on the specmapper page by checking the breadcrumbs
expect(page.getProjectNameBreadCrumbText()).toContain(PROJECT_NAME);
expect(page.getProjectMapperBreadCrumbText()).toEqual("MAPPER");
// Verify: Verify Latest Submitted Date is displayed at the top
// Verify: Verify the Submitted Date column is in descending order
page.getImportDateSubmittedColumnValues().then(function(results) {
for(let value of results) {
console.log("a value is: " + value);
}
});
});
});
});
});
When I breakpoint in the PO class at the return stringDatesArray; line, I have the following variables in my differing scopes. Note that the promisesArray has 3 objects, but the results array going into the protractor.promise.all( block has 0 objects. I'm not sure what my disconnect is. :/
I think I'm running into a scopes problem that I am having issues understanding. You'll note the commented out promise resolution on the getText(), and this was my POC proving that I am getting the string values I'm expecting, so I'm not sure why it's not working in the Promise Array structure presented as a solution below.
Only other related question that I could find has to do with grabbing a particular row of a table, not specifically aggregating the data to be returned for test verification in Protractor. You can find it here if you're interested.
As you've alluded to your issue is caused by the console.log returning the value of the variable before its actually been populated.
I've taken a snippet from this answer which should allow you to solve it: Is there a way to resolve multiple promises with Protractor?
var x = element(by.id('x')).sendKeys('xxx');
var y = element(by.id('y')).sendKeys('yyy');
var z = element(by.id('z')).sendKeys('zzz');
myFun(x,y,z);
//isEnabled() is contained in the expect() function, so it'll wait for
// myFun() promise to be fulfilled
expect(element(by.id('myButton')).isEnabled()).toBe(true);
// in a common function library
function myFun(Xel,Yel,Zel) {
return protractor.promise.all([Xel,Yel,Zel]).then(function(results){
var xText = results[0];
var yText = results[1];
var zText = results[2];
});
}
So in your code it would be something like
getImportDateSubmittedColumnValues() {
let promisesArray = [];
let stringDatesArray: Array<string> = [];
// currently this css selector gets rows in both import and export tables
// TODO: get better identifiers on the import and export tables and columns
element.all(by.css('md-card-content tbody tr.ng-tns-c3-0')).each(function(row, index){
// check outerHTML for presence of "unclickable", the rows in the export table
row.getAttribute('outerHTML').then(function(outerHTML:string) {
// specifically look for rows without unclickable
if(outerHTML.indexOf("unclickable") < 0){
// grab the columns and get the third column, where the date submitted field is
// TODO: get better identifiers on the import and export columns
promisesArray.push(row.all(by.css("td.ng-tns-c3-0")).get(2).getText());
}
});
});
return protractor.promise.all(promisesArray).then(function(results){
// In here you'll have access to the results
});
}
Theres quite a few different ways you could do it. You could process the data in that method at the end or I think you could return the array within that "then", and access it like so:
page.getImportDateSubmittedColumnValues().then((res) =>{
//And then here you will have access to the array
})
I don't do the Typescript but if you're just looking to get an array of locator texts back from your method, something resembling this should work...
getImportDateSubmittedColumnValues() {
let stringDatesArray: Array<string> = [];
$$('.import-component .created-date').each((cell, index) => {
cell.getText().then(text => {
stringDatesArray.push(text);
});
}).then(() => {
return stringDatesArray;
});
}
The answer ended up related to the answer posted on How do I return the response from an asynchronous call?
The final PageObject class function:
import {browser, element, by, protractor} from 'protractor';
export class SpecMapperPage {
getImportDateSubmittedColumnValues() {
let stringDatesArray: Array<string> = [];
let promisesArray = [];
// return a promise promising that stringDatesArray will have an array of dates
return new Promise((resolve, reject) => {
// This CSS selector grabs the import table and any cells with the label .created-date
element.all(by.css('.import-component .created-date')).map((cell) => {
// Gather all the getText's we want the text from
promisesArray.push(cell.getText());
}).then(() => {
protractor.promise.all(promisesArray).then((results) => {
// Resolve the getText's values and shove into array we want to return
for(let result of results) {
stringDatesArray.push(result);
}
}).then(() => {
// Set the filled array as the resolution to the returned promise
resolve(stringDatesArray);
});
});
});
}
}
The final test class:
import { SpecMapperPage } from "./specMapper.po";
import {browser, ExpectedConditions} from "protractor";
describe('spec mapper app', () => {
let page: SpecMapperPage;
let PROJECT_ID: string = '57';
let PROJECT_NAME: string = 'DO NOT DELETE - AUTOMATED TESTING PROJECT';
beforeEach(() => {
page = new SpecMapperPage();
});
describe('import/export page', () => {
it('TC2963: ImportComponentGrid_ShouldDefaultSortBySubmittedDateInDescendingOrder_WhenPageIsLoaded', () => {
browser.waitForAngularEnabled(false);
// Step 1: Launch Map Data from Dashboard
page.navigateTo(PROJECT_ID);
browser.driver.sleep(5000).then(() => {
// Verify: Mapping Screen displays
// Verify on the specmapper page by checking the breadcrumbs
expect(page.getProjectNameBreadCrumbText()).toContain(PROJECT_NAME);
expect(page.getProjectMapperBreadCrumbText()).toEqual("MAPPER");
// Verify: Verify Latest Submitted Date is displayed at the top
// Verify: Verify the Submitted Date column is in descending order
page.getImportDateSubmittedColumnValues().then((results) => {
console.log(results);
});
});
});
});
});
The biggest thing was waiting for the different calls to get done running and then waiting for the stringDataArray to be filled. That required the promise(resolve,reject) structure I found in the SO post noted above. I ended up using the lambda (()=>{}) function calls instead of declared (function(){}) for a cleaner look, the method works the same either way. None of the other proposed solutions successfully propagated the array of strings back to my test. I'm working in Typescript, with Protractor.

Why is my node-oracledb execute Promise steadily increasing in the amount of time it takes to resolve?

I'm writing an ETL tool that interacts with an Oracle database which also uses node-oracledb 1.10 and RxJS to handle all of the various asynchronous data streams that I'm tossing around. I'm running into an issue where the longer my application runs, the longer a call to node-oracledb's .execute() takes, and it seems to increase in running time linearly. Hopefully you can spot the mistake in the code below and correct it.
First let me show how I'm running Oracle queries. I created my own .execute() function that acts as a wrapper around node-oracledb's .execute().
import oracledb from 'oracledb';
var oraConnPool;
export function execute(sql, bind, opts) {
if (!oraConnPool) {
createOraPool();
}
return oraConnPool
.then(pool => pool.getConnection())
.then(conn => conn.execute(sql, bind, opts));
}
function createOraPool() {
let oraPool = oracledb.createPool(config.database.oracle);
oraConnPool = oraPool;
return oraPool;
}
And my config.database.oracle (without the credentials):
{
"poolTimeout": 60,
"poolMin": 10,
"poolMax": 25,
"queueRequests": true,
"queueTimeout": 600000,
"_enableStats": true
}
Below is an example of me invoking my .execute() function. As you can see, there's a lot happening here, so let me try to annotate it a bit for clarity. rnd is used to create a unique id for console.time(), so I can keep track of the time it takes for the .execute() Promise to resolve. Let me know if there's a flaw in this time measuring technique. The bind input variable passed to the SELECT statement is a csv string of ssid identifiers, and a list of matches will be returned. This allows me to batch process records instead of creating a single query for each individual row, hopefully saving some execution time. The first .then() makes every key in the resulting array of objects lowercase. The second .then(), obviously, ends the console.time() tracking.
const rnd = Math.random() * 100;
console.time(rnd);
return execute(`
SELECT
ssid_input.ssid AS ssid,
students.id AS student_id,
students.student_number AS student_number
FROM (
SELECT REGEXP_SUBSTR(
:ssids,
'[^,]+', 1, level) AS ssid
FROM dual
CONNECT BY REGEXP_SUBSTR(
:ssids,
'[^,]+', 1, level) IS NOT NULL
) ssid_input
LEFT JOIN students ON students.state_studentnumber = ssid_input.ssid`, {
ssids: {
val: ssids.join(','),
dir: orawrap.BIND_IN,
type: orawrap.STRING
}
}, {
outFormat: orawrap.OBJECT,
maxRows: ssids.length
})
.then(results => {
return results.rows.map(result => {
let newObj = {};
Object.keys(result).forEach(key => {
newObj[key.toLowerCase()] = result[key];
});
return newObj;
});
})
.then(result => {
console.timeEnd(rnd);
return result;
});
Below is the console.time() output, which increases steadily until it hits the 60000 ms queueTimeout limit.
97.24179652744425: 12226.930ms
38.14057213652584: 14583.518ms
46.19793585774834: 16024.785ms
16.12600313565251: 17820.694ms
87.73720584788988: 20809.461ms
54.711100085462604: 22652.638ms
42.474404414891744: 24037.868ms
49.09845121453702: 26521.596ms
87.70258724764568: 29461.480ms
1.0731996619882223: 31210.875ms
90.33430329792829: 32259.944ms
37.4829457960367: 34076.824ms
9.731832830291932: 35292.281ms
/home/nathanjones/Projects/test-forge/node_modules/#reactivex/rxjs/dist/cjs/util/subscribeToResult.js:41
root_1.root.setTimeout(function () { throw err; });
^
Error: NJS-040: connection request timeout
I've tried to include most of the relevant code, please let me know if you need more context.
EDIT:
I added a console.log(pool._logStats()) statement every time the .execute() function is called. I've included the output of the last time it was printed before the NJS-040 error:
Pool statistics:
...total up time (milliseconds): 62823
...total connection requests: 1794
...total requests enqueued: 1769
...total requests dequeued: 0
...total requests failed: 0
...total request timeouts: 0
...max queue length: 1769
...sum of time in queue (milliseconds): 0
...min time in queue (milliseconds): 0
...max time in queue (milliseconds): 0
...avg time in queue (milliseconds): 0
...pool connections in use: 25
...pool connections open: 25
Related pool attributes:
...queueRequests: true
...queueTimeout (milliseconds): 60000
...poolMin: 10
...poolMax: 25
...poolIncrement: 1
...poolTimeout (seconds): 60
...stmtCacheSize: 30
Related environment variables:
...process.env.UV_THREADPOOL_SIZE: undefined
undefined
(This is a duplicate of node-oracledb Issue 474).
You need to make sure you close connections.
You probably need to increase UV_THREADPOOL_SIZE, see the node-oracledb documentation on Connections and Number of Threads.

Categories

Resources