store agent SessionsClient of dialogflow api - javascript

I'm integrating dialogflow in my chat, but i'm having a problem. I can't figure out how can i store the session with the agent
async startSession({projectId, google_app_credencials}) {
process.env.GOOGLE_APPLICATION_CREDENTIALS = google_app_credencials;
const sessionId = uuid.v4();
const sessionClient = new dialogflow.SessionsClient();
const sessionPath = await sessionClient.projectAgentSessionPath(projectId, sessionId)
await sessionClient.initialize();
return {
sessionId: sessionId,
sessionClient: sessionClient,
sessionPath: sessionPath,
};
}
async sendMessageAndGetResponse(message, {sessionPath, sessionClient}) {
const request = {
session: sessionPath,
queryInput: {
text: {
text: message,
languageCode: 'pt-BR',
},
},
};
const responses = await sessionClient.detectIntent(request);
const result = responses[0].queryResult;
return {
response: result.fulfillmentText,
fields: result.parameters.fields,
}
}
I need the return of startSession in every call of sendMessageAndGetResponse and i need to store then e my nodejs server. but my attempts of store SessionsClient in redis failed. Now i wanna know if there is a way of re establish the connection with the agente just with the SessionPath in the future calls.
//attempt to save in redis
let dialogflowBot = {
projectId: dialogflow.project_name,
google_app_credencials: DialogflowHelper.getCredentialsPath(dialogflow)
}
dialogflowBot.bot = await DialogflowHelper.startSession(dialogflowBot);
redisClient.set(filaChat.id_registro, JSON.stringify(dialogflowBot));
//error: Converting circular structure to JSON
//Can't save the sessionClient
how can i save the SessionClient for call him again later, or save just the SessionPath for re establish the connection again?

I solved this just saving the SessionId and passing this same id in future calls instead of generating a new one
async reestablishSession({projectId, google_app_credentials, sessionId}) {
process.env.GOOGLE_APPLICATION_CREDENTIALS = google_app_credentials;
const sessionClient = new dialogflow.SessionsClient();
const sessionPath = await sessionClient.projectAgentSessionPath(projectId, sessionId)
await sessionClient.initialize();
return {
sessionId: sessionId,
sessionClient: sessionClient,
sessionPath: sessionPath,
};
}

Related

How to get Cognito Identity Pool ID for a new user

After importing the AWS module I go ahead and declare the cognitoidentityserviceprovider variable:
let AWS = require("aws-sdk");
let AWS_REGION = "us-east-1";
let USER_POOL_ID = 'us-east-1_4RQvUuPkX';
let AWS_COGNITO_CLIENT_ID = 'l703om838lkem323m04tiparls';
let AWS_COGNITO_IDENTITY_POOL_ID = 'us-east-1:112e122-bdd5-1234-983b-0afff8de2b3f';
AWS.config.update({
region: AWS_REGION,
});
let cognitoidentityserviceprovider = new AWS.CognitoIdentityServiceProvider({
apiVersion: "2016-04-19",
region: AWS_REGION
});
After the initial configuration is set I am ready to define the create_user function that I can use to create a new Cognito user supplying this functions with the user's email and password:
async function create_user(email, password) {
let params = {
UserPoolId: USER_POOL_ID,
Username: email,
DesiredDeliveryMediums: ["EMAIL"],
TemporaryPassword: password,
UserAttributes: [
{
Name: "email",
Value: email
},
{
Name: "email_verified",
Value: "true"
}
]
};
return await cognitoidentityserviceprovider.adminCreateUser(params).promise();
}
Next, I define another function confirm_user which I am going to use to confirm the new user by setting the user's password:
async function confirm_user(sub_id, password) {
let params = {
Password: password,
UserPoolId: USER_POOL_ID,
Username: sub_id,
Permanent: true
};
return await cognitoidentityserviceprovider.adminSetUserPassword(params).promise();
}
With both create_user and confirm_user functions defined I can now create a new Cognito user and confirm it on spot without a need for the user to confirm the sign-up process:
async function main(email, password) {
let user_data = await create_user(email, password);
let sub_id = user_data.User.Attributes[0].Value;
let confirm_data = await confirm_user(sub_id, password);
}
let EMAIL = 'foo#bar.com';
let PASSWORD = 'MY_PASSWORD';
main(EMAIL, PASSWORD)
Since I want to get the user's Identity Pool ID (as identityID variable here) I need to define a third function that I name as authenticate_user(email, password):
const AmazonCognitoIdentity = require('amazon-cognito-identity-js');
async function authenticate_user(email, password) {
var authenticationData = {
Username: email,
Password: password,
};
let auth_details = new AmazonCognitoIdentity.AuthenticationDetails(authenticationData);
let poolData = {
UserPoolId : USER_POOL_ID,
ClientId : AWS_COGNITO_CLIENT_ID
};
let pool = new AmazonCognitoIdentity.CognitoUserPool(poolData);
let userData = {
Username: email,
Pool: pool
};
let cognito_user = new AmazonCognitoIdentity.CognitoUser(userData);
let jwt_token;
await new Promise((resolve) => {
cognito_user.authenticateUser(auth_details, {
onSuccess: (result) => {
jwt_token = result.getIdToken().getJwtToken();
return resolve(jwt_token);
},
onFailure: (err) => {
return resolve(err.message || JSON.stringify(err) );
},
});
});
let logins = {};
logins["cognito-idp." + AWS_REGION + ".amazonaws.com/" + USER_POOL_ID] = jwt_token;
let creds = new AWS.CognitoIdentityCredentials({
IdentityPoolId: AWS_COGNITO_IDENTITY_POOL_ID,
Logins: logins
});
let IdentityId;
await new Promise((resolve) => {
creds.get(function(err) {
IdentityId = creds.data.IdentityId;
resolve(IdentityId);
});
})
return IdentityId;
}
Apparently it is a very long and complex way of getting the user's identityID. Is there a simpler way to get the user federated Identity Pool ID? Aside from the complexity, we need to know the user password to get the user's Identity Pool ID. Is there a way to get it without knowing the user's password?

Reusing SQL server Database Connections With Azure Functions Using Javascript

I cannot find clear information on how to manage SQL server database connections from an Azure function written in Javascript.
I am using a connection pool code -
const pool = new sql.ConnectionPool(config);
const poolConnect = pool.connect();
pool.on('error', err => {
// ... error handler
})
and I am using the poolConnect object from the function which is executing the query
export const selectQuery = function() {
const connectionPool = await mssqlDBPoolConnect;
const request = connectionPool.request();
await request.query('select query');
}
So how can I use the same connection pool across all azure functions.
Create two folder named config and toolkit under your root path. Put your db.js in config folder, and create a sql helper class to export a function named sqltools.js in toolkit folder.
So you could use the same connection pool by calling sqltools in your function's code. This step help you to reduce using the same code in every function.
Try use the db.js code below:
const sql = require('mssql')
const config = {
user: 'yourusername',
password: 'yourpassword',
server: 'yoursqlserver.database.windows.net', // You can use 'localhost\\instance' to connect to named instance. Do not use TCP.
database: 'yourdb',
"options": {
"encrypt": true,
"enableArithAbort": true
}
}
const poolPromise = new sql.ConnectionPool(config)
.connect()
.then(pool => {
console.log('Connected to MSSQL')
return pool
})
.catch(err => console.log('Database Connection Failed! Bad Config: ', err))
module.exports = {
sql, poolPromise
}
The sqltools.js class:
const { poolPromise } = require('../config/db')
module.exports.sqltools = {
ExecSqlQuery : async function(arg){
const pool = await poolPromise
//SELECT *FROM SYSOBJECTS WHERE xtype = \'U\'
var result=null;
try {
result = await pool.request()
.query(arg)
} catch (error) {
console.log(error.message);
}
return result;
},
ExecProce : function (arg2, arg3, arg4){
console.log(arg2,arg3,arg4);
}
}
Here is my HttpTrigger1 index.js code, call ExecSqlQuery to exec sqlstrings:
const { sqltools } = require('../toolkit/sqltools');
module.exports = async function (context, req) {
context.log('JavaScript HTTP trigger function processed a request.');
var result=null;
try {
// call ExecSqlQuery func
result = await sqltools.ExecSqlQuery('SELECT *FROM SYSOBJECTS WHERE xtype = \'U\'');
} catch (error) {
console.log(error.message);
}
const responseMessage ="Func 1 Result : TableName= " + result.recordset[0].name;
context.res = {
// status: 200, /* Defaults to 200 */
body: responseMessage
};
}

Using google oauth2 node library, can't get new access token with refresh token

With the google oauth2 library, I can successfully authenticate a user on their first pass through, get their refresh token and first access token. Until the token expires, everything works as expected.
However, when the access token expires, I need to get a new access token and store these tokens in my data store using the existing refresh token. I am aware the documentation states tokens should re-fetch themselves when they expire, but as I am creating a new client for each call (to ensure tokens are not re-used between users), I think the client gets torn down before a token gets chance to refresh itself.
Inspecting what the library does calling the actual google api, I should be able to get new access tokens by calling the client.refreshAccessToken() method, the response from this call gives me the invalid_grant Bad Request error. I have compared the actual api request this method makes to the one on google oauth2 playground and the two calls are identical - although their call for refreshing their token works and mine does not.
Attached is my code as it now currently stands Please send help - I don't have any hair left to pull out!
const { google } = require('googleapis')
const scopes = [
'https://www.googleapis.com/auth/spreadsheets.readonly',
'https://www.googleapis.com/auth/userinfo.email',
'https://www.googleapis.com/auth/drive.readonly'
]
module.exports = (env, mongo) => {
const getBaseClient = () => {
const { OAUTH_CLIENT_ID, OAUTH_CLIENT_SECRET, OAUTH_CALLBACK_URL } = env.credentials
return new google.auth.OAuth2(
OAUTH_CLIENT_ID, OAUTH_CLIENT_SECRET, OAUTH_CALLBACK_URL
)
}
const getNewAccessTokens = async (authId, refreshToken) => {
const { tokens } = await getBaseClient().getToken(refreshToken)
await mongo.setAccessTokensForAuthUser(authId, { ...tokens, refresh_token: refreshToken })
return tokens
}
const getAuthedClient = async (authId) => {
let tokens = await mongo.getAccessTokensForAuthUser(authId)
if (!tokens.access_token) {
tokens = await getNewAccessTokens(authId, tokens.refresh_token)
}
const client = getBaseClient()
client.setCredentials(tokens)
if (client.isTokenExpiring()) {
const { credentials } = await client.refreshAccessToken()
tokens = { ...credentials, refresh_token: tokens.refreshToken }
await mongo.setAccessTokensForAuthUser(authId, tokens)
client.setCredentials(tokens)
}
return client
}
const generateAuthUrl = (userId) => {
return getBaseClient().generateAuthUrl({
access_type: 'offline',
scope: scopes,
state: `userId=${userId}`
})
}
const getUserInfo = async (authId) => {
const auth = await getAuthedClient(authId)
return google.oauth2({ version: 'v2', auth }).userinfo.get({})
}
const listSheets = async (authId) => {
const auth = await getAuthedClient(authId)
let nextPageToken = null
let results = []
do {
const { data } = await google
.drive({ version: 'v3', auth })
.files.list({
q: 'mimeType = \'application/vnd.google-apps.spreadsheet\'',
includeItemsFromAllDrives: true,
supportsAllDrives: true,
corpora: 'user',
orderBy: 'name',
pageToken: nextPageToken
})
nextPageToken = data.nextPageToken
results = results.concat(data.files)
} while (nextPageToken)
return results
}
return {
generateAuthUrl,
getUserInfo,
listSheets
}
}
I solved my own problem.
I was conflating access_codes with refresh_tokens, and believed the code you receive from the auth url was the refresh_token, storing it, and attempting to reuse it to get more access_tokens. This is wrong. Don't do this.
You get the access_code from the authentication url, and the first time you use that with the client.getToken(code) method, you receive the refresh_token and access_token.
I've attached updated and working code should anyone wish to use it.
I should also mention that I added prompt: 'consent' to the auth url so that you always receive an access_code you can use to get a refresh_token when someone re-authenticates (as if you don't, then a call to client.getToken() does not return a refresh_token (part of what was confusing me in the first place).
const { google } = require('googleapis')
const scopes = [
'https://www.googleapis.com/auth/spreadsheets.readonly',
'https://www.googleapis.com/auth/userinfo.email',
'https://www.googleapis.com/auth/drive.readonly'
]
module.exports = (env, mongo) => {
const getBaseClient = () => {
const { OAUTH_CLIENT_ID, OAUTH_CLIENT_SECRET, OAUTH_CALLBACK_URL } = env.credentials
return new google.auth.OAuth2(
OAUTH_CLIENT_ID, OAUTH_CLIENT_SECRET, OAUTH_CALLBACK_URL
)
}
const getAuthedClient = async (authId) => {
let tokens = await mongo.getAccessTokensForAuthUser(authId)
const client = getBaseClient()
client.setCredentials(tokens)
if (client.isTokenExpiring()) {
const { credentials } = await client.refreshAccessToken()
tokens = { ...credentials, refresh_token: tokens.refresh_token }
await mongo.setAccessTokensForAuthUser(authId, tokens)
client.setCredentials(tokens)
}
return client
}
const generateAuthUrl = (userId) => {
return getBaseClient().generateAuthUrl({
access_type: 'offline',
prompt: 'consent',
scope: scopes,
state: `userId=${userId}`
})
}
const getUserInfo = async (accessCode) => {
const auth = getBaseClient()
const { tokens } = await auth.getToken(accessCode)
auth.setCredentials(tokens)
const { data } = await google.oauth2({ version: 'v2', auth }).userinfo.get({})
return { ...data, tokens }
}
const listSheets = async (authId) => {
const auth = await getAuthedClient(authId)
let nextPageToken = null
let results = []
do {
const { data } = await google
.drive({ version: 'v3', auth })
.files.list({
q: 'mimeType = \'application/vnd.google-apps.spreadsheet\'',
includeItemsFromAllDrives: true,
supportsAllDrives: true,
corpora: 'user',
orderBy: 'name',
pageToken: nextPageToken
})
nextPageToken = data.nextPageToken
results = results.concat(data.files)
} while (nextPageToken)
return results
}
return {
generateAuthUrl,
getUserInfo,
listSheets
}
}

Exporting data in a node file after it is returned from an API call to AWS Secrets Manager

Background
I am storing the database information for a RDS in AWS in the secrets manager. I am using the AWS-SDK to retrieve the password and other data so I can create a secrets object at run time. When I try and create this object and then export it, the object that is exported is always lacking the data that I expect to be returned from the aws-sdk.
What I Have Tried -
I have tried using async await but it is still exporting the object before all of the data is correctly populated.
Example
const AWS = require('aws-sdk');
const region = 'us-west-2';
const secretName = 'example/example/example';
let secrets = {
username: '',
password: '',
host: '',
port: '',
database: '',
email: 'example#example.com',
emailPassword: 'SomePassword'
};
const client = new AWS.SecretsManager({
region: region
});
client.getSecretValue({ SecretId: secretName }, async (err, data) => {
if (err) {
throw err;
} else {
const res = await JSON.parse(data.SecretString);
secrets.username = res.username;
secrets.password = res.password;
secrets.host = res.host;
secrets.port = res.port;
secrets.database = res.database;
}
});
module.exports = secrets;
Question
The obvious problem here is not creating the promise correctly but I am not sure why my attempts are completing the promises after the file gets exported. If I console.log(secrets) in another file in some cases it will output the object missing data then show the data returned by the promise a few seconds later after when I console.log(secrets) inside of the function.
What is the proper way to build this object secrets and export it once the data is returned from AWS and added to the object secrets?
According to the docs, the second argument to getSecretValue is a callback function, so there's no need to use async/await since async/await is meant to work with promises.
Removing async/await should work.
client.getSecretValue({ SecretId: secretName }, (err, data) => {
if (err) {
throw err;
} else {
const res = JSON.parse(data.SecretString);
secrets.username = res.username;
secrets.password = res.password;
secrets.host = res.host;
secrets.port = res.port;
secrets.database = res.database;
}
});
However, you're exporting the secrets object synchronously, and its properties are getting set asynchronously.
Instead, you can return a promise for your other modules to consume.
const AWS = require('aws-sdk');
const region = 'us-west-2';
const secretName = 'example/example/example';
let secrets = {
email: 'example#example.com',
emailPassword: 'SomePassword'
};
const client = new AWS.SecretsManager({
region: region
});
const promise = new Promise((resolve, reject) => {
client.getSecretValue({ SecretId: secretName }, async (err, data) => {
if (err) {
reject(err);
} else {
const res = await JSON.parse(data.SecretString);
secrets.username = res.username;
secrets.password = res.password;
secrets.host = res.host;
secrets.port = res.port;
secrets.database = res.database;
resolve(secrets);
}
});
})
module.exports = promise;
Then, in some other module that consumes this one, you can use async/await since we now have a promise.
import {promise} from "./this-module";
(async () => {
const secrets = await promise;
console.log(secrets);
})();
Update
I'm not sure if this will work, but it's worth a shot. Here, set module.exports only after secrets is set. If this doesn't work, then I would ask a new question on StackOverflow about how to export resolved promises with CommonJS (which is the module format that you're using).
const AWS = require('aws-sdk');
const region = 'us-west-2';
const secretName = 'example/example/example';
let secrets = {
email: 'example#example.com',
emailPassword: 'SomePassword'
};
const client = new AWS.SecretsManager({
region: region
});
const promise = new Promise((resolve, reject) => {
client.getSecretValue({ SecretId: secretName }, async (err, data) => {
if (err) {
reject(err);
} else {
const res = await JSON.parse(data.SecretString);
secrets.username = res.username;
secrets.password = res.password;
secrets.host = res.host;
secrets.port = res.port;
secrets.database = res.database;
resolve(secrets);
}
});
});
(async () => {
module.exports = await promise;
})();
client.getSecretValue({ SecretId: secretName }).subscribe(data=>{
const res =JSON.parse(data.SecretString);
secrets.username = res.username;
secrets.password = res.password;
secrets.host = res.host;
secrets.port = res.port;
secrets.database = res.database;
});

PostgreSql results printing to console but not displaying in browser

I'm trying to get some data from a pg database to my api endpoint , I can print the results to the console but I can't get them to display in the browser with res.send. I'm guessing the problem is with global and local scope however I've not been able to figure it out. I'm using ES6 but transpiling with babel. Here's a snippet.
app.get('/', (request, response) => {
const { Pool, Client } = require('pg');
const config = {
user: '',
host: '',
database: '',
password: '',
port: ,
}
const pool = new Pool(config);
const client = new Client(config);
let whole = [];
client.connect();
const text = "SELECT * FROM entries where id='1'";
client.query(text)
.then(res => {
console.log(res.rows[0]);
whole.push(res.rows[0]);
})
.catch(e => console.error(e.stack));
response.send(whole);
client.end;
});
This logs to the console
{ id: 1, title: 'First title', body: 'beautiful body' }
However the browser only displays []
This is what babel transpiles it to which is the script I run in node.
var whole = [];
client.connect();
var text = "SELECT * FROM entries where id='1'";
client.query(text).then(function (res) {
console.log(res.rows[0]);
whole.push(res.rows[0]);
}).catch(function (e) {
return console.error(e.stack);
});
response.send(whole);
client.end;
response.send is called outside of the async promise .then resolver, and is therefore executed before you push the row data into the array. Moving response.send into the promise resolver should fix it.
client.query(text).then(res => {
whole.push(res.rows[0]);
client.end();
response.send(whole);
}).catch((e) => {
console.error(e.stack);
});
Alternatively, you can use async/await depending on your babel version and presets/plugins.
const { Client } = require("pg");
const config = {...};
const queryText = "SELECT * FROM entries where id='1'";
app.get("/", async (request, response) => {
const client = new Client(config);
await client.connect();
try {
const queryResponse = await client.query(queryText);
// Send response without pushing to array
response.send(queryResponse.rows[0]);
client.end();
} catch (e) {
console.error(e.stack);
}
});

Categories

Resources