Mocking firebase's auth as object and function at the same time? - javascript

I am trying to write minimalistic mocks fro firebase's auth to satisfy use cases like
facebookSignIn = async () => {
try {
const user = await FacebookLogIn.logIn();
const credential = firebase.auth.FacebookAuthProvider.credential(user.accessToken);
await firebase.auth().signInAndRetrieveDataWithCredential(credential);
} catch (e) {
Alert.alert("Error", e.message);
}
};
I've got this far
const firebase = {
auth: {
onAuthStateChange: callback => {
callback({
user: {
uid: "123",
email: "test#email.com"
}
});
},
FacebookAuthProvider: {
credential: accessToken => ({ param: "1" });
}
}
};
my main concern now is difference when doing
const credential = firebase.auth.FacebookAuthProvider.credential(user.accessToken);
and
await firebase.auth().signInAndRetrieveDataWithCredential(credential);
you can see that auth here is an object and a function at the same time, I'm not entirely sure about how I can mock that :/

In JavaScript functions are objects too. You can simply add the properties you need to it.
const firebase = {
auth: function(){
console.log('foo');
}
};
firebase.auth.bar = 'bar';
firebase.auth();
console.log(firebase.auth.bar);

Related

Proxyquire not calling inner functions (npm modules) and does not work with classes properly

Where am i going wrong here?
Using mocha, chai, sinon and proxyquire for an express server and sequelize ORM linked with a postgres database
I am trying to test a login controller route from my express server
Before I show the file which I want to run my test on here is what "../services/authService.js" file looks like
../services/authService
const UserService = require("./userService");
module.exports = class AuthService extends UserService {
};
// so UserService will have the method findByEmail
// UserService class looks like this and it is coming from another file require("./userService.js) as stated above
/*
class UserService {
async findByEmail(email) {
try {
const user = await User.findOne({ where: { email: email }});
if (user) {
return user;
}
throw new Error("User not found");
} catch (err) {
err.code = 404;
throw err
}
}
}
*/
And here is the auth-controller.js file which I want to run the test on
auth-controller.js
const bcrypt = require('bcryptjs');
const AuthService = require("../services/authService"); // is a class which extends from another calls see the code above
const authService = new AuthService();
const jwtGenerator = require('../utils/jwtGenerator');
const createError = require("http-errors");
exports.loginRoute = async (req, res, next) => {
try {
req.body.password = String(req.body.password);
// db query trying to force a sinon.stub to resolve a fake value. But code wont pass here hence 500 error
const userQuery = await authService.findByEmail(req.body.email);
const compare = await bcrypt.compare(req.body.password, userQuery.password);
if (!compare) throw createError(401, 'Incorrect password.');
const user = {
id: userQuery.id, role: userQuery.is_admin ? "Administrator" : "User", email: userQuery.email, Authorized: true
}
const token = jwtGenerator(user);
return res
.cookie("access_token", token, {
httpOnly: true,
secure: process.env.NODE_ENV === "production",
}).status(200).json({ message: "Logged in successfully 😊 👌", user, token });
} catch (error) {
next(error);
}
}
This code works in production but I cannot seem to test it. I used proxyquire to require the modules that the function uses. I have a big problem in making proxyquire work when it comes to my class AuthService here is my test file. As proxyquire is not working with classes some how. proxyquire is not using make AuthServiceMock at all cant figure out why.
First of these are my helper variables which I will use in the test file
../test-utils/user-helper
const createAccessToken = (payload) => jwt.sign(payload, TOKEN, {expiresIn: "1h"});
let loginDetail = {
email: "admin#test.com",
password: "123456"
};
let loginAdminUser = {
id: 1,
email: "admin#test.com",
password: "123456",
is_admin: true
}
const loginUser = {
id: 1,
email: "admin#test.com",
password: "123456",
is_admin: true
}
const adminUser = {
id: 1,
email: 'admin#test.com',
password: '123456',
is_admin: true,
first_name: 'john',
last_name: 'doe',
created_at: "2020-06-26T09:31:36.630Z",
updated_at: "2020-06-26T09:31:49.627Z"
}
module.exports = {
createAccessToken,
loginDetail,
loginAdminUser,
loginUser,
adminUser
}
And here is the test file I placed comments espcially around proxyquire when I am trying to use it as this is giving me some issues when it comes to using it with classes. And as well it is not calling mocked/stubbed npm modules for some reason
auth-controller.spec.js
"use strict";
const _ = require("lodash");
const path = require("path");
const proxyquire = require("proxyquire").noCallThru().noPreserveCache();
const chai = require("chai");
const { expect } = chai;
const sinon = require("sinon");
const sinonChai = require("sinon-chai");
chai.use(sinonChai);
// const AuthServiceOriginalClass = require("../../services/authService"); If i use this directly in proxyquire it calls the original class
const { createAccessToken, loginDetail, loginAdminUser, loginUser, adminUser } = require("../test-utils/user-helper");
const controllerPath = path.resolve('./controllers/authController.js');
describe("login route", () => {
let proxy, authService, bcryptStub, fakeCallback, fakeReq, fakeRes, fakeNext, resolveFn, token;
let result, bcryptStubbing, response;
class UserServiceMock {
async findByEmail(email) {
try {
if (email) {
return loginAdminUser;
}
} catch (error) {
throw error;
}
}
}
class AuthServiceMock extends UserServiceMock {};
bcryptStub = {
compare: function() { return true }
};
let tokeen = (kk) => {
return createAccessToken(kk);
}
// token = sinon.mock(createAccessToken(loginAdminUser)); // ?? which 1 to use?
token = sinon.spy(createAccessToken); // ?? which 1 to use?
// token = sinon.stub(createAccessToken) ?? which 1 to use?
proxy = proxyquire(controllerPath, {
"../services/authService.js": AuthServiceMock, // seems like this is not called at all
// "../services/authService.js": AuthServiceOriginalClass, // commented out if use this instead it calls the original class instant
"bcryptjs": bcryptStub,
"../utils/jwtGenerator": token,
// "#noCallThru": true // keep on or off?
});
before("Stub my methods", () => {
authService = new AuthServiceMock();
// If I call the entire loginRoute I want this stub authTry to be called inside of it and resolve that object value
authTry = sinon.stub(authService, "findByEmail").withArgs(loginDetail.email).resolves(loginAdminUser);
sinon.stub(bcryptStub, "compare").resolves(true); // force it to return true as that seems to be like the code of authController.js
// sinon.stub(token, "createAccessToken")
});
before("call the function loginRoute", async () => {
// fakeCallback = new Promise((res, rej) => {
// resolveFn = res
// });
fakeReq = {
body: {
email: loginDetail.email,
password: loginDetail.password
}
};
fakeRes = {
cookie: sinon.spy(),
status: sinon.spy(),
json: sinon.spy()
}
fakeNext = sinon.stub();
await proxy.loginRoute(fakeReq, fakeReq, fakeNext).then((_result) => {
result = _result;
});
console.log("result")
console.log(result) // undefined
console.log("result")
});
it("login route test if the stubs are called", async () => {
expect(authService.findByEmail).to.have.been.called // never called
// expect(bcryptStubbing).to.have.been.called // never called
// expect(response.status).to.deep.equal(200); // doesn't work
}).timeout(10000);
after(() => {
sinon.reset()
});
});
Where am i going wrong here in the test?

Asynchronous code works independently, but not together. Not getting any exceptions, just timing out

I am writing a lambda function to add hosts to a SQS queue for a rolling restart. The code I have written works individually, but not together. Even when I hard code values in the constructor. This doesn't appear to be a memory/CPU. I tried running the function with 1GB of memory, even though it only uses about 80MB. The average execution time for the individual functions is about 0.5 seconds (shouldn't take more than about 1.5 seconds to execute in total). I did trying running this function with a 30 second timeout, but it still timed out.
I work behind a corporate proxy, and have to hand jam the code. I don't have an IDE or intellisense on my internet facing network. There may be typos here, but not in the actual code. I have omitted my module imports and variable declarations to save time. It isn't relevant to the issue at hand.
EDIT: I added the module imports and variable declarations to the first example to hopefully alleviate some confusion.
Here are just a few things I have tried. This does not work (timing out):
// Custom lambda layer
const { marklogic, aws } = require('nodejs-layer-lib');
const { HOSTS, DOMAIN, PORT, USERNAME, PASSWORD, RESTART_QUEUE_NAME } = process.env;
const params = [
'format=json'
];
const options = {
port: PORT,
params: params,
httpOptions: {
headers: {
'Authorization': `Basic ${Buffer.from(`${USERNAME}:${PASSWORD}`).toString('base64')}`
},
method: 'GET'
}
};
const taskServers = (HOSTS.split(',') || []).map(host => {
const _host = host.split(':');
return {
id: _host[0],
name: `http://${_host[1].toLowerCase()}.${DOMAIN}`
};
});
exports.handler = async () => {
let hosts, queueUrl, addToQueueResults;
try {
hosts = (await marklogic.hosts.getHosts(taskServers, options) || []);
} catch (e) { console.error('hosts', e); }
try {
queueUrl = await aws.sqs.getQueueUrlByName(RESTART_QUEUE_NAME);
} catch (e) { console.error('queueUrl ', e); }
try {
addToQueueResults = await aws.sqs.addMessages(queueURL, hosts);
} catch (e) { console.error('addToQueueResults ', e); }
return {
status: 200,
body: addToQueueResults
};
}
This does not work (timing out):
// Modules imports and variable declarations here...
exports.handler = async () => {
const hosts = (await marklogic.hosts.getHosts(taskServers, options) || []);
const queueUrl = await aws.sqs.getQueueUrlByName(RESTART_QUEUE_NAME);
const addToQueueResults = await aws.sqs.addMessages(queueURL, hosts);
return {
status: 200,
body: addToQueueResults
};
}
This does not work (timing out):
// Modules imports and variable declarations here...
exports.handler = async () => {
const hosts = (await marklogic.hosts.getHosts(taskServers, options) || []);
const queueUrl = await aws.sqs.getQueueUrlByName('my-queue-name');
const addToQueueResults = await aws.sqs.addMessages('http://queueurl.com', ['anything', 'in', 'here']); // Doesn't even need the queueUrl or hosts anymore
return {
status: 200,
body: addToQueueResults
};
}
This works. It will return the host objects I am expecting in the response:
// Modules imports and variable declarations here...
exports.handler = async () => {
const hosts = (await marklogic.hosts.getHosts(taskServers, options) || []);
return {
status: 200,
body: hosts
};
}
This works. It will get the queue url, then add messages to my SQS queue and return the SQS response:
// Modules imports and variable declarations here...
exports.handler = async () => {
const queueUrl = await aws.sqs.getQueueUrlByName(RESTART_QUEUE_NAME);
const addToQueueResults = await aws.sqs.addMessages(queueUrl , ['anything', 'in', 'here']);
return {
status: 200,
body: addToQueueResults
};
}
I tried implementing the Async handler in AWS Lambda function handler in Node.js and reviewed many AWS Lambda execution troubleshooting documents. The marklogic management API runs on port 8002 by default and I think the aws-sdk module uses http/https (80/443), so I don't think the ports are getting tied up.
What am I missing here?
EDIT 2: This has something to do with how promises are handled with AWS Lambda. I cannot find much information about this. Even following the instructions in AWS Lambda function handler in Node.js for "Async Handlers" I cannot get this to work. It works perfectly fine locally with or without my custom lambda layer.
Node.js runtime: 12.x (I didn't mention this before)
This also doesn't work (timing out):
// Modules imports and variable declarations here...
exports.handler = async function (event) {
const promise = function () {
return new Promise(async function (resolve, reject) {
try {
const hosts = await marklogic.hosts.getHosts(taskServers, options) || [];
const queueUrl = await aws.sqs.getQueueUrlByName(RESTART_QUEUE_NAME);
const addToQueueResults = await aws.sqs.addMessages(queueUrl, hosts);
resolve({
status: 200,
body: addToQueueResults
});
} catch (error) {
reject({
status: 500,
error: error
});
}
});
};
return promise(); // Throws error without constructor despite the AWS doc example
}
Unless someone AWS Lambda genius has ran into a similar issue before with Node.js, I am just going to convert it into 2 lambda functions and use Step Functions to process them.
There was a typo in queueUrl (I imagine not that, but worth a try!)
Please run:
// Custom lambda layer
const { marklogic, aws } = require('nodejs-layer-lib');
const { HOSTS, DOMAIN, PORT, USERNAME, PASSWORD, RESTART_QUEUE_NAME } = process.env;
const params = [
'format=json'
];
const options = {
port: PORT,
params,
httpOptions: {
headers: {
Authorization: `Basic ${Buffer.from(`${USERNAME}:${PASSWORD}`).toString('base64')}`
},
method: 'GET'
}
};
const taskServers = (HOSTS.split(',') || []).map(host => {
const _host = host.split(':');
return {
id: _host[0],
name: `http://${_host[1].toLowerCase()}.${DOMAIN}`
};
});
exports.handler = async () => {
let hosts, queueUrl, addToQueueResults;
try {
hosts = (await marklogic.hosts.getHosts(taskServers, options) || []);
} catch (e) { console.error('hosts', e); }
try {
queueUrl = await aws.sqs.getQueueUrlByName(RESTART_QUEUE_NAME);
} catch (e) { console.error('queueUrl ', e); }
try {
addToQueueResults = await aws.sqs.addMessages(queueUrl, hosts);
} catch (e) { console.error('addToQueueResults ', e); }
return {
status: 200,
body: JSON.stringify(addToQueueResults)
};
};
// keeping the same format.. ^^
If no luck - what is on my mind, is as aws-sdk is included out of the box in lambda.. it's not customary to require it extraneously via a layer and although it may not look to be imported at top level by marklogic, it may be bundled deep within marklogic, then when you import AWS and change config (in the layer) it overwrites it
Let's find out..:
Step 1:
So, this you say should work.. if we ignore the AWS import, and just import marklogic?
// Custom lambda layer
// const { marklogic, aws } = require('nodejs-layer-lib'); // ignoring AWS for now
const { marklogic } = require('nodejs-layer-lib');
const { HOSTS, DOMAIN, PORT, USERNAME, PASSWORD, RESTART_QUEUE_NAME } = process.env;
const params = [
'format=json'
];
const options = {
port: PORT,
params,
httpOptions: {
headers: {
Authorization: `Basic ${Buffer.from(`${USERNAME}:${PASSWORD}`).toString('base64')}`
},
method: 'GET'
}
};
const taskServers = (HOSTS.split(',') || []).map(host => {
const _host = host.split(':');
return {
id: _host[0],
name: `http://${_host[1].toLowerCase()}.${DOMAIN}`
};
});
exports.handler = async () => {
// let hosts, queueUrl, addToQueueResults;
let hosts;
try {
hosts = (await marklogic.hosts.getHosts(taskServers, options) || []);
console.log('hosts => ', hosts);
// queueUrl = await aws.sqs.getQueueUrlByName(RESTART_QUEUE_NAME);
// addToQueueResults = await aws.sqs.addMessages(queueUrl, hosts);
return {
status: 200,
body: JSON.stringify(hosts)
};
} catch (error) {
console.log('error => ', error);
throw error;
}
};
Ok, so if that works..:
Step 2 (Please set the region for SQS and also hard code in the queueUrl):
// Custom lambda layer
// const { marklogic, aws } = require('nodejs-layer-lib');
const { marklogic } = require('nodejs-layer-lib');
const AWS = require('aws-sdk');
AWS.config.update({ region: 'eu-west-1' }); // Please set region accordingly
const sqs = new AWS.SQS({ apiVersion: '2012-11-05' });
const { HOSTS, DOMAIN, PORT, USERNAME, PASSWORD, RESTART_QUEUE_NAME } = process.env;
const params = [
'format=json'
];
const options = {
port: PORT,
params,
httpOptions: {
headers: {
Authorization: `Basic ${Buffer.from(`${USERNAME}:${PASSWORD}`).toString('base64')}`
},
method: 'GET'
}
};
const taskServers = (HOSTS.split(',') || []).map(host => {
const _host = host.split(':');
return {
id: _host[0],
name: `http://${_host[1].toLowerCase()}.${DOMAIN}`
};
});
exports.handler = async () => {
let hosts, addToQueueResults;
try {
hosts = (await marklogic.hosts.getHosts(taskServers, options) || []);
console.log('hosts => ', hosts);
const queueUrl = 'Please hard code the queueUrl for now';
const sqsParams = {
MessageBody: hosts,
QueueUrl: queueUrl
};
addToQueueResults = await sqs.sendMessage(sqsParams).promise();
console.log('addToQueueResults => ', addToQueueResults);
return {
status: 200,
body: JSON.stringify(addToQueueResults)
};
} catch (error) {
console.log('error => ', error);
throw error;
}
};
IF.. that doesn't work.. then Step 3.. move the require of marklogic to below the require of AWS and setting the region in this last example.. (so any deeply nested marklogic AWS logic we're unaware of now overwrites your AWS require..) re-run it.. fingers crossed :-)

Mocking Twilio Client from twilio-node package

I currently use The Twilio Node Helper Library to do various API calls whether it may be to create assistants/services, list them, remove them and various other things when it comes to uploading tasks, samples, fields to create a chatbot on Twilio Autopilot.
An example of one some of these functions include:
async function createAssistant(name, client){
var assistantUid = ""
await client.autopilot.assistants
.create({
friendlyName: name,
uniqueName: name
})
.then(assistant => assistantUid = assistant.sid);
return assistantUid
}
async function getAccount(client){
var valid = true
try {
await client.api.accounts.list()
} catch (e) {
valid = false
}
return valid
}
async function connectToTwilio(twilioAccountSid, twilioAuthToken) {
try{
var client = twilio(twilioAccountSid, twilioAuthToken);
} catch (e){
throw new TwilioRequestError(e)
}
var valid = await getAccount(client)
if(valid && client._httpClient.lastResponse.statusCode === 200){
return client
} else{
throw new Error("Invalid Twilio Credentials")
}
}
where client is the client object returned from require("twilio")(twilioAccountSid, twilioAuthToken).
I was wondering what would the best way of mocking this API to allow me to emulate creating assistants, returning their uniqueNames etc..
I was wondering that I may just define some class like
class TwilioTestClient{
constructor(sid, token){
this.sid = sid
this.token = token
this.assistants = TwilioAssistant()
this.services = TwilioServices()
}
}
Where TwilioAssitant and TwilioServices will be additional classes.
Any help would be greatly appreciated!
I struggled with mocking Twilio for a long time. In fact I previously architected my application such that I could mock a wrapper around the Twilio Node Helper just to avoid mocking the actual library. But recent changes to the architecture meant that was no longer an option. This morning I finally got a mock of the Twilio Node Helper Library working. I'm not familiar with the portions of the Twilio library you are using, but I'm hopeful the example here will help you.
We have a function to check if a phone number is mobile, call it isMobile.js.
const Twilio = require("twilio");
const isMobile = async (num) => {
const TwilioClient = new Twilio(process.env.TWILIO_SID, process.env.TWILIO_AUTH_TOKEN);
try {
const twilioResponse = await TwilioClient.lookups.v1
.phoneNumbers(num)
.fetch({ type: "carrier", mobile_country_code: "carrier" });
const { carrier: { type } = {} } = twilioResponse;
return type === "mobile";
} catch (e) {
return false;
}
};
module.exports = isMobile;
Then build a mock for Twilio in __mocks__/twilio.js
const mockLookupClient = {
v1: { phoneNumbers: () => ({ fetch: jest.fn(() => {}) }) }
};
module.exports = class Twilio {
constructor(sid, token) {
this.lookups = mockLookupClient;
}
};
In the test file isMobile.test.js
jest.mock("twilio");
const Twilio = require("twilio");
const isMobile = require("./isMobile");
const mockFetch = jest.fn();
const mockPhoneNumbers = jest.fn(() => ({
fetch: mockFetch
}));
describe("isMobile", () => {
const TwilioClient = new Twilio(process.env.TWILIO_SID, process.env.TWILIO_AUTH_TOKEN);
const lookupClient = TwilioClient.lookups.v1;
lookupClient.phoneNumbers = mockPhoneNumbers;
beforeEach(() => {
mockFetch.mockReset();
});
test("is a function", () => {
expect(typeof isMobile).toBe("function");
});
test("returns true for valid mobile number", async () => {
const validMobile = "+14037007492";
mockFetch.mockReturnValueOnce({
carrier: { type: "mobile", mobile_country_code: 302 }, // eslint-disable-line camelcase
phoneNumber: validMobile
});
expect(await isMobile(validMobile)).toBe(true);
});
test("returns false for non-mobile number", async () => {
const invalidMobile = "+14035470770";
mockFetch.mockReturnValueOnce({
carrier: { type: "not-mobile", mobile_country_code: null }, // eslint-disable-line camelcase
phoneNumber: invalidMobile
});
expect(await isMobile(invalidMobile)).toBe(false);
});
});

Adaption of firebase stripe sample cloud functions returning unspecified error

I am implementing the firebase stripe cloud functions from this sample: https://github.com/firebase/functions-samples/blob/master/stripe/functions/index.js with some adaptions (mainly running stripe.confirmCardSetup() on the cloud function instead of front-end because I had a lot of trouble trying to import stripe correctly outside of the functions files) in my Vue webapp.
The code as is, however, produces an error with no type specification, which is why it sets the field "error" to 'An error occurred, developers have been alerted', as defined in the function userFacingMessage()
What is causing these errors?
Btw, I am still running stripe in test mode and use the test cards for testing, however, I don't imagine that causes the errors.
const functions = require('firebase-functions');
const admin = require('firebase-admin');
admin.initializeApp();
// const { Logging } = require('#google-cloud/logging');
// const logging = new Logging({
// projectId: "",
// });
const stripe = require('stripe')(functions.config().stripe.secret, {
apiVersion: '2020-03-02',
});
exports.createStripeCustomerSA = functions
.region('southamerica-east1')
.auth.user().onCreate(async (user) => {
const customer = await stripe.customers.create({ email: user.email });
const intent = await stripe.setupIntents.create({
customer: customer.id,
});
await admin.firestore().collection('users').doc(user.email).set({
customer_id: customer.id,
setup_secret: intent.client_secret
});
return;
});
exports.createStripePaymentSA = functions
.region('southamerica-east1')
.firestore
.document('users/{userId}/payments/{pushId}')
.onCreate(async (snap, context) => {
const { amount, payment_method } = snap.data();
try {
// Look up the Stripe customer id.
const customer = (await snap.ref.parent.parent.get()).data().customer_id;
const setup_secret = (await snap.ref.parent.parent.get()).data().setup_secret;
const { setupIntent, error } = await stripe.confirmCardSetup(
await setup_secret,
{
payment_method
}
);
if (error) {
await snap.ref.set({"confirmCardSetupError": 1});
return;
}
// Create a charge using the pushId as the idempotency key
// to protect against double charges.
const idempotencyKey = context.params.pushId;
const payment = await stripe.paymentIntents.create(
{
amount: amount,
currency: 'BRL',
customer: customer,
payment_method: setupIntent.payment_method,
off_session: false,
confirm: true,
confirmation_method: 'manual',
},
{ idempotencyKey }
);
// If the result is successful, write it back to the database.
await snap.ref.set(payment);
//CREAT THE COLLECTION OF PAID ITEMS
await snap.ref.parent.parent.collection("wantedCourses").map(function (course) {
return course.set("paid", 1);
})
} catch (error) {
// We want to capture errors and render them in a user-friendly way, while
// still logging an exception with StackDriver
await snap.ref.set({ error: userFacingMessage(error) }, { merge: true });
// await reportError(error, { user: context.params.userId });
}
});
exports.confirmStripePaymentSA = functions
.region('southamerica-east1')
.firestore
.document('stripe_customers/{userId}/payments/{pushId}')
.onUpdate(async (change, context) => {
if (change.after.data().status === 'requires_confirmation') {
const payment = await stripe.paymentIntents.confirm(
change.after.data().id
);
change.after.ref.set(payment);
}
});
function userFacingMessage(error) {
return error.type
? error.message
: 'An error occurred, developers have been alerted';
}

Using google oauth2 node library, can't get new access token with refresh token

With the google oauth2 library, I can successfully authenticate a user on their first pass through, get their refresh token and first access token. Until the token expires, everything works as expected.
However, when the access token expires, I need to get a new access token and store these tokens in my data store using the existing refresh token. I am aware the documentation states tokens should re-fetch themselves when they expire, but as I am creating a new client for each call (to ensure tokens are not re-used between users), I think the client gets torn down before a token gets chance to refresh itself.
Inspecting what the library does calling the actual google api, I should be able to get new access tokens by calling the client.refreshAccessToken() method, the response from this call gives me the invalid_grant Bad Request error. I have compared the actual api request this method makes to the one on google oauth2 playground and the two calls are identical - although their call for refreshing their token works and mine does not.
Attached is my code as it now currently stands Please send help - I don't have any hair left to pull out!
const { google } = require('googleapis')
const scopes = [
'https://www.googleapis.com/auth/spreadsheets.readonly',
'https://www.googleapis.com/auth/userinfo.email',
'https://www.googleapis.com/auth/drive.readonly'
]
module.exports = (env, mongo) => {
const getBaseClient = () => {
const { OAUTH_CLIENT_ID, OAUTH_CLIENT_SECRET, OAUTH_CALLBACK_URL } = env.credentials
return new google.auth.OAuth2(
OAUTH_CLIENT_ID, OAUTH_CLIENT_SECRET, OAUTH_CALLBACK_URL
)
}
const getNewAccessTokens = async (authId, refreshToken) => {
const { tokens } = await getBaseClient().getToken(refreshToken)
await mongo.setAccessTokensForAuthUser(authId, { ...tokens, refresh_token: refreshToken })
return tokens
}
const getAuthedClient = async (authId) => {
let tokens = await mongo.getAccessTokensForAuthUser(authId)
if (!tokens.access_token) {
tokens = await getNewAccessTokens(authId, tokens.refresh_token)
}
const client = getBaseClient()
client.setCredentials(tokens)
if (client.isTokenExpiring()) {
const { credentials } = await client.refreshAccessToken()
tokens = { ...credentials, refresh_token: tokens.refreshToken }
await mongo.setAccessTokensForAuthUser(authId, tokens)
client.setCredentials(tokens)
}
return client
}
const generateAuthUrl = (userId) => {
return getBaseClient().generateAuthUrl({
access_type: 'offline',
scope: scopes,
state: `userId=${userId}`
})
}
const getUserInfo = async (authId) => {
const auth = await getAuthedClient(authId)
return google.oauth2({ version: 'v2', auth }).userinfo.get({})
}
const listSheets = async (authId) => {
const auth = await getAuthedClient(authId)
let nextPageToken = null
let results = []
do {
const { data } = await google
.drive({ version: 'v3', auth })
.files.list({
q: 'mimeType = \'application/vnd.google-apps.spreadsheet\'',
includeItemsFromAllDrives: true,
supportsAllDrives: true,
corpora: 'user',
orderBy: 'name',
pageToken: nextPageToken
})
nextPageToken = data.nextPageToken
results = results.concat(data.files)
} while (nextPageToken)
return results
}
return {
generateAuthUrl,
getUserInfo,
listSheets
}
}
I solved my own problem.
I was conflating access_codes with refresh_tokens, and believed the code you receive from the auth url was the refresh_token, storing it, and attempting to reuse it to get more access_tokens. This is wrong. Don't do this.
You get the access_code from the authentication url, and the first time you use that with the client.getToken(code) method, you receive the refresh_token and access_token.
I've attached updated and working code should anyone wish to use it.
I should also mention that I added prompt: 'consent' to the auth url so that you always receive an access_code you can use to get a refresh_token when someone re-authenticates (as if you don't, then a call to client.getToken() does not return a refresh_token (part of what was confusing me in the first place).
const { google } = require('googleapis')
const scopes = [
'https://www.googleapis.com/auth/spreadsheets.readonly',
'https://www.googleapis.com/auth/userinfo.email',
'https://www.googleapis.com/auth/drive.readonly'
]
module.exports = (env, mongo) => {
const getBaseClient = () => {
const { OAUTH_CLIENT_ID, OAUTH_CLIENT_SECRET, OAUTH_CALLBACK_URL } = env.credentials
return new google.auth.OAuth2(
OAUTH_CLIENT_ID, OAUTH_CLIENT_SECRET, OAUTH_CALLBACK_URL
)
}
const getAuthedClient = async (authId) => {
let tokens = await mongo.getAccessTokensForAuthUser(authId)
const client = getBaseClient()
client.setCredentials(tokens)
if (client.isTokenExpiring()) {
const { credentials } = await client.refreshAccessToken()
tokens = { ...credentials, refresh_token: tokens.refresh_token }
await mongo.setAccessTokensForAuthUser(authId, tokens)
client.setCredentials(tokens)
}
return client
}
const generateAuthUrl = (userId) => {
return getBaseClient().generateAuthUrl({
access_type: 'offline',
prompt: 'consent',
scope: scopes,
state: `userId=${userId}`
})
}
const getUserInfo = async (accessCode) => {
const auth = getBaseClient()
const { tokens } = await auth.getToken(accessCode)
auth.setCredentials(tokens)
const { data } = await google.oauth2({ version: 'v2', auth }).userinfo.get({})
return { ...data, tokens }
}
const listSheets = async (authId) => {
const auth = await getAuthedClient(authId)
let nextPageToken = null
let results = []
do {
const { data } = await google
.drive({ version: 'v3', auth })
.files.list({
q: 'mimeType = \'application/vnd.google-apps.spreadsheet\'',
includeItemsFromAllDrives: true,
supportsAllDrives: true,
corpora: 'user',
orderBy: 'name',
pageToken: nextPageToken
})
nextPageToken = data.nextPageToken
results = results.concat(data.files)
} while (nextPageToken)
return results
}
return {
generateAuthUrl,
getUserInfo,
listSheets
}
}

Categories

Resources