Cannot write using the Twitter API (unsupported authentication) - javascript

I'm trying to use twitter-api-v2 to query twitter using their [rate limit example]
import dotenv from 'dotenv'
import { TwitterApi } from 'twitter-api-v2';
import { TwitterApiRateLimitPlugin } from '#twitter-api-v2/plugin-rate-limit'
dotenv.config()
const API_KEY = process.env.TWITTER_API_KEY;
const API_SECRET = process.env.TWITTER_API_SECRET;
const BEARER_TOKEN = process.env.BEARER_TOKEN;
const rateLimitPlugin = new TwitterApiRateLimitPlugin()
// Instantiate with desired auth type (here's Bearer v2 auth)
const twitterClient = new TwitterApi(process.env.BEARER_TOKEN, { plugins: [rateLimitPlugin] });
//const twitterClient = new TwitterApi({ appKey: API_KEY, appSecret: API_SECRET }, { plugins: [rateLimitPlugin] });
await twitterClient.v2.me()
const currentRateLimitForMe = await rateLimitPlugin.v2.getRateLimit('users/me')
console.log(currentRateLimitForMe.limit) // 75
console.log(currentRateLimitForMe.remaining) // 74
I'm getting an error:
'Unsupported Authentication: Authenticating with OAuth 2.0 Application-Only is forbidden for this endpoint. Supported authentication types are [OAuth 1.0a User Context, OAuth 2.0 User Context].',
I'm guessing it has an issue with how I'm logging in, I've tried BEARER and using my API Keys, neither seem to work.
How can I obtain rate limit information?

I'm not sure if this is the case but when I logged in to the developer portal I saw there was another section of keys I could create and everything I was doing before was operating under read only mode.
Here is the full code.
const twitterClient = new TwitterApi({
appKey: API_KEY,
appSecret: API_SECRET,
accessToken: ACCESS_TOKEN_KEY,
accessSecret: ACCESS_TOKEN_SECRET
}, { plugins: [rateLimitPlugin] })
await twitterClient.v2.me()
const currentRateLimitForMe = await rateLimitPlugin.v2.getRateLimit('users/me')
console.log(`rate limit: ${currentRateLimitForMe.limit} remaining: ${currentRateLimitForMe.remaining}`)

Related

MERN - store user in backend after successful google login

I am pretty new to Programming and have some questions regarding the MERN stack.
I am building an app and trying to realize the log in via google. I was successful with integrating the google auth to my frontend and now I want to store the user after a successful login in the backend.
The first question I have is do I need a database to store the user, or is it common to just store them on the express backend?
In the auth process I do get the JWT from google and try sending it to the backend, but it does not work.
I do get the following error: "SyntaxError: Unexpected token o in JSON at position 1".
How can I send the JWT to the backend and check, if the user already exists in MongoDB and if he does not exist, make a new entry for the user. And when he is logged in, make a session so he does not have to log in again after every refresh.
At the moment I got the following code for the frontend:
import './App.css';
import { useEffect, useState } from 'react'
import jwt_decode from 'jwt-decode';
import Survey from './components/survey';
function App() {
const [ user, setUser] = useState({});
const [backendData, setBackendData] = useState([{}]);
// fetch backend API, we can define relative route, as proxy is defined in package.json
useEffect(() => {
fetch("http://localhost:5000/api").then(
response => response.json()
).then(
data => {
setBackendData(data)
}
)
}, [])
// store the JWT and decode it
function handleCallBackResponse(response){
console.log("Encoded JWT ID token: " + response.credential);
var userObject = jwt_decode(response.credential);
console.log(userObject);
setUser(userObject);
document.getElementById("signInDiv").hidden = true;
fetch("http://localhost:5000/user", {
method: "POST",
headers: {
"Content-Type": "application/json",
},
body: userObject,
})
}
// logout the user and show sign in button, google.accounts.id.disableAutoSelect is recording the status in cookies. This prevents a UX dead loop.
function handleSignOut(event){
setUser({});
document.getElementById("signInDiv").hidden = false;
google.accounts.id.disableAutoSelect();
}
useEffect(() => {
/* global google */
// The google.accounts.id.initialize method initializes the Sign In With Google client based on the configuration object.
google.accounts.id.initialize({
client_id: "CLIENT_ID",
callback: handleCallBackResponse
});
// The google.accounts.id.renderButton method renders a Sign In With Google button in your web pages
google.accounts.id.renderButton(
document.getElementById("signInDiv"),
// if only want to display icon
//{theme: "outline", size: "medium", type: "icon"}
{theme: "outline", size: "medium", text: "signin", shape: "square"}
);
// The google.accounts.id.prompt method displays the One Tap prompt or the browser native credential manager after the initialize() method is invoked.
google.accounts.id.prompt();
}, [])
// If we have no user: show sign in button
// if we have a user: show the log out button
return (
<div className="App">
<div id = "signInDiv"/>
{ Object.keys(user).length !== 0 &&
<button className ='signout' onClick={ (e) => handleSignOut(e)}>
{ user &&
<img className='logout' alt="googleprofile" src={user.picture} width='30px' height='30px'></img>
}
</button>
}
<Survey></Survey>
{(typeof backendData.users === 'undefined') ? (
<p>Loading</p>
) : (
backendData.users.map((user, i) => (
<p key={i}>{user}</p>
))
)}
</div>
);
}
export default App;
Backend:
const express = require('express')
const morgan = require('morgan')
const cors = require("cors")
const mongoose = require("mongoose")
const { OAuth2Client } = require("google-auth-library");
const jwt = require("jsonwebtoken");
const app = express()
const uri = "MONGODBURL"
// connect to mongoDB
async function connect() {
try {
await mongoose.connect(uri)
console.log("Connected to MongoDB")
} catch (error) {
console.error(error)
}
}
connect()
// setup view engine, file in "views" folder needs to have ending .ejs
app.set('view engine', 'ejs')
// log requests in the terminal for troubleshooting
app.use(morgan('combined'))
// allow cors URLs
app.use(cors({
origin: ['http://localhost:3000', 'https://play.google.com', 'https://accounts.google.com'],
methods: ["GET", "POST", "PUT", "DELETE"],
credentials: true
}))
app.use(express.json());
// start app on port 5000 and give an log message
app.listen(5000, () => {console.log("Server started on port 5000") })
Hope this make clear what I want to achieve. I really appreciate any help.
Kind Regards

Google Drive API: 404 File not found error [custom company domain]

I've been trying to GET google drive file by file id using a service account in NodeJS, but requests are failed with the following error (meaning the lack of access):
code: 404,
errors: [
{
message: 'File not found: XXX.',
domain: 'global',
reason: 'notFound',
location: 'fileId',
locationType: 'parameter'
}
]
Scope
I've tried to play around with the scope by adding extra scopes, but essentially https://www.googleapis.com/auth/drive have been always in place.
const scopes = [
'https://www.googleapis.com/auth/drive',
'https://www.googleapis.com/auth/drive.appdata',
'https://www.googleapis.com/auth/drive.file',
'https://www.googleapis.com/auth/drive.metadata',
'https://www.googleapis.com/auth/drive.metadata.readonly',
'https://www.googleapis.com/auth/drive.photos.readonly',
'https://www.googleapis.com/auth/drive.readonly',
];
Service account
I've created a service account, following the same conventional flow showed in different resources/docs/tutorials (1, 2, 3, etc)
https://console.cloud.google.com/iam-admin/serviceaccounts?project=XXXX
Enabled Google Drive API
https://console.cloud.google.com/marketplace/product/google/drive.googleapis.com
Enabled Domain-wide Delegation in admin google panel with the exact same scope as listed above (also had tested without additionally enabling this)
https://admin.google.com/ac/owl/domainwidedelegation
Source code
There's a google nodejs quickstart out there for accessing drive api that works the way a user granting permissions through oauth2 modal (example), that's not acceptable in my case, since it must be working using service account (akka demon machine-2-machine) without any real user interaction.
I've tried out many ways:
using google-auth-library package:
const { auth } = require('google-auth-library');
const client = auth.fromJSON({
type: 'service_account',
project_id: 'XXX',
private_key_id: 'XXX',
private_key: 'XXX',
client_email: 'X#Y.iam.gserviceaccount.com',
client_id: 'XXXX',
auth_uri: 'https://accounts.google.com/o/oauth2/auth',
token_uri: 'https://oauth2.googleapis.com/token',
auth_provider_x509_cert_url: 'https://www.googleapis.com/oauth2/v1/certs',
client_x509_cert_url:
'https://www.googleapis.com/robot/v1/metadata/x509/X%40Y.iam.gserviceaccount.com',
});
// also tested with exact same scopes listed above
const scopes = ['https://www.googleapis.com/auth/drive'];
client.scopes = scopes;
// tested both options for `supportsAllDrives`: true/false
const url = `https://www.googleapis.com/drive/v3/files/XXX?fields=name&supportsAllDrives=true`;
client.request({ url }).then(console.log).catch(console.error);
using ts-google-drive package:
import { TsGoogleDrive } from 'ts-google-drive';
const tsGoogleDrive = new TsGoogleDrive({
credentials: {
client_email: 'X#Y.iam.gserviceaccount.com',
private_key: '',
},
});
async function getSingleFile(fileId: string): Promise<void> {
// returns `undefined`, meaning an error
const file = await tsGoogleDrive.getFile(fileId);
console.log('file', file);
if (file) {
const isFolder = file.isFolder;
console.log('isFolder', isFolder);
}
}
getSingleFile('XXX');
using googleapis
const { google } = require('googleapis');
const auth = new google.auth.GoogleAuth({
keyFile: 'service-account.json', // file properly located
scopes: ..., // same scope
});
const drive = google.drive({ version: 'v3', auth });
const driveResponse = await drive.files.list({
fields: '*',
});
const file = await drive.files.get({
fileId: 'XXX',
fields: 'name',
supportsAllDrives: true,
});
console.log(file); // error!
using googleapis with jwtClient
const google = require('googleapis');
const fs = require('fs');
const key = require('./service-account.json');
const scopes = ... // same
const drive = google.google.drive('v3');
const jwtClient = new google.google.auth.JWT(
key.client_email,
null,
key.private_key,
scopes,
null,
);
jwtClient.authorize(async (authErr) => {
if (authErr) {
console.log(authErr); // NO error here
return;
}
const drive = google.google.drive({ version: 'v3', auth: jwtClient });
console.log('jwtClient.getCredentials()', jwtClient.getCredentials());
console.log('jwtClient.apiKey', jwtClient.apiKey);
console.log('jwtClient.credentials', jwtClient.credentials);
console.log('jwtClient.gtoken', jwtClient.gtoken);
// errors occur down below when actually requesting the api
const file = await drive.files.get({
fileId: 'XXX',
fields: 'name',
supportsAllDrives: true,
});
console.log(file);
});
./service-account file structure:
{
"type": "service_account",
"project_id": "X",
"private_key_id": "XXXX",
"client_email": "X#Y.iam.gserviceaccount.com",
"client_id": "XXX",
"auth_uri": "https://accounts.google.com/o/oauth2/auth",
"token_uri": "https://oauth2.googleapis.com/token",
"auth_provider_x509_cert_url": "https://www.googleapis.com/oauth2/v1/certs",
"client_x509_cert_url": "https://www.googleapis.com/robot/v1/metadata/x509/X%Y.iam.gserviceaccount.com"
}
Google Drive resources permissions
Since I'm trying to access my internal company's google drive files, I'm running into issues of giving possibly required file permissions to my service account:
attempt to share a folder/drive with my service account ended up being unsuccessful
It's also important to note that I've failed accessing a file that had been shared with my service account granularly (file was in "Shared with me").
Even though any public file in my company's drive can be accessed for no problem.
ER
To have it being able to access the company drive files using a service account (no real user interaction) - to highliht if it's importatn - those are located in "Shared with me" and in "Shared drives".

aws javascript sdk v3 - signature mismatch error

I can generate the presigned url following the steps as described in this section, so I wanted to test uploading a specific image marble.jpg and I tried to use postman to test the upload. So, I copied the presigned url and hit the endpoint with a PUT request, and I got this error:
<?xml version="1.0" encoding="UTF-8"?>
<Error>
<Code>SignatureDoesNotMatch</Code>
<Message>The request signature we calculated does not match the signature you provided. Check your key and signing method.</Message>
<Key>records/marble_cave.jpg</Key>
<BucketName>bucket</BucketName>
<Resource>/bucket/records/marble.jpg</Resource>
<RequestId>17E3999B521ABB65</RequestId>
<HostId>50abb07a-2ad0-4948-96e0-23403f661cba</HostId>
</Error>
The following resources are setup:
I'm using the min.io server to test this locally.
I'm using aws-sdk version 3 of the nodejs sdk for aws
I've triple checked my credentials, simple minio creds with no special characters also, I'm definitely making a PUT request.
So, The question is:
How to set the signatureVersion using the new javascript aws sdk version 3. (
The getSignedUrl is used to generate presigned url in v3 of the sdk, import { getSignedUrl } from '#aws-sdk/s3-request-presigner';)
what causes might be there such that this error is occuring?
The code I use for presigned url generation is:
import { getSignedUrl } from '#aws-sdk/s3-request-presigner';
import { PutObjectCommand, S3Client } from '#aws-sdk/client-s3';
const s3Client = new S3Client({
region: 'us-east-1',
credentials: {
accessKeyId: 'minioadmin',
secretAccessKey: 'minioadmin',
},
endpoint: http://172.21.0.2:9000,
forcePathStyle: true,
});
const bucketParams = {
Bucket: 'myBucket',
Key: `marbles.jpg`,
};
const command = new PutObjectCommand(bucketParams);
const signedUrl = await getSignedUrl(s3Client, command, {
expiresIn: 10000,
})
I stumbled on this issue myself a year ago, the new V3 SDK has a bug, it doesn't take the port into consideration when signing a URL.
see here https://github.com/aws/aws-sdk-js-v3/issues/2726
the work around I ended up implemented overrides getSignedUrl in my code and add the missing port as follows:
import {BuildMiddleware, MetadataBearer, RequestPresigningArguments} from '#aws-sdk/types';
import {Client, Command} from '#aws-sdk/smithy-client';
import {HttpRequest} from '#aws-sdk/protocol-http';
import {formatUrl} from '#aws-sdk/util-format-url';
import {S3RequestPresigner} from '#aws-sdk/s3-request-presigner';
export const getSignedUrl = async <
InputTypesUnion extends object,
InputType extends InputTypesUnion,
OutputType extends MetadataBearer = MetadataBearer
>(
client: Client<any, InputTypesUnion, MetadataBearer, any>,
command: Command<InputType, OutputType, any, InputTypesUnion, MetadataBearer>,
options: RequestPresigningArguments = {}
): Promise<string> => {
const s3Presigner = new S3RequestPresigner({ ...client.config });
const presignInterceptMiddleware: BuildMiddleware<InputTypesUnion, MetadataBearer> =
(next, context) => async (args) => {
const { request } = args;
if (!HttpRequest.isInstance(request)) {
throw new Error('Request to be presigned is not an valid HTTP request.');
}
// Retry information headers are not meaningful in presigned URLs
delete request.headers['amz-sdk-invocation-id'];
delete request.headers['amz-sdk-request'];
// User agent header would leak sensitive information
delete request.headers['x-amz-user-agent'];
delete request.headers['x-amz-content-sha256'];
delete request.query['x-id'];
if (request.port) {
request.headers['host'] = `${request.hostname}:${request.port}`;
}
const presigned = await s3Presigner.presign(request, {
...options,
signingRegion: options.signingRegion ?? context['signing_region'],
signingService: options.signingService ?? context['signing_service'],
});
return {
// Intercept the middleware stack by returning fake response
response: {},
output: {
$metadata: { httpStatusCode: 200 },
presigned,
},
} as any;
};
const middlewareName = 'presignInterceptMiddleware';
client.middlewareStack.addRelativeTo(presignInterceptMiddleware, {
name: middlewareName,
relation: 'before',
toMiddleware: 'awsAuthMiddleware',
override: true,
});
let presigned: HttpRequest;
try {
const output = await client.send(command);
//#ts-ignore the output is faked, so it's not actually OutputType
presigned = output.presigned;
} finally {
client.middlewareStack.remove(middlewareName);
}
return formatUrl(presigned);
};
The solution is probably the same as in my other question, so simply copying the answer:
I was trying and changing ports, and the put command seems to work when I use only local host for url generation
so, in this above:
new S3Client({
region: 'us-east-1',
credentials: {
accessKeyId: 'minioadmin',
secretAccessKey: 'minioadmin',
},
endpoint: http://172.21.0.2:9000,
forcePathStyle: true,
});
I use:
new S3Client({
region: 'us-east-1',
credentials: {
accessKeyId: 'minioadmin',
secretAccessKey: 'minioadmin',
},
endpoint: http://172.21.0.2, // or 127.0.0.1
forcePathStyle: true,
});
Note, I haven't used any port number, so the default is 80
If you're using docker-compose add this config:
.
.
.
ports:
- 80:9000
and it works fine.

id_token undefined google api nodejs

I am having an issue with my google APi using a JWT token, I have been having trouble impersonating users for checking calendar avaliability, and I started back tracing and found when I output the JWT token after an authorize, I get the following:
{ access_token: '****',
token_type: 'Bearer',
expiry_date: 1589294984000,
id_token: undefined,
refresh_token: 'jwt-placeholder' }
the id_token comes back undefined. I have set up everything in my cloud services and I call the authorization with a config file
var googleAuthorization = require('../config/calendarConfig.js').jwtClient;
const {google} = require('googleapis');
const calendar = google.calendar('v3');
googleAuthorization.authorize(function(err, token) {
if(err) {
reject(console.log({status: 1, message: 'Google Authorization Failed: ' + err}));
} else {
console.log(token);
}
});
and the config file:
var google = require('googleapis');
var googleAauth = require('google-auth-library');
var scopes = ['https://www.googleapis.com/auth/calendar',
'https://www.googleapis.com/auth/admin.directory.user'];
var key = require('./intranet-google-service-account.json');
var jwtClient = new google.google.auth.JWT(key.client_email, null, key.private_key, scopes,
"service#accountemail.com");
exports.jwtClient = jwtClient;
Can anyone see what I might be doing wrong to not get an ID token back?

Passport.js / Google OAuth2 strategy - How to use token on login for API access

I am logging users in via their domain Google accounts using passport.js. This works great, but now I need to give this application access to a few Google API's (drive, sheets, etc).
When a user logs in, a message appears in the logs, that makes it seem like passport has all the required info:
info: [06/Jun/2019:21:24:37 +0000] "302 GET /auth/callback?code=** USER ACCESS TOKEN HERE **&scope=email%20profile%20https://www.googleapis.com/auth/drive.file%20https://www.googleapis.com/auth/spreadsheets%20https://www.googleapis.com/auth/userinfo.email%20https://www.googleapis.com/auth/userinfo.profile%20https://www.googleapis.com/auth/drive HTTP/1.1" [46]
This is achieved by passing the appended scopes via passport.authenticate(), which presents the user with the "Grant access to these things on your Google account to this app?" screen :
//Initial auth call to Google
router.get('/',
passport.authenticate('google', {
hd: 'edmonds.wednet.edu',
scope: [
'email',
'profile',
'https://www.googleapis.com/auth/drive',
'https://www.googleapis.com/auth/drive.file',
'https://www.googleapis.com/auth/spreadsheets'
],
prompt: 'select_account'
})
);
However, when I go and try to call an API with something like:
const {google} = require('googleapis');
const sheets = google.sheets({version: 'v4', auth});
router.post('/gsCreate', function(req,res,next){
sheets.spreadsheets.create({
// Details here.....
});
});
I get nothing but errors (the current one is debug: authClient.request is not a function)
My question is: Is it possible for me to use a setup like this, asking the user to log in and grant permissions once, and then somehow save that to their user session via passport?
I had the same question, but I was able to access Google Gmail API functionalities along with Passport.js user authentication by specifying 'scopes' using the following process.
First, create a file to setup the passport-google-strategy in nodejs as follows.
passport_setup.js
const passport = require('passport')
const GoogleStrategy = require('passport-google-oauth20')
const fs = require("fs");
const path = require('path');
//make OAuth2 Credentials file using Google Developer console and download it(credentials.json)
//replace the 'web' using 'installed' in the file downloaded
var pathToJson = path.resolve(__dirname, './credentials.json');
const config = JSON.parse(fs.readFileSync(pathToJson));
passport.serializeUser((user, done) => {
done(null, user.id)
})
passport.deserializeUser((id, done) => {
const query = { _id: id }
Users.findOne(query, (err, user) => {
if (err) {
res.status(500).json(err);
} else {
done(null, user)
}
})
})
//create a google startergy including following details
passport.use(
new GoogleStrategy({
clientID: config.installed.client_id,
clientSecret: config.installed.client_secret,
callbackURL: config.installed.redirect_uris[0]
}, (accessToken, refreshToken,otherTokenDetails, user, done) => {
//in here you can access all token details to given API scope
//and i have created file from that details
let tokens = {
access_token: accessToken,
refresh_token: refreshToken,
scope: otherTokenDetails.scope,
token_type: otherTokenDetails.token_type,
expiry_date:otherTokenDetails.expires_in
}
let data = JSON.stringify(tokens);
fs.writeFileSync('./tokens.json', data);
//you will get a "user" object which will include the google id, name details,
//email etc, using that details you can do persist user data in your DB or can check
//whether the user already exists
//after persisting user data to a DB call done
//better to use your DB user objects in the done method
done(null, user)
})
)
Then create your index.js file in nodejs for API route management and to call send method of Gmail API.
Also, run the following command to install "google-apis"
npm install googleapis#39 --save
index.js
const express = require("express")
//import passport_setup.js
const passportSetup = require('./passport_setup')
const cookieSeesion = require('cookie-session');
const passport = require("passport");
//import google api
const { google } = require('googleapis');
//read credentials file you obtained from google developer console
const fs = require("fs");
const path = require('path');
var pathToJson_1 = path.resolve(__dirname, './credentials.json');
const credentials = JSON.parse(fs.readFileSync(pathToJson_1));
//get Express functionalities to app
const app = express();
// **Middleware Operations**//
//cookie encryption
app.use(cookieSeesion({
name:'Reserve It',
maxAge: 1*60*60*1000,
keys: ['ranmalc6h12o6dewage']
}))
//initialize passort session handling
app.use(passport.initialize())
app.use(passport.session())
app.use(express.json());
//**API urls**//
//route to authenticate users using google by calling google stratergy in passport_setup.js
//mention access levels of API you want in the scope
app.get("/google", passport.authenticate('google', {
scope: ['profile',
'email',
'https://mail.google.com/'
],
accessType: 'offline',
prompt: 'consent'
}))
//redirected route after obtaining 'code' from user authentication with API scopes
app.get("/google/redirect", passport.authenticate('google'), (req, res) => {
try {
//read token file you saved earlier in passport_setup.js
var pathToJson_2 = path.resolve(__dirname, './tokens.json');
//get tokens to details to object
const tokens = JSON.parse(fs.readFileSync(pathToJson_2));
//extract credential details
const { client_secret, client_id, redirect_uris } = credentials.installed
//make OAuth2 object
const oAuth2Client = new google.auth.OAuth2(client_id,
client_secret,
redirect_uris[0])
// set token details to OAuth2 object
oAuth2Client.setCredentials(tokens)
//create gmail object to call APIs
const gmail = google.gmail({ version: 'v1', auth: oAuth2Client })
//call gmail APIs message send method
gmail.users.messages.send({
userId: 'me',//'me' indicate current logged in user id
resource: {
raw: //<email content>
}
}, (err, res) => {
if (err) {
console.log('The API returned an error: ' + err)
throw err
}
console.log('Email Status : ' + res.status)
console.log('Email Status Text : ' + res.statusText)
})
res.status(200).json({ status:true })
} catch (err) {
res.status(500).json(err)
}
})
app.listen(3000, () => { console.log('Server Satrted at port 3000') })
You can separate the routes in the index.js file to different files for clarity using express.Router()
If you want to call another Google API service just change this code segment and code below that;
const gmail = google.gmail({ version: 'v1', auth: oAuth2Client })
gmail.users.messages.send(....Send Method internal implementation given above....)
For Google Drive:
const drive = google.drive({version: 'v3', auth: oAuth2Client});
drive.files.list(...Refer "Google Drive API" documentation for more details....)
I believe you can't use passport.js for three-legged oauth for APIs like Sheets or Drive.
Have a look at the Using OAuth for web servers documentation instead.
user835611 has the correct answer, as that page explains everything quite nicely. However, if you still need more, the below link really helped me to understand how this works.
https://github.com/googleapis/google-auth-library-nodejs#oauth2

Categories

Resources