loopback send email with attachment not working - javascript

I have the career PersistedModel for storing the data in the database and i have the attachment model for file storage to store in some location.Now i want to send an email with the data. I can able to send only the career data but i want to send attachment also with the same email.I could not able to fetch the file name because it is not in the career model it is in the attachment. How to do get the file name and send it help me out.
career.js
const app = require('../../server/server');
module.exports = function(Career) {
Career.afterRemote('create', function(context, remoteMethodOutput, next) {
next();
console.log(remoteMethodOutput)
Career.app.models.Email.send({
to: 'lakshmipriya.l#gmail.com',
from: 'lakshmipriya.l#gmail.com',
subject: 'my subject',
html: 'Hello-world',
attachments: [
{
path: '../files/resume/'+remoteMethodOutput.resume,
}
],
}, function(err, mail) {
// console.log(context.result.email)
console.log('email sent!');
cb(err);
});
});
};
attachment.json
{
"name": "attachment",
"base": "Model",
"idInjection": true,
"options": {
"validateUpsert": true
},
"properties": {},
"validations": [],
"relations": {},
"acls": [],
"methods": {}
}
My project structure where i used to store the files is

Using absolute path for your files is always more robust than relative path. Use __dirname for that :
const filePath = __dirname + '/files/resume/' + remoteMethodOutput.resume;
If you need to go up one level, then enter the files directory, you need Node's path module to resolve it :
const path = require("path"),
filePath = path.normalize(__dirname + '/../files/resume/' + remoteMethodOutput.resume)

Related

Loopback passport-facebook for multiple user models

I'm using passport to authenticate users for my application using Facebook.
In my application, I have 2 different users: customers and professionals (transitioning to "experts").
I followed the Loopback docs to implement the loopback-component-passport package and passport-facebook and have it working for my customers.
Because I use custom User models, I followed this SO Post on how to get that fixed (note: I still use the build-in AccessToken model).
Angular Frontend
I use an Angular frontend. To redirect the user back to my frontend, I created a bootscript that looks like this:
'use strict';
module.exports = function(app) {
var router = app.loopback.Router();
var cookieParser = require('cookie');
// Redirect user to page set in env.APP_HOST
router.get('/auth/customer/success', function(req, res, next) {
const cookie = req.headers.cookie;
var result = cookieParser.parse(cookie);
res.redirect(process.env.APP_HOST + '/auth/facebook/?token=' +
result['access_token'] + '&id=' + result['userId'] +
'&err=&fbsr=');
});
router.get('/auth/expert/success', function(req, res, next) {
const cookie = req.headers.cookie;
var result = cookieParser.parse(cookie);
res.redirect(process.env.APP_HOST + '/expert/auth/facebook/?token='
+ result['access_token'] + '&id=' + result['userId'] +
'&err=&fbsr=');
});
app.use(router);
};
When I authenticate as a customer, I should be redirected to https://example.com/auth/customer/success
When I authenticate as a professional/expert, I should be redirected to https://example.com/auth/expert/success.
Setup a second passport-facebook
I have to load the configuration for both the customer and professional/expert which I'm doing in server.js. I created 2 providers.json files (providers.customer.json and provider.professional.json) containing their specific info.
This is how I load both configurations (perhaps a little messy):
var passportCustomer = require('loopback-component-passport');
var PassportConfiguratorCustomer = passportCustomer.PassportConfigurator;
var passportConfiguratorCustomer = new PassportConfiguratorCustomer(app);
var passportProfessional = require('loopback-component-passport');
var PassportConfiguratorProfessional = passportProfessional.PassportConfigurator;
var passportConfiguratorProfessional = new PassportConfiguratorProfessional(app);
// Setup passport configuration
var passportCustomerConfig = {};
var passportProfessionalConfig = {};
try {
passportCustomerConfig = require('./providers.customer.json');
passportProfessionalConfig = require('./providers.professional.json');
} catch(err) {
console.log('Please configure the passport strategy in providers.customer.json');
console.error(err);
process.exit(1); // Fatal error
}
// Init passport for customer
passportConfiguratorCustomer.init(false);
passportConfiguratorProfessional.init(false);
// Setup passport models
passportConfiguratorCustomer.setupModels({
userModel: app.models.Customer,
userIdentityModel: app.models.UserIdentityCustomer,
userCredentialModel: app.models.UserCredentialsCustomer
});
passportConfiguratorProfessional.setupModels({
userModel: app.models.Professional,
userIdentityModel: app.models.UserIdentityProfessional,
userCredentialModel: app.models.UserCredentialsProfessional
});
// Configure passport for customer
for(var s in passportCustomerConfig) {
var c = passportCustomerConfig[s];
c.session = c.session !== false;
passportConfiguratorCustomer.configureProvider(s, c);
}
// Configure passport for professional/expert
for(var s in passportProfessionalConfig) {
var c = passportProfessionalConfig[s];
c.session = c.session !== false;
// passportConfiguratorProfessional.configureProvider(s, c);
passportConfiguratorCustomer.configureProvider(s, c);
}
The actual problem
I have 2 different apps in Facebook (1 for customers, 1 for professionals/experts). When I authenticate using localhost:3000/auth/customer/facebook or localhost:3000/auth/expert/facebook I see that both apps are used for the correct endpoint. But no matter what endpoint I use, after authentication I'm always redirected to http://example.com/expert/auth/facebook
So my question is: How can I fix this issue so that customers are redirected to the customer endpoint and experts/professionals are redirected to their expert endpoint?
Additional information
Registration works fine, I can find customers in customer table and expert in expert table in my database
For reference: providers.customer.json
{
"facebook-login": {
"provider": "facebook",
"module": "passport-facebook",
"clientID": "OhOh, I removed it :)",
"clientSecret": "Supa Dupa secret",
"callbackURL": "/auth/customer/facebook/callback",
"authPath": "/auth/customer/facebook",
"callbackPath": "/auth/customer/facebook/callback",
"successRedirect": "/auth/customer/success",
"failureRedirect": "/auth/customer/failure",
"scope": ["email"],
"failureFlash": true,
"profileFields" : ["locale", "name", "email"]
},
"facebook-link": {
"provider": "facebook",
"module": "passport-facebook",
"clientID": "OhOh, I removed it :)",
"clientSecret": "Supa Dupa secret",
"callbackURL": "/link/customer/facebook/callback",
"authPath": "/link/customer/facebook",
"callbackPath": "/link/customer/facebook/callback",
"successRedirect": "/auth/customer/success",
"failureRedirect": "/auth/customer/failure",
"scope": ["email"],
"link": true,
"failureFlash": true
}
}
For reference: providers.professional.json
{
"facebook-login": {
"provider": "facebook",
"module": "passport-facebook",
"clientID": "Aaaaand",
"clientSecret": "It's gone",
"callbackURL": "/auth/expert/facebook/callback",
"authPath": "/auth/expert/facebook",
"callbackPath": "/auth/expert/facebook/callback",
"successRedirect": "/auth/expert/success",
"failureRedirect": "/auth/expert/failure",
"scope": ["email"],
"failureFlash": true,
"profileFields" : ["locale", "name", "email"]
},
"facebook-link": {
"provider": "facebook",
"module": "passport-facebook",
"clientID": "Aaaaand",
"clientSecret": "It's gone",
"callbackURL": "/link/expert/facebook/callback",
"authPath": "/link/expert/facebook",
"callbackPath": "/link/expert/facebook/callback",
"successRedirect": "/auth/expert/success",
"failureRedirect": "/auth/expert/failure",
"scope": ["email"],
"link": true,
"failureFlash": true
}
}
So I was able to fix this by using the PassPortConfigurator#configureProvider method. Instead of using the JSON files to setup the facebook authentication, I did it in JS.
var passportCustomer = require('loopback-component-passport');
var PassportConfiguratorCustomer = passportCustomer.PassportConfigurator;
var passportConfiguratorCustomer = new PassportConfiguratorCustomer(app); // Note the first letter is not a capital one
// Init passport for customer
passportConfiguratorCustomer.init(false);
// Setup passport models
passportConfiguratorCustomer.setupModels({
userModel: app.models.Customer,
userIdentityModel: app.models.UserIdentityCustomer,
userCredentialModel: app.models.UserCredentialsCustomer
});
// Load configuration here instead of the JSON file.
passportConfiguratorProfessional.configureProvider('facebook-login-expert', {
module: 'passport-facebook',
clientID: XXX,
clientSecret: XXX,
callbackURL: '/auth/expert/facebook/callback',
authPath: '/auth/expert/facebook',
callbackPath: '/auth/expert/facebook/callback',
successRedirect: '/auth/expert/success',
failureRedirect: '/auth/expert/failure',
scope: ['email'],
failureFlash: true,
profileFields: ['locale', 'name', 'email']
});

How to get real name from Google API via Node Client

My web app allows signing up / signing in with a Google account. I am using the following code to obtain user info from Google:
var scopes = ['profile', 'email'];
var url = oauth2Client.generateAuthUrl({ access_type: 'offline', scope: scopes });
router.route('/authorize').post((req, res) => {
code = req.body.code;
oauth2Client.getToken(code, (err, tokens) => {
if (err) return // error handler
oauth2Client.verifyIdToken(tokens.id_token, clientId, (err, login) => {
if (err) return // error handler
console.log(login.getPayload()); // this gives me the JSON object below
});
});
});
I've tried adding different scopes, but I always just get the same info, which doesn't include the user's real name:
{ azp: 'stuffblahblah',
aud: 'stuffblahblah',
sub: 'google-id-here',
email: 'email#address.com',
email_verified: true,
at_hash: 'some-hash',
iss: 'accounts.google.com',
iat: 1234567890,
exp: 1234567890 }
Despite some documentation suggesting it's possible to get info like real name in the id_token (see https://developers.google.com/identity/sign-in/android/backend-auth), I couldn't get it to return that info with the .getToken method. However, I was able to get it by requesting the info in a separate request via the access token:
let url = 'https://www.googleapis.com/oauth2/v3/userinfo?access_token=' + access_token;
request(url, (err, response, body) => {
if (err) console.log('error');
console.log(body);
});
And the body looks like this:
{
"sub": "4319874317893142",
"name": "My Real name",
"given_name": "My First Name",
"family_name": "My Last Name",
"profile": "https://plus.google.com/link_to_my_profile",
"picture": "https://lh4.googleusercontent.com/link_to_my_pic.jpg",
"email": "email#address.com",
"email_verified": true,
"gender": "male",
"locale": "en"
}
Still wishing there was a way to grab the real name in my initial request, rather than having to make a separate one, but this works well enough.

Google drive API get thumbnail

I want to get a thumbnail that google drive creates for the stored pdf files. With this function I am listing all the files:
listFiles: function (folderId, onData, onError) {
drive.files.list({
auth: jwtClient,
q: "'" + folderId + "' in parents"
}, function (err, res) {
console.log(res.files)
}
});
The output of a console log for each file looks like this:
{
kind: 'drive#file',
id: '0BapkdhpPsqtgf01YbEJRRlhuaVUf',
name: 'file-name.pdf',
mimeType: 'application/pdf'
}
When I check the google's documentation, it says that metadata of a file should contain all of these properties: https://developers.google.com/drive/v3/reference/files
and there I found: contentHints.thumbnail.image How do I access it?
Ok, the thing is to get the metadata of a file I needed to use a files.get function, not files.list. Another thing is that in the call, field parameter needs to be set. For example:
drive.files.get({
auth: jwtClient,
fileId: fileId,
fields : "thumbnailLink"
})
You can use the fields parameter to change which metadata fields are returned:
drive.files.list({
auth: jwtClient,
q: "'" + folderId + "' in parents",
fields: "files(id, name, mimeType, thumbnailLink)"
}, function (err, res) {
console.log(res.files)
});
You can get essential metadata with following api call.
Workaround for python:
self.__results = drive_service.files().list(pageSize=10, supportsAllDrives = True, fields = "files(kind, id, name, mimeType, webViewLink, hasThumbnail, thumbnailLink, createdTime, modifiedTime, owners, permissions, iconLink, imageMediaMetadata)").execute()
or
self.__results = drive_service.files().list(pageSize=10, supportsAllDrives = True, fields = "*").execute()
to get complete metadata.
Reference: https://developers.google.com/drive/api/v3/reference/files#resource

How to add image path in user profile using multer-gridfs-storage ,express, mongodb

below is my file upload code
/** Setting up storage using multer-gridfs-storage */
var storage = GridFsStorage({
gfs : gfs,
filename: function (req, file, cb) {
var datetimestamp = Date.now();
cb(null, file.fieldname + '-' + datetimestamp + '.' + file.originalname.split('.')[file.originalname.split('.').length -1]);
},
/** With gridfs we can store aditional meta-data along with the file */
metadata: function(req, file, cb) {
cb(null, { originalname: file.originalname });
},
root: 'ctFiles' //root name for collection to store files into
});
var upload = multer({ //multer settings for single upload
storage: storage
}).single('file');
/** API path that will upload the files */
app.post('/upload', function(req, res) {
upload(req,res,function(err){
if(err){
res.json({error_code:1,err_desc:err});
return;
}
console.log(res.file);
console.log(res[0].file);
res.json({error_code:0,err_desc:null});
});
});
i want to store user name, email and file path in user collection
this is my UserSchema
var UserSchema = new Schema({
name: String,
email: {
type: String,
lowercase: true
},
filepath: String,
});
this is how image has stored in collection
{
"_id" : ObjectId("58fb894111387b23a0bf2ccc"),
"filename" : "file-1492879681306.PNG",
"contentType" : "image/png",
"length" : 67794,
"chunkSize" : 261120,
"uploadDate" : ISODate("2017-04-22T16:48:01.350Z"),
"aliases" : null,
"metadata" : {
"originalname" : "Front.PNG"
},
"md5" : "404787a5534d0479bd55b2793f2a74b5"
}
this is my expectation result: in user collection i should get data like this
{
"name" :"asdf",
"email" : "asdf#gmail.com",
"filepath":"file/file-1492879681306.PNG"
}
There is a difference between storing in GridFS and storing in an ordinary MongoDb collection. The first is designed to efficiently store files with optionally any additional information while the later allows you to set any schema and store any information. You cannot deal with both the same way.
If what you want is to establish a relationship between a file and a schema in your application, you can do it like this.
Store the desired schema information in the metadata of the file
Pro: All additional data is stored within the file and deleting the file automatically
cleans the additional information.
Con: Queries could become complex because all of them must be prefixed by a metadata
field and all information is mixed together.
This could be the output of one of your files
{
"_id" : ObjectId("58fb894111387b23a0bf2ccc"),
"filename" : "file-1492879681306.PNG",
"contentType" : "image/png",
"length" : 67794,
"chunkSize" : 261120,
"uploadDate" : ISODate("2017-04-22T16:48:01.350Z"),
"aliases" : null,
"metadata" : {
"originalname" : "Front.PNG",
"name" :"asdf",
"email" : "asdf#gmail.com",
"filepath":"file/file-1492879681306.PNG"
},
"md5" : "404787a5534d0479bd55b2793f2a74b5"
}
Setting information like this is easy, just change the metadata function a little bit.
....
/** With gridfs we can store aditional meta-data along with the file */
metadata: function(req, file, cb) {
var metadata = {
originalname: file.originalname,
// get this information somehow
name :"asdf",
email : "asdf#gmail.com",
filepath:"file/file-1492879681306.PNG"
};
cb(null, metadata);
},
....
and this is how you should access them although you could use db.collection or Mongoose to the same purpose.
const mongodb = require('mongodb');
const GridFSBucket = mongodb.GridFSBucket;
const MongoClient = mongodb.MongoClient;
MongoClient.connect('mongodb://yourhost:27017/database').then((db) => {
const bucket = new GridFSBucket(db, {bucketName: 'ctFiles'});
bucket
.find({metadata: {'email' : 'asdf#gmail.com'}})
.toArray()
.then((fileInfoArr) => {
console.log(fileInfoArr);
});
});
Then you can use the fileInfo array to create streams and read the file.
Store the schema independently and add an ObjectId field that points to the id of the stored file.
Pro: Queries and updates over your UserSchema are easier to compose and understand because schema definitions are stored in different collections.
Con: Now you have two collections to worry about and have to manually keep
both in sync; when a file is deleted you should delete the User data and viceversa.
This is how your UserSchema could look like
var UserSchema = new Schema({
name: String,
email: {
type: String,
lowercase: true
},
filepath: String,
fileId: Schema.Types.ObjectId
});
and this is how you access the files
UserModel.findOne({'email' : 'asdf#gmail.com'}, function (err, user) {
// deal with error
// store and get the db object somehow
const bucket = new GridFSBucket(db, {bucketName: 'ctFiles'});
// A download stream reads the file from GridFs
const readStream = bucket.openDownloadStream(user.fileId));
readStream.pipe(/* some writable stream */);
});
Remember that GridFs stores files, therefore you should use streams whenever possible to read and write data handling backpressure correctly.
PD:
Stuff like root belong to GridStore which is obsolete. You should use GridFSBucket whenever possible. The new version of multer-gridfs-storage also deals with bucketName instead of root.

Strongloop app does not load local datasource

I want to use different environment specific datasource configurations in a Strongloop app. I saw at https://docs.strongloop.com/display/public/LB/Environment-specific+configuration that the priority of configurations are:
Environment-specific configuration, based on the value of NODE_ENV;
for example, server/config.staging.json.
Local configuration file;
for example, server/config.local.json.
Default configuration file;
for example, server/config.json.
I have declared three datasource conf files:
datasources.json:
{}
datasources.local.json:
{
"db": {
"name": "db",
"connector": "loopback-connector-mongodb",
"host":"127.0.0.1",
"port": "27017",
"database": "woowDev"
}
}
and datasources.staging.js:
module.exports = {
db: {
connector: 'mongodb',
hostname: process.env.OPENSHIFT_MONGODB_DB_HOST,
port: process.env.OPENSHIFT_MONGODB_DB_PORT,
user: process.env.OPENSHIFT_MONGODB_DB_USERNAME,
password: process.env.OPENSHIFT_MONGODB_DB_PASSWORD,
database: 'woow'
}
};
Now unless I put the configuration of datasources.local.json in datasources.json it does not work. I keep getting the error: AssertionError: User is referencing a dataSource that does not exist: "db"
I tried also to add the local conf to staging conf and defined the variable NODE_ENV, but it would not load neither datasource.staging.js. I defined the NODE_ENV by doing:
export NODE_ENV=staging
I used node-debug to track down the issue. And it came in this particular source strongloop file:
node_modules/loopback-boot/lib/config-loader.js
the function:
function mergeDataSourceConfig(target, config, fileName) {
for (var ds in target) {
var err = applyCustomConfig(target[ds], config[ds]);
if (err) {
throw new Error('Cannot apply ' + fileName + ' to `' + ds + '`: ' + err);
}
}
}
will not merge configs if "db" key is not defined in the master file i.e. datasources.json.
So, I just modified the datasources.json to:
{
"db": {}
}
and it worked!
Maybe it is my fault but the documentation is not clear enough.
Trick is to add all the datasources(memory/redis/mongo/postgres) in datasources.json and then override parameters in datasources.local.js or datasources.staging.js or datasources.production.js
Sample file configuration:
datasources.json
{
"db": {
"name": "db",
"connector": "memory"
},
"redisDS": {
"name": "redisDS",
"connector": "redis"
},
"testPostgress": {
"port": 5432,
"name": "localPostgress",
"user": "akumar",
"connector": "postgresql"
}
}
datasources.staging.js
module.exports = {
db:{
connector: 'memory'
},
redisDS:{
connector: 'redis'
},
testPostgress:{
database:'stagingPostgress'
}
};
Loopback will override database name in this case similarly you can override other datasource parameters like port and user

Categories

Resources