Nodemailer - cb is not a function? - javascript

i create an function to send email with nodemailer, but after run my console throw me:
TypeError: cb is not a function
at tryHandleCache (C:\Users\Maciek\Desktop\GoParty\backend\node_modules\ejs\lib\ejs.js:226:12)
at Object.exports.renderFile (C:\Users\Maciek\Desktop\GoParty\backend\node_modules\ejs\lib\ejs.js:437:10)
at Object.fn (C:\Users\Maciek\Desktop\GoParty\backend\api\controllers\user\create.js:47:28)
at <anonymous>
at process._tickDomainCallback (internal/process/next_tick.js:229:7)
my function to sendEmails.js
const transporter = require('nodemailer').createTransport(sails.config.custom.email)
module.exports = {
inputs:{
to: { type:'string', required:true },
subject: { type:'string', required:true},
html: {type:'string', required:true}
},
exits:{
success: {
description: 'All done.'
}
},
fn: async function(inputs, exits){
const options = {
from: sails.config.custom.email.auth.user,
to: inputs.to,
subject: inputs.subject,
html: inputs.html
}
transporter.sendMail(options, (err, info) => {
if(err){
return exits.error(err)
}else return exits.success(info.response)
})
}
}
my create.js where i must send email with correct variables:
const ejsVariable = {
activeCode: inputs.activateCode
}
// const html = await ejs.renderFile(templatePath, ejsVariable)
// const subject = 'EventZone - potwierdzenie rejestracji'
// const res = await sails.helpers.email.sendEmail(inputs.email, subject, html)
// if(!res){
// return this.res.badRequest('Confirmation email has not been send.')
// }
thanks for any help

ejs.renderFile takes 4 parameters, the last one is a function. Example usage:
ejs.renderFile(filename, data, options, function(err, str){
// str => Rendered HTML string
});
it doesn't return a promise so you can't await it.
try replacing
const html = await ejs.renderFile(templatePath, ejsVariable)
const subject = 'xxx'
const res = await sails.helpers.email.sendEmail(inputs.email, subject, html)
with
ejs.renderFile(templatePath, ejsVariable, async (err, html) => {
const subject = 'xxx'
const res = await sails.helpers.email.sendEmail(inputs.email, subject, html)
})
UPDATE
you can use util.promisify to make the ejs.renderFile function return a promise and thus work with async await like so:
const util = require('util') //first import `util`
....
const asyncEjsRenderFile = util.promisify(ejs.renderFile)
const html = await asyncEjsRenderFile(templatePath, ejsVariable)
const subject = 'xxx'
const res = await sails.helpers.email.sendEmail(inputs.email, subject, html)

Related

Why mongoose is not awaiting for await new MODEL.save()

I have 2 servers and one of these is work fine, but second (modified variant of first) is not
`This is not works:
router.post("/", async (req, res, next) => {
const newBriefAppeal = await new BriefAppeal(req.body);
let appealId;
let target;
let goals;
let brand;
let ***;
try {
const savedBriefAppeal = await newBriefAppeal.save(function (err, appeal) {
appealId = appeal.id;
target = appeal.step01target;
goals = appeal.step02goals;
brand = appeal.step03brand;
*** = appeal.***
});
res.status(200).json(savedBriefAppeal);
} catch (err) {
res.status(500).json(err);
}
});
`
and i got error
node:events:491
throw er; // Unhandled 'error' event
^
TypeError: Cannot read properties of undefined (reading 'id')
`but this variant in my similar project works fine:
router.post("/", async (req, res, next) => {
const newAppeal = await new Appeal(req.body);
let appealId;
let name;
let email;
let phone;
let subject;
let message;
let attachments = [];
try {
const savedAppeal = await newAppeal.save(function (err, appeal) {
appealId = appeal.id;
name = appeal.name;
email = appeal.email;
phone = appeal.phone;
subject = appeal.subject;
message = appeal.text;
attachments = appeal.appealAttach.map((attachment) => ({
filename: attachment,
path: "./uploads/media/mailAttachments/" + attachment,
}));
});
res.status(200).json(savedAppeal);
} catch (err) {
res.status(500).json(err);
}
});
Where's i'm wrong and why my appeal is undefined ?
Because you're passing in a callback. As it says in the documentation, save only returns a promise when you don't pass in a callback:
Returns:
...Returns undefined if used with callback or a Promise otherwise.
Either use the old-style callback signature or use the promise feature.

How to save in Mongoose with await?

The below code works, but I would like to only use async/await, so my question is ยด: How can I turn
cat.save().then(() => console.log('Saved in db'));
into using await instead?
The reason I have mongoose.connection.once() is to only send commands when MongoDB is connected. if this could use await as well, then it would be really great =)
import mongoose from 'mongoose';
import { connectDb } from './modules/connectDb';
const { Schema } = mongoose;
const catSchema = new Schema({ name: String });
(async () => {
connectDb('testDB');
mongoose.connection.once('open', () => {
console.log('MongoDB is connected');
mongoose.connection.db.listCollections().toArray(function (err, names) {
console.log(names);
});
const catModel = mongoose.model('testColl', catSchema);
const cat = new catModel({ name: 'Zildjian' });
cat.save().then(() => console.log('Saved in db'));
});
})();
connectDb.ts
import mongoose from 'mongoose';
import { strict as assert } from 'assert';
import { readToml } from './readToml';
const db = readToml('./config/database.toml');
export function connectDb(
database: string = db.database,
uri: string = db.uri,
username: string = db.username,
password: string = db.password,
) {
assert(typeof uri === 'string');
assert(typeof database === 'string');
assert(typeof username === 'string');
assert(typeof password === 'string');
const URI = `mongodb+srv://${username}:${password}#${uri}/${database}?retryWrites=true&w=majority`;
try {
mongoose.connect(URI);
} catch (err) {
console.error(err);
}
}
Try below code and make sure the function contains await keyword should always async function (async keyword should be used before function name). But in your case callback function already defined as async function.
Just change the saving part alone to below, you are good to go.
try {
const catModel = mongoose.model('testColl', catSchema);
const cat = new catModel({ name: 'Zildjian' });
const response = await cat.save(); // if error while saving, catch will get executed
console.log(response); // saved record
// return success response
} catch (err) {
console.log('err' + err);
// return error response
}
First you need to make connectDB async and then git rid of mongoose.connection.once() as you otherwise would need all your Mongoose code to be in there.
main().catch((err) => console.log(err));
async function main() {
await connectDb('testDB');
const catSchema = new mongoose.Schema({ name: String });
const catDocument = mongoose.model('testColl', catSchema);
const catObj = new catDocument({ name: 'Zildjian' });
await catObj.save();
}
connectDB
import mongoose from 'mongoose';
import { strict as assert } from 'assert';
import { readToml } from './readToml';
const db = readToml('./config/database.toml');
export async function connectDb(
database: string = db.database,
uri: string = db.uri,
username: string = db.username,
password: string = db.password,
) {
assert(typeof uri === 'string');
assert(typeof database === 'string');
assert(typeof username === 'string');
assert(typeof password === 'string');
const URI = `mongodb+srv://${username}:${password}#${uri}/${database}?retryWrites=true&w=majority`;
const options = {
bufferCommands: false,
autoCreate: false,
};
try {
await mongoose.connect(URI, options);
} catch (err: any) {
throw err.message;
}
}
In order to use await you have to provide a promise to await for.
cat.save() returns a promise, so this should work. Nevertheless you can also only use await in async function, so you should also declare the callback function for the open event as async:
(async () => {
connectDb('testDB');
mongoose.connection.once('open', async () => {
console.log('MongoDB is connected');
mongoose.connection.db.listCollections().toArray(function (err, names) {
console.log(names);
});
const catModel = mongoose.model('testColl', catSchema);
const cat = new catModel({ name: 'Zildjian' });
await cat.save();
console.log('Saved in db');
});
})();

How to call a javascript function inside another function in node js

I have a file index.js as below. Where I am trying to call a async function getConn in other function createThumbnails. But I am getting the error as "failed to connect to DEDC: 1433 - self signed certificate" in the catch block.
const sharp = require('sharp');
const sql = require('mssql')
// CONNECTION CONFIGURATION OF BASE DB
async function getConn() {
try {
const config = {
user: 'sa_user',
password: '*******',
server: 'DEDC',
database: 'DEMO_BASE'
}
const pool = await new sql.ConnectionPool(config)
const req=await pool.connect()
const conn = await req.request()
return conn;
} catch (err) {
return err;
}
};
const createThumbnails = async() => {
try{
var conn = await getConn();
const query = `exec DBBASE.get_client_info`
var clientusers = await conn.query(query);
} catch (err) {
return err;
}
}
createThumbnails()
How do I exactly call the function getConn inside createThumbnails. Please help. Thanks in advance
It's because you are using variable with the same name as the function.
Try different name:
var conn = await getConn();
const query = `exec DBBASE.get_client_info`
var clientusers = await conn.query(query);
You encounter what called hoisting. Kyle Simpson has a great explaination on this topic
var getConn = await getConn();
which means getConn will be initialized first, before assignment, which equivalents to
var getConn // initialized
getConn = await getConn() // assignment
Then turned out that you got the error
Solution here is to store it in a different variable name, like
var conn = await getConn();
async function getConn() {
return {
query: async () => {
console.log("query called");
},
};
}
const createThumbnails = async () => {
try {
var conn = await getConn();
const query = `exec DBBASE.get_client_info`;
var clientusers = await conn.query(query);
} catch (err) {
console.log(err);
}
};
createThumbnails();
We need to use trustServerCertificate: true in DB configuration i.e in const config

DynamoDB update does not console.log any output

I have the following code. This code is supposed to receive an SQS message, read the body, then update a dynamo record with the information contained within that body. The update is not working which is one issue, but even stranger I'm not getting any output from the dynamodb update. The last line of output is the console.log which details the SQS message, then the function ends.
How is this possible? Shouldn't dynamo return some kind of output?
console.log('Loading function');
const util = require('util')
const AWS = require('aws-sdk');
var documentClient = new AWS.DynamoDB.DocumentClient();
exports.handler = async(event) => {
//console.log('Received event:', JSON.stringify(event, null, 2));
for (const { messageId, body } of event.Records) {
//const { body } = event.Records[0];
//console.log(body)
console.log('SQS message %s: %j', messageId, body);
const JSONBody = JSON.parse(body)
//const message = JSON.parse(test["Message"]);
const id = JSONBody.id;
const city = JSONBody.City;
const address = JSONBody.Address;
const params = {
TableName: 'myTable',
Key: {
ID: ':id',
},
UpdateExpression: 'set address = :address',
ExpressionAttributeValues: {
':id': id,
':address': address,
':sortKey': "null"
}
//ReturnValues: "UPDATED_NEW"
};
documentClient.update(params, function(err, data) {
if (err) console.log(err);
else console.log(data);
});
}
return `Successfully processed ${event.Records.length} messages.`;
};
There're a couple of ways to do this, but I'm not sure about your use cases: Are operations are critical? Do the failed items need to be handled? Are performance need to be boosted as the large dataset? etc...
// I'm not recommend to this implementation
const { DynamoDB } = require('aws-sdk');
const documentClient = new DynamoDB.DocumentClient();
exports.handler = async (event) => {
for (const { messageId, body } of event.Records) {
console.log('SQS message %s: %j', messageId, body);
// Parse json is dangerous without knowing the structure, remember to handle
// when error occured
const JSONBody = JSON.parse(body)
const id = JSONBody.id;
const address = JSONBody.Address;
const params = {
TableName: 'myTable',
Key: {
ID: ':id',
},
UpdateExpression: 'set address = :address',
ExpressionAttributeValues: {
':id': id,
':address': address,
':sortKey': "null"
},
ReturnValues: "UPDATED_NEW"
};
// Wait for each update operation to finished
// IO time will be extended
await documentClient.update(params)
.promise()
.then(res => {
console.log(res)
})
.catch(err => {
console.error(err);
})
}
// In case there's a failed update operation, this message still be returned by lambda handler
return `Successfully processed ${event.Records.length} messages.`;
};
// My recommended way
const AWS = require('aws-sdk');
const documentClient = new AWS.DynamoDB.DocumentClient();
exports.handler = async (event) => {
// All the update operation is fired nearly concurrently
// IO will be reduced
return Promise.all(event.Records.map(({ messageId, body }) => {
console.log('SQS message %s: %j', messageId, body);
// Parse json is dangerous without knowing the structure, remember to handle
// when error occured
const JSONBody = JSON.parse(body)
const id = JSONBody.id;
const address = JSONBody.Address;
const params = {
TableName: 'myTable',
Key: {
ID: ':id',
},
UpdateExpression: 'set address = :address',
ExpressionAttributeValues: {
':id': id,
':address': address,
':sortKey': "null"
},
ReturnValues: "UPDATED_NEW"
};
return documentClient.update(params)
.promise()
.then(res => {
console.log(res)
})
}))
// When lambda handler finised all the update, lambda handler return a string
.then(() => {
return `Successfully processed ${event.Records.length} messages.`
})
// In case any of the update operation failed, the next update operations is cancelled
// Lambda handler return undefined
.catch(error => {
console.error(error);
// return some error for lambda response.
})
};
P/s: My two cents, before you do any kind of Lamba development with node.js runtime, you should understand the differences between callbacks, promises, await/async in javascript.
Fixed it by making the method synchronous, i.e removed async from the function def

How can I generate a `V1Job` object for the Kubernetes nodejs API client from a yaml file?

I've done this previously in python using:
with open(path.join(path.dirname(__file__), "job.yaml")) as f:
body= yaml.safe_load(f)
try:
api_response = api_instance.create_namespaced_job(namespace, body)
Looking at source of the nodejs api client:
public createNamespacedJob (namespace: string, body: V1Job, includeUninitialized?: boolean, pretty?: string, dryRun?: string, options: any = {}) : Promise<{ response: http.IncomingMessage; body: V1Job; }> {
How can I generate that the V1Job?
I've tried the below but get back a very verbose error message / response:
const k8s = require('#kubernetes/client-node');
const yaml = require('js-yaml');
const fs = require('fs');
const kc = new k8s.KubeConfig();
kc.loadFromDefault();
const k8sApi = kc.makeApiClient(k8s.BatchV1Api);
var namespace = {
metadata: {
name: 'test123',
},
};
try {
var job = yaml.safeLoad(fs.readFileSync('job.yaml', 'utf8'));
k8sApi.createNamespacedJob(namespace, job).then(
(response) => {
console.log('Created namespace');
console.log("Success!")
},
(err) => {
console.log(err);
console.log(job);
console.log("Err")
},
);
} catch (e) {
console.log(e);
}
V1Job seems to be an ordinary object so the below worked.
Namespace had to be a string rather than an object...
const k8s = require('#kubernetes/client-node');
const yaml = require('js-yaml');
const fs = require('fs');
const kc = new k8s.KubeConfig();
kc.loadFromDefault();
const k8sApi = kc.makeApiClient(k8s.BatchV1Api);
try {
var job = yaml.safeLoad(fs.readFileSync('job.yaml', 'utf8'));
k8sApi.createNamespacedJob("default", job).then(
(response) => {
console.log("Success")
},
(err) => {
console.log(e);
process.exit(1);
},
);
} catch (e) {
console.log(e);
process.exit(1);
}
This is the same as chris-stryczynski's example with 2 slight modifications. Also please note that chris-stryczynski's example with NodeJs-8 results in (at least on my side):
(upon execution of k8sApi.createNamespacedJob)
TypeError [ERR_INVALID_ARG_TYPE]: The "original" argument must be of type function at promisify
This error does not occur with NodeJs-12.
Here is the modified version:
const k8s = require('#kubernetes/client-node');
const yaml = require('js-yaml');
const fs = require('fs');
const kc = new k8s.KubeConfig();
kc.loadFromDefault(); //You might consider using kc.loadFromFile(...) here
const k8sApi = kc.makeApiClient(k8s.BatchV1Api);
try {
var job = yaml.load(fs.readFileSync('job.yaml', 'utf8')); // Change#1 safeLoad->load
k8sApi.createNamespacedJob("default", job).then(
(response) => {
console.log("Success")
},
(err) => {
console.log(err); // Change#2 e->err
process.exit(1);
},
);
} catch (e) {
console.log(e);
process.exit(1);
}

Categories

Resources