Triggering Cloud Dataflow pipeline from Cloud Function - function times out - javascript

I am trying to trigger a Dataflow pipeline from a Cloud Function which itself is triggered upon upload of a new file in a GCS bucket.
When I upload a file, the Cloud function gets triggered properly but timesout after few seconds without any Dataflow being triggered.
Below is my function code:
const google = require('googleapis');
const projectId = "iot-fitness-198120";
exports.moveDataFromGCStoPubSub = function(event, callback) {
const file = event.data;
if (file.resourceState === 'exists' && file.name) {
google.auth.getApplicationDefault(function (err, authClient, projectId) {
if (err) {
throw err;
}
if (authClient.createScopedRequired && authClient.createScopedRequired()) {
authClient = authClient.createScoped([
'https://www.googleapis.com/auth/cloud-platform',
'https://www.googleapis.com/auth/userinfo.email'
]);
}
console.log("File exists and client function is authenticated");
console.log(file);
const dataflow = google.dataflow({ version: 'v1b3', auth: authClient });
console.log(`Incoming data: ${file.name}`);
dataflow.projects.templates.create({
projectId: projectId,
resource: {
parameters: {
inputFile: `gs://${file.bucket}/${file.name}`,
outputTopic: `projects/iot-fitness-198120/topics/MemberFitnessData`
},
jobName: 'CStoPubSub',
gcsPath: 'gs://dataflow-templates/latest/GCS_Text_to_Cloud_PubSub',
staginglocation: 'gs://fitnessanalytics-tmp/tmp'
}
}, function(err, response) {
if (err) {
console.error("problem running dataflow template, error was: ", err);
}
console.log("Dataflow template response: ", response);
callback();
});
});
}
};
The execution doesn't even log the following line, console.log("File exists and client function is authenticated"); which tells me it is not even getting that far.
Here's the log output during execution:
2018-03-20 04:56:43.283 GST
DataflowTriggeringFunction
52957909906492
Function execution took 60097 ms, finished with status: 'timeout'
2018-03-20 04:55:43.188 GST
DataflowTriggeringFunction
52957909906492
Function execution started
Any idea why it's not triggering the Dataflow and yet not throwing an error message ?

I have finally modified the code. Got some help from GCP support. Below is the right syntax that works:
var {google} = require('googleapis');
exports.moveDataFromGCStoPubSub = (event, callback) => {
const file = event.data;
const context = event.context;
console.log(`Event ${context.eventId}`);
console.log(` Event Type: ${context.eventType}`);
console.log(` Bucket: ${file.bucket}`);
console.log(` File: ${file.name}`);
console.log(` Metageneration: ${file.metageneration}`);
console.log(` Created: ${file.timeCreated}`);
console.log(` Updated: ${file.updated}`);
google.auth.getApplicationDefault(function (err, authClient, projectId) {
if (err) {
throw err;
}
console.log(projectId);
const dataflow = google.dataflow({ version: 'v1b3', auth: authClient });
console.log(`gs://${file.bucket}/${file.name}`);
dataflow.projects.templates.create({
gcsPath: 'gs://dataflow-templates/latest/GCS_Text_to_Cloud_PubSub',
projectId: projectId,
resource: {
parameters: {
inputFilePattern: `gs://${file.bucket}/${file.name}`,
outputTopic: 'projects/iot-fitness-198120/topics/MemberFitnessData2'
},
environment: {
tempLocation: 'gs://fitnessanalytics-tmp/tmp'
},
jobName: 'CStoPubSub',
//gcsPath: 'gs://dataflow-templates/latest/GCS_Text_to_Cloud_PubSub',
}
}, function(err, response) {
if (err) {
console.error("problem running dataflow template, error was: ", err);
}
console.log("Dataflow template response: ", response);
callback();
});
});
callback();
};

Guess your cloud function execution fails doesn't satisfy your if statement,
if (file.resourceState === 'exists' && file.name)
I had similar issue when I started working on Cloud Function. Modify your index.js file var {google} = require('googleapis'); as provided in the solution here

Related

Node js error, "myFunction" is not a function

So I am using the fcm-node package in order to send notifications from the Express api route to the app using a registration token.
The function is:
const FCM = require('fcm-node');
const serverKey = ...
const fcm = new FCM(serverKey);
function sendNotification(registrationToken, title, body, dataTitle, dataBody) {
const message = {
to: registrationToken,
notification: {
title: title,
body: body
},
data: {
title: dataTitle,
body: dataBody
}
};
fcm.send(message, (err, response) => {
if (err) console.log('Error ', err)
else console.log('response ', response)
});
};
module.exports = {
sendNotification
};
I made sure that if outside the function, the notification system is running. Now In the api endpoint:
const sendNotification = require('../sendNotification');
router.get('/test', async (req, res, next) => {
sendNotification('...', 'hi', 'bye','1', '2');
return res.send(200)
};
I keep on getting the error "sendNotification" is not a function. What is the cause of this?
Expression require('../sendNotification'); is giving you a object (because you exported a object in this file), so extract what you need out.
const { sendNotification } = require('../sendNotification');
try this:
module.exports = sendNotification
and use it like this:
const sendNotification = require('../sendNotification');

DynamoDB updateItem in Lambda fails silently

I'm attempting to implement a simple counter with a Lambda function, but whenever I test it, the updateItem below simply does not work: none of the log statements in the callback are run at all, and of course the relevant counter in the table is never updated. Here's my lambda function:
'use strict';
const AWS = require('aws-sdk');
const dynamodb = new AWS.DynamoDB({ apiVersion: '2012-08-10' });
let params = {
TableName: 'Counters',
Key: {
'name': { S: 'global' }
},
UpdateExpression: 'SET val = val + :inc',
ExpressionAttributeValues: {
':inc': { N: '1' }
},
ReturnValues: 'ALL_NEW'
};
exports.handler = async(event) => {
console.log("Invoked counter-test");
dynamodb.updateItem(params, function(err, data) {
console.log("In updateItem callback");
if (err)
console.log(err, err.stack);
else
console.log(data);
});
console.log("Updated counter");
const response = {
statusCode: 200,
body: JSON.stringify('Counter updated'),
};
return response;
};
And here's the output of the test:
Response:
{
"statusCode": 200,
"body": "\"Counter updated\""
}
Request ID:
"80e92299-2eea-45e4-9c68-54ccf87199c5"
Function Logs:
START RequestId: 80e92299-2eea-45e4-9c68-54ccf87199c5 Version: $LATEST
2019-05-07T11:34:21.931Z 80e92299-2eea-45e4-9c68-54ccf87199c5 Invoked counter-test
2019-05-07T11:34:21.934Z 80e92299-2eea-45e4-9c68-54ccf87199c5 Updated counter
END RequestId: 80e92299-2eea-45e4-9c68-54ccf87199c5
REPORT RequestId: 80e92299-2eea-45e4-9c68-54ccf87199c5 Duration: 275.91 ms Billed Duration: 300 ms Memory Size: 128 MB Max Memory Used: 67 MB
As you can see, no log statements from the updateItems callback ran.
If I try to update the counter from the command line using aws dynamodb it does work, however:
$ aws dynamodb update-item \
--table-name Counters \
--key '{"name": { "S": "global" }}' \
--update-expression 'SET val = val + :inc' \
--expression-attribute-values '{":inc": {"N": "1"}}' \
--return-values ALL_NEW \
--output json
{
"Attributes": {
"name": {
"S": "global"
},
"val": {
"N": "129"
}
}
}
This is due to the asynchronous nature of Javascript.
The method updateItem is asynchronous and you don't wait for the callback to be fired before returning (you start the updateItem operation and then immediately return a response).
If you want to maintain the callback pattern, you should do:
exports.handler = (event, context, callback) => {
console.log("Invoked counter-test");
dynamodb.updateItem(params, function(err, data) {
console.log("In updateItem callback");
if (err) {
console.log(err, err.stack);
callback(err);
} else {
console.log(data);
console.log("Updated counter");
const response = {
statusCode: 200,
body: JSON.stringify('Counter updated'),
};
callback(null, response);
}
});
};
Using promises:
exports.handler = (event, context, callback) => {
console.log("Invoked counter-test");
dynamodb.updateItem(params).promise()
.then((data) => {
console.log(data);
console.log("Updated counter");
const response = {
statusCode: 200,
body: JSON.stringify('Counter updated'),
};
callback(null, response);
});
.catch((err) => {
console.log(err, err.stack);
callback(err);
})
};
Using await (recommended):
exports.handler = async (event) => {
try {
console.log("Invoked counter-test");
const data = await dynamodb.updateItem(params).promise();
console.log(data);
console.log("Updated counter");
const response = {
statusCode: 200,
body: JSON.stringify('Counter updated'),
};
return response;
} catch (err) {
console.log(err, err.stack);
throw err;
}
};
See also Understanding Asynchronous JavaScript, Deeply Understanding JavaScript Async and Await with Examples and AWS SDK for Javascript - Using JavaScript Promises.

aws upload object to S3 bucket and pass details of data to lambda

Working my way through tutorials for AWS...So ive created an S3 bucket which when a file is dropped into it calls my lambda 'testHelloWorld' which sends an email...this all works fine (see below)
'use strict';
console.log('Loading function');
var aws = require('aws-sdk');
var ses = new aws.SES({
region: 'us-west-2'
});
exports.handler = function(event, context) {
console.log("Incoming: ", event);
// var output = querystring.parse(event);
var eParams = {
Destination: {
ToAddresses: ["johnb#hotmail.com"]
},
Message: {
Body: {
Text: {
Data: "Hey! What is up?"
}
},
Subject: {
Data: "Email Subject!!!"
}
},
Source: "johnb#hotmail.com"
};
console.log('===SENDING EMAIL===');
var email = ses.sendEmail(eParams, function(err, data){
if(err) console.log(err);
else {
console.log("===EMAIL SENT===");
console.log(data);
console.log("EMAIL CODE END");
console.log('EMAIL: ', email);
context.succeed(event);
}
});
};
but I want to extend the email to include data on the file that was uploaded to the bucket. I have found How to trigger my Lambda Function once the file is uploaded to s3 bucket which gives a node.js code snippet which should capture the data. I have tried to import this into my existing lambda
'use strict';
console.log('Loading function');
var aws = require('aws-sdk');
var ses = new aws.SES({
region: 'us-west-2'
});
var s3 = new aws.S3({ apiVersion: '2006-03-01', accessKeyId: process.env.ACCESS_KEY, secretAccessKey: process.env.SECRET_KEY, region: process.env.LAMBDA_REGION });
exports.handler = function(event, context, exit){
console.log("Incoming: ", event);
// var output = querystring.parse(event);
// Get the object from the event and show its content type
// const bucket = event.Records[0].s3.bucket.name;
// const key = decodeURIComponent(event.Records[0].s3.object.key.replace(/\+/g, ' '));
const params = {
Bucket: 'bucketName',
Key: 'keyName',
Source : 'SourceName',
Destination : 'DestinationName',
Message : 'MessageName'
};
s3.getObject(function(err, data){
if (err) {
console.log('ERROR ' + err);
// exit(err);
} else {
// the data has the content of the uploaded file
var eParams = {
Destination: {
ToAddresses: ["johnboy#hotmail.com"]
},
Message: {
Body: {
Text: {
Data: data
}
},
Subject: {
Data: "Email Subject!!!"
}
},
Source: "johnboy#hotmail.com"
};
}
});
console.log('===SENDING EMAIL===');
var email = ses.sendEmail(eParams, function(err, data){
if(err) console.log(err);
else {
console.log("===EMAIL SENT===");
console.log(data);
console.log("EMAIL CODE END");
console.log('EMAIL: ', email);
context.succeed(event);
}
});
};
but this is failing on the params
message: 'There were 3 validation errors:
* MissingRequiredParameter: Missing required key \'Source\' in params
* MissingRequiredParameter: Missing required key \'Destination\' in params
* MissingRequiredParameter: Missing required key \'Message\' in params',
code: 'MultipleValidationErrors',
errors:
These source, destination and message are listed in the params, are they not correctly formatted and it isnt picking them up?
I cant find much online....any help appreciated
UPDATE
Ok iv got it working without failing...if i use the test function in the lambda with the following code...
'use strict';
console.log('Loading function');
var aws = require('aws-sdk');
var ses = new aws.SES({
region: 'us-west-2'
});
var s3 = new aws.S3({ apiVersion: '2006-03-01', accessKeyId: process.env.ACCESS_KEY, secretAccessKey: process.env.SECRET_KEY, region: process.env.LAMBDA_REGION });
exports.handler = function(event, context) {
console.log("Incoming: ", event);
// var output = querystring.parse(event);
var testData = null;
// Get the object from the event and show its content type
// const bucket = event.Records[0].s3.bucket.name;
// const key = decodeURIComponent(event.Records[0].s3.object.key.replace(/\+/g, ' '));
const params = {
Bucket: 'bucket',
Key: 'key',
};
s3.getObject(params, function(err, data){
if (err) {
console.log('ERROR ' + err);
exit(err);
} else {
testData = data;
}
});
var eParams = {
Destination: {
ToAddresses: ["jim#him.com"]
},
Message: {
Body: {
Text: { Data: 'testData2' + testData}
},
Subject: {
Data: "Email Subject!!!"
}
},
Source: "jim#him.com"
};
console.log('===SENDING EMAIL===');
var email = ses.sendEmail(eParams, function(err, data){
if(err) console.log(err);
else {
console.log("===EMAIL SENT===");
console.log(data);
console.log("EMAIL CODE END");
console.log('EMAIL: ', email);
context.succeed(event);
}
});
};
I get the email with the body- testData2null
So I tried uploading an image through the s3 bucket and I still get the email with the body testData2null
is there anyway to debug this further or does anyone kno who it is saying null. I never actually tested the code from the other post which passes the data over to the email I just assumed it would work. Does anyone else know who to obtain the data from the upload please? thanks
You are declaring the var eParams within the callback of s3.getObject, but then you run the ses.sendMail outside of the callback. I think that's why!
You also need to move the ses.sendEmail to inside the callback of s3.getObject if you want to send the data from your object inside the email.
Try this:
s3.getObject(function(err, objectData) {
if (err) {
console.log('Could not fetch object data: ', err);
} else {
console.log('Data was successfully fetched from object');
var eParams = {
Destination: {
ToAddresses: ["johnboy#hotmail.com"]
},
Message: {
Body: {
Text: {
Data: objectData
}
},
Subject: {
Data: "Email Subject!!!"
}
},
Source: "johnboy#hotmail.com"
};
console.log('===SENDING EMAIL===');
var email = ses.sendEmail(eParams, function(err, emailResult) {
if (err) console.log('Error while sending email', err);
else {
console.log("===EMAIL SENT===");
console.log(objectData);
console.log("EMAIL CODE END");
console.log('EMAIL: ', emailResult);
context.succeed(event);
}
});
}
});
You need to read on how Nodejs works. It is event based and depends on callbacks and promises. You should do -
s3.getObject(params, function(err, data){
//This is your callback for s3 API call. DO stuff here
if (err) {
console.log('ERROR ' + err);
exit(err);
} else {
testData = data;
// Got your data. Send the mail here
}
});
I have added my comments in code above. Since Nodejs is single threaded it will make S3 api call and go ahead. When it is sending mail s3 api call is not complete so data is null. It is better to use promises here.
Anyway read up on callback and promises in nodejs and how it works. But hope this answers your logical error.

How to return a documentDB document with Azure functions?

I created an azure function as well as a documentDB database with a users collection, however, I am stuck at connecting the two of them to each other. I want to just send a username and the function queries the database then returns the user with that unique username.
I am using node js. Any ideas?
Thanks
First of all, you'd need to install the documentdb module via npm. Use the following command:
npm install documentdb --save
After that, you've finished setting up. Now you can start writing some code to query the collection in the database. The following is an example of querying a family collection with Azure HTTP-trigger function.
The folder structure:
node_modules/
.gitignore
config.js
function.json
index.js
package.json
CONFIG.JS
var config = {}
config.endpoint = "https://<documentdb name>.documents.azure.com:443/";
config.primaryKey = "<primary key>";
config.database = {
"id": "FamilyDB"
};
config.collection = {
"id": "FamilyColl"
};
module.exports = config;
INDEX.JS
var documentClient = require("documentdb").DocumentClient;
var config = require("./config");
var databaseUrl = `dbs/${config.database.id}`;
var collectionUrl = `${databaseUrl}/colls/${config.collection.id}`;
var client = new documentClient(config.endpoint, { "masterKey": config.primaryKey });
module.exports = function (context, req) {
context.log('JavaScript HTTP trigger function processed a request.');
if (req.query.name || (req.body && req.body.name)) {
var name = req.query.name || req.body.name;
queryCollectionByName(name).then((result) => {
context.log('result: ', result);
res = {
body: "Result: " + JSON.stringify(result)
};
context.done(null, res);
}, (err) => {
context.log('error: ', err);
res = {
body: "Error: " + JSON.stringify(err)
};
context.done(null, res);
});
}
else {
res = {
status: 400,
body: "Please pass a name on the query string or in the request body"
};
context.done(null, res);
}
};
function queryCollectionByName(name) {
return new Promise((resolve, reject) => {
client.queryDocuments(
collectionUrl,
`SELECT VALUE r.children FROM root r WHERE r.lastName = "${name}"`
).toArray((err, results) => {
if (err) reject(err)
else {
resolve(results);
}
});
});
};
Tested result:
For more details, please refer to https://learn.microsoft.com/en-us/azure/documentdb/documentdb-nodejs-get-started.

Using Node.js to connect to a REST API

Is it sensible to use Node.js to write a stand alone app that will connect two REST API's?
One end will be a POS - Point of sale - system
The other will be a hosted eCommerce platform
There will be a minimal interface for configuration of the service. nothing more.
Yes, Node.js is perfectly suited to making calls to external APIs. Just like everything in Node, however, the functions for making these calls are based around events, which means doing things like buffering response data as opposed to receiving a single completed response.
For example:
// get walking directions from central park to the empire state building
var http = require("http");
url = "http://maps.googleapis.com/maps/api/directions/json?origin=Central Park&destination=Empire State Building&sensor=false&mode=walking";
// get is a simple wrapper for request()
// which sets the http method to GET
var request = http.get(url, function (response) {
// data is streamed in chunks from the server
// so we have to handle the "data" event
var buffer = "",
data,
route;
response.on("data", function (chunk) {
buffer += chunk;
});
response.on("end", function (err) {
// finished transferring data
// dump the raw data
console.log(buffer);
console.log("\n");
data = JSON.parse(buffer);
route = data.routes[0];
// extract the distance and time
console.log("Walking Distance: " + route.legs[0].distance.text);
console.log("Time: " + route.legs[0].duration.text);
});
});
It may make sense to find a simple wrapper library (or write your own) if you are going to be making a lot of these calls.
Sure. The node.js API contains methods to make HTTP requests:
http.request
http.get
I assume the app you're writing is a web app. You might want to use a framework like Express to remove some of the grunt work (see also this question on node.js web frameworks).
/*Below logics covered in below sample GET API
-DB connection created in class
-common function to execute the query
-logging through bunyan library*/
const { APIResponse} = require('./../commonFun/utils');
const createlog = require('./../lib/createlog');
var obj = new DB();
//Test API
routes.get('/testapi', (req, res) => {
res.status(201).json({ message: 'API microservices test' });
});
dbObj = new DB();
routes.get('/getStore', (req, res) => {
try {
//create DB instance
const store_id = req.body.storeID;
const promiseReturnwithResult = selectQueryData('tablename', whereField, dbObj.conn);
(promiseReturnwithResult).then((result) => {
APIResponse(200, 'Data fetched successfully', result).then((result) => {
res.send(result);
});
}).catch((err) => { console.log(err); throw err; })
} catch (err) {
console.log('Exception caught in getuser API', err);
const e = new Error();
if (err.errors && err.errors.length > 0) {
e.Error = 'Exception caught in getuser API';
e.message = err.errors[0].message;
e.code = 500;
res.status(404).send(APIResponse(e.code, e.message, e.Error));
createlog.writeErrorInLog(err);
}
}
});
//create connection
"use strict"
const mysql = require("mysql");
class DB {
constructor() {
this.conn = mysql.createConnection({
host: 'localhost',
user: 'root',
password: 'pass',
database: 'db_name'
});
}
connect() {
this.conn.connect(function (err) {
if (err) {
console.error("error connecting: " + err.stack);
return;
}
console.log("connected to DBB");
});
}
//End class
}
module.exports = DB
//queryTransaction.js File
selectQueryData= (table,where,db_conn)=>{
return new Promise(function(resolve,reject){
try{
db_conn.query(`SELECT * FROM ${table} WHERE id = ${where}`,function(err,result){
if(err){
reject(err);
}else{
resolve(result);
}
});
}catch(err){
console.log(err);
}
});
}
module.exports= {selectQueryData};
//utils.js file
APIResponse = async (status, msg, data = '',error=null) => {
try {
if (status) {
return { statusCode: status, message: msg, PayLoad: data,error:error }
}
} catch (err) {
console.log('Exception caught in getuser API', err);
}
}
module.exports={
logsSetting: {
name: "USER-API",
streams: [
{
level: 'error',
path: '' // log ERROR and above to a file
}
],
},APIResponse
}
//createlogs.js File
var bunyan = require('bunyan');
const dateFormat = require('dateformat');
const {logsSetting} = require('./../commonFun/utils');
module.exports.writeErrorInLog = (customError) => {
let logConfig = {...logsSetting};
console.log('reached in writeErrorInLog',customError)
const currentDate = dateFormat(new Date(), 'yyyy-mm-dd');
const path = logConfig.streams[0].path = `${__dirname}/../log/${currentDate}error.log`;
const log = bunyan.createLogger(logConfig);
log.error(customError);
}
A more easy and useful tool is just using an API like Unirest; URest is a package in NPM that is just too easy to use jus like
app.get('/any-route', function(req, res){
unirest.get("https://rest.url.to.consume/param1/paramN")
.header("Any-Key", "XXXXXXXXXXXXXXXXXX")
.header("Accept", "text/plain")
.end(function (result) {
res.render('name-of-the-page-according-to-your-engine', {
layout: 'some-layout-if-you-want',
markup: result.body.any-property,
});
});

Categories

Resources