Node.js 6.10
Process is as follows...
1) Dropped an XML file into an S3 bucket.
2) Triggered a lambda,
3) Called a step function, which calls another lambda
4) This lambda captures the XML from the bucket with
var s3Bucket = new aws.S3( { params: {Bucket: 'bucketName'} } );
var xmlFile = s3Bucket.getObject('fileName.xml');
5) send an email with the contents of the XML as a string
let index = function index(event, context, callback) {
var fileName = event.fileName;
var bucketName = event.bucketName;
var todaysDate = event.todaysDate;
var eParams = {
Destination: {
ToAddresses: ["emailAddress"]
},
Message: {
Body: {
//Text: { Data: 'file: ' + fileName + ' bucketName: ' + bucketName + ' todaysDate: ' + todaysDate}
Text: { Data: 'file: ' + JSON.stringify(xmlFile)}
},
Subject: {
Data: "Email Subject!!!"
}
},
Source: "emailAddress"
};
console.log('===SENDING EMAIL===');
var email = ses.sendEmail(eParams, function(err, data){
if(err) console.log(err);
else {
console.log("===EMAIL SENT===");
console.log(data);
console.log("EMAIL CODE END");
console.log('EMAIL: ', email);
context.succeed(event);
}
});
};
module.exports.init = (event, context, callback) => {
};
exports.handler = index;
I know the sending an email works because if I uncomment the line
Text: { Data: 'fileName: ' + fileName + ' bucketName: ' + bucketName + ' todaysDate: ' + todaysDate}
and comment out Text: { Data: 'file: ' + JSON.stringify(xmlFile)}
it sends the email with the correct filename, bucketName, and date
So when I try to include Text: { Data: 'file: ' + JSON.stringify(xmlFile)}
the logs show the error
ypeError: Converting circular structure to JSON at Object.stringify (native) at index (/var/task/index.js:39:51)
UPDATE
Thanks #Michael & #Khand for the replies. I have tried what you suggested
var params = {
Bucket: "bucketName",
Key: "fileName.xml"
};
s3.getObject(params, function(err, data)
{
if (err)
{
console.log(err, err.stack);
}// an error occurred
else
{
console.log("Returned data object " + data); // successful response
console.log("Returned xml " + data.body);
}
console is returning
Returned data object [object Object]
Returned xml undefined
and yes the bucket does contain the named file. The [object, object] is populated but the body tag is undefined
Related
I want to send SMS from a aws sns function were we provide the mobile number and text message and use SNS to deliver that message but I had an error
{"Error":{"message":"Missing required key 'Message' in
params","code":"MissingRequiredParameter","time":"2022-04-11T16:14:53.306Z"}}
Can any one suggest a solution?
router.post('/send-text', (req, res) => {
var dynamoDBConfiguration = {
"accessKeyId": "X...",
"secretAccessKey": "l...",
"region": "eu-west-3" };
AWS.config.update(dynamoDBConfiguration);
console.log("Message = " + req.body.message);
console.log("Number = " + req.body.number);
console.log("Subject = " + req.body.subject);
var params = {
Message: req.query.message,
PhoneNumber: '+' + req.query.number,
MessageAttributes: {
'AWS.SNS.SMS.SenderID': {
'DataType': 'String',
'StringValue': req.query.subject
}
}
};
var publishTextPromise = new AWS.SNS({ apiVersion: '2010-03-31' }).publish(params).promise();
publishTextPromise.then(
function (data) {
res.end(JSON.stringify({ MessageID: data.MessageId }));
}).catch(
function (err) {
res.end(JSON.stringify({ Error: err }));
});
have a strange thing happening running a Google cloud function. The function starts and logs the user id and job id as expected. Then it calls firestore db and basically sits there for 1 minute, sometimes 2 before it executes the first call... It was even timing out on 240 seconds.
const AWS = require('aws-sdk');
const functions = require('firebase-functions');
const admin = require('firebase-admin');
admin.initializeApp();
exports.run = functions.https.onCall((data, context) => {
var id = data.id;
var userid = data.uid;
var retry = data.retry;
var project;
var db = admin.firestore();
var storage = admin.storage();
console.log("Starting Collect");
console.log("UID: " + userid);
console.log("id ID: " + id);
// Times out on this call
db.collection("users").doc(userid).collection("ids").doc(id).get().then(function(doc) {
console.log("Loaded DB");
project = doc.data();
createexport();
}).catch(function(err) {
console.log(err);
error('Loading DB Error, ' + err, false);
});
function createexport() {
db.collection("exports").doc(id).set({
status: 'Collecting',
stage: 'Export Checker',
percent: 0,
id: id,
}).then(function() {
console.log("Creating Export");
setdb();
}).catch(function(err) {
error("Error creating export in database :" + err, true)
});
}
function setdb() {
db.collection("users").doc(userid).collection("ids").doc(id).update({
status: 'Analyzing Files',
stage: 'Collecting'
}).then(function() {
getaudio();
}).catch(function(err) {
error("Error updating users id in database :" + err, true)
});
}
function getaudio() {
const from = userid + '/projects/' + project.originalproject.id + '/audio.' + project.originalproject.extension;
const to = userid + '/' + id + '/audio.' + project.originalproject.extension;
storage.bucket('---------').file(from).copy(storage.bucket('---------').file(to)).then(function() {
console.log("Collecting files");
copy2();
}).catch(function(err) {
error('Collecting Audio Error, ' + err, true);
});
}
function copy2() {
const from = userid + '/projects/' + project.originalproject.id + '/overlay.png';
const to = userid + '/' + id + '/overlay.png';
storage.bucket('--------.appspot.com').file(from).copy(storage.bucket('---------').file(to)).then(function() {
updateexport();
}).catch(function(err) {
error('Collecting Overlay Error, ' + err, true);
});
}
function updateexport() {
db.collection("exports").doc(id).update({ status: "Waiting" }).then(function() {
console.log("All files collected");
return { status: 'Success' };
}).catch(function(err) {
error("Error creating export entry in database :" + err, true)
});
}
function error(evt, evt2) {
AWS.config.update({ region: "us-east-1" });
var html;
var sub = 'Error with id ' + id;
console.log(evt);
if (evt2) {
db.collection('users').doc(userid).collection('ids').doc(id).update({
status: 'Error'
}).catch(function(err) {
console.log(err);
});
db.collection("exports").doc(id).update({
status: 'Error',
stage: 'Collecting',
error: evt,
}).catch(function(err) {
console.log(err);
});
html = `
Username: ${project.username} <br>
UserID: ${userid} <br>
Email: ${project.email} <br>
id: ${id}
`
} else {
html = `id: ${id}<br>
UserID: ${userid} <br>
Message: Error logged was: ${evt}
`
}
var params = {
Destination: {
ToAddresses: [
'errors#mail.com'
]
},
Message: {
Body: {
Html: {
Charset: "UTF-8",
Data: html
},
},
Subject: {
Charset: 'UTF-8',
Data: sub
}
},
Source: 'errors#mail.com',
ReplyToAddresses: [
project.email
],
};
var sendPromise = new AWS.SES({
apiVersion: "2010-12-01",
"accessKeyId": "-----------",
"secretAccessKey": "------------------------",
"region": "--------",
}).sendEmail(params).promise();
sendPromise.then(function(data) {
return { data: data };
}).catch(function(err) {
return { err: err };
});
}
});
Seems to me to be way too long for a database call of only a few kb. I will attach the cloud log to show time difference. After this initial slump it then performs as expected.
Cloud log image
Anyone got any ideas as to why this could be happening? Many thanks...
Your function is appearing to hang because it isn't handling promises correctly. Also, it doesn't appear to be sending a specific response to the client app. The main point of callable functions is to send a response.
I suggest reviewing the documentation, where you will learn that callable functions are required to return a promise that resolves with an object to send to the client app, after all the async work is complete.
Minimally, it will take a form like this:
return db.collection("users").doc(userid).collection("files").doc(id).get().then(function(doc) {
console.log("Loaded DB");
project = doc.data();
return { "data": "to send to the client" };
}
Note that the promise chain is being returned, and the promise itself resolves to an object to send to the client.
I have a function called postGirl that posts an image of an anime girl onto my twitter account. The images all download fine, so I have a folder that generates 400 images named girl1.jpg - girl400.jpg, so that doesn't seem to be the issue.
let postGirl = (girl, girlNum) => {
let numOfGirl = girlNum + 1;
let girlImage = './img/girl' + numOfGirl + '.jpg';
var girlImageFinal = girl[3];
console.log(girlImage);
let author = girl[0];
let tags = girl[1];
let hashtags = girl[2];
console.log('posting?');
var b64content = fs.readFileSync(girlImage, { encoding: 'base64' })
OtakuBot.post('media/upload', { media_data: b64content }, function (err, data, response) {
var mediaIdStr = data.media_id_string
var altText = 'Cute Girl'
var meta_params = { media_id: mediaIdStr, alt_text: { text: altText } }
console.log('inside media/upload');
OtakuBot.post('media/metadata/create', meta_params, function (err, data, response) {
if (!err) {
let postMessage = '';
console.log(tags[0]);
postMessage = 'Cute picture with ' + tags[0] + ' and ' + tags[1] + ' by ' + author + ' #animegirls #' + author + ' #' + hashtags[0] + ' #' + hashtags[1] + ' #' + hashtags[2];
var params = { status: postMessage, media_ids: [mediaIdStr] }
OtakuBot.post('statuses/update', params, function (err, data, response) {
console.log(data)
})
} else {
console.log(err);
}
})
})
}
I keep getting this error:
Error: media_id field must be provided.
at Object.exports.makeTwitError (C:\Users\Atlow\Documents\otakubot\node_modules\twit\lib\helpers.js:74:13)
at onRequestComplete (C:\Users\Atlow\Documents\otakubot\node_modules\twit\lib\twitter.js:344:25)
at Request.<anonymous> (C:\Users\Atlow\Documents\otakubot\node_modules\twit\lib\twitter.js:364:7)
at Request.emit (events.js:327:22)
at Gunzip.<anonymous> (C:\Users\Atlow\Documents\otakubot\node_modules\request\request.js:1076:12)
at Object.onceWrapper (events.js:421:28)
at Gunzip.emit (events.js:315:20)
at endReadableNT (_stream_readable.js:1327:12)
at processTicksAndRejections (internal/process/task_queues.js:80:21) {
code: null,
allErrors: [
{
request: '/1.1/media/metadata/create.json',
error: 'media_id field must be provided.'
}
],
twitterReply: {
request: '/1.1/media/metadata/create.json',
error: 'media_id field must be provided.'
},
statusCode: 400
}
did you checked out the data.media_id_string is not undefined in callback function?
My node.js project converts text to an mp3, saves it to my file system, then uploads to s3. I want this function to run consecutive times on a loop until the queue is complete, but only the first file is uploading successfully. Subsequent file uploads return this error: 400 Your socket connection to the server was not read from or written to within the timeout period. Idle connections will be closed.
Here is my code:
function uploadit () {
console.log('uploading the verse')
AWS.config.update({ accessKeyId: '...', secretAccessKey: '...' });
var s3 = new AWS.S3();
s3.putObject({
Bucket: 'myverses',
Key: book.replace(/ /g, "")+reference.replace(/ /g, "")+".mp3",
Body: myvariable,
ACL: 'public-read'
},function (resp) {
console.log(arguments);
console.log('Successfully uploaded the verse.');
addanother();
});}
EDIT:
The addanother(); function checks to see if there are any more files to generate and upload. If so, the following code is executed:
function addverse() {
connection.query('SELECT versetext, book, mp3, id, reference FROM myverses where mp3 = "empty" limit 1',
function (error, results, fields) {
console.log(error);
var scripture = results[0].versetext;
var book = results[0].book;
var reference = results[0].reference.replace(":", " verse ");
console.log(scripture + " " + book.replace("1", "first").replace("2", "second").replace("3", "third") + " " + reference);
var myverse = scripture + " " + book.replace("1", "first").replace("2", "second").replace("3", "third") + " " + reference;
var link = "https://s3.amazonaws.com/myverses/" + book.replace(/ /g, "")+reference.replace(/ /g, "")+".mp3"
function linkit(){
connection.query('update myverses set mp3 = ? where mp3 = "empty" limit 1', [link],
function (error, results, fields) {
console.log(error)
})
}
txtomp3.getMp3(myverse, function(err, binaryStream){
if(err){
console.log(err);
return;
}
function writeit() {
var file = fs.createWriteStream("myverse.mp3");
console.log('recording the verse')
file.write(binaryStream);
file.end();
}
function uploadit () {
console.log('uploading the verse')
AWS.config.update({ accessKeyId: '...', secretAccessKey: '...' });
var s3 = new AWS.S3();
s3.putObject({
Bucket: 'myverses',
Key: book.replace(/ /g, "")+reference.replace(/ /g, "")+".mp3",
Body: myvalue,
ACL: 'public-read'
},function (resp) {
console.log(arguments);
console.log('Successfully uploaded the verse.');
addanother();
});}
writeit();
var myvalue = fs.createReadStream("myverse.mp3");
setTimeout(uploadit, 3000)
});
});
}
Working my way through tutorials for AWS...So ive created an S3 bucket which when a file is dropped into it calls my lambda 'testHelloWorld' which sends an email...this all works fine (see below)
'use strict';
console.log('Loading function');
var aws = require('aws-sdk');
var ses = new aws.SES({
region: 'us-west-2'
});
exports.handler = function(event, context) {
console.log("Incoming: ", event);
// var output = querystring.parse(event);
var eParams = {
Destination: {
ToAddresses: ["johnb#hotmail.com"]
},
Message: {
Body: {
Text: {
Data: "Hey! What is up?"
}
},
Subject: {
Data: "Email Subject!!!"
}
},
Source: "johnb#hotmail.com"
};
console.log('===SENDING EMAIL===');
var email = ses.sendEmail(eParams, function(err, data){
if(err) console.log(err);
else {
console.log("===EMAIL SENT===");
console.log(data);
console.log("EMAIL CODE END");
console.log('EMAIL: ', email);
context.succeed(event);
}
});
};
but I want to extend the email to include data on the file that was uploaded to the bucket. I have found How to trigger my Lambda Function once the file is uploaded to s3 bucket which gives a node.js code snippet which should capture the data. I have tried to import this into my existing lambda
'use strict';
console.log('Loading function');
var aws = require('aws-sdk');
var ses = new aws.SES({
region: 'us-west-2'
});
var s3 = new aws.S3({ apiVersion: '2006-03-01', accessKeyId: process.env.ACCESS_KEY, secretAccessKey: process.env.SECRET_KEY, region: process.env.LAMBDA_REGION });
exports.handler = function(event, context, exit){
console.log("Incoming: ", event);
// var output = querystring.parse(event);
// Get the object from the event and show its content type
// const bucket = event.Records[0].s3.bucket.name;
// const key = decodeURIComponent(event.Records[0].s3.object.key.replace(/\+/g, ' '));
const params = {
Bucket: 'bucketName',
Key: 'keyName',
Source : 'SourceName',
Destination : 'DestinationName',
Message : 'MessageName'
};
s3.getObject(function(err, data){
if (err) {
console.log('ERROR ' + err);
// exit(err);
} else {
// the data has the content of the uploaded file
var eParams = {
Destination: {
ToAddresses: ["johnboy#hotmail.com"]
},
Message: {
Body: {
Text: {
Data: data
}
},
Subject: {
Data: "Email Subject!!!"
}
},
Source: "johnboy#hotmail.com"
};
}
});
console.log('===SENDING EMAIL===');
var email = ses.sendEmail(eParams, function(err, data){
if(err) console.log(err);
else {
console.log("===EMAIL SENT===");
console.log(data);
console.log("EMAIL CODE END");
console.log('EMAIL: ', email);
context.succeed(event);
}
});
};
but this is failing on the params
message: 'There were 3 validation errors:
* MissingRequiredParameter: Missing required key \'Source\' in params
* MissingRequiredParameter: Missing required key \'Destination\' in params
* MissingRequiredParameter: Missing required key \'Message\' in params',
code: 'MultipleValidationErrors',
errors:
These source, destination and message are listed in the params, are they not correctly formatted and it isnt picking them up?
I cant find much online....any help appreciated
UPDATE
Ok iv got it working without failing...if i use the test function in the lambda with the following code...
'use strict';
console.log('Loading function');
var aws = require('aws-sdk');
var ses = new aws.SES({
region: 'us-west-2'
});
var s3 = new aws.S3({ apiVersion: '2006-03-01', accessKeyId: process.env.ACCESS_KEY, secretAccessKey: process.env.SECRET_KEY, region: process.env.LAMBDA_REGION });
exports.handler = function(event, context) {
console.log("Incoming: ", event);
// var output = querystring.parse(event);
var testData = null;
// Get the object from the event and show its content type
// const bucket = event.Records[0].s3.bucket.name;
// const key = decodeURIComponent(event.Records[0].s3.object.key.replace(/\+/g, ' '));
const params = {
Bucket: 'bucket',
Key: 'key',
};
s3.getObject(params, function(err, data){
if (err) {
console.log('ERROR ' + err);
exit(err);
} else {
testData = data;
}
});
var eParams = {
Destination: {
ToAddresses: ["jim#him.com"]
},
Message: {
Body: {
Text: { Data: 'testData2' + testData}
},
Subject: {
Data: "Email Subject!!!"
}
},
Source: "jim#him.com"
};
console.log('===SENDING EMAIL===');
var email = ses.sendEmail(eParams, function(err, data){
if(err) console.log(err);
else {
console.log("===EMAIL SENT===");
console.log(data);
console.log("EMAIL CODE END");
console.log('EMAIL: ', email);
context.succeed(event);
}
});
};
I get the email with the body- testData2null
So I tried uploading an image through the s3 bucket and I still get the email with the body testData2null
is there anyway to debug this further or does anyone kno who it is saying null. I never actually tested the code from the other post which passes the data over to the email I just assumed it would work. Does anyone else know who to obtain the data from the upload please? thanks
You are declaring the var eParams within the callback of s3.getObject, but then you run the ses.sendMail outside of the callback. I think that's why!
You also need to move the ses.sendEmail to inside the callback of s3.getObject if you want to send the data from your object inside the email.
Try this:
s3.getObject(function(err, objectData) {
if (err) {
console.log('Could not fetch object data: ', err);
} else {
console.log('Data was successfully fetched from object');
var eParams = {
Destination: {
ToAddresses: ["johnboy#hotmail.com"]
},
Message: {
Body: {
Text: {
Data: objectData
}
},
Subject: {
Data: "Email Subject!!!"
}
},
Source: "johnboy#hotmail.com"
};
console.log('===SENDING EMAIL===');
var email = ses.sendEmail(eParams, function(err, emailResult) {
if (err) console.log('Error while sending email', err);
else {
console.log("===EMAIL SENT===");
console.log(objectData);
console.log("EMAIL CODE END");
console.log('EMAIL: ', emailResult);
context.succeed(event);
}
});
}
});
You need to read on how Nodejs works. It is event based and depends on callbacks and promises. You should do -
s3.getObject(params, function(err, data){
//This is your callback for s3 API call. DO stuff here
if (err) {
console.log('ERROR ' + err);
exit(err);
} else {
testData = data;
// Got your data. Send the mail here
}
});
I have added my comments in code above. Since Nodejs is single threaded it will make S3 api call and go ahead. When it is sending mail s3 api call is not complete so data is null. It is better to use promises here.
Anyway read up on callback and promises in nodejs and how it works. But hope this answers your logical error.