Node.JS - S3 Timing Out With Multiple Files - javascript

My node.js project converts text to an mp3, saves it to my file system, then uploads to s3. I want this function to run consecutive times on a loop until the queue is complete, but only the first file is uploading successfully. Subsequent file uploads return this error: 400 Your socket connection to the server was not read from or written to within the timeout period. Idle connections will be closed.
Here is my code:
function uploadit () {
console.log('uploading the verse')
AWS.config.update({ accessKeyId: '...', secretAccessKey: '...' });
var s3 = new AWS.S3();
s3.putObject({
Bucket: 'myverses',
Key: book.replace(/ /g, "")+reference.replace(/ /g, "")+".mp3",
Body: myvariable,
ACL: 'public-read'
},function (resp) {
console.log(arguments);
console.log('Successfully uploaded the verse.');
addanother();
});}
EDIT:
The addanother(); function checks to see if there are any more files to generate and upload. If so, the following code is executed:
function addverse() {
connection.query('SELECT versetext, book, mp3, id, reference FROM myverses where mp3 = "empty" limit 1',
function (error, results, fields) {
console.log(error);
var scripture = results[0].versetext;
var book = results[0].book;
var reference = results[0].reference.replace(":", " verse ");
console.log(scripture + " " + book.replace("1", "first").replace("2", "second").replace("3", "third") + " " + reference);
var myverse = scripture + " " + book.replace("1", "first").replace("2", "second").replace("3", "third") + " " + reference;
var link = "https://s3.amazonaws.com/myverses/" + book.replace(/ /g, "")+reference.replace(/ /g, "")+".mp3"
function linkit(){
connection.query('update myverses set mp3 = ? where mp3 = "empty" limit 1', [link],
function (error, results, fields) {
console.log(error)
})
}
txtomp3.getMp3(myverse, function(err, binaryStream){
if(err){
console.log(err);
return;
}
function writeit() {
var file = fs.createWriteStream("myverse.mp3");
console.log('recording the verse')
file.write(binaryStream);
file.end();
}
function uploadit () {
console.log('uploading the verse')
AWS.config.update({ accessKeyId: '...', secretAccessKey: '...' });
var s3 = new AWS.S3();
s3.putObject({
Bucket: 'myverses',
Key: book.replace(/ /g, "")+reference.replace(/ /g, "")+".mp3",
Body: myvalue,
ACL: 'public-read'
},function (resp) {
console.log(arguments);
console.log('Successfully uploaded the verse.');
addanother();
});}
writeit();
var myvalue = fs.createReadStream("myverse.mp3");
setTimeout(uploadit, 3000)
});
});
}

Related

aws node.js pass contents of xml as JSON stringify

Node.js 6.10
Process is as follows...
1) Dropped an XML file into an S3 bucket.
2) Triggered a lambda,
3) Called a step function, which calls another lambda
4) This lambda captures the XML from the bucket with
var s3Bucket = new aws.S3( { params: {Bucket: 'bucketName'} } );
var xmlFile = s3Bucket.getObject('fileName.xml');
5) send an email with the contents of the XML as a string
let index = function index(event, context, callback) {
var fileName = event.fileName;
var bucketName = event.bucketName;
var todaysDate = event.todaysDate;
var eParams = {
Destination: {
ToAddresses: ["emailAddress"]
},
Message: {
Body: {
//Text: { Data: 'file: ' + fileName + ' bucketName: ' + bucketName + ' todaysDate: ' + todaysDate}
Text: { Data: 'file: ' + JSON.stringify(xmlFile)}
},
Subject: {
Data: "Email Subject!!!"
}
},
Source: "emailAddress"
};
console.log('===SENDING EMAIL===');
var email = ses.sendEmail(eParams, function(err, data){
if(err) console.log(err);
else {
console.log("===EMAIL SENT===");
console.log(data);
console.log("EMAIL CODE END");
console.log('EMAIL: ', email);
context.succeed(event);
}
});
};
module.exports.init = (event, context, callback) => {
};
exports.handler = index;
I know the sending an email works because if I uncomment the line
Text: { Data: 'fileName: ' + fileName + ' bucketName: ' + bucketName + ' todaysDate: ' + todaysDate}
and comment out Text: { Data: 'file: ' + JSON.stringify(xmlFile)}
it sends the email with the correct filename, bucketName, and date
So when I try to include Text: { Data: 'file: ' + JSON.stringify(xmlFile)}
the logs show the error
ypeError: Converting circular structure to JSON at Object.stringify (native) at index (/var/task/index.js:39:51)
UPDATE
Thanks #Michael & #Khand for the replies. I have tried what you suggested
var params = {
Bucket: "bucketName",
Key: "fileName.xml"
};
s3.getObject(params, function(err, data)
{
if (err)
{
console.log(err, err.stack);
}// an error occurred
else
{
console.log("Returned data object " + data); // successful response
console.log("Returned xml " + data.body);
}
console is returning
Returned data object [object Object]
Returned xml undefined
and yes the bucket does contain the named file. The [object, object] is populated but the body tag is undefined

aws upload object to S3 bucket and pass details of data to lambda

Working my way through tutorials for AWS...So ive created an S3 bucket which when a file is dropped into it calls my lambda 'testHelloWorld' which sends an email...this all works fine (see below)
'use strict';
console.log('Loading function');
var aws = require('aws-sdk');
var ses = new aws.SES({
region: 'us-west-2'
});
exports.handler = function(event, context) {
console.log("Incoming: ", event);
// var output = querystring.parse(event);
var eParams = {
Destination: {
ToAddresses: ["johnb#hotmail.com"]
},
Message: {
Body: {
Text: {
Data: "Hey! What is up?"
}
},
Subject: {
Data: "Email Subject!!!"
}
},
Source: "johnb#hotmail.com"
};
console.log('===SENDING EMAIL===');
var email = ses.sendEmail(eParams, function(err, data){
if(err) console.log(err);
else {
console.log("===EMAIL SENT===");
console.log(data);
console.log("EMAIL CODE END");
console.log('EMAIL: ', email);
context.succeed(event);
}
});
};
but I want to extend the email to include data on the file that was uploaded to the bucket. I have found How to trigger my Lambda Function once the file is uploaded to s3 bucket which gives a node.js code snippet which should capture the data. I have tried to import this into my existing lambda
'use strict';
console.log('Loading function');
var aws = require('aws-sdk');
var ses = new aws.SES({
region: 'us-west-2'
});
var s3 = new aws.S3({ apiVersion: '2006-03-01', accessKeyId: process.env.ACCESS_KEY, secretAccessKey: process.env.SECRET_KEY, region: process.env.LAMBDA_REGION });
exports.handler = function(event, context, exit){
console.log("Incoming: ", event);
// var output = querystring.parse(event);
// Get the object from the event and show its content type
// const bucket = event.Records[0].s3.bucket.name;
// const key = decodeURIComponent(event.Records[0].s3.object.key.replace(/\+/g, ' '));
const params = {
Bucket: 'bucketName',
Key: 'keyName',
Source : 'SourceName',
Destination : 'DestinationName',
Message : 'MessageName'
};
s3.getObject(function(err, data){
if (err) {
console.log('ERROR ' + err);
// exit(err);
} else {
// the data has the content of the uploaded file
var eParams = {
Destination: {
ToAddresses: ["johnboy#hotmail.com"]
},
Message: {
Body: {
Text: {
Data: data
}
},
Subject: {
Data: "Email Subject!!!"
}
},
Source: "johnboy#hotmail.com"
};
}
});
console.log('===SENDING EMAIL===');
var email = ses.sendEmail(eParams, function(err, data){
if(err) console.log(err);
else {
console.log("===EMAIL SENT===");
console.log(data);
console.log("EMAIL CODE END");
console.log('EMAIL: ', email);
context.succeed(event);
}
});
};
but this is failing on the params
message: 'There were 3 validation errors:
* MissingRequiredParameter: Missing required key \'Source\' in params
* MissingRequiredParameter: Missing required key \'Destination\' in params
* MissingRequiredParameter: Missing required key \'Message\' in params',
code: 'MultipleValidationErrors',
errors:
These source, destination and message are listed in the params, are they not correctly formatted and it isnt picking them up?
I cant find much online....any help appreciated
UPDATE
Ok iv got it working without failing...if i use the test function in the lambda with the following code...
'use strict';
console.log('Loading function');
var aws = require('aws-sdk');
var ses = new aws.SES({
region: 'us-west-2'
});
var s3 = new aws.S3({ apiVersion: '2006-03-01', accessKeyId: process.env.ACCESS_KEY, secretAccessKey: process.env.SECRET_KEY, region: process.env.LAMBDA_REGION });
exports.handler = function(event, context) {
console.log("Incoming: ", event);
// var output = querystring.parse(event);
var testData = null;
// Get the object from the event and show its content type
// const bucket = event.Records[0].s3.bucket.name;
// const key = decodeURIComponent(event.Records[0].s3.object.key.replace(/\+/g, ' '));
const params = {
Bucket: 'bucket',
Key: 'key',
};
s3.getObject(params, function(err, data){
if (err) {
console.log('ERROR ' + err);
exit(err);
} else {
testData = data;
}
});
var eParams = {
Destination: {
ToAddresses: ["jim#him.com"]
},
Message: {
Body: {
Text: { Data: 'testData2' + testData}
},
Subject: {
Data: "Email Subject!!!"
}
},
Source: "jim#him.com"
};
console.log('===SENDING EMAIL===');
var email = ses.sendEmail(eParams, function(err, data){
if(err) console.log(err);
else {
console.log("===EMAIL SENT===");
console.log(data);
console.log("EMAIL CODE END");
console.log('EMAIL: ', email);
context.succeed(event);
}
});
};
I get the email with the body- testData2null
So I tried uploading an image through the s3 bucket and I still get the email with the body testData2null
is there anyway to debug this further or does anyone kno who it is saying null. I never actually tested the code from the other post which passes the data over to the email I just assumed it would work. Does anyone else know who to obtain the data from the upload please? thanks
You are declaring the var eParams within the callback of s3.getObject, but then you run the ses.sendMail outside of the callback. I think that's why!
You also need to move the ses.sendEmail to inside the callback of s3.getObject if you want to send the data from your object inside the email.
Try this:
s3.getObject(function(err, objectData) {
if (err) {
console.log('Could not fetch object data: ', err);
} else {
console.log('Data was successfully fetched from object');
var eParams = {
Destination: {
ToAddresses: ["johnboy#hotmail.com"]
},
Message: {
Body: {
Text: {
Data: objectData
}
},
Subject: {
Data: "Email Subject!!!"
}
},
Source: "johnboy#hotmail.com"
};
console.log('===SENDING EMAIL===');
var email = ses.sendEmail(eParams, function(err, emailResult) {
if (err) console.log('Error while sending email', err);
else {
console.log("===EMAIL SENT===");
console.log(objectData);
console.log("EMAIL CODE END");
console.log('EMAIL: ', emailResult);
context.succeed(event);
}
});
}
});
You need to read on how Nodejs works. It is event based and depends on callbacks and promises. You should do -
s3.getObject(params, function(err, data){
//This is your callback for s3 API call. DO stuff here
if (err) {
console.log('ERROR ' + err);
exit(err);
} else {
testData = data;
// Got your data. Send the mail here
}
});
I have added my comments in code above. Since Nodejs is single threaded it will make S3 api call and go ahead. When it is sending mail s3 api call is not complete so data is null. It is better to use promises here.
Anyway read up on callback and promises in nodejs and how it works. But hope this answers your logical error.

Amazon S3 upload image - using angular js directly from browser

I am trying to upload images from my browser to Amazon S3 directly, using angular js.
Below is my code.
function _upload($files) {
$scope.file = $files[0];
$scope.creds = {
access_key: '***',
secret_key: '***',
bucket: 'sabari-test'
};
var bucket = new AWS.S3({
params: {
Bucket: $scope.creds.bucket
}
});
AWS.config.accessKeyId = $scope.creds.access_key;
AWS.config.secretAccessKey = $scope.creds.secret_key;
AWS.config.region = 'us-west-2';
// AWS.
if ($scope.file) {
// Perform File Size Check First
var fileSize = Math.round(parseInt($scope.file.size));
if (fileSize > $scope.sizeLimit) {
console.log('Sorry, your attachment is too big.');
return false;
}
// Prepend Unique String To Prevent Overwrites
var uniqueFileName = 'hai' + '-' + $scope.file.name;
var params = {
Key: uniqueFileName,
ContentType: $scope.file.type,
Body: $scope.file,
ServerSideEncryption: 'AES256'
};
bucket.putObject(params, function(err, data) {
if (err) {
console.log(err.message);
return false;
} else {
// Upload Successfully Finished
console.log('File Uploaded Successfully');
}
})
} else {
// No File Selected
console.log('Please select a file to upload');
}
}
I get the below error:
"Missing credentials in config"
Please let me know what is the missing credential?
Thanks.
You need to replace these lines:
var bucket = new AWS.S3({
params: {
Bucket: $scope.creds.bucket
}
});
AWS.config.accessKeyId = $scope.creds.access_key;
AWS.config.secretAccessKey = $scope.creds.secret_key;
AWS.config.region = 'us-west-2';
With this:
var bucket = new AWS.S3({
region = 'us-west-2',
credentials: new AWS.Credentials($scope.creds.access_key, $scope.creds.secret_key)
});
And then move the Bucket to your var params
var params = {
Bucket: $scope.creds.bucket,
Key: uniqueFileName,
ContentType: $scope.file.type,
Body: $scope.file,
ServerSideEncryption: 'AES256'
};

Uploading files using Skipper with Sails.js v0.10 - how to retrieve new file name

I am upgrading to Sails.js version 0.10 and now need to use Skipper to manage my file uploads.
When I upload a file I generate a new name for it using a UUID, and save it in the public/files/ folder (this will change when I've got this all working but it's good for testing right now)
I save the original name, and the uploaded name + path into a Mongo database.
This was all quite straightforward under Sails v0.9.x but using Skipper I can't figure out how to read the new file name and path. (Obviously if I could read the name I could construct the path though so it's really only the name I need)
My Controller looks like this
var uuid = require('node-uuid'),
path = require('path'),
blobAdapter = require('skipper-disk');
module.exports = {
upload: function(req, res) {
var receiver = blobAdapter().receive({
dirname: sails.config.appPath + "/public/files/",
saveAs: function(file) {
var filename = file.filename,
newName = uuid.v4() + path.extname(filename);
return newName;
}
}),
results = [];
req.file('docs').upload(receiver, function (err, files) {
if (err) return res.serverError(err);
async.forEach(files, function(file, next) {
Document.create({
name: file.filename,
size: file.size,
localName: // ***** how do I get the `saveAs()` value from the uploaded file *****,
path: // *** and likewise how do i get the path ******
}).exec(function(err, savedFile){
if (err) {
next(err);
} else {
results.push({
id: savedFile.id,
url: '/files/' + savedFile.localName
});
next();
}
});
}, function(err){
if (err) {
sails.log.error('caught error', err);
return res.serverError({error: err});
} else {
return res.json({ files: results });
}
});
});
},
_config: {}
};
How do I do this?
I've worked this out now and thought I'd share my solution for the benefit of others struggling with similar issues.
The solution was to not use skipper-disk at all but to write my own custom receiver. I've created this as a Sails Service object.
So in file api/services/Uploader.js
// Uploader utilities and helper methods
// designed to be relatively generic.
var fs = require('fs'),
Writable = require('stream').Writable;
exports.documentReceiverStream = function(options) {
var defaults = {
dirname: '/dev/null',
saveAs: function(file){
return file.filename;
},
completed: function(file, done){
done();
}
};
// I don't have access to jQuery here so this is the simplest way I
// could think of to merge the options.
opts = defaults;
if (options.dirname) opts.dirname = options.dirname;
if (options.saveAs) opts.saveAs = options.saveAs;
if (options.completed) opts.completed = options.completed;
var documentReceiver = Writable({objectMode: true});
// This `_write` method is invoked each time a new file is received
// from the Readable stream (Upstream) which is pumping filestreams
// into this receiver. (filename === `file.filename`).
documentReceiver._write = function onFile(file, encoding, done) {
var newFilename = opts.saveAs(file),
fileSavePath = opts.dirname + newFilename,
outputs = fs.createWriteStream(fileSavePath, encoding);
file.pipe(outputs);
// Garbage-collect the bytes that were already written for this file.
// (called when a read or write error occurs)
function gc(err) {
sails.log.debug("Garbage collecting file '" + file.filename + "' located at '" + fileSavePath + "'");
fs.unlink(fileSavePath, function (gcErr) {
if (gcErr) {
return done([err].concat([gcErr]));
} else {
return done(err);
}
});
};
file.on('error', function (err) {
sails.log.error('READ error on file ' + file.filename, '::', err);
});
outputs.on('error', function failedToWriteFile (err) {
sails.log.error('failed to write file', file.filename, 'with encoding', encoding, ': done =', done);
gc(err);
});
outputs.on('finish', function successfullyWroteFile () {
sails.log.debug("file uploaded")
opts.completed({
name: file.filename,
size: file.size,
localName: newFilename,
path: fileSavePath
}, done);
});
};
return documentReceiver;
}
and then my controller just became (in api/controllers/DocumentController.js)
var uuid = require('node-uuid'),
path = require('path');
module.exports = {
upload: function(req, res) {
var results = [],
streamOptions = {
dirname: sails.config.appPath + "/public/files/",
saveAs: function(file) {
var filename = file.filename,
newName = uuid.v4() + path.extname(filename);
return newName;
},
completed: function(fileData, next) {
Document.create(fileData).exec(function(err, savedFile){
if (err) {
next(err);
} else {
results.push({
id: savedFile.id,
url: '/files/' + savedFile.localName
});
next();
}
});
}
};
req.file('docs').upload(Uploader.documentReceiverStream(streamOptions),
function (err, files) {
if (err) return res.serverError(err);
res.json({
message: files.length + ' file(s) uploaded successfully!',
files: results
});
}
);
},
_config: {}
};
I'm sure it can be improved further but this works perfectly for me.
The uploaded file object contains all data you need:
req.file('fileTest').upload({
// You can apply a file upload limit (in bytes)
maxBytes: maxUpload,
adapter: require('skipper-disk')
}, function whenDone(err, uploadedFiles) {
if (err) {
var error = { "status": 500, "error" : err };
res.status(500);
return res.json(error);
} else {
for (var u in uploadedFiles) {
//"fd" contains the actual file path (and name) of your file on disk
fileOnDisk = uploadedFiles[u].fd;
// I suggest you stringify the object to see what it contains and might be useful to you
console.log(JSON.stringify(uploadedFiles[u]));
}
}
});

How upload a file to Dropbox with dropbox.js?

ORIGINAL
I'm having problems to upload a file (image) to Dropbox from Node.js using the official dropbox.js.
I want to upload a picture that I have in another server. For example with the dropbpox icon (www.dropbox.com/static/images/new_logo.png).
client.writeFile(file, 'www.dropbox.com/static/images/new_logo.png', function(error, stat) {
if (error) {
return es.send(error.status); // Something went wrong.
}
res.send("File saved as revision " + stat.revisionTag);
});
I know that this only creates a text file with the url, but how I can upload the picture to Dropbox?
I also try to download the file using http.get and then upload this to dropbox but it doesn't work.
Thanks.
UPDATE WITH MORE INFO
First I download the image from a remote url with this code:
var request = http.get(options, function(res){
var imagedata = ''
res.setEncoding('binary')
res.on('data', function(chunk){
imagedata += chunk
})
res.on('end', function(){
console.log("Image downloaded!");
fs.writeFile(local, imagedata, 'binary', function(err){
if (err) throw err
console.log('File saved.')
})
})
})
The file is saved correctly.
Then I trie to things:
Sending the 'imagedata' to Dropbox:
console.log("Image downloaded!");
client.writeFile(file, imagedata, function(error, stat) {
if (error) {
return response.send(error.status); // Something went wrong.
}
response.send("File saved as revision " + stat.revisionTag);
});
And something is uploaded to Dropbox but it's nothing useful.
Then I also tried to read the file from disc and then send it to Dropbox but it doesn't work neither:
fs.readFile(file, function(err, data) {
Use dropbox-js 0.9.1-beta1 or above to upload binary files from node.js. You need to pass it Buffer or ArrayBuffer instances. Try this code:
var req = http.get(options, function(res) {
var chunks = [];
res.on('data', function(chunk) {
chunks.push(chunk);
});
res.on('end', function() {
console.log("Image downloaded!");
var imageData = Buffer.concat(chunks);
client.writeFile(file, imageData, function(error, stat) {
if (error) {
return response.send(error.status);
}
response.send("File saved as revision " + stat.revisionTag);
});
});
});
```
Original answer: the dropbox-js README mentions that binary files don't work in node.js just yet.
I had issue as well, I just copied and modified a bit on the old dropbox-node npm(which is now deprecated), but I added following function on dropbox.js.
Client.prototype.writeFileNodejs = function(path, data, callback) {
var self = this;
fs.readFile(data.path, function(err, data) {
if (err) return callback(err);
var uri = "" + self.urls.putFile + "/" + (self.urlEncodePath(path));
if (typeof data === 'function') callback = data, data = undefined;
var oauth = {
consumer_key: self.oauth.key
, consumer_secret: self.oauth.secret
, token: self.oauth.token
, token_secret: self.oauth.tokenSecret
};
var requestOptions = { uri: uri, oauth: oauth };
requestOptions.body = data;
return request['put'](requestOptions, callback ?
function(err, res, body) {
if (err) return callback(err);
var contentType = res.headers['content-type'];
// check if the response body is in JSON format
if (contentType === 'application/json' ||
contentType === 'text/javascript') {
body = JSON.parse(body);
if (body.error) {
var err = new Error(body.error);
err.statusCode = res.statusCode;
return callback(err);
}
} else if (errors[res.statusCode]) {
var err = new Error(errors[res.statusCode]);
err.statusCode = res.statusCode;
return callback(err);
}
// check for metadata in headers
if (res.headers['x-dropbox-metadata']) {
var metadata = JSON.parse(res.headers['x-dropbox-metadata']);
}
callback(null, body, metadata);
} : undefined);
});
};
As well you would like to require request and fs to do this.
var request = require('request'),
fs = require('fs');

Categories

Resources