I have a function in my helper js file to upload files on S3:
module.exports.uploadToAWS = function uploadToAWS(folderName, fileName, fileData) {
var s3 = new AWS.S3({ params: {Bucket: ‘myBucket’} });
var keyName = folderName + "/" + fileName;
var buffer = new Buffer(fileData.replace(/^data:image\/\w+;base64,/, ""),'base64')
var data = {
Key: keyName,
Body: buffer,
ContentEncoding: 'base64',
ContentType: 'image/jpeg'
};
s3.putObject(data, function(err, data) {
if (err) {
console.log(err);
console.log('Error uploading data: ', data);
} else {
console.log('succesfully uploaded the image!');
}
});
}
My consumer function is like this:
if(images) {
images.forEach(function(imageRecord) {
awsHelper.uploadToAWS('assets', assetObject.id, imageRecord);
});
}
I want to introduce a callback method here, such that I might get the success and failure from my helper function to my caller function. How could it be implemented?
Need my consumer function to be like the following, but what would be the helper function be like?
if(images) {
images.forEach(function(imageRecord) {
awsHelper.uploadToAWS(
'assets',
assetObject.id,
imageRecord,
function (success, failure) {
//handle success or failure
});
});
}
Why don't you simply add a Callback in your "uploadToAWS" like :
module.exports.uploadToAWS = function uploadToAWS(folderName, fileName, fileData, callback){
Then make a simple validation and call it (Passing the Response) when your upload completes/fails
s3.putObject(data, function(err, data){
if (err) {
console.log(err);
console.log('Error uploading data: ', data);
if(typeof callback === 'function') callback(err , data);
} else {
console.log('succesfully uploaded the image!');
if(typeof callback === 'function') callback(err , data);
}
});
After this you will use it exactly as you proposed
if(images){
images.forEach(function(imageRecord){
awsHelper.uploadToAWS('assets', assetObject.id, imageRecord, function (err , data) {
//handle success or failure
});
});
}
So by rewriting in the Promise style, you can:
module.exports.uploadToAWSAsync =
function uploadToAWSAsync(folderName, fileName, fileData){
return new Promise(function(resolve, reject){
var s3 = new AWS.S3({ params: {Bucket: ‘myBucket’} });
var keyName = folderName + "/" + fileName;
var buffer = new Buffer(fileData.replace(/^data:image\/\w+;base64,/, ""),
'base64')
var data = {
Key: keyName,
Body: buffer,
ContentEncoding: 'base64',
ContentType: 'image/jpeg'
};
s3.putObject(data, function(err, data){
if (err) {
console.log(err);
console.log('Error uploading data: ', data);
reject(err);
} else {
console.log('succesfully uploaded the image!');
resolve();
}
});
});
}
You could rewrite you consumer code as follows:
//fire them all off in one go
var promises = images.map(function(imageRecord){
return awsHelper.uploadToAWSAsync('assets', assetObject.id, imageRecord);
});
Promise.all(promises).then(function(){
//success, everything uploaded
}).catch(function(err){
//something went wrong
});
You need to pass the callback function as an argument for the helper function.
Try this:
module.exports.uploadToAWS = function uploadToAWS(folderName, fileName, fileData, callback){
var s3 = new AWS.S3({ params: {Bucket: ‘myBucket’} });
var keyName = folderName + "/" + fileName;
var buffer = new Buffer(fileData.replace(/^data:image\/\w+;base64,/, ""),'base64')
var data = {
Key: keyName,
Body: buffer,
ContentEncoding: 'base64',
ContentType: 'image/jpeg'
};
s3.putObject(data, function(err, data){
if (err) {
callback(err);
} else {
callback(null, data);
}
});
}
Related
I am too naive in the JS world. Why is my Javascript Promise not working as expected? I thought console.log("update configs"); won't be executed until console.log("checking bucket"); is completed. Thanks in advance for any help.
Note: the 'checking bucket' function continuous check S3 bucket exists every few seconds until the S3 bucket is created.
process.env.AWS_PROFILE = 'aquarius';
process.env.AWS_SDK_LOAD_CONFIG = 'true';
var AWS = require('aws-sdk');
AWS.config.update({region: 'us-east-1'});
// Create S3 service object
s3 = new AWS.S3({apiVersion: '2006-03-01'});
let bucketName = "mybucket.test.com"
// Create S3 service object
var s3 = new AWS.S3({
apiVersion: '2006-03-01'
});
var promise = new Promise(function(resolve, reject) {
setTimeout(function() {
resolve('hello world');
}, 1);
});
promise.then(function(data) {
var params = {
Bucket: bucketName,
ACL: "public-read"
};
s3.createBucket(params, function(err, data) {
console.log("creating bucket");
if (err) {
console.log(err, err.stack); // an error occurred
} else {
console.log(data); // successful
}
});
})
.then(function(data) {
console.log("checking bucket");
var params = {
Bucket: bucketName
};
s3.waitFor('bucketExists', params, function(err, data) {
if (err) console.log(err, err.stack); // an error occurred
else console.log("bucket exist"); // successful response
});
})
.then(function(data) {
console.log("update configs");
var params2 = {
Bucket: bucketName,
ContentMD5: "",
WebsiteConfiguration: {
ErrorDocument: {
Key: "error.html"
},
IndexDocument: {
Suffix: "index.html"
}
}
};
s3.putBucketWebsite(params2, function(err, data) {
if (err) console.log(err, err.stack); // an error occurred
else console.log(data); // successful
});
});
Thanks everyone for the feedback and hints. I found I can retrieve promise object directly from AWS SDK function. Here is the revised code that is working now.
// Create S3 service object
var s3 = new AWS.S3({
apiVersion: '2006-03-01'
});
var params = {
Bucket: bucketName,
ACL: "public-read"
};
var promiseObject = s3.createBucket(params, function(err, data) {
console.log("creating bucket");
if (err) {
console.log(err, err.stack); // an error occurred
} else {
console.log(data); // successful
}
}).promise(); // <=== THIS LINE SAVED MY DAY !!
promiseObject.then(function(data) {
console.log("checking bucket");
var params = {
Bucket: bucketName
};
s3.waitFor('bucketExists', params, function(err, data) {
if (err) console.log(err, err.stack); // an error occurred
else console.log("bucket exist"); // successful response
});
})
.then(function(data) {
console.log("update configs");
var params2 = {
Bucket: bucketName,
ContentMD5: "",
WebsiteConfiguration: {
ErrorDocument: {
Key: "error.html"
},
IndexDocument: {
Suffix: "index.html"
}
}
};
s3.putBucketWebsite(params2, function(err, data) {
if (err) console.log(err, err.stack); // an error occurred
else console.log(data); // successful
});
})
I'm doing a implementation, and I need to fork a git repository into a codecommit repository using lambda and javascript. Create or update a file in the repository I can do.
What I need to know to fork a github repository (or a codecommit repository) for another repository inside codecommit?
I am showing the full code, because it is needed to undestand what I already do.
'use strict';
var AWS = require("aws-sdk");
var codecommit = new AWS.CodeCommit({ apiVersion: '2015-04-13' });
exports.handler = (event, context, callback) => {
event.Records.forEach((record) => {
console.log('Stream record: ', JSON.stringify(record, null, 2));
if (record.eventName == 'INSERT') {
var name = JSON.stringify(record.dynamodb.NewImage.name.S).replace(/[^a-z0-9áéíóúñü \.,_-]/gim,"");
var version = JSON.stringify(record.dynamodb.NewImage.version.S);
var data = JSON.stringify(record.dynamodb.NewImage.data.S);
var params = { repositoryName: name };
codecommit.getRepository(params, function(err, response) {
if (err) {
console.log(err, err.stack);
if(err.code == 'RepositoryDoesNotExistException') {
params.repositoryDescription='Repository ' + name;
codecommit.createRepository(params, function(err, response) {
if (err) {
console.log(err, err.stack);
} else {
console.log(response);
prepareCommit(name,version, data);
}
});
}
} else {
console.log('response repository: ', response);
prepareCommit(name, version, data);
}
});
}
});
callback(null, `Successfully processed ${event.Records.length} records.`);
};
function prepareCommit(name, version, data) {
var params = {
branchName: 'master',
repositoryName: name
};
codecommit.getBranch(params, function(err, response) {
if (err) {
console.log(err);
if (err.code='BranchDoesNotExistException') {
doCommit(name, version, '' + data, '');
}
} else {
console.log('response branch: ', response);
doCommit(name, version, '' + data, '' + response.branch.commitId);
}
});
}
function doCommit(name, version, data, parentCommitId) {
var params = {
branchName: 'master',
fileContent: Buffer.from(JSON.parse(data)) || '',
filePath: 'data/structure.json',
repositoryName: name,
commitMessage: 'generated by lambda stream ' + version,
email: '<email here>',
fileMode: 'NORMAL',
name: '<name here>'
};
if (parentCommitId != '') {
params.parentCommitId = parentCommitId;
}
codecommit.putFile(params, function(err, response) {
if (err) {
console.log(err, err.stack);
} else {
console.log(response);
}
});
}
This code receive a data from STREAM/DynamoDB and check if the repository exist, if not, a new is created and put a json file structure.json inside data/ folder.
I need fork a github or a codecommit repository on create inside a new repository.
I have a program where user first create a file once file is created i am appending data to the file that is coming from client consistently.The below code is working as expected. I am new to nodejs so just want to get an expert opinion in case when multiple users creating and recording files on their machines at same time, will it work asynchronously or do i need to make some changes to the code ?
io.js
socket.on('createlogfile', function() {
logsRecording.userLogs(function(filename) {
socket.emit('filename', filename);
});
});
socket.on('startrecording', function(obj) {
logsRecording.recordLogs(obj);
});
server.js
userLogs: function (callback) {
var filename = uuid.v4() + '.log';
var file = filePath + '/' + filename;
fs.openSync(file, 'a',function () {
console.log('file created');
});
console.log('userLogs');
callback(filename);
},
recordLogs: function (obj) {
var dir = './app/records/templogs'
var fileAppend = dir + '/'+ obj.file;
console.log('data from recording', obj.data);
fs.readdir(dir, function(err, items) {
items.forEach(function(file){
if(obj.file === file){
fs.appendFile(fileAppend, obj.data+ "\r\n", null, 'utf8', function (err) {
if (err) throw err;
});
console.log('filename in records',obj.file);
}
});
});
}
You are using fs.openSync, which is synchronous and as such can hang the event loop.
You should be using fs.open and callback inside it:
userLogs: function (callback) {
var filename = uuid.v4() + '.log';
var file = filePath + '/' + filename;
fs.open(file, 'a', function (err) {
console.log('file created');
console.log('userLogs');
callback(err, filename);
});
},
And you can flatten recordLogs using async.
Also, it is bad practice to throw error in synchronous function, you should be passing the error in the callback.
As a last tip, Array.forEach is synchronous, and can hang the process, you should be using async.each
recordLogs: function (obj, callback) {
var dir = './app/records/templogs'
var fileAppend = dir + '/'+ obj.file;
console.log('data from recording', obj.data);
async.waterfall([
(callback) => {
fs.readdir(dir, (err, items) => {
callback(err, items);
});
},
(items, callback) => {
async.each(items, (file, callback) => {
if(obj.file === file) {
fs.appendFile(fileAppend, obj.data+ "\r\n", null, 'utf8', function (err) {
callback(err);
});
console.log('filename in records',obj.file);
} else {
callback();
}
}, (err) => {
callback(err);
});
}
], (err, file) => {
if(callback) {
callback(err);
}
});
}
for example:
knox.js:
knox.putFile("local.jpeg", "upload.jpeg", {
"Content-Type": "image/jpeg"
}, function(err, result) {
if (err != null) {
return console.log(err);
} else {
return console.log("Uploaded to amazon S3");
I have two images in the same directory as knox.js, local.jpeg and local2.jpeg, i am able to upload local.jpeg to s3, but not local2.jpeg, both files have the same permissions. am i missing anything here? thanks
My implementation without store in locale. With express, knox, mime, fs.
var knox = require('knox').createClient({
key: S3_KEY,
secret: S3_SECRET,
bucket: S3_BUCKET
});
exports.upload = function uploadToAmazon(req, res, next) {
var file = req.files.file;
var stream = fs.createReadStream(file.path)
var mimetype = mime.lookup(file.path);
var req;
if (mimetype.localeCompare('image/jpeg')
|| mimetype.localeCompare('image/pjpeg')
|| mimetype.localeCompare('image/png')
|| mimetype.localeCompare('image/gif')) {
req = knox.putStream(stream, file.name,
{
'Content-Type': mimetype,
'Cache-Control': 'max-age=604800',
'x-amz-acl': 'public-read',
'Content-Length': file.size
},
function(err, result) {
console.log(result);
}
);
} else {
next(new HttpError(HTTPStatus.BAD_REQUEST))
}
req.on('response', function(res){
if (res.statusCode == HTTPStatus.OK) {
res.json('url: ' + req.url)
} else {
next(new HttpError(res.statusCode))
}
});
That's because your code does not uploade local2.jpeg!
You code will only pushes the file named local.jpeg. You should, for every file, invoke the knox.put() method. I also advise you to have some helper function that will do some string formatting to rename to uploaded file on s3 (or just keep it as it is :) )
var files = ["local.jpeg", "local1.jpeg"];
for (file in files){
var upload_name = "upload_"+ file; // or whatever you want it to be called
knox.putFile(file, upload_name, {
"Content-Type": "image/jpeg"
}, function (err, result) {
if (err != null) {
return console.log(err);
} else {
return console.log("Uploaded to amazon S3");
}
});
}
ORIGINAL
I'm having problems to upload a file (image) to Dropbox from Node.js using the official dropbox.js.
I want to upload a picture that I have in another server. For example with the dropbpox icon (www.dropbox.com/static/images/new_logo.png).
client.writeFile(file, 'www.dropbox.com/static/images/new_logo.png', function(error, stat) {
if (error) {
return es.send(error.status); // Something went wrong.
}
res.send("File saved as revision " + stat.revisionTag);
});
I know that this only creates a text file with the url, but how I can upload the picture to Dropbox?
I also try to download the file using http.get and then upload this to dropbox but it doesn't work.
Thanks.
UPDATE WITH MORE INFO
First I download the image from a remote url with this code:
var request = http.get(options, function(res){
var imagedata = ''
res.setEncoding('binary')
res.on('data', function(chunk){
imagedata += chunk
})
res.on('end', function(){
console.log("Image downloaded!");
fs.writeFile(local, imagedata, 'binary', function(err){
if (err) throw err
console.log('File saved.')
})
})
})
The file is saved correctly.
Then I trie to things:
Sending the 'imagedata' to Dropbox:
console.log("Image downloaded!");
client.writeFile(file, imagedata, function(error, stat) {
if (error) {
return response.send(error.status); // Something went wrong.
}
response.send("File saved as revision " + stat.revisionTag);
});
And something is uploaded to Dropbox but it's nothing useful.
Then I also tried to read the file from disc and then send it to Dropbox but it doesn't work neither:
fs.readFile(file, function(err, data) {
Use dropbox-js 0.9.1-beta1 or above to upload binary files from node.js. You need to pass it Buffer or ArrayBuffer instances. Try this code:
var req = http.get(options, function(res) {
var chunks = [];
res.on('data', function(chunk) {
chunks.push(chunk);
});
res.on('end', function() {
console.log("Image downloaded!");
var imageData = Buffer.concat(chunks);
client.writeFile(file, imageData, function(error, stat) {
if (error) {
return response.send(error.status);
}
response.send("File saved as revision " + stat.revisionTag);
});
});
});
```
Original answer: the dropbox-js README mentions that binary files don't work in node.js just yet.
I had issue as well, I just copied and modified a bit on the old dropbox-node npm(which is now deprecated), but I added following function on dropbox.js.
Client.prototype.writeFileNodejs = function(path, data, callback) {
var self = this;
fs.readFile(data.path, function(err, data) {
if (err) return callback(err);
var uri = "" + self.urls.putFile + "/" + (self.urlEncodePath(path));
if (typeof data === 'function') callback = data, data = undefined;
var oauth = {
consumer_key: self.oauth.key
, consumer_secret: self.oauth.secret
, token: self.oauth.token
, token_secret: self.oauth.tokenSecret
};
var requestOptions = { uri: uri, oauth: oauth };
requestOptions.body = data;
return request['put'](requestOptions, callback ?
function(err, res, body) {
if (err) return callback(err);
var contentType = res.headers['content-type'];
// check if the response body is in JSON format
if (contentType === 'application/json' ||
contentType === 'text/javascript') {
body = JSON.parse(body);
if (body.error) {
var err = new Error(body.error);
err.statusCode = res.statusCode;
return callback(err);
}
} else if (errors[res.statusCode]) {
var err = new Error(errors[res.statusCode]);
err.statusCode = res.statusCode;
return callback(err);
}
// check for metadata in headers
if (res.headers['x-dropbox-metadata']) {
var metadata = JSON.parse(res.headers['x-dropbox-metadata']);
}
callback(null, body, metadata);
} : undefined);
});
};
As well you would like to require request and fs to do this.
var request = require('request'),
fs = require('fs');