nodejs uploading to s3 using knox? - javascript

for example:
knox.js:
knox.putFile("local.jpeg", "upload.jpeg", {
"Content-Type": "image/jpeg"
}, function(err, result) {
if (err != null) {
return console.log(err);
} else {
return console.log("Uploaded to amazon S3");
I have two images in the same directory as knox.js, local.jpeg and local2.jpeg, i am able to upload local.jpeg to s3, but not local2.jpeg, both files have the same permissions. am i missing anything here? thanks

My implementation without store in locale. With express, knox, mime, fs.
var knox = require('knox').createClient({
key: S3_KEY,
secret: S3_SECRET,
bucket: S3_BUCKET
});
exports.upload = function uploadToAmazon(req, res, next) {
var file = req.files.file;
var stream = fs.createReadStream(file.path)
var mimetype = mime.lookup(file.path);
var req;
if (mimetype.localeCompare('image/jpeg')
|| mimetype.localeCompare('image/pjpeg')
|| mimetype.localeCompare('image/png')
|| mimetype.localeCompare('image/gif')) {
req = knox.putStream(stream, file.name,
{
'Content-Type': mimetype,
'Cache-Control': 'max-age=604800',
'x-amz-acl': 'public-read',
'Content-Length': file.size
},
function(err, result) {
console.log(result);
}
);
} else {
next(new HttpError(HTTPStatus.BAD_REQUEST))
}
req.on('response', function(res){
if (res.statusCode == HTTPStatus.OK) {
res.json('url: ' + req.url)
} else {
next(new HttpError(res.statusCode))
}
});

That's because your code does not uploade local2.jpeg!
You code will only pushes the file named local.jpeg. You should, for every file, invoke the knox.put() method. I also advise you to have some helper function that will do some string formatting to rename to uploaded file on s3 (or just keep it as it is :) )
var files = ["local.jpeg", "local1.jpeg"];
for (file in files){
var upload_name = "upload_"+ file; // or whatever you want it to be called
knox.putFile(file, upload_name, {
"Content-Type": "image/jpeg"
}, function (err, result) {
if (err != null) {
return console.log(err);
} else {
return console.log("Uploaded to amazon S3");
}
});
}

Related

How to fix SSL error with NodeJS HTTPS server?

I'm new to Stack Overflow and this is my first question, so please bear with me. I never had to ask a question until I had a problem and couldn't find a solution. Anyways, my problem is my Node JS HTTPS server that I use to host my website has a problem where when the client requests a resource such as a video or an image, it's a 50% chance that the resource will properly load for the client. The times it doesn't load I get an SSL error in the console (for example, net::ERR_SSL_PROTOCOL_ERROR 200 (OK)). Sometimes it will load, sometimes it won't. I don't know if I have a misconfiguration with my server or not, but I need to get this fixed. I need the resource to load for the client 100% of the time. Any help would be great.
Here's how my server is configured.
const reqHandler = (req, res) => {
let body = '';
req.on('data', chunk => body += chunk.toString());
req.on('end', () => {
var path = req.url.split('?')[0];
var query = req.url.split('?')[1] || '';
res.statusCode = 200; // Assume a good request unless otherwise indicated
if (path.split('/').slice(-1)[0] === '') { // If client requests folder
if (existsSync(`./${root}${path}index.js`)) { // node-http has priority
let type = readFileSync(`./${root}${path}index.js`, 'utf-8').split('\n')[0];
type = type.replace('//', '').trim();
if (type === 'node-http') {
try {
delete require.cache[require.resolve(`./${root}${path}index.js`)];
require(`./${root}${path}index.js`).callback(req, res, body, query);
} catch (e) {
res.write(`Node HTTP exception: ${e.message}`);
res.end();
}
return;
}
} else { // Otherwise, fallback to index.html
path = path + 'index.html';
}
} else { // If the client didn't request a folder
if (path.split('.').length === 1) { // No extension? No problem!
// See if it's a folder
if (existsSync(`./${root}${path}/`)) { // Redirect, if exists
res.statusCode = 301;
res.setHeader('Location', path + '/');
res.end();
return;
} else { // If not folder, assume try to find it
var extensions = getType('get-extensions');
for (var i of extensions) {
if (existsSync(`./${root}${path}.${i}`)) {
path = `${path}.${i}`;
break;
}
}
}
}
var extension = path.split('.').slice(-1)[0];
// node-http
if (extension === 'js') {
if (!existsSync(`./${root}${path}`)) {
res.statusCode = 404;
res.end();
return;
}
let type = readFileSync(`./${root}${path}`, 'utf-8').split('\n')[0];
type = type.replace('//', '').trim();
if (type === 'node-http') {
try {
delete require.cache[require.resolve(`./${root}${path}`)];
require(`./${root}${path}`).callback(req, res, body, query);
} catch (e) {
res.write(`Node HTTP exception: ${e.message}`);
res.end();
}
return;
}
}
if (extension === 'ws') {
// Websocket connection
return;
}
// videos
if (extension === 'mp4') {
var vidPath = `./${root}${path}`;
readFile(vidPath, (err, data) => {
if (err) {
if (err.code === 'ENOENT') {
res.statusCode = 404;
res.end('404 Not Found');
}
res.end();
return
} else {
var stats = statSync(vidPath);
let size = stats.size;
let chunkSize = 10 ** 6; // 1 megabyte (1,000,000 bytes)
let start = req.headers.range ? Number(req.headers.range.replace(/\D/g, "")) : 0;
let end = Math.min(start + chunkSize, size - 1);
let contentLength = end - start + 1;
res.setHeader('Content-Range', `bytes ${start}-${end}/${size}`);
res.statusCode = 206;
res.writeHead(206, {
'Content-Range': `bytes ${start}-${end}/${size}`,
'Content-Type': 'video/mp4',
'Content-Length': contentLength,
'Accept-Ranges': 'bytes',
'Date': new Date().toUTCString()
});
let stream = createReadStream(vidPath, {start, end});
stream.pipe(res);
}
});
return;
}
}
readFile(`./${root}${path}`, (err, data) => {
if (err) {
if (err.code === 'ENOENT') {
res.statusCode = 404;
res.end('404 Not Found');
}
} else {
let type = getType(path.split('.').slice(-1)[0]);
if (type !== null) {
res.setHeader('Content-Type', type);
}
res.end(data);
}
});
});
}
// Redirect HTTP traffic to HTTPS
http.createServer((req, res) => {
if (req.headers.host === 'domain.com') {
let redirectUrl = `https://${req.headers.host}${req.url}`;
res.statusCode = 301;
res.setHeader('Location', redirectUrl);
res.end();
return;
}
reqHandler(req, res);
}).listen(80);
// HTTPS server
const server = https.createServer({
cert: readFileSync('/path/to/cert.pem').toString(),
key: readFileSync('/path/to/key.pem').toString(),
ca: readFileSync('/path/to/chain.pem').toString()
}, reqHandler);
server.listen(443);

Implementing callbacks in javascript

I have a function in my helper js file to upload files on S3:
module.exports.uploadToAWS = function uploadToAWS(folderName, fileName, fileData) {
var s3 = new AWS.S3({ params: {Bucket: ‘myBucket’} });
var keyName = folderName + "/" + fileName;
var buffer = new Buffer(fileData.replace(/^data:image\/\w+;base64,/, ""),'base64')
var data = {
Key: keyName,
Body: buffer,
ContentEncoding: 'base64',
ContentType: 'image/jpeg'
};
s3.putObject(data, function(err, data) {
if (err) {
console.log(err);
console.log('Error uploading data: ', data);
} else {
console.log('succesfully uploaded the image!');
}
});
}
My consumer function is like this:
if(images) {
images.forEach(function(imageRecord) {
awsHelper.uploadToAWS('assets', assetObject.id, imageRecord);
});
}
I want to introduce a callback method here, such that I might get the success and failure from my helper function to my caller function. How could it be implemented?
Need my consumer function to be like the following, but what would be the helper function be like?
if(images) {
images.forEach(function(imageRecord) {
awsHelper.uploadToAWS(
'assets',
assetObject.id,
imageRecord,
function (success, failure) {
//handle success or failure
});
});
}
Why don't you simply add a Callback in your "uploadToAWS" like :
module.exports.uploadToAWS = function uploadToAWS(folderName, fileName, fileData, callback){
Then make a simple validation and call it (Passing the Response) when your upload completes/fails
s3.putObject(data, function(err, data){
if (err) {
console.log(err);
console.log('Error uploading data: ', data);
if(typeof callback === 'function') callback(err , data);
} else {
console.log('succesfully uploaded the image!');
if(typeof callback === 'function') callback(err , data);
}
});
After this you will use it exactly as you proposed
if(images){
images.forEach(function(imageRecord){
awsHelper.uploadToAWS('assets', assetObject.id, imageRecord, function (err , data) {
//handle success or failure
});
});
}
So by rewriting in the Promise style, you can:
module.exports.uploadToAWSAsync =
function uploadToAWSAsync(folderName, fileName, fileData){
return new Promise(function(resolve, reject){
var s3 = new AWS.S3({ params: {Bucket: ‘myBucket’} });
var keyName = folderName + "/" + fileName;
var buffer = new Buffer(fileData.replace(/^data:image\/\w+;base64,/, ""),
'base64')
var data = {
Key: keyName,
Body: buffer,
ContentEncoding: 'base64',
ContentType: 'image/jpeg'
};
s3.putObject(data, function(err, data){
if (err) {
console.log(err);
console.log('Error uploading data: ', data);
reject(err);
} else {
console.log('succesfully uploaded the image!');
resolve();
}
});
});
}
You could rewrite you consumer code as follows:
//fire them all off in one go
var promises = images.map(function(imageRecord){
return awsHelper.uploadToAWSAsync('assets', assetObject.id, imageRecord);
});
Promise.all(promises).then(function(){
//success, everything uploaded
}).catch(function(err){
//something went wrong
});
You need to pass the callback function as an argument for the helper function.
Try this:
module.exports.uploadToAWS = function uploadToAWS(folderName, fileName, fileData, callback){
var s3 = new AWS.S3({ params: {Bucket: ‘myBucket’} });
var keyName = folderName + "/" + fileName;
var buffer = new Buffer(fileData.replace(/^data:image\/\w+;base64,/, ""),'base64')
var data = {
Key: keyName,
Body: buffer,
ContentEncoding: 'base64',
ContentType: 'image/jpeg'
};
s3.putObject(data, function(err, data){
if (err) {
callback(err);
} else {
callback(null, data);
}
});
}

Uploading files using Skipper with Sails.js v0.10 - how to retrieve new file name

I am upgrading to Sails.js version 0.10 and now need to use Skipper to manage my file uploads.
When I upload a file I generate a new name for it using a UUID, and save it in the public/files/ folder (this will change when I've got this all working but it's good for testing right now)
I save the original name, and the uploaded name + path into a Mongo database.
This was all quite straightforward under Sails v0.9.x but using Skipper I can't figure out how to read the new file name and path. (Obviously if I could read the name I could construct the path though so it's really only the name I need)
My Controller looks like this
var uuid = require('node-uuid'),
path = require('path'),
blobAdapter = require('skipper-disk');
module.exports = {
upload: function(req, res) {
var receiver = blobAdapter().receive({
dirname: sails.config.appPath + "/public/files/",
saveAs: function(file) {
var filename = file.filename,
newName = uuid.v4() + path.extname(filename);
return newName;
}
}),
results = [];
req.file('docs').upload(receiver, function (err, files) {
if (err) return res.serverError(err);
async.forEach(files, function(file, next) {
Document.create({
name: file.filename,
size: file.size,
localName: // ***** how do I get the `saveAs()` value from the uploaded file *****,
path: // *** and likewise how do i get the path ******
}).exec(function(err, savedFile){
if (err) {
next(err);
} else {
results.push({
id: savedFile.id,
url: '/files/' + savedFile.localName
});
next();
}
});
}, function(err){
if (err) {
sails.log.error('caught error', err);
return res.serverError({error: err});
} else {
return res.json({ files: results });
}
});
});
},
_config: {}
};
How do I do this?
I've worked this out now and thought I'd share my solution for the benefit of others struggling with similar issues.
The solution was to not use skipper-disk at all but to write my own custom receiver. I've created this as a Sails Service object.
So in file api/services/Uploader.js
// Uploader utilities and helper methods
// designed to be relatively generic.
var fs = require('fs'),
Writable = require('stream').Writable;
exports.documentReceiverStream = function(options) {
var defaults = {
dirname: '/dev/null',
saveAs: function(file){
return file.filename;
},
completed: function(file, done){
done();
}
};
// I don't have access to jQuery here so this is the simplest way I
// could think of to merge the options.
opts = defaults;
if (options.dirname) opts.dirname = options.dirname;
if (options.saveAs) opts.saveAs = options.saveAs;
if (options.completed) opts.completed = options.completed;
var documentReceiver = Writable({objectMode: true});
// This `_write` method is invoked each time a new file is received
// from the Readable stream (Upstream) which is pumping filestreams
// into this receiver. (filename === `file.filename`).
documentReceiver._write = function onFile(file, encoding, done) {
var newFilename = opts.saveAs(file),
fileSavePath = opts.dirname + newFilename,
outputs = fs.createWriteStream(fileSavePath, encoding);
file.pipe(outputs);
// Garbage-collect the bytes that were already written for this file.
// (called when a read or write error occurs)
function gc(err) {
sails.log.debug("Garbage collecting file '" + file.filename + "' located at '" + fileSavePath + "'");
fs.unlink(fileSavePath, function (gcErr) {
if (gcErr) {
return done([err].concat([gcErr]));
} else {
return done(err);
}
});
};
file.on('error', function (err) {
sails.log.error('READ error on file ' + file.filename, '::', err);
});
outputs.on('error', function failedToWriteFile (err) {
sails.log.error('failed to write file', file.filename, 'with encoding', encoding, ': done =', done);
gc(err);
});
outputs.on('finish', function successfullyWroteFile () {
sails.log.debug("file uploaded")
opts.completed({
name: file.filename,
size: file.size,
localName: newFilename,
path: fileSavePath
}, done);
});
};
return documentReceiver;
}
and then my controller just became (in api/controllers/DocumentController.js)
var uuid = require('node-uuid'),
path = require('path');
module.exports = {
upload: function(req, res) {
var results = [],
streamOptions = {
dirname: sails.config.appPath + "/public/files/",
saveAs: function(file) {
var filename = file.filename,
newName = uuid.v4() + path.extname(filename);
return newName;
},
completed: function(fileData, next) {
Document.create(fileData).exec(function(err, savedFile){
if (err) {
next(err);
} else {
results.push({
id: savedFile.id,
url: '/files/' + savedFile.localName
});
next();
}
});
}
};
req.file('docs').upload(Uploader.documentReceiverStream(streamOptions),
function (err, files) {
if (err) return res.serverError(err);
res.json({
message: files.length + ' file(s) uploaded successfully!',
files: results
});
}
);
},
_config: {}
};
I'm sure it can be improved further but this works perfectly for me.
The uploaded file object contains all data you need:
req.file('fileTest').upload({
// You can apply a file upload limit (in bytes)
maxBytes: maxUpload,
adapter: require('skipper-disk')
}, function whenDone(err, uploadedFiles) {
if (err) {
var error = { "status": 500, "error" : err };
res.status(500);
return res.json(error);
} else {
for (var u in uploadedFiles) {
//"fd" contains the actual file path (and name) of your file on disk
fileOnDisk = uploadedFiles[u].fd;
// I suggest you stringify the object to see what it contains and might be useful to you
console.log(JSON.stringify(uploadedFiles[u]));
}
}
});

Upload images to twitter API from node.js

I'm attempting to post an image onto the twitter api, v1.1
I've tried just about all the example out there, and nothing seems to be able to post it.
include Posting images to twitter in Node.js using Oauth
I'm using the oauth library mentioned there, and I also had jsOauth, which I thought I'd give a shot according to https://gist.github.com/lukaszkorecki/1038408
Nothing has worked, and at this point I'm starting to lose hope on whether I can even do this.
function postStatusWithMedia(status, file) {
var err = new Object();
if(fs.existsSync(file) === false) {
err.message = "File not found :(";
parseTwitterError(err);
} else {
var oauth = OAuth(options = {
"consumerKey": consumer_key,
"consumerSecret": consumer_secret,
"accessTokenKey": access_token,
"accessTokenSecret": access_token_secret
});
callbacks = {
onSuccess : function() {
console.log('upload worked!')
},
onFailure : function() {
console.log('upload failed!');
console.dir(arguments);
}
},
uploadData = {
'status' : status,
'media' : Base64.encode(fs.readFileSync(file))
};
oauth.post('https://api.twitter.com/1.1/statuses/update_with_media.json',uploadData, callbacks.onSuccess, callbacks.onFailure);
return false;
}
}
If it can't be done, can you please explain why?
Otherwise, anything that could lead me to the right direction would be great.
var fs = require('fs');
var request = require('request');
var FormData = require('form-data');
var utf8 = require('utf8');
// Encode in UTF-8
status = utf8.encode(status);
var form = new FormData();
form.append('status', status)
form.append('media[]', fs.createReadStream(file));
// Twitter OAuth
form.getLength(function(err, length){
if (err) {
return requestCallback(err);
}
var oauth = {
consumer_key: consumer_key,
consumer_secret: consumer_secret,
token: access_token,
token_secret: access_token_secret
};
var r = request.post({url:"https://api.twitter.com/1.1/statuses/update_with_media.json", oauth:oauth, host: "api.twitter.com", protocol: "https:"}, requestCallback);
r._form = form;
r.setHeader('content-length', length);
});
function requestCallback(err, res, body) {
if(err) {
throw err;
} else {
console.log("Tweet and Image uploaded successfully!");
}
}
I ended up using request and node-form-data to manually construct a multipart/form-data request and send it with the status request, utf8 was for encoding the status into UTF-8, not doing so caused issues with '<3' and other characters.
I have not tested these code.Its from my colleague.sure the code is working.
Perhaps this will help.
//twitter_update_with_media.js
(function() {
var fs, path, request, twitter_update_with_media;
fs = require('fs');
path = require('path');
request = require('request');
twitter_update_with_media = (function() {
function twitter_update_with_media(auth_settings) {
this.auth_settings = auth_settings;
this.api_url = 'https://api.twitter.com/1.1/statuses/update_with_media.json';
}
twitter_update_with_media.prototype.post = function(status, imageUrl, callback) {
var form, r;
r = request.post(this.api_url, {
oauth: this.auth_settings
}, callback);
form = r.form();
form.append('status', status);
return form.append('media[]', request(imageUrl));
};
return twitter_update_with_media;
})();
module.exports = twitter_update_with_media;
}).call(this);
next file
//upload_to_twitter.js
var tuwm = new twitter_update_with_media({
consumer_key: TWITTER_OAUTH_KEY,
consumer_secret: TWITTER_OAUTH_SECRET,
token: access[0],
token_secret: access[1]
});
media_picture.picture = imageURL;
if (media_picture.picture) {
console.log('with media upload');
request.head(media_picture.picture,
function (error, response, body) {
if (!error && response.statusCode == 200) {
var image_size = response.headers['content-length'];
if (image_size > 2000000) { // 2mb max upload limit
console.log('greater than 2mb');
sendMessageWithoutImage(err, req, res, next, twit, wallpost, access);
} else {
console.log('less than 2mb');
console.log('twitter text', content);
tuwm.post(content, media_picture.picture, function(err, response) {
if (err) {
console.log('error', err);
return next(err);
}
error_parse = JSON.parse(response.body);
console.log('with media response', response.body);
if (error_parse.errors) {
console.log('have errors', error_parse);
res.json({
status: 500,
info: error_parse.errors[0].code + ' ' + error_parse.errors[0].message
});
} else {
res.json({
status: 200,
info: "OK",
id: response.id
});
}
});
}
}
});

How upload a file to Dropbox with dropbox.js?

ORIGINAL
I'm having problems to upload a file (image) to Dropbox from Node.js using the official dropbox.js.
I want to upload a picture that I have in another server. For example with the dropbpox icon (www.dropbox.com/static/images/new_logo.png).
client.writeFile(file, 'www.dropbox.com/static/images/new_logo.png', function(error, stat) {
if (error) {
return es.send(error.status); // Something went wrong.
}
res.send("File saved as revision " + stat.revisionTag);
});
I know that this only creates a text file with the url, but how I can upload the picture to Dropbox?
I also try to download the file using http.get and then upload this to dropbox but it doesn't work.
Thanks.
UPDATE WITH MORE INFO
First I download the image from a remote url with this code:
var request = http.get(options, function(res){
var imagedata = ''
res.setEncoding('binary')
res.on('data', function(chunk){
imagedata += chunk
})
res.on('end', function(){
console.log("Image downloaded!");
fs.writeFile(local, imagedata, 'binary', function(err){
if (err) throw err
console.log('File saved.')
})
})
})
The file is saved correctly.
Then I trie to things:
Sending the 'imagedata' to Dropbox:
console.log("Image downloaded!");
client.writeFile(file, imagedata, function(error, stat) {
if (error) {
return response.send(error.status); // Something went wrong.
}
response.send("File saved as revision " + stat.revisionTag);
});
And something is uploaded to Dropbox but it's nothing useful.
Then I also tried to read the file from disc and then send it to Dropbox but it doesn't work neither:
fs.readFile(file, function(err, data) {
Use dropbox-js 0.9.1-beta1 or above to upload binary files from node.js. You need to pass it Buffer or ArrayBuffer instances. Try this code:
var req = http.get(options, function(res) {
var chunks = [];
res.on('data', function(chunk) {
chunks.push(chunk);
});
res.on('end', function() {
console.log("Image downloaded!");
var imageData = Buffer.concat(chunks);
client.writeFile(file, imageData, function(error, stat) {
if (error) {
return response.send(error.status);
}
response.send("File saved as revision " + stat.revisionTag);
});
});
});
```
Original answer: the dropbox-js README mentions that binary files don't work in node.js just yet.
I had issue as well, I just copied and modified a bit on the old dropbox-node npm(which is now deprecated), but I added following function on dropbox.js.
Client.prototype.writeFileNodejs = function(path, data, callback) {
var self = this;
fs.readFile(data.path, function(err, data) {
if (err) return callback(err);
var uri = "" + self.urls.putFile + "/" + (self.urlEncodePath(path));
if (typeof data === 'function') callback = data, data = undefined;
var oauth = {
consumer_key: self.oauth.key
, consumer_secret: self.oauth.secret
, token: self.oauth.token
, token_secret: self.oauth.tokenSecret
};
var requestOptions = { uri: uri, oauth: oauth };
requestOptions.body = data;
return request['put'](requestOptions, callback ?
function(err, res, body) {
if (err) return callback(err);
var contentType = res.headers['content-type'];
// check if the response body is in JSON format
if (contentType === 'application/json' ||
contentType === 'text/javascript') {
body = JSON.parse(body);
if (body.error) {
var err = new Error(body.error);
err.statusCode = res.statusCode;
return callback(err);
}
} else if (errors[res.statusCode]) {
var err = new Error(errors[res.statusCode]);
err.statusCode = res.statusCode;
return callback(err);
}
// check for metadata in headers
if (res.headers['x-dropbox-metadata']) {
var metadata = JSON.parse(res.headers['x-dropbox-metadata']);
}
callback(null, body, metadata);
} : undefined);
});
};
As well you would like to require request and fs to do this.
var request = require('request'),
fs = require('fs');

Categories

Resources