When i run the below code in console like myFunc.myfunc("SomeText"), it creates the bucket but when I use the same function on button, it doesn't create anything. Sometimes it does create but you have to wait around 20-30 minutes before you get it right next time.
I am using node.js at the client side, browserify to make the require() work on client side and cors-anywhere for CORS policy.
JS File-
// Load the SDK and UUID
function uploadToS3(dataForBody) {
var AWS = require('aws-sdk'); //Browserify will be used and bundled in uploadDocsBundle
var uuid = require('uuid');
AWS.config.accessKeyId = 'my access key id';
AWS.config.secretAccessKey = 'my secret access id';
AWS.config.region = 'eu-west-1';
// Create an S3 client
var s3 = new AWS.S3();
// Create a bucket and upload something into it
var bucketName = 'node-sdk-sample-' + uuid.v4();
var keyName = 'hello_world.txt';
s3.createBucket({ Bucket: bucketName }, function () {
var params = {
Bucket: bucketName, Key: keyName, Body: dataForBody
};
s3.putObject(params, function (err, data) {
if (err)
console.log(err);
else
console.log("Successfully uploaded data to " + bucketName + "/" + keyName);
});
});
}
(function () {
var cors_api_host = 'cors-anywhere.herokuapp.com';
var cors_api_url = 'https://' + cors_api_host + '/';
var slice = [].slice;
var origin = window.location.protocol + '//' + window.location.host;
var open = XMLHttpRequest.prototype.open;
XMLHttpRequest.prototype.open = function () {
var args = slice.call(arguments);
var targetOrigin = /^https?:\/\/([^\/]+)/i.exec(args[1]);
if (targetOrigin && targetOrigin[0].toLowerCase() !== origin &&
targetOrigin[1] !== cors_api_host) {
args[1] = cors_api_url + args[1];
}
return open.apply(this, args);
};
})();
======================================================
HTML File
<button type="submit" onClick="myFunc.myfunc('hello')" class="btn btn-primary">Submit</button>
Edit2:-
=======
// Load the SDK and UUID
module.exports = { myfunc: uploadToS3 };
function uploadToS3(dataForBody) {
var AWS = require('aws-sdk'); //Browserify will be used and bundled in uploadDocsBundle
var uuid = require('uuid');
AWS.config.accessKeyId = 'AccessKey';
AWS.config.secretAccessKey = 'SecretAccessID';
AWS.config.region = 'eu-west-1';
// Create a bucket and upload something into it
var bucketName = 'node-sdk-sample-' + uuid.v4();
var keyName = 'hello_world2.txt';
var bucketPromise = new AWS.S3().createBucket({ Bucket: bucketName }).promise();
//Handle promise fulfilled/rejected states
bucketPromise.then(
function (data) {
// Create params for putObject call
var objectParams = { Bucket: bucketName, Key: keyName, Body: 'Hello World!' };
// Create object upload promise
var uploadPromise = new AWS.S3().putObject(objectParams).promise();
uploadPromise.then(
function(data) {
console.log("Successfully uploaded data to " + bucketName + "/" + keyName);
});
}).catch(
function (err) {
console.error(err, err.stack);
});
}
(function () {
var cors_api_host = 'cors-anywhere.herokuapp.com';
var cors_api_url = 'https://' + cors_api_host + '/';
var slice = [].slice;
var origin = window.location.protocol + '//' + window.location.host;
var open = XMLHttpRequest.prototype.open;
XMLHttpRequest.prototype.open = function () {
var args = slice.call(arguments);
var targetOrigin = /^https?:\/\/([^\/]+)/i.exec(args[1]);
if (targetOrigin && targetOrigin[0].toLowerCase() !== origin &&
targetOrigin[1] !== cors_api_host) {
args[1] = cors_api_url + args[1];
}
return open.apply(this, args);
};
})();
Related
I made a YouTube API upload app. It works great with small video file sizes but with larger sizes my app crashes. The exception happens when I try to get the video file with Fetch().
Question: Is there a way I can fetch a large file in React Native and feed it into the YouTube API in smaller chunks?
Here is my fetch code:
const fetchResponse = await fetch(videoUri);
const blob = await fetchResponse.blob();
var file = new File([blob], "video.mp4", {type: "video/mp4"});
My upload YouTube code is taken from the following git repos - supposedly supports multipart upload as well:
https://github.com/youtube/api-samples/blob/master/javascript/cors_upload.js and
https://github.com/youtube/api-samples/blob/master/javascript/upload_video.js
Here is my full upload code:
uploadVideo = async function() {
var match = this.state.match.value;
var video = match.mergedVideo;
var players = match.players;
var scoreboard = this.state.match.value.scoreboard;
var points = match.points;
var title = players[0].name + " vs. " + players[1].name + " " + scoreboard;
var description = this.descriptionBuilder(points, match.videos);
/*const fetchResponse = await fetch(video);
const blob = await fetchResponse.blob();
var file = new File([blob], "video.mp4", {type: "video/mp4"});
console.log(file);*/
const file = await DocumentPicker.pick({
type: [DocumentPicker.types.video],
});
var metadata = {
snippet: {
title: title,
description: description,
tags: ['youtube-cors-upload'],
categoryId: 22
},
status: {
privacyStatus: 'unlisted'
}
};
var uploader = new MediaUploader({
baseUrl: 'https://www.googleapis.com/upload/youtube/v3/videos',
file: file,
token: this.state.user.auth.accessToken,
metadata: metadata,
chunkSize: 1024 * 1024,
params: {
part: Object.keys(metadata).join(',')
},
onError: function(data) {
console.log(data);
var message = data;
try {
var errorResponse = JSON.parse(data);
message = errorResponse.error.message;
} finally {
alert(message);
}
}.bind(this),
onProgress: function(data) {
var currentTime = Date.now();
var bytesUploaded = data.loaded;
var totalBytes = data.total;
var bytesPerSecond = bytesUploaded / ((currentTime - window.uploadStartTime) / 1000);
var estimatedSecondsRemaining = (totalBytes - bytesUploaded) / bytesPerSecond;
var percentageComplete = (bytesUploaded * 100) / totalBytes;
this.setState({ youtubeUploadProgress: percentageComplete / 100});
console.log("Uploaded: " + bytesUploaded + " | Total: " + totalBytes + " | Percentage: " + percentageComplete + " | Esitmated seconds remaining: " + estimatedSecondsRemaining);
}.bind(this),
onComplete: function(data) {
console.log("Complete");
alert("Upload successful!");
this.setState({ youtubeUploadProgress: 0});
}.bind(this)
});
window.uploadStartTime = Date.now();
uploader.upload();
}
and this is my cors_upload.js in React Native class module:
import React, { Component } from 'react';
export default class MediaUploader extends Component {
constructor(props) {
super(props);
const obj = this;
const DRIVE_UPLOAD_URL = 'https://www.googleapis.com/upload/drive/v2/files/';
var options = props;
var noop = function() {};
this.file = options.file;
this.contentType = options.contentType || this.file.type || 'application/octet-stream';
this.metadata = options.metadata || {
'title': this.file.name,
'mimeType': this.contentType
};
this.token = options.token;
this.onComplete = options.onComplete || noop;
this.onProgress = options.onProgress || noop;
this.onError = options.onError || noop;
this.offset = options.offset || 0;
this.chunkSize = options.chunkSize || 0;
//this.retryHandler = new RetryHandler();
//this.retryHandler = new obj.RetryHandler();
this.interval = 1000; // Start at one second
this.maxInterval = 60 * 1000;
this.url = options.url;
if (!this.url) {
var params = options.params || {};
params.uploadType = 'resumable';
//this.url = this.buildUrl_(options.fileId, params, options.baseUrl);
this.url = obj.buildUrl_(options.fileId, params, options.baseUrl);
}
this.httpMethod = options.fileId ? 'PUT' : 'POST';
}
retry = function(fn) {
setTimeout(fn, this.interval);
this.interval = this.nextInterval_();
};
reset = function() {
this.interval = 1000;
};
nextInterval_ = function() {
var interval = this.interval * 2 + this.getRandomInt_(0, 1000);
return Math.min(interval, this.maxInterval);
};
getRandomInt_ = function(min, max) {
return Math.floor(Math.random() * (max - min + 1) + min);
};
buildQuery_ = function(params) {
params = params || {};
return Object.keys(params).map(function(key) {
return encodeURIComponent(key) + '=' + encodeURIComponent(params[key]);
}).join('&');
};
buildUrl_ = function(id, params, baseUrl) {
var url = baseUrl || DRIVE_UPLOAD_URL;
if (id) {
url += id;
}
var query = this.buildQuery_(params);
if (query) {
url += '?' + query;
}
return url;
};
upload = function() {
//var self = this;
console.log("UPLOAD called", this.file.size);
var xhr = new XMLHttpRequest();
xhr.open(this.httpMethod, this.url, true);
xhr.setRequestHeader('Authorization', 'Bearer ' + this.token);
xhr.setRequestHeader('Content-Type', 'application/json');
xhr.setRequestHeader('X-Upload-Content-Length', this.file.size);
xhr.setRequestHeader('X-Upload-Content-Type', this.contentType);
xhr.onload = function(e) {
console.log("ON LOAD CALLED");
if (e.target.status < 400) {
var location = e.target.getResponseHeader('Location');
this.url = location;
this.sendFile_();
} else {
this.onUploadError_(e);
}
}.bind(this);
xhr.onerror = this.onUploadError_.bind(this);
xhr.send(JSON.stringify(this.metadata));
};
sendFile_ = function() {
console.log("SEND FILE CALLED");
var content = this.file;
var end = this.file.size;
if (this.offset || this.chunkSize) {
// Only bother to slice the file if we're either resuming or uploading in chunks
if (this.chunkSize) {
end = Math.min(this.offset + this.chunkSize, this.file.size);
}
content = content.slice(this.offset, end);
}
var xhr = new XMLHttpRequest();
xhr.open('PUT', this.url, true);
xhr.setRequestHeader('Content-Type', this.contentType);
xhr.setRequestHeader('Content-Range', 'bytes ' + this.offset + '-' + (end - 1) + '/' + this.file.size);
xhr.setRequestHeader('X-Upload-Content-Type', this.file.type);
if (xhr.upload) {
xhr.upload.addEventListener('progress', this.onProgress);
}
xhr.onload = this.onContentUploadSuccess_.bind(this);
xhr.onerror = this.onContentUploadError_.bind(this);
xhr.send(content);
};
resume_ = function() {
var xhr = new XMLHttpRequest();
xhr.open('PUT', this.url, true);
xhr.setRequestHeader('Content-Range', 'bytes */' + this.file.size);
xhr.setRequestHeader('X-Upload-Content-Type', this.file.type);
if (xhr.upload) {
xhr.upload.addEventListener('progress', this.onProgress);
}
xhr.onload = this.onContentUploadSuccess_.bind(this);
xhr.onerror = this.onContentUploadError_.bind(this);
xhr.send();
};
extractRange_ = function(xhr) {
var range = xhr.getResponseHeader('Range');
if (range) {
this.offset = parseInt(range.match(/\d+/g).pop(), 10) + 1;
}
};
onContentUploadSuccess_ = function(e) {
if (e.target.status == 200 || e.target.status == 201) {
this.onComplete(e.target.response);
} else if (e.target.status == 308) {
this.extractRange_(e.target);
this.reset();
this.sendFile_();
}
};
onContentUploadError_ = function(e) {
if (e.target.status && e.target.status < 500) {
this.onError(e.target.response);
} else {
this.retry(this.resume_.bind(this));
}
};
onUploadError_ = function(e) {
this.onError(e.target.response); // TODO - Retries for initial upload
};
}
UPDATE 1:
To avoid using Fetch() I decided to use React Native Document Picker. Now I can select the video file and pass it to the MediaUploader following this guide: https://alishavineeth.medium.com/upload-a-video-from-a-mobile-device-to-youtube-using-react-native-eb2fa54a7445
Now if I set the chunkSize option I will receive a .slice array exception because the object structure doesn't match. If I pass the file without the chunkSize option the metadata uploads to YouTube but the video status will be stuck on processing without any other errors. The video upload process never begins.
DocumentPicker responds with the following object after I select my file:
[{"fileCopyUri": "content://com.android.providers.media.documents/document/video%3A7853", "name": "video_1629795128339.mp4", "size": 192660773, "type": "video/mp4", "uri": "content://com.android.providers.media.documents/document/video%3A7853"}]
UPDATE 2:
Managed to fix my DocumentPicker file issue(from my Update 1) with changing React Native Document Picker to Expo Document Picker.
Now I am able to select large files and call the upload function - the metadata uploads, the video file begins to upload as well but the app crashes during the upload. If I set the chunkSize option on the MediaUploader object I get [TypeError: content.slice is not a function. (In 'content.slice(this.offset, end)', 'content.slice' is undefined)]
Expo Document Picker responds with the following object after I select my video file:
{"name": "video_1629801588164.mp4", "size": 5799179, "type": "video/mp4", "uri": "file:///data/user/0/com.tennis.rec/cache/DocumentPicker/8b350fbf-1b66-4a78-a10f-b61eb2ed3032.mp4"}
UPDATE 3 - RESOLVED!!!
The chunk upload is working now!!! I modified my cors_upload.js file where the chunkSize is being evaluated and sliced with the following code:
if (this.offset || this.chunkSize) {
// Only bother to slice the file if we're either resuming or uploading in chunks
if (this.chunkSize) {
end = Math.min(this.offset + this.chunkSize, this.file.size);
}
console.log("CONTENT SLICE", this.offset, end, this.file.size);
//content = content.slice(this.offset, end);
var base64 = await RNFS.read(this.file.uri, this.chunkSize, this.offset, 'base64');
content = Uint8Array.from(atob(base64), c => c.charCodeAt(0));
}
I added React Native File System and I am using its read() function to load the chunk as base64 and convert it back to a byte array.
i'm very new to Javascript and i just want to login into website from NodeJS request. This website need information from the first time visited to login.
Here is my code.
var cheerio = require('cheerio');
var loginLink = 'link';
var loginJar = request.jar();
var ltValue = '';
request.get({url: loginLink, jar: loginJar}, function(err, httpResponse, html)
{
var dat = cheerio.load(html);
var arr = dat('input[name="lt"]');
ltValue = arr.attr('value');
arr = dat('input[name="execution"]');
executionValue = arr.attr('value');
/*Post body*/
var loginBody = 'username=' + usn + '&password=' + pwd + '<=' + ltValue + '&execution=' + executionValue
request.post({url: loginLink, jar: loginJar, method: 'post', json: true, body: loginBody, }}, function(err, res, b)
{
if (b.indexOf('errors') != -1)
console.log("Success");
else console.log("Fail");
});
});
I have write try it in C# and it work correctly but in my NodeJs code it always return fail. I have tried everytime but i couldn't do it. Please help me with this problem.
byte[] binData = Encoding.ASCII.GetBytes(loginBody)
string loginFile = "loginInfo.txt";
HttpWebRequest request = (HttpWebRequest)WebRequest.Create("link");
request.Method = "POST";
request.ContentType = "application/x-www-form-urlencoded";
request.ContentLength = binData.Length;
request.CookieContainer = cookieContainer;
using (Stream stream = request.GetRequestStream())
{
stream.Write(binData, 0, binData.Length);
}
WebResponse response = request.GetResponse();
using (StreamReader reader = new StreamReader(response.GetResponseStream()))
{
File.WriteAllText(loginFile, reader.ReadToEnd());
}
string loginData = userID + " " + password;
File.WriteAllText("login.txt", loginData);
I'm attempting to download a PDF document from any of the 'download' buttons on this website using Node's Http module. How can I download the PDF document without downloading the aspx file instead, which is what is happening with my code? For some reason, my code downloads an aspx file that says 'Error Message - File does not exist or you do not have permission to view this file', even though I can easily download the file from my web browser. Here is my code:
var pdf_text = require("pdf-text");
var request = require("request");
var http = require("http");
var fs = require("fs");
var cheerio = require("cheerio");
var urllib = require("url");
var path = "final.pdf";
var url = "http://www2.nationalgrid.com/UK/Industry-information/System-charges/Electricity-transmission/Assistance-for-areas-with-high-distribution-costs/";
var links = [];
request(url, function(error, response, html) {
if(!error && response.statusCode == 200) {
var $ = cheerio.load(html);
$(".txtLnk").each(function() {
links.push("http://www2.nationalgrid.com" + $(this).attr("href"));
});
var file = fs.createWriteStream(urllib.parse(links[1]).pathname.split('/').pop());
var options = {
host: urllib.parse(links[1]).host,
port: 80,
path: urllib.parse(links[1]).pathname,
headers: {
"User-Agent": "Mozilla/5.0 (X11; Linux i686; rv:43.0) Gecko/201001101 Firefox/43.0"
}
};
http.get(options, function(res) {
res.on('data', function(data) {
file.write(data);
}).on('end', function() {
file.end();
});
});
console.log(links);
}
});
function data_from_pdf(pdf) {
pdf_text("pdf/" + pdf, function(err, chunks) {
var data = chunks.join("").substring(chunks.join("").search("(p/kWh)") + 6, chunks.join("").search("(p/kWh)") + 21);
var date = data.substring(0, data.indexOf("/") + 3);
var rate = data.substring(data.indexOf("/") + 3);
var json_data = "{" + "\n\tname: " + "final.pdf" + ",\n\tdate: " + date + ",\n\trate: " + rate + "\n}";
return json_data;
});
}
Turns out, if I just replace "options" with the base URL, it works. Strange. Problem solved. :)
Try this:
var request = require("request");
var fs = require("fs");
var cheerio = require("cheerio");
var path = "./final.pdf";
var url = "http://www2.nationalgrid.com/UK/Industry-information/System-charges/Electricity-transmission/Assistance-for-areas-with-high-distribution-costs/";
var links = [];
request(url, function(error, response, html) {
if(!error && response.statusCode == 200) {
var $ = cheerio.load(html);
$(".txtLnk").each(function() {
links.push("http://www2.nationalgrid.com" + $(this).attr("href"));
});
var r = request(links[0]);
r.on('response', function (res) {
console.log(res.headers);
res.pipe(fs.createWriteStream(path));
});
}
});
I am a newbie and trying to create an application based on poloniex.js API getting error-TypeError: curl.setopt is not a function] set node-curl(not working) and node-libcurl (partially works,but the function seems incorrectly expressed) slightly confused between the two curl) node-curl is outdated and maybe that's the problem-can you tell what is wrong?
'use strict';
var autobahn = require('autobahn'),
crypto = require('crypto'),
async = require('async'),
https = require('https'),
nonce = require('nonce')(),
querystring = require('querystring'),
Curl = require('node-libcurl').Curl,
microtime = require('microtime'),
events = require('events'),
util = require('util');
var Poloniex = function Poloniex() {};
Poloniex._query_tradeApi = function (req, callback) {
var post_data,
hash = crypto.createHmac('sha512', "key-key-key"),
sign,
received,
headers;
nonce = (new Date()).getTime() * 1000;
post_data = querystring.stringify(req);
hash.update(post_data);
sign = hash.digest("hex");
try {
headers = [ 'Key: ' + "SECRET-SECRET-SECRET", 'Sign: ' + sign ];
var curl = new Curl(),
close = curl.close.bind( curl );
curl.setopt('URL', 'https://poloniex.com/tradingApi/');
curl.setopt('POST', 1);
curl.setopt('POSTFIELDS', post_data);
curl.setopt('HTTPHEADER', headers);
received = '';
curl.on('data', function (chunk) {
received += chunk;
return chunk.length;
});
curl.on('header', function (chunk) {
return chunk.length;
});
curl.on('error', curl.close.bind( curl ),function (e) {
console.error('exchanges/poloniex', '_query_tradeApi', e,
req, e.stack);
callback(e, undefined);
curl.perform();
curl.close();
});
curl.on('end', function () {
try {
var data = JSON.parse(received);
callback(undefined, data);
} catch (ex) {
console.error('exchanges/poloniex', '_query_tradeApi',
ex, req, ex.stack);
callback(ex, received);
}
curl.close();
});
curl.perform();
} catch (ee) {
console.error('exchanges/poloniex', '_query_tradeApi', ee,
req, ee.stack);
callback(ee, received);
}
};
The syntax required is curl.setOpt, not curl.setopt.
I am using expressjs and trying to POST an image to AWS S3 that can be used throughout my app. I have been following this tutorial and while I am able to successfully upload an image, the filename that is being given is default_name every single time and I don't believe a file format is being attached to the string to give the file the proper image format. When I look at the s3upload.js script that is provided in the tutorial, I notice that default_name is ths standard name they provide for files, but I'm not sure why it is accepting my file without using its title.
events-create.ejs (Where I have the upload):
<!DOCTYPE HTML>
<html>
<head>
<% include ../partials/head %>
</head>
<body>
<% include ../partials/navigation %>
<div class="grid" id="create-event-container">
<div class="col-1-1">
<div id="create-event">
<h1><i>Create Event</i></h1>
<input type="file" id="files"/>
<p id="status">Please select a file</p>
<div id="preview"><img src="/images/event-placeholder.png"></div>
<form action="/admin/events/create" method="POST">
<input type="hidden" id="speaker-image" name="speakerImage" value="/images/event-placeholder.png" />
Name: <input type="text" name="name"><br>
Title: <input type="text" name="title"><br>
Company: <input type="text" name="company"><br>
Website: <input type="text" name="url"><br>
<input type="submit" value="Submit"><br>
</form>
</div>
</div>
</div>
<script type="text/javascript" src="/js/s3upload.js" async></script>
<script>
console.log("S3 Function Launched");
function s3_upload(){
var status_elem = document.getElementById("status");
var url_elem = document.getElementById("speaker-image");
var preview_elem = document.getElementById("preview");
var s3upload = new S3Upload({
file_dom_selector: 'files',
s3_sign_put_url: '/sign_s3',
onProgress: function(percent, message) {
status_elem.innerHTML = 'Upload progress: ' + percent + '% ' + message;
},
onFinishS3Put: function(public_url) {
status_elem.innerHTML = 'Upload completed. Uploaded to: '+ public_url;
url_elem.value = public_url;
console.log(public_url);
preview_elem.innerHTML = '<img src="'+public_url+'" style="width:300px;" />';
},
onError: function(status) {
status_elem.innerHTML = 'Upload error: ' + status;
console.log(status_elem.innerHTML);
}
});
}
/*
* Listen for file selection:
*/
(function() {
var input_element = document.getElementById("files");
input_element.onchange = s3_upload;
})();
</script>
</body>
</html>
routes.js:
var express = require('express');
var router = express.Router();
var Event = require('./models/eventsModel');
var http = require('http');
var path = require('path');
var aws = require('aws-sdk');
var AWS_ACCESS_KEY = process.env.AWS_ACCESS_KEY;
var AWS_SECRET_KEY = process.env.AWS_SECRET_KEY;
var S3_BUCKET = process.env.S3_BUCKET;
router.get('/sign_s3', function(req, res){
aws.config.update({accessKeyId: AWS_ACCESS_KEY, secretAccessKey: AWS_SECRET_KEY });
var s3 = new aws.S3();
var s3_params = {
Bucket: S3_BUCKET,
Key: req.query.s3_object_name,
Expires: 60,
ContentType: req.query.s3_object_type,
ACL: 'public-read'
};
s3.getSignedUrl('putObject', s3_params, function(err, data){
if(err){
console.log(err);
}
else{
var return_data = {
signed_request: data,
url: 'https://'+S3_BUCKET+'.s3.amazonaws.com/'+req.query.s3_object_name
};
res.write(JSON.stringify(return_data));
res.end();
}
});
});
router.route('/admin/events/create')
.post(function(req, res){
var events = new Event();
events.name = req.body.name;
events.title = req.body.title;
events.company = req.body.company;
events.url = req.body.url;
events.speakerImage = req.body.url;
events.save(function(err){
if (err)
res.send(err);
res.redirect(303, '/events');
});
})
.get(function(req, res){
Event.find(function(err, events){
if (err)
res.send(err);
res.render('pages/events-create.ejs');
});
});
s3upload.js:
(function() {
window.S3Upload = (function() {
S3Upload.prototype.s3_object_name = 'default_name';
S3Upload.prototype.s3_sign_put_url = '/signS3put';
S3Upload.prototype.file_dom_selector = 'file_upload';
S3Upload.prototype.onFinishS3Put = function(public_url) {
return console.log('base.onFinishS3Put()', public_url);
};
S3Upload.prototype.onProgress = function(percent, status) {
return console.log('base.onProgress()', percent, status);
};
S3Upload.prototype.onError = function(status) {
return console.log('base.onError()', status);
};
function S3Upload(options) {
if (options == null) options = {};
for (option in options) {
this[option] = options[option];
}
this.handleFileSelect(document.getElementById(this.file_dom_selector));
}
S3Upload.prototype.handleFileSelect = function(file_element) {
var f, files, output, _i, _len, _results;
this.onProgress(0, 'Upload started.');
files = file_element.files;
output = [];
_results = [];
for (_i = 0, _len = files.length; _i < _len; _i++) {
f = files[_i];
_results.push(this.uploadFile(f));
}
return _results;
};
S3Upload.prototype.createCORSRequest = function(method, url) {
var xhr;
xhr = new XMLHttpRequest();
if (xhr.withCredentials != null) {
xhr.open(method, url, true);
} else if (typeof XDomainRequest !== "undefined") {
xhr = new XDomainRequest();
xhr.open(method, url);
} else {
xhr = null;
}
return xhr;
};
S3Upload.prototype.executeOnSignedUrl = function(file, callback) {
var this_s3upload, xhr;
this_s3upload = this;
xhr = new XMLHttpRequest();
xhr.open('GET', this.s3_sign_put_url + '?s3_object_type=' + file.type + '&s3_object_name=' + this.s3_object_name, true);
xhr.overrideMimeType('text/plain; charset=x-user-defined');
xhr.onreadystatechange = function(e) {
var result;
if (this.readyState === 4 && this.status === 200) {
try {
result = JSON.parse(this.responseText);
} catch (error) {
this_s3upload.onError('Signing server returned some ugly/empty JSON: "' + this.responseText + '"');
return false;
}
return callback(result.signed_request, result.url);
} else if (this.readyState === 4 && this.status !== 200) {
return this_s3upload.onError('Could not contact request signing server. Status = ' + this.status);
}
};
return xhr.send();
};
S3Upload.prototype.uploadToS3 = function(file, url, public_url) {
var this_s3upload, xhr;
this_s3upload = this;
xhr = this.createCORSRequest('PUT', url);
if (!xhr) {
this.onError('CORS not supported');
} else {
xhr.onload = function() {
if (xhr.status === 200) {
this_s3upload.onProgress(100, 'Upload completed.');
return this_s3upload.onFinishS3Put(public_url);
} else {
return this_s3upload.onError('Upload error: ' + xhr.status);
}
};
xhr.onerror = function() {
return this_s3upload.onError('XHR error.');
};
xhr.upload.onprogress = function(e) {
var percentLoaded;
if (e.lengthComputable) {
percentLoaded = Math.round((e.loaded / e.total) * 100);
return this_s3upload.onProgress(percentLoaded, percentLoaded === 100 ? 'Finalizing.' : 'Uploading.');
}
};
}
xhr.setRequestHeader('Content-Type', file.type);
xhr.setRequestHeader('x-amz-acl', 'public-read');
return xhr.send(file);
};
S3Upload.prototype.uploadFile = function(file) {
var this_s3upload;
this_s3upload = this;
return this.executeOnSignedUrl(file, function(signedURL, publicURL) {
return this_s3upload.uploadToS3(file, signedURL, publicURL);
});
};
return S3Upload;
})();
}).call(this);
You could either set the filename on the client side or the server side.
Client-side: In events-create.ejs, pass this parameter to S3Upload:
s3_object_name: $('input[type=file]').val().match(/[^\/\\]+$/)[0]
Server-side (Preferred method): In routes.js, replace all instances of req.query.s3_object_name with a unique filename. You can use req.query.s3_object_type to determine the extension you should put on the end of the filename. You want to use a unique filename here because everything is being stored in the same bucket and AWS automatically overwrites files with the same filename.
I came across the same issue, this is how i tackled it within my node controller:
aws.config.update({accessKeyId: AWS_ACCESS_KEY, secretAccessKey: AWS_SECRET_KEY});
var s3 = new aws.S3();
// Set Extension
switch(req.query.s3_object_type) {
case 'image/png':
var ext = '.png';
break;
case 'image/gif':
var ext = '.gif';
break;
case 'image/jpg':
case 'image/jpeg':
var ext = '.jpg';
break;
}
// Rename File
var name = Math.floor(new Date() / 1000);
// Set S3
var s3_params = {
Bucket: S3_BUCKET,
Key: 'blog/'+name+ext,
Expires: 60,
ContentType: req.query.s3_object_type,
ACL: 'public-read'
};
// Send S3
s3.getSignedUrl('putObject', s3_params, function(err, data){
if(err){
console.log(err);
}
else{
var return_data = {
signed_request: data,
url: 'https://'+S3_BUCKET+'.s3.amazonaws.com/'+name+ext
};
res.write(JSON.stringify(return_data));
res.end();
}
});
So as you can see a pretty simple solution to the problem, just check the extension and rename the file. Hope this helps.