Filename not being used as AWS S3 name - javascript

I am using expressjs and trying to POST an image to AWS S3 that can be used throughout my app. I have been following this tutorial and while I am able to successfully upload an image, the filename that is being given is default_name every single time and I don't believe a file format is being attached to the string to give the file the proper image format. When I look at the s3upload.js script that is provided in the tutorial, I notice that default_name is ths standard name they provide for files, but I'm not sure why it is accepting my file without using its title.
events-create.ejs (Where I have the upload):
<!DOCTYPE HTML>
<html>
<head>
<% include ../partials/head %>
</head>
<body>
<% include ../partials/navigation %>
<div class="grid" id="create-event-container">
<div class="col-1-1">
<div id="create-event">
<h1><i>Create Event</i></h1>
<input type="file" id="files"/>
<p id="status">Please select a file</p>
<div id="preview"><img src="/images/event-placeholder.png"></div>
<form action="/admin/events/create" method="POST">
<input type="hidden" id="speaker-image" name="speakerImage" value="/images/event-placeholder.png" />
Name: <input type="text" name="name"><br>
Title: <input type="text" name="title"><br>
Company: <input type="text" name="company"><br>
Website: <input type="text" name="url"><br>
<input type="submit" value="Submit"><br>
</form>
</div>
</div>
</div>
<script type="text/javascript" src="/js/s3upload.js" async></script>
<script>
console.log("S3 Function Launched");
function s3_upload(){
var status_elem = document.getElementById("status");
var url_elem = document.getElementById("speaker-image");
var preview_elem = document.getElementById("preview");
var s3upload = new S3Upload({
file_dom_selector: 'files',
s3_sign_put_url: '/sign_s3',
onProgress: function(percent, message) {
status_elem.innerHTML = 'Upload progress: ' + percent + '% ' + message;
},
onFinishS3Put: function(public_url) {
status_elem.innerHTML = 'Upload completed. Uploaded to: '+ public_url;
url_elem.value = public_url;
console.log(public_url);
preview_elem.innerHTML = '<img src="'+public_url+'" style="width:300px;" />';
},
onError: function(status) {
status_elem.innerHTML = 'Upload error: ' + status;
console.log(status_elem.innerHTML);
}
});
}
/*
* Listen for file selection:
*/
(function() {
var input_element = document.getElementById("files");
input_element.onchange = s3_upload;
})();
</script>
</body>
</html>
routes.js:
var express = require('express');
var router = express.Router();
var Event = require('./models/eventsModel');
var http = require('http');
var path = require('path');
var aws = require('aws-sdk');
var AWS_ACCESS_KEY = process.env.AWS_ACCESS_KEY;
var AWS_SECRET_KEY = process.env.AWS_SECRET_KEY;
var S3_BUCKET = process.env.S3_BUCKET;
router.get('/sign_s3', function(req, res){
aws.config.update({accessKeyId: AWS_ACCESS_KEY, secretAccessKey: AWS_SECRET_KEY });
var s3 = new aws.S3();
var s3_params = {
Bucket: S3_BUCKET,
Key: req.query.s3_object_name,
Expires: 60,
ContentType: req.query.s3_object_type,
ACL: 'public-read'
};
s3.getSignedUrl('putObject', s3_params, function(err, data){
if(err){
console.log(err);
}
else{
var return_data = {
signed_request: data,
url: 'https://'+S3_BUCKET+'.s3.amazonaws.com/'+req.query.s3_object_name
};
res.write(JSON.stringify(return_data));
res.end();
}
});
});
router.route('/admin/events/create')
.post(function(req, res){
var events = new Event();
events.name = req.body.name;
events.title = req.body.title;
events.company = req.body.company;
events.url = req.body.url;
events.speakerImage = req.body.url;
events.save(function(err){
if (err)
res.send(err);
res.redirect(303, '/events');
});
})
.get(function(req, res){
Event.find(function(err, events){
if (err)
res.send(err);
res.render('pages/events-create.ejs');
});
});
s3upload.js:
(function() {
window.S3Upload = (function() {
S3Upload.prototype.s3_object_name = 'default_name';
S3Upload.prototype.s3_sign_put_url = '/signS3put';
S3Upload.prototype.file_dom_selector = 'file_upload';
S3Upload.prototype.onFinishS3Put = function(public_url) {
return console.log('base.onFinishS3Put()', public_url);
};
S3Upload.prototype.onProgress = function(percent, status) {
return console.log('base.onProgress()', percent, status);
};
S3Upload.prototype.onError = function(status) {
return console.log('base.onError()', status);
};
function S3Upload(options) {
if (options == null) options = {};
for (option in options) {
this[option] = options[option];
}
this.handleFileSelect(document.getElementById(this.file_dom_selector));
}
S3Upload.prototype.handleFileSelect = function(file_element) {
var f, files, output, _i, _len, _results;
this.onProgress(0, 'Upload started.');
files = file_element.files;
output = [];
_results = [];
for (_i = 0, _len = files.length; _i < _len; _i++) {
f = files[_i];
_results.push(this.uploadFile(f));
}
return _results;
};
S3Upload.prototype.createCORSRequest = function(method, url) {
var xhr;
xhr = new XMLHttpRequest();
if (xhr.withCredentials != null) {
xhr.open(method, url, true);
} else if (typeof XDomainRequest !== "undefined") {
xhr = new XDomainRequest();
xhr.open(method, url);
} else {
xhr = null;
}
return xhr;
};
S3Upload.prototype.executeOnSignedUrl = function(file, callback) {
var this_s3upload, xhr;
this_s3upload = this;
xhr = new XMLHttpRequest();
xhr.open('GET', this.s3_sign_put_url + '?s3_object_type=' + file.type + '&s3_object_name=' + this.s3_object_name, true);
xhr.overrideMimeType('text/plain; charset=x-user-defined');
xhr.onreadystatechange = function(e) {
var result;
if (this.readyState === 4 && this.status === 200) {
try {
result = JSON.parse(this.responseText);
} catch (error) {
this_s3upload.onError('Signing server returned some ugly/empty JSON: "' + this.responseText + '"');
return false;
}
return callback(result.signed_request, result.url);
} else if (this.readyState === 4 && this.status !== 200) {
return this_s3upload.onError('Could not contact request signing server. Status = ' + this.status);
}
};
return xhr.send();
};
S3Upload.prototype.uploadToS3 = function(file, url, public_url) {
var this_s3upload, xhr;
this_s3upload = this;
xhr = this.createCORSRequest('PUT', url);
if (!xhr) {
this.onError('CORS not supported');
} else {
xhr.onload = function() {
if (xhr.status === 200) {
this_s3upload.onProgress(100, 'Upload completed.');
return this_s3upload.onFinishS3Put(public_url);
} else {
return this_s3upload.onError('Upload error: ' + xhr.status);
}
};
xhr.onerror = function() {
return this_s3upload.onError('XHR error.');
};
xhr.upload.onprogress = function(e) {
var percentLoaded;
if (e.lengthComputable) {
percentLoaded = Math.round((e.loaded / e.total) * 100);
return this_s3upload.onProgress(percentLoaded, percentLoaded === 100 ? 'Finalizing.' : 'Uploading.');
}
};
}
xhr.setRequestHeader('Content-Type', file.type);
xhr.setRequestHeader('x-amz-acl', 'public-read');
return xhr.send(file);
};
S3Upload.prototype.uploadFile = function(file) {
var this_s3upload;
this_s3upload = this;
return this.executeOnSignedUrl(file, function(signedURL, publicURL) {
return this_s3upload.uploadToS3(file, signedURL, publicURL);
});
};
return S3Upload;
})();
}).call(this);

You could either set the filename on the client side or the server side.
Client-side: In events-create.ejs, pass this parameter to S3Upload:
s3_object_name: $('input[type=file]').val().match(/[^\/\\]+$/)[0]
Server-side (Preferred method): In routes.js, replace all instances of req.query.s3_object_name with a unique filename. You can use req.query.s3_object_type to determine the extension you should put on the end of the filename. You want to use a unique filename here because everything is being stored in the same bucket and AWS automatically overwrites files with the same filename.

I came across the same issue, this is how i tackled it within my node controller:
aws.config.update({accessKeyId: AWS_ACCESS_KEY, secretAccessKey: AWS_SECRET_KEY});
var s3 = new aws.S3();
// Set Extension
switch(req.query.s3_object_type) {
case 'image/png':
var ext = '.png';
break;
case 'image/gif':
var ext = '.gif';
break;
case 'image/jpg':
case 'image/jpeg':
var ext = '.jpg';
break;
}
// Rename File
var name = Math.floor(new Date() / 1000);
// Set S3
var s3_params = {
Bucket: S3_BUCKET,
Key: 'blog/'+name+ext,
Expires: 60,
ContentType: req.query.s3_object_type,
ACL: 'public-read'
};
// Send S3
s3.getSignedUrl('putObject', s3_params, function(err, data){
if(err){
console.log(err);
}
else{
var return_data = {
signed_request: data,
url: 'https://'+S3_BUCKET+'.s3.amazonaws.com/'+name+ext
};
res.write(JSON.stringify(return_data));
res.end();
}
});
So as you can see a pretty simple solution to the problem, just check the extension and rename the file. Hope this helps.

Related

React Native Fetch video file by chunk / YouTube Data API chunk(multipart) upload

I made a YouTube API upload app. It works great with small video file sizes but with larger sizes my app crashes. The exception happens when I try to get the video file with Fetch().
Question: Is there a way I can fetch a large file in React Native and feed it into the YouTube API in smaller chunks?
Here is my fetch code:
const fetchResponse = await fetch(videoUri);
const blob = await fetchResponse.blob();
var file = new File([blob], "video.mp4", {type: "video/mp4"});
My upload YouTube code is taken from the following git repos - supposedly supports multipart upload as well:
https://github.com/youtube/api-samples/blob/master/javascript/cors_upload.js and
https://github.com/youtube/api-samples/blob/master/javascript/upload_video.js
Here is my full upload code:
uploadVideo = async function() {
var match = this.state.match.value;
var video = match.mergedVideo;
var players = match.players;
var scoreboard = this.state.match.value.scoreboard;
var points = match.points;
var title = players[0].name + " vs. " + players[1].name + " " + scoreboard;
var description = this.descriptionBuilder(points, match.videos);
/*const fetchResponse = await fetch(video);
const blob = await fetchResponse.blob();
var file = new File([blob], "video.mp4", {type: "video/mp4"});
console.log(file);*/
const file = await DocumentPicker.pick({
type: [DocumentPicker.types.video],
});
var metadata = {
snippet: {
title: title,
description: description,
tags: ['youtube-cors-upload'],
categoryId: 22
},
status: {
privacyStatus: 'unlisted'
}
};
var uploader = new MediaUploader({
baseUrl: 'https://www.googleapis.com/upload/youtube/v3/videos',
file: file,
token: this.state.user.auth.accessToken,
metadata: metadata,
chunkSize: 1024 * 1024,
params: {
part: Object.keys(metadata).join(',')
},
onError: function(data) {
console.log(data);
var message = data;
try {
var errorResponse = JSON.parse(data);
message = errorResponse.error.message;
} finally {
alert(message);
}
}.bind(this),
onProgress: function(data) {
var currentTime = Date.now();
var bytesUploaded = data.loaded;
var totalBytes = data.total;
var bytesPerSecond = bytesUploaded / ((currentTime - window.uploadStartTime) / 1000);
var estimatedSecondsRemaining = (totalBytes - bytesUploaded) / bytesPerSecond;
var percentageComplete = (bytesUploaded * 100) / totalBytes;
this.setState({ youtubeUploadProgress: percentageComplete / 100});
console.log("Uploaded: " + bytesUploaded + " | Total: " + totalBytes + " | Percentage: " + percentageComplete + " | Esitmated seconds remaining: " + estimatedSecondsRemaining);
}.bind(this),
onComplete: function(data) {
console.log("Complete");
alert("Upload successful!");
this.setState({ youtubeUploadProgress: 0});
}.bind(this)
});
window.uploadStartTime = Date.now();
uploader.upload();
}
and this is my cors_upload.js in React Native class module:
import React, { Component } from 'react';
export default class MediaUploader extends Component {
constructor(props) {
super(props);
const obj = this;
const DRIVE_UPLOAD_URL = 'https://www.googleapis.com/upload/drive/v2/files/';
var options = props;
var noop = function() {};
this.file = options.file;
this.contentType = options.contentType || this.file.type || 'application/octet-stream';
this.metadata = options.metadata || {
'title': this.file.name,
'mimeType': this.contentType
};
this.token = options.token;
this.onComplete = options.onComplete || noop;
this.onProgress = options.onProgress || noop;
this.onError = options.onError || noop;
this.offset = options.offset || 0;
this.chunkSize = options.chunkSize || 0;
//this.retryHandler = new RetryHandler();
//this.retryHandler = new obj.RetryHandler();
this.interval = 1000; // Start at one second
this.maxInterval = 60 * 1000;
this.url = options.url;
if (!this.url) {
var params = options.params || {};
params.uploadType = 'resumable';
//this.url = this.buildUrl_(options.fileId, params, options.baseUrl);
this.url = obj.buildUrl_(options.fileId, params, options.baseUrl);
}
this.httpMethod = options.fileId ? 'PUT' : 'POST';
}
retry = function(fn) {
setTimeout(fn, this.interval);
this.interval = this.nextInterval_();
};
reset = function() {
this.interval = 1000;
};
nextInterval_ = function() {
var interval = this.interval * 2 + this.getRandomInt_(0, 1000);
return Math.min(interval, this.maxInterval);
};
getRandomInt_ = function(min, max) {
return Math.floor(Math.random() * (max - min + 1) + min);
};
buildQuery_ = function(params) {
params = params || {};
return Object.keys(params).map(function(key) {
return encodeURIComponent(key) + '=' + encodeURIComponent(params[key]);
}).join('&');
};
buildUrl_ = function(id, params, baseUrl) {
var url = baseUrl || DRIVE_UPLOAD_URL;
if (id) {
url += id;
}
var query = this.buildQuery_(params);
if (query) {
url += '?' + query;
}
return url;
};
upload = function() {
//var self = this;
console.log("UPLOAD called", this.file.size);
var xhr = new XMLHttpRequest();
xhr.open(this.httpMethod, this.url, true);
xhr.setRequestHeader('Authorization', 'Bearer ' + this.token);
xhr.setRequestHeader('Content-Type', 'application/json');
xhr.setRequestHeader('X-Upload-Content-Length', this.file.size);
xhr.setRequestHeader('X-Upload-Content-Type', this.contentType);
xhr.onload = function(e) {
console.log("ON LOAD CALLED");
if (e.target.status < 400) {
var location = e.target.getResponseHeader('Location');
this.url = location;
this.sendFile_();
} else {
this.onUploadError_(e);
}
}.bind(this);
xhr.onerror = this.onUploadError_.bind(this);
xhr.send(JSON.stringify(this.metadata));
};
sendFile_ = function() {
console.log("SEND FILE CALLED");
var content = this.file;
var end = this.file.size;
if (this.offset || this.chunkSize) {
// Only bother to slice the file if we're either resuming or uploading in chunks
if (this.chunkSize) {
end = Math.min(this.offset + this.chunkSize, this.file.size);
}
content = content.slice(this.offset, end);
}
var xhr = new XMLHttpRequest();
xhr.open('PUT', this.url, true);
xhr.setRequestHeader('Content-Type', this.contentType);
xhr.setRequestHeader('Content-Range', 'bytes ' + this.offset + '-' + (end - 1) + '/' + this.file.size);
xhr.setRequestHeader('X-Upload-Content-Type', this.file.type);
if (xhr.upload) {
xhr.upload.addEventListener('progress', this.onProgress);
}
xhr.onload = this.onContentUploadSuccess_.bind(this);
xhr.onerror = this.onContentUploadError_.bind(this);
xhr.send(content);
};
resume_ = function() {
var xhr = new XMLHttpRequest();
xhr.open('PUT', this.url, true);
xhr.setRequestHeader('Content-Range', 'bytes */' + this.file.size);
xhr.setRequestHeader('X-Upload-Content-Type', this.file.type);
if (xhr.upload) {
xhr.upload.addEventListener('progress', this.onProgress);
}
xhr.onload = this.onContentUploadSuccess_.bind(this);
xhr.onerror = this.onContentUploadError_.bind(this);
xhr.send();
};
extractRange_ = function(xhr) {
var range = xhr.getResponseHeader('Range');
if (range) {
this.offset = parseInt(range.match(/\d+/g).pop(), 10) + 1;
}
};
onContentUploadSuccess_ = function(e) {
if (e.target.status == 200 || e.target.status == 201) {
this.onComplete(e.target.response);
} else if (e.target.status == 308) {
this.extractRange_(e.target);
this.reset();
this.sendFile_();
}
};
onContentUploadError_ = function(e) {
if (e.target.status && e.target.status < 500) {
this.onError(e.target.response);
} else {
this.retry(this.resume_.bind(this));
}
};
onUploadError_ = function(e) {
this.onError(e.target.response); // TODO - Retries for initial upload
};
}
UPDATE 1:
To avoid using Fetch() I decided to use React Native Document Picker. Now I can select the video file and pass it to the MediaUploader following this guide: https://alishavineeth.medium.com/upload-a-video-from-a-mobile-device-to-youtube-using-react-native-eb2fa54a7445
Now if I set the chunkSize option I will receive a .slice array exception because the object structure doesn't match. If I pass the file without the chunkSize option the metadata uploads to YouTube but the video status will be stuck on processing without any other errors. The video upload process never begins.
DocumentPicker responds with the following object after I select my file:
[{"fileCopyUri": "content://com.android.providers.media.documents/document/video%3A7853", "name": "video_1629795128339.mp4", "size": 192660773, "type": "video/mp4", "uri": "content://com.android.providers.media.documents/document/video%3A7853"}]
UPDATE 2:
Managed to fix my DocumentPicker file issue(from my Update 1) with changing React Native Document Picker to Expo Document Picker.
Now I am able to select large files and call the upload function - the metadata uploads, the video file begins to upload as well but the app crashes during the upload. If I set the chunkSize option on the MediaUploader object I get [TypeError: content.slice is not a function. (In 'content.slice(this.offset, end)', 'content.slice' is undefined)]
Expo Document Picker responds with the following object after I select my video file:
{"name": "video_1629801588164.mp4", "size": 5799179, "type": "video/mp4", "uri": "file:///data/user/0/com.tennis.rec/cache/DocumentPicker/8b350fbf-1b66-4a78-a10f-b61eb2ed3032.mp4"}
UPDATE 3 - RESOLVED!!!
The chunk upload is working now!!! I modified my cors_upload.js file where the chunkSize is being evaluated and sliced with the following code:
if (this.offset || this.chunkSize) {
// Only bother to slice the file if we're either resuming or uploading in chunks
if (this.chunkSize) {
end = Math.min(this.offset + this.chunkSize, this.file.size);
}
console.log("CONTENT SLICE", this.offset, end, this.file.size);
//content = content.slice(this.offset, end);
var base64 = await RNFS.read(this.file.uri, this.chunkSize, this.offset, 'base64');
content = Uint8Array.from(atob(base64), c => c.charCodeAt(0));
}
I added React Native File System and I am using its read() function to load the chunk as base64 and convert it back to a byte array.

DELETE method that integrates with a Lambda Function (AWS)

I will start this by saying that I know this is probably the worst JavaScript implementation you will see but I am required to use it for academic purposes.
I am required to make a static website and deploy an API Gateway for my application with GET, POST, and DELETE methods that integrate with a Lambda Function.
My GET and POST functions are functioning well but the problem is with the DELETE.
<script>
var url = 'The API endpoint';
var submitBtn = document.getElementById('submitBtn');
submitBtn.addEventListener('click', getDetails);
function getDetails(){
var mail = document.getElementById('mail').value;
var firstName = document.getElementById('firstName').value;
if(mail == '' || firstName == ''){
alert("Please submit valid data!");
return;
}
var params = '{"Item": {"email": "' + mail + '", "firstname": "' + firstName + '"}}';
httpDeleteAsync(url, params, processResponse);
}
function httpDeleteAsync(url, params, callback){
var xmlHttp = new XMLHttpRequest();
xmlHttp.onreadystatechange = function(){
if(xmlHttp.readyState == 4 && xmlHttp.status == 200){
callback(xmlHttp.responseText);
}
}
console.log(params);
console.log(JSON.parse(params));
xmlHttp.open("DELETE", url);
xmlHttp.setRequestHeader('Content-type', 'application/json');
xmlHttp.send(params);
}
function processResponse(response){
document.getElementById('response').innerHTML = response;
}
</script>
The console doesn't display any errors, but I get a null response on my page when I try to delete.
Thanks in advance for any help.
UPDATE #1
I am starting to think that the problem is with the Lambda function not sure if I am right though.
var AWS = require('aws-sdk');
var docClient = new AWS.DynamoDB.DocumentClient();
exports.handler = (event, context, callback) => {
// TODO implement
//console.log(event['body-json']);
var tableName = 'Customers';
var params = {
TableName: tableName,
Item : event['body-json'].Item
}
// docClient.put(params).promise().then(res => res).catch(err => err);
docClient.delete(params, function(err, data) {
if (err) {
console.error("Unable to delete item. Error JSON:", JSON.stringify(err, null, 2));
} else {
console.log("DeleteItem succeeded:", JSON.stringify(data, null, 2));
}
});
};
I was treating the DELETE as the opposite of POST hence their processes being similar in code but I was mistaken. The DELETE had more in common with GET. So by thinking with that mindset I was able to solve the problem. Down bellow are the adjusted codes.
JAVASCRIPT:
<script>
var url = 'YOUR API'
var submitBtn = document.getElementById('submitBtn');
submitBtn.addEventListener('click', getDetails);
function getDetails(){
var mail = document.getElementById('mail').value;
if(mail == ''){
alert("Please submit a valid email!");
return;
}
var params = 'email=' + mail;
httpDelAsync(url, params, processResponse);
}
function httpDelAsync(url, params, callback){
var xmlHttp = new XMLHttpRequest();
xmlHttp.onreadystatechange = function(){
if(xmlHttp.readyState == 4 && xmlHttp.status == 200){
callback(JSON.parse(xmlHttp.responseText));
}
}
console.log(params);
xmlHttp.open("DELETE", url + "/?" + params);
xmlHttp.send(null);
}
function isEmpty(obj) {
for(var key in obj) {
if(obj.hasOwnProperty(key))
return false;
}
return true;
}
function processResponse(response){
//console.log(response);
if(!isEmpty(response)){
document.getElementById('firstNameLabel').innerHTML = response.Item.firstname;
document.getElementById('mailLabel').innerHTML = response.Item.email;
document.getElementById('error').innerHTML = "";
}
else{
document.getElementById('firstNameLabel').innerHTML = '';
document.getElementById('mailLabel').innerHTML = '';
document.getElementById('error').innerHTML = "DELETED";
}
}
</script>
Lambda Function:
const AWS = require('aws-sdk');
var docClient = new AWS.DynamoDB.DocumentClient();
var tableName = "Customers" // Put your Table Name Here
exports.handler = async (event) => {
console.log(event.email)
var params = {
TableName: tableName,
Key: {
email: event.email
}
};
return docClient.delete(params).promise().then(res => res).catch(err => err);
};
There is a slight problem with the response of the DELETE but it works fine so I left it as it is.

Controlling file upload location in Google Drive (App script not form)

I'm implementing Kanshi Tanaike's Resumable Upload For Web Apps code and it works, but I don't fully understand the AJAX and am trying add a feature. Right now the code places the new file in the user's Drive root folder. I would either like to define a specific folder and upload there directly, or automatically move the file from root to the correct folder (I also need to collect the download link). I see the upload function references location in the response header, but I'm struggling to figure out how to define it, and since the doUpload() function does not seem to treat the upload as a File object I can't figure out how to reference it after the upload to acquire the URL or move it. Any feedback would be enormously appreciated.
$('#uploadfile').on("change", function() {
var file = this.files[0];
if (file.name != "") {
var fr = new FileReader();
fr.fileName = file.name;
fr.fileSize = file.size;
fr.fileType = file.type;
fr.onload = init;
fr.readAsArrayBuffer(file);
}
});
function init() {
$("#progress").text("Initializing.");
var fileName = this.fileName;
var fileSize = this.fileSize;
var fileType = this.fileType;
console.log({fileName: fileName, fileSize: fileSize, fileType: fileType});
var buf = this.result;
var chunkpot = getChunkpot(chunkSize, fileSize);
var uint8Array = new Uint8Array(buf);
var chunks = chunkpot.chunks.map(function(e) {
return {
data: uint8Array.slice(e.startByte, e.endByte + 1),
length: e.numByte,
range: "bytes " + e.startByte + "-" + e.endByte + "/" + chunkpot.total,
};
});
google.script.run.withSuccessHandler(function(at) {
var xhr = new XMLHttpRequest();
xhr.open("POST", "https://www.googleapis.com/upload/drive/v3/files?uploadType=resumable");
xhr.setRequestHeader('Authorization', "Bearer " + at);
xhr.setRequestHeader('Content-Type', "application/json");
xhr.send(JSON.stringify({
mimeType: fileType,
name: fileName,
}));
xhr.onload = function() {
doUpload({
location: xhr.getResponseHeader("location"),
chunks: chunks,
});
};
xhr.onerror = function() {
console.log(xhr.response);
};
}).getAt();
}
function doUpload(e) {
var chunks = e.chunks;
var location = e.location;
var cnt = 0;
var end = chunks.length;
var temp = function callback(cnt) {
var e = chunks[cnt];
var xhr = new XMLHttpRequest();
xhr.open("PUT", location, true);
xhr.setRequestHeader('Content-Range', e.range);
xhr.send(e.data);
xhr.onloadend = function() {
var status = xhr.status;
cnt += 1;
console.log("Uploading: " + status + " (" + cnt + " / " + end + ")");
$("#progress").text("Uploading: " + Math.floor(100 * cnt / end) + "%");
if (status == 308) {
callback(cnt);
} else if (status == 200) {
$("#progress").text("Done.");
} else {
$("#progress").text("Error: " + xhr.response);
}
};
}(cnt);
}
I believe your goal and your current situation as follows.
You want to upload a file to the specific folder.
You want to retrieve webContentLink of the uploaded file.
You want to achieve above using Resumable Upload for Web Apps using Google Apps Script
You have already confirmed that the default script at the repository worked.
Modification points:
In this case, it is required to check the resumable upload and the method of "Files: create" in Drive API.
In order to upload the file to the specific folder, please add the folder ID to the request body of the initial request.
In order to return the value of webContentLink, please use fields value to the initial request.
When above points are reflected to the original script, it becomes as follows.
Modified script:
In this case, HTML is modified.
<!DOCTYPE html>
<html>
<head>
<script src="https://ajax.googleapis.com/ajax/libs/jquery/3.3.1/jquery.js"></script>
<title>Resumable upload for Web Apps</title>
</head>
<body>
<form>
<input name="file" id="uploadfile" type="file">
</form>
<div id="progress"></div>
<script>
const chunkSize = 5242880;
$('#uploadfile').on("change", function() {
var file = this.files[0];
if (file.name != "") {
var fr = new FileReader();
fr.fileName = file.name;
fr.fileSize = file.size;
fr.fileType = file.type;
fr.onload = init;
fr.readAsArrayBuffer(file);
}
});
function init() {
var folderId = "###"; // Added: Please set the folder ID.
$("#progress").text("Initializing.");
var fileName = this.fileName;
var fileSize = this.fileSize;
var fileType = this.fileType;
console.log({fileName: fileName, fileSize: fileSize, fileType: fileType});
var buf = this.result;
var chunkpot = getChunkpot(chunkSize, fileSize);
var uint8Array = new Uint8Array(buf);
var chunks = chunkpot.chunks.map(function(e) {
return {
data: uint8Array.slice(e.startByte, e.endByte + 1),
length: e.numByte,
range: "bytes " + e.startByte + "-" + e.endByte + "/" + chunkpot.total,
};
});
google.script.run.withSuccessHandler(function(at) {
var xhr = new XMLHttpRequest();
xhr.open("POST", "https://www.googleapis.com/upload/drive/v3/files?uploadType=resumable&fields=*");
xhr.setRequestHeader('Authorization', "Bearer " + at);
xhr.setRequestHeader('Content-Type', "application/json");
xhr.send(JSON.stringify({
mimeType: fileType,
name: fileName,
parents: [folderId] // Added
}));
xhr.onload = function() {
doUpload({
location: xhr.getResponseHeader("location"),
chunks: chunks,
});
};
xhr.onerror = function() {
console.log(xhr.response);
};
}).getAt();
}
function doUpload(e) {
var chunks = e.chunks;
var location = e.location;
var cnt = 0;
var end = chunks.length;
var temp = function callback(cnt) {
var e = chunks[cnt];
var xhr = new XMLHttpRequest();
xhr.open("PUT", location, true);
xhr.setRequestHeader('Content-Range', e.range);
xhr.send(e.data);
xhr.onloadend = function() {
var status = xhr.status;
cnt += 1;
console.log("Uploading: " + status + " (" + cnt + " / " + end + ")");
$("#progress").text("Uploading: " + Math.floor(100 * cnt / end) + "%");
if (status == 308) {
callback(cnt);
} else if (status == 200) {
var metadata = JSON.parse(xhr.response); // Added
$("#progress").text("Done. Link: " + metadata.webContentLink); // Modified
} else {
$("#progress").text("Error: " + xhr.response);
}
};
}(cnt);
}
function getChunkpot(chunkSize, fileSize) {
var chunkPot = {};
chunkPot.total = fileSize;
chunkPot.chunks = [];
if (fileSize > chunkSize) {
var numE = chunkSize;
var endS = function(f, n) {
var c = f % n;
if (c == 0) {
return 0;
} else {
return c;
}
}(fileSize, numE);
var repeat = Math.floor(fileSize / numE);
for (var i = 0; i <= repeat; i++) {
var startAddress = i * numE;
var c = {};
c.startByte = startAddress;
if (i < repeat) {
c.endByte = startAddress + numE - 1;
c.numByte = numE;
chunkPot.chunks.push(c);
} else if (i == repeat && endS > 0) {
c.endByte = startAddress + endS - 1;
c.numByte = endS;
chunkPot.chunks.push(c);
}
}
} else {
var chunk = {
startByte: 0,
endByte: fileSize - 1,
numByte: fileSize,
};
chunkPot.chunks.push(chunk);
}
return chunkPot;
}
</script>
</body>
</html>
When the above modified script is run, the uploaded file is created to the specific folder and webContentLink is displayed as the result.
References:
Perform a resumable upload
Files: create
Resumable Upload for Web Apps using Google Apps Script

Direct upload string to s3 from bootstrap modal not working

I observed a very strange behavior. If I set a button from web to upload string to S3, it works fine. But if I set a button from web to bring up a bootstrap modal, then from this modal I set a button to upload the string, it doesn't work.
Frontend is like below in both cases. The difference is whether clicking to run function 'saveToAWS' from web or from modal as 2-step-process, the latter returns xhr.status as 0.
function saveToAWS() {
var file = new File([jsonStr], "file.json", { type: "application/json" });
var xhr = new XMLHttpRequest();
var fn=file.name;
var ft = file.type;
xhr.open("GET", "/sign_s3_save?file-name=" + fn + "&file-type=" + ft);
xhr.onreadystatechange = function () {
if (xhr.readyState === 4) {
if (xhr.status === 200) {
var response = JSON.parse(xhr.responseText);
console.log('signiture returned')
save_file(file, response.signed_request, response.url);
}
else {
alert("Could not get signed URL.");
}
}
};
xhr.send();
}
function save_file(file, signed_request, url) {
var xhr = new XMLHttpRequest();
xhr.open('PUT', signed_request);
xhr.onreadystatechange = () => {
if (xhr.readyState === 4) {
if (xhr.status === 200) {
console.log('200');
}
else {
alert('cannot upload file');
}
}
};
xhr.send(file);
}
Backend as Node.js, in order to get a signed URL:
app.get('/sign_s3_save', isLoggedIn, function (req, res) {
var fileName = req.query['file-name'];
var fileType = req.query['file-type'];
aws.config.update({ accessKeyId: AWS_ACCESS_KEY, secretAccessKey: AWS_SECRET_KEY });
var s3 = new aws.S3();
var ac = req.user._id;
var timenow = Date.now().toString();
var fileRename = ac + '/json/' + timenow + '.json';
var s3_params = {
Bucket: S3_BUCKET,
Key: fileRename,
Expires: 60,
ContentType: fileType,
ACL: 'public-read'
};
s3.getSignedUrl('putObject', s3_params, function (err, data) {
if (err) {
console.log(err);
}
else {
var return_data = {
signed_request: data,
url: 'https://' + S3_BUCKET + '.s3.amazonaws.com/' + fileRename
};
res.write(JSON.stringify(return_data));
res.end();
}
});
});
Any suggestion, please?

How to set the argument "url" in xmlhttp.open() when I want to use Ajax to send/receive request with node.js

server.js can produce random number. So now I want to get a random number from the server and use xmlhttp to send a request. But the value of string is not changed when I load http://localhost:3000/index.html. What happen?
index.js is shown as follows:
$(document).ready(function() {
getRandomNum();
});
function getRandomNum() {
$(".button").each(function() {
var that = $(this);
that.click(function() {
var span = $("<span></span>").addClass("unread");
that.append(span);
span.text("...");
server(span);
});
});
}
function server(span) {
var string;
var xmlhttp = new XMLHttpRequest();
if (xmlhttp.readyState == 4 && xmlhttp.status == 200) {
string = xmlhttp.responseText;
}
xmlhttp.open("GET","http://localhost:3000/", true);
xmlhttp.send();
span.text(string);
}
server.js is shown as follows:
var http = require('http');
var url = require('url');
var path = require('path');
var fs = require('fs');
var port = 3000;
http.createServer(function(req,res) {
var pathname = url.parse(req.url).pathname;
var mimeType = getMimeType(pathname);
if (!!mimeType) {
handlePage(req, res, pathname);
} else {
handleAjax(req, res);
}
}).listen(port, function(){
console.log('server listen on ', port);
});
function getMimeType(pathname) {
var validExtensions = {
".html" : "text/html",
".js": "application/javascript",
".css": "text/css",
".jpg": "image/jpeg",
".gif": "image/gif",
".png": "image/png"
};
var ext = path.extname(pathname);
var type = validExtensions[ext];
return type;
}
function handlePage(req, res, pathname) {
var filePath = __dirname + pathname;
var mimeType = getMimeType(pathname);
if (fs.existsSync(filePath)) {
fs.readFile(filePath, function(err, data){
if (err) {
res.writeHead(500);
res.end();
} else {
res.setHeader("Content-Length", data.length);
res.setHeader("Content-Type", mimeType);
res.statusCode = 200;
res.end(data);
}
});
} else {
res.writeHead(500);
res.end();
}
}
function handleAjax(req, res) {
var random_time = 1000 + getRandomNumber(2000);
var random_num = 1 + getRandomNumber(9);
setTimeout(function(){
res.writeHead(200, {'Content-Type': 'text/plain'});
res.end("" + random_num);
}, random_time);
}
function getRandomNumber(limit) {
return Math.round(Math.random() * limit);
}
In your server(span) method, you initialize sending the request and immediately assigning the null string to the span by span.text(string) after this line xmlhttp.send();.
Once the response (random number) is received from the server, you are assigning the same to the variable string = xmlhttp.responseText; but not updating the same to the span.
Also, you haven't handled the response inside the onreadystatechange method.
I have modified your code below. I have created a test app and tested the same. Its working as you expected.
function server(span) {
var string;
var xmlhttp = new XMLHttpRequest();
xmlhttp.onreadystatechange= function(){
if (xmlhttp.readyState == 4 && xmlhttp.status == 200) {
string = xmlhttp.responseText;
span.text(string);
}
};
xmlhttp.open("GET","http://localhost:3000/", true);
xmlhttp.send();
}

Categories

Resources