Uploading Large Files with Ajax call - javascript

I am trying to use the Microsoft Graph API to upload large files using an Ajax call.
According to the documentation, you first have to create an upload session which I can do successfully with my code. The problem comes when I start my upload to the returned uploadUrl. I get the following error:
{
code: "invalidRequest",
message: "The Content-Range header length does not match the provided number of bytes."
}
So when I check the actual request in Fiddler, I can see that the Content-Length header is set to 0.
So I tried setting my Content-Length header to the size of the ArrayBuffer that I'm sending, but I get an error (Chrome) that says:
Refused to set unsafe header "Content-Length"
I've been struggling with this for 2 full days now and I'm at my wit's end. There is very little documentation on the Microsoft Graph API, and even fewer examples that seem to fit what I'm trying to do.
I can't imagine I'm the only one out there that's attempting to do this, I would think it would be a fairly common idea?
Below is the code I'm using. I'm getting my AccessToken and URL elsewhere, but they seem to be fine as I can query using them from the console.
this.UploadLargeFileToFolderID = function (FolderID,
FileObject,
ShowLoadingMessage,
SuccessFunction,
ErrorFunction,
CompleteFunction) { //will upload a file up to 60Mb to folder.
//shows the loading messag
ShowLoadingMessage && ThisRepository.LoadingMessage.Show();
//cleans the file name to something acceptable to SharePoint
FileObject.name = CleanOneDriveFileName(FileObject.name);
var UploadSessionURL = FolderID ?
ThisRepository.RepositoryRootURL + '/drive/items/' + FolderID + '/createUploadSession' :
ThisRepository.RepositoryRootURL + '/drive/root/createUploadSession';
//First, get the Upload Sesion.
$.ajax({
url: UploadSessionURL,
method: 'POST',
headers: {
authorization: "Bearer " + ThisRepository.AccessToken
},
success: function (data, textStatus, jqXHR) {
//successfully got the upload session.
console.log('Session:');
console.log(data);
//Create the ArrayBuffer and upload the file.
ReturnArrayBufferFromFile(FileObject, function (ArrayBuffer) {
console.log('Array Buffer:');
console.log(ArrayBuffer);
var MaxChunkSize = 327680;
var ChunkSize = ArrayBuffer.byteLength < MaxChunkSize ?
ArrayBuffer.byteLength :
MaxChunkSize;
chunkedUpload(data.uploadUrl, ArrayBuffer, ChunkSize, 0, null,
null, null, null,
function (response) {
console.log(response);
!SuccessFunction && console.log(response);
typeof SuccessFunction === 'function' && SuccessFunction(response);
});
});
},
error: function (jqXHR, textStatus, errorThrown) {
console.log(jqXHR);
typeof ErrorFunction === 'function' && ErrorFunction(jqXHR);
},
complete: function (jqXHR, textStatus) {
ThisRepository.LoadingMessage.Remove();
typeof CompleteFunction === 'function' && CompleteFunction(jqXHR);
},
});
};
Function for returning the Array Buffer to send
function ReturnArrayBufferFromFile(InputFile, CallBackFunction) {
console.log('Input File');
console.log(InputFile);
var FileName = CleanOneDriveFileName(InputFile.name);
var FileUploadReader = new FileReader();
if (InputFile.type.match('image.*')) {
// if the file is an image, we want to make sure
// it's not too big before we return it.
FileUploadReader.onloadend = function (e) {
var img = new Image();
//will resize an image to a maximum of 2 megapixels.
img.onload = function () {
var MAX_HEIGHT = 2048; //max final height, in pixels
var MAX_WIDTH = 2048; //max final width, in pixels
var height = img.height;
var width = img.width;
//do the resizing
if (width > height) { //landscape image
if (width > MAX_WIDTH) {
height *= MAX_WIDTH / width;
width = MAX_WIDTH;
};
} else { //portrait image
if (height > MAX_HEIGHT) {
width *= MAX_HEIGHT / height;
height = MAX_HEIGHT;
};
};
//Create a new canvas element, correctly sized with the image
var canvas = document.createElement("canvas");
canvas.width = width;
canvas.height = height;
canvas.getContext('2d').drawImage(this, 0, 0, width, height);
//Create the new file reader for the upload function.
var ConvertedFile = canvas.toBlob(function (blob) {
var ConvertedFileReader = new FileReader();
ConvertedFileReader.onloadend = function (loadendevent) {
//return loadendevent.target.result;
var result = loadendevent.target.result;
var Rawresult = result.split(',')[1];
CallBackFunction(loadendevent.target.result);
};
ConvertedFileReader.readAsArrayBuffer(blob);
}, 'image/jpeg', 0.90);
};
img.src = e.target.result;
};
FileUploadReader.readAsArrayBuffer(InputFile);
} else {
//File is not an image. No pre-work is required. Just upload it.
FileUploadReader.onloadend = function (e) {
CallBackFunction(e.target.result);
};
FileUploadReader.readAsArrayBuffer(InputFile);
};
};
And finally, the chunkUpload function:
function chunkedUpload(url, file, chunkSize, chunkStart,
chunkEnd, chunks, chunksDone, fileChunk, CompleteCallBack) {
var filesize = file.byteLength;
chunkSize = chunkSize ? chunkSize : 327680;
chunkStart = chunkStart ? chunkStart : 0;
chunkEnd = chunkEnd ? chunkEnd : chunkSize;
chunks = chunks ? chunks : filesize / chunkSize;
chunksDone = chunksDone ? chunksDone : 0;
fileChunk = fileChunk ? fileChunk : file.slice(chunkStart, chunkEnd);
var req = new XMLHttpRequest();
req.open("PUT", url, true);
//req.setRequestHeader("Content-Length", file.size.toString());
req.setRequestHeader("Content-Range", "bytes " + chunkStart + "-" +
(chunkEnd - 1) + "/" + filesize);
req.onload = (e) => {
let response = JSON.parse(req.response);
console.log(response);
if (response.nextExpectedRanges) {
let range = response.nextExpectedRanges[0].split('-'),
chunkStart = Number(range[0]),
nextChunk = chunkStart + chunkSize,
chunkEnd = nextChunk > file.byteLength ? file.byteLength : nextChunk;
console.log(((chunksDone++/ chunks) * 100), '%' );
chunkedUpload(url, file, chunkSize, chunkStart,
chunkEnd, chunks, chunksDone, CompleteCallBack);
}
else {
console.log("upload session complete");
typeof CompleteCallBack === 'function' &&
CompleteCallBack(response);
}
}; req.send(file);
}

I was able to figure out an answer to the problem, so I'll post the final code here as well for anyone else having an issue with this since there's very little examples I could find online to do this:
First of all, sending a bare ArrayBuffer in any browser (IE, Mozilla or Chrome) did not set the Content-Length to anything but 0, at least it wouldn't for me.
If I converted the ArrayBuffer to a new uInt8Array, however, the browsers did pick up the Content-Length and set it correctly.
Another Issue I found was in the Microsoft Graph documentation. I was not aware that you had to put the New File Name in the Upload Session Request URL - it's not clear that you need to do that in the documentation. See my code below, It's formatted correctly and works well.
Finally, my chunkedUpload function needed quite a few changes, most notably adjusting the xhr.send(file) to xhr.send(fileChunk) <--that was a big one I missed originally. I also included a Progress callback for the file upload that works very well with my bootstrap ProgressBar.
on to the working code:
this.UploadLargeFileToFolderID = function (FolderID, FileObject, ShowLoadingMessage, SuccessFunction, ErrorFunction, CompleteFunction, ProgressFunction) {//will upload a file up to 60Mb to folder.
ShowLoadingMessage && ThisRepository.LoadingMessage.Show(); //shows the loading message
FileObject.name = CleanOneDriveFileName(FileObject.name); //cleans the file name to something acceptable to SharePoint
var NewFileName = CleanOneDriveFileName(FileObject.name);
var UploadSessionURL = FolderID ? ThisRepository.RepositoryRootURL + '/drive/items/' + FolderID + ':/' + NewFileName + ':/createUploadSession' : ThisRepository.RepositoryRootURL + '/drive/root:/' + NewFileName + ':/createUploadSession';
var PathToParent = FolderID ? ThisRepository.RepositoryRootURL + '/drive/items/' + FolderID + ':/' + NewFileName + ':/' : ThisRepository.RepositoryRootURL + '/drive/root:/' + NewFileName + ':/'; //used if we have a naming conflict and must rename the object.
var UploadSessionOptions = {
item: {
//"#microsoft.graph.conflictBehavior": "rename",
"#microsoft.graph.conflictBehavior": "replace",
}
};
//First, get the Upload Sesion.
$.ajax({
url: UploadSessionURL,
method: 'POST',
headers: { authorization: "Bearer " + ThisRepository.AccessToken, 'Content-Type': 'application/json', 'accept': 'application/json'},
data: JSON.stringify(UploadSessionOptions),
success: function (SessionData, textStatus, jqXHR) { //successfully got the upload session.
//Create the ArrayBuffer and upload the file.
ReturnArrayBufferFromFile(FileObject,
function (ArrayBuffer) {
var uInt8Array = new Uint8Array(ArrayBuffer);
var FileSize = uInt8Array.length;
var MaxChunkSize = 3276800; //approx 3.2Mb. Microsoft Graph OneDrive API says that all chunks MUST be in a multiple of 320Kib (327,680 bytes). Recommended is 5Mb-10Mb for good internet connections.
var ChunkSize = FileSize < MaxChunkSize ? FileSize : MaxChunkSize;
var DataUploadURL = SessionData.uploadUrl;
chunkedUpload(DataUploadURL, uInt8Array, ChunkSize, 0, null, null, null,
function (progress) { //progress handler
ProgressFunction(progress);
},
function (response) { //completion handler
if (response.StatusCode == 201 || response.StatusCode == 200) {//success. 201 is 'Created' and 200 is 'OK'
typeof SuccessFunction === 'function' && SuccessFunction(response);
ThisRepository.LoadingMessage.Remove();
typeof CompleteFunction === 'function' && CompleteFunction(response);
} else if (response.StatusCode == 409) { //naming conflict?
//if we had a renaming conflict error, per Graph Documentation we can make a simple PUT request to rename the file
//HAVE NOT SUCCESSFULLY TESTED THIS...
var NewDriveItemResolve = {
"name": NewFileName,
"#microsoft.graph.conflictBehavior": "rename",
"#microsoft.graph.sourceUrl": DataUploadURL
};
$.ajax({
url: PathToParent,
method: "PUT",
headers: { authorization: "Bearer " + ThisRepository.AccessToken, 'Content-Type': 'application/json', accept: 'application/json' },
data: JSON.stringify(NewDriveItemResolve),
success: function (RenameSuccess) {
console.log(RenameSuccess);
typeof SuccessFunction === 'function' && SuccessFunction(response);
ThisRepository.LoadingMessage.Remove();
typeof CompleteFunction === 'function' && CompleteFunction(response);
},
error: function (RenameError) {
console.log(RenameError);
var CompleteObject = { StatusCode: RenameError.status, ResponseObject: RenameError, Code: RenameError.error ? RenameError.error.code : 'Unknown Error Code', Message: RenameError.error ? RenameError.error.message : 'Unknown Error Message' };
var Status = CompleteObject.StatusCode;
var StatusText = CompleteObject.Code;
var ErrorMessage = CompleteObject.Message;
var ErrorMessage = new Alert({ Location: ThisRepository.LoadingMessage.Location, Type: 'danger', Text: "Status: " + Status + ': ' + StatusText + "<br />Error: " + ErrorMessage + '<br />Rest Endpoint: ' + data.uploadUrl });
ErrorMessage.ShowWithoutTimeout();
typeof ErrorFunction == 'function' && ErrorFunction(response);
ThisRepository.LoadingMessage.Remove();
typeof CompleteFunction === 'function' && CompleteFunction(response);
},
complete: function (RenameComplete) { /* Complete Function */ }
});
} else { //we had an error of some kind.
var Status = response.StatusCode;
var StatusText = response.Code;
var ErrorMessage = response.Message;
var ErrorMessage = new Alert({ Location: ThisRepository.LoadingMessage.Location, Type: 'danger', Text: "Status: " + Status + ': ' + StatusText + "<br />Error: " + ErrorMessage + '<br />Rest Endpoint: ' + data.uploadUrl });
ErrorMessage.ShowWithoutTimeout();
//CANCEL THE UPLOAD SESSION.
$.ajax({
url: UploadSessionURL,
method: "DELETE",
headers: { authorization: "Bearer " + ThisRepository.AccessToken },
success: function (SessionCancelled) { console.log('Upload Session Cancelled');},
error: function (SessionCancelError) { /* Error Goes Here*/},
});
typeof ErrorFunction == 'function' && ErrorFunction(response);
ThisRepository.LoadingMessage.Remove();
typeof CompleteFunction === 'function' && CompleteFunction(response);
};
});
}
);
},
error: function (jqXHR, textStatus, errorThrown) {
console.log('Error Creating Session:');
console.log(jqXHR);
//WE MAY HAVE A CANCELLED UPLOAD...TRY TO DELETE THE OLD UPLOAD SESSION, TELL THE USER TO TRY AGAIN.
//COULD OPTIONALLY RUN A "RENAME" ATTEMPT HERE AS WELL
$.ajax({
url: PathToParent,
method: "DELETE",
headers: { authorization: "Bearer " + ThisRepository.AccessToken },
success: function (SessionCancelled) { console.log('Upload Session Cancelled'); },
error: function (SessionCancelError) { console.log(SessionCancelError); },
});
typeof ErrorFunction === 'function' && ErrorFunction(jqXHR);
},
complete: function (jqXHR, textStatus) { /* COMPLETE CODE GOES HERE */},
});
};
function ReturnArrayBufferFromFile(InputFile, CallBackFunction) {
var FileName = InputFile.name;
var FileUploadReader = new FileReader();
//Check the file type. If it's an image, we want to make sure the user isn't uploading a very high quality image (2 megapixel max for our purposes).
if (InputFile.type.match('image.*')) { // it's an image, so we will resize it before returning the array buffer...
FileUploadReader.onloadend = function (e) {
var img = new Image();
img.onload = function () { //will resize an image to a maximum of 2 megapixels.
var MAX_HEIGHT = 2048;//max final height, in pixels
var MAX_WIDTH = 2048; //max final width, in pixels
var height = img.height;
var width = img.width;
//do the resizing
if (width > height) {//landscape image
if (width > MAX_WIDTH) {
height *= MAX_WIDTH / width;
width = MAX_WIDTH;
};
}
else { //portrait image
if (height > MAX_HEIGHT) {
width *= MAX_HEIGHT / height;
height = MAX_HEIGHT;
};
};
//Create a new canvas element, correctly sized with the image
var canvas = document.createElement("canvas");
canvas.width = width;
canvas.height = height;
canvas.getContext('2d').drawImage(this, 0, 0, width, height);
//Create the new file reader for the upload function.
var ConvertedFile = canvas.toBlob(function (blob) {
var ConvertedFileReader = new FileReader();
ConvertedFileReader.onloadend = function (loadendevent) { //return the ArrayBuffer
CallBackFunction(loadendevent.target.result);
};
ConvertedFileReader.readAsArrayBuffer(blob);
//ConvertedFileReader.readAsDataURL(blob);
}, 'image/jpeg', 0.90);
};
img.src = e.target.result;
};
FileUploadReader.readAsDataURL(InputFile);
}
else {
FileUploadReader.onloadend = function (e) {//File is not an image. No pre-work is required. Just return as an array buffer.
CallBackFunction(e.target.result);
};
FileUploadReader.readAsArrayBuffer(InputFile);
};
};
function chunkedUpload(url, file, chunkSize, chunkStart, chunkEnd, chunks,
chunksDone, ProgressCallBack, CompleteCallBack) {
var filesize = file.length;
chunkSize = chunkSize ? chunkSize : 3276800; //note: Microsoft Graph indicates all chunks MUST be in a multiple of 320Kib (327,680 bytes).
chunkStart = chunkStart ? chunkStart : 0;
chunkEnd = chunkEnd ? chunkEnd : chunkSize;
chunks = chunks ? chunks : Math.ceil(filesize / chunkSize);
chunksDone = chunksDone ? chunksDone : 0;
console.log('NOW CHUNKS DONE = ' + chunksDone);
fileChunk = file.slice(chunkStart, chunkEnd);
var TotalLoaded = chunksDone * chunkSize;
var req = new XMLHttpRequest();
req.upload.addEventListener('progress', function (progressobject) {
var ThisProgress = progressobject.loaded ? progressobject.loaded : 0;
var OverallPctComplete = parseFloat((TotalLoaded + ThisProgress) / filesize);
ProgressCallBack({ PercentComplete: OverallPctComplete });
}, false);
req.open("PUT", url, true);
req.setRequestHeader("Content-Range", "bytes " + chunkStart + "-" + (chunkEnd - 1) + "/" + filesize);
req.onload = function (e) {
var response = JSON.parse(req.response);
var Status = req.status;
var CallBackObject = {
StatusCode: Status,
ResponseObject: req,
};
if (Status == 202) { //response ready for another chunk.
var range = response.nextExpectedRanges[0].split('-'),
chunkStart = Number(range[0]),
nextChunk = chunkStart + chunkSize,
chunkEnd = nextChunk > filesize ? filesize : nextChunk;
chunksDone++;
TotalLoaded = chunksDone * chunkSize;
CallBackObject.Code = "Accepted",
CallBackObject.Message = "Upload Another Chunk";
chunkedUpload(url, file, chunkSize, chunkStart, chunkEnd, chunks, chunksDone++, ProgressCallBack, CompleteCallBack);
} else {//we are done
if (Status == 201 || Status == 200) {//successfully created or uploaded
CallBackObject.Code = 'Success';
CallBackObject.Message = 'File was Uploaded Successfully.';
} else { //we had an error.
var ErrorCode = response.error ? response.error.code : 'Unknown Error Code';
var ErrorMessage = response.error ? response.error.message : 'Unknown Error Message';
CallBackObject.Code = ErrorCode;
CallBackObject.Message = ErrorMessage;
};
CompleteCallBack(CallBackObject);
};
};
req.send(fileChunk);
}

Related

React Native Fetch video file by chunk / YouTube Data API chunk(multipart) upload

I made a YouTube API upload app. It works great with small video file sizes but with larger sizes my app crashes. The exception happens when I try to get the video file with Fetch().
Question: Is there a way I can fetch a large file in React Native and feed it into the YouTube API in smaller chunks?
Here is my fetch code:
const fetchResponse = await fetch(videoUri);
const blob = await fetchResponse.blob();
var file = new File([blob], "video.mp4", {type: "video/mp4"});
My upload YouTube code is taken from the following git repos - supposedly supports multipart upload as well:
https://github.com/youtube/api-samples/blob/master/javascript/cors_upload.js and
https://github.com/youtube/api-samples/blob/master/javascript/upload_video.js
Here is my full upload code:
uploadVideo = async function() {
var match = this.state.match.value;
var video = match.mergedVideo;
var players = match.players;
var scoreboard = this.state.match.value.scoreboard;
var points = match.points;
var title = players[0].name + " vs. " + players[1].name + " " + scoreboard;
var description = this.descriptionBuilder(points, match.videos);
/*const fetchResponse = await fetch(video);
const blob = await fetchResponse.blob();
var file = new File([blob], "video.mp4", {type: "video/mp4"});
console.log(file);*/
const file = await DocumentPicker.pick({
type: [DocumentPicker.types.video],
});
var metadata = {
snippet: {
title: title,
description: description,
tags: ['youtube-cors-upload'],
categoryId: 22
},
status: {
privacyStatus: 'unlisted'
}
};
var uploader = new MediaUploader({
baseUrl: 'https://www.googleapis.com/upload/youtube/v3/videos',
file: file,
token: this.state.user.auth.accessToken,
metadata: metadata,
chunkSize: 1024 * 1024,
params: {
part: Object.keys(metadata).join(',')
},
onError: function(data) {
console.log(data);
var message = data;
try {
var errorResponse = JSON.parse(data);
message = errorResponse.error.message;
} finally {
alert(message);
}
}.bind(this),
onProgress: function(data) {
var currentTime = Date.now();
var bytesUploaded = data.loaded;
var totalBytes = data.total;
var bytesPerSecond = bytesUploaded / ((currentTime - window.uploadStartTime) / 1000);
var estimatedSecondsRemaining = (totalBytes - bytesUploaded) / bytesPerSecond;
var percentageComplete = (bytesUploaded * 100) / totalBytes;
this.setState({ youtubeUploadProgress: percentageComplete / 100});
console.log("Uploaded: " + bytesUploaded + " | Total: " + totalBytes + " | Percentage: " + percentageComplete + " | Esitmated seconds remaining: " + estimatedSecondsRemaining);
}.bind(this),
onComplete: function(data) {
console.log("Complete");
alert("Upload successful!");
this.setState({ youtubeUploadProgress: 0});
}.bind(this)
});
window.uploadStartTime = Date.now();
uploader.upload();
}
and this is my cors_upload.js in React Native class module:
import React, { Component } from 'react';
export default class MediaUploader extends Component {
constructor(props) {
super(props);
const obj = this;
const DRIVE_UPLOAD_URL = 'https://www.googleapis.com/upload/drive/v2/files/';
var options = props;
var noop = function() {};
this.file = options.file;
this.contentType = options.contentType || this.file.type || 'application/octet-stream';
this.metadata = options.metadata || {
'title': this.file.name,
'mimeType': this.contentType
};
this.token = options.token;
this.onComplete = options.onComplete || noop;
this.onProgress = options.onProgress || noop;
this.onError = options.onError || noop;
this.offset = options.offset || 0;
this.chunkSize = options.chunkSize || 0;
//this.retryHandler = new RetryHandler();
//this.retryHandler = new obj.RetryHandler();
this.interval = 1000; // Start at one second
this.maxInterval = 60 * 1000;
this.url = options.url;
if (!this.url) {
var params = options.params || {};
params.uploadType = 'resumable';
//this.url = this.buildUrl_(options.fileId, params, options.baseUrl);
this.url = obj.buildUrl_(options.fileId, params, options.baseUrl);
}
this.httpMethod = options.fileId ? 'PUT' : 'POST';
}
retry = function(fn) {
setTimeout(fn, this.interval);
this.interval = this.nextInterval_();
};
reset = function() {
this.interval = 1000;
};
nextInterval_ = function() {
var interval = this.interval * 2 + this.getRandomInt_(0, 1000);
return Math.min(interval, this.maxInterval);
};
getRandomInt_ = function(min, max) {
return Math.floor(Math.random() * (max - min + 1) + min);
};
buildQuery_ = function(params) {
params = params || {};
return Object.keys(params).map(function(key) {
return encodeURIComponent(key) + '=' + encodeURIComponent(params[key]);
}).join('&');
};
buildUrl_ = function(id, params, baseUrl) {
var url = baseUrl || DRIVE_UPLOAD_URL;
if (id) {
url += id;
}
var query = this.buildQuery_(params);
if (query) {
url += '?' + query;
}
return url;
};
upload = function() {
//var self = this;
console.log("UPLOAD called", this.file.size);
var xhr = new XMLHttpRequest();
xhr.open(this.httpMethod, this.url, true);
xhr.setRequestHeader('Authorization', 'Bearer ' + this.token);
xhr.setRequestHeader('Content-Type', 'application/json');
xhr.setRequestHeader('X-Upload-Content-Length', this.file.size);
xhr.setRequestHeader('X-Upload-Content-Type', this.contentType);
xhr.onload = function(e) {
console.log("ON LOAD CALLED");
if (e.target.status < 400) {
var location = e.target.getResponseHeader('Location');
this.url = location;
this.sendFile_();
} else {
this.onUploadError_(e);
}
}.bind(this);
xhr.onerror = this.onUploadError_.bind(this);
xhr.send(JSON.stringify(this.metadata));
};
sendFile_ = function() {
console.log("SEND FILE CALLED");
var content = this.file;
var end = this.file.size;
if (this.offset || this.chunkSize) {
// Only bother to slice the file if we're either resuming or uploading in chunks
if (this.chunkSize) {
end = Math.min(this.offset + this.chunkSize, this.file.size);
}
content = content.slice(this.offset, end);
}
var xhr = new XMLHttpRequest();
xhr.open('PUT', this.url, true);
xhr.setRequestHeader('Content-Type', this.contentType);
xhr.setRequestHeader('Content-Range', 'bytes ' + this.offset + '-' + (end - 1) + '/' + this.file.size);
xhr.setRequestHeader('X-Upload-Content-Type', this.file.type);
if (xhr.upload) {
xhr.upload.addEventListener('progress', this.onProgress);
}
xhr.onload = this.onContentUploadSuccess_.bind(this);
xhr.onerror = this.onContentUploadError_.bind(this);
xhr.send(content);
};
resume_ = function() {
var xhr = new XMLHttpRequest();
xhr.open('PUT', this.url, true);
xhr.setRequestHeader('Content-Range', 'bytes */' + this.file.size);
xhr.setRequestHeader('X-Upload-Content-Type', this.file.type);
if (xhr.upload) {
xhr.upload.addEventListener('progress', this.onProgress);
}
xhr.onload = this.onContentUploadSuccess_.bind(this);
xhr.onerror = this.onContentUploadError_.bind(this);
xhr.send();
};
extractRange_ = function(xhr) {
var range = xhr.getResponseHeader('Range');
if (range) {
this.offset = parseInt(range.match(/\d+/g).pop(), 10) + 1;
}
};
onContentUploadSuccess_ = function(e) {
if (e.target.status == 200 || e.target.status == 201) {
this.onComplete(e.target.response);
} else if (e.target.status == 308) {
this.extractRange_(e.target);
this.reset();
this.sendFile_();
}
};
onContentUploadError_ = function(e) {
if (e.target.status && e.target.status < 500) {
this.onError(e.target.response);
} else {
this.retry(this.resume_.bind(this));
}
};
onUploadError_ = function(e) {
this.onError(e.target.response); // TODO - Retries for initial upload
};
}
UPDATE 1:
To avoid using Fetch() I decided to use React Native Document Picker. Now I can select the video file and pass it to the MediaUploader following this guide: https://alishavineeth.medium.com/upload-a-video-from-a-mobile-device-to-youtube-using-react-native-eb2fa54a7445
Now if I set the chunkSize option I will receive a .slice array exception because the object structure doesn't match. If I pass the file without the chunkSize option the metadata uploads to YouTube but the video status will be stuck on processing without any other errors. The video upload process never begins.
DocumentPicker responds with the following object after I select my file:
[{"fileCopyUri": "content://com.android.providers.media.documents/document/video%3A7853", "name": "video_1629795128339.mp4", "size": 192660773, "type": "video/mp4", "uri": "content://com.android.providers.media.documents/document/video%3A7853"}]
UPDATE 2:
Managed to fix my DocumentPicker file issue(from my Update 1) with changing React Native Document Picker to Expo Document Picker.
Now I am able to select large files and call the upload function - the metadata uploads, the video file begins to upload as well but the app crashes during the upload. If I set the chunkSize option on the MediaUploader object I get [TypeError: content.slice is not a function. (In 'content.slice(this.offset, end)', 'content.slice' is undefined)]
Expo Document Picker responds with the following object after I select my video file:
{"name": "video_1629801588164.mp4", "size": 5799179, "type": "video/mp4", "uri": "file:///data/user/0/com.tennis.rec/cache/DocumentPicker/8b350fbf-1b66-4a78-a10f-b61eb2ed3032.mp4"}
UPDATE 3 - RESOLVED!!!
The chunk upload is working now!!! I modified my cors_upload.js file where the chunkSize is being evaluated and sliced with the following code:
if (this.offset || this.chunkSize) {
// Only bother to slice the file if we're either resuming or uploading in chunks
if (this.chunkSize) {
end = Math.min(this.offset + this.chunkSize, this.file.size);
}
console.log("CONTENT SLICE", this.offset, end, this.file.size);
//content = content.slice(this.offset, end);
var base64 = await RNFS.read(this.file.uri, this.chunkSize, this.offset, 'base64');
content = Uint8Array.from(atob(base64), c => c.charCodeAt(0));
}
I added React Native File System and I am using its read() function to load the chunk as base64 and convert it back to a byte array.

Upload multiple images to server in ajax request

I have a web application that create post with 15 images maximum and other data, I am using AJAX request to send all data with images in one request but I got this error:
An error occurred during the serialization or de-serialization process with JSON JavaScriptSerializer. The length of the string exceeds the value set in the MaxJsonLength property.
I am trying to compress the images before uploading but same problem still.
That's the JS code:
function readURL(input) {
var files = input.files;
for (var index = 0; index < files.length; index++) {
var mainImage = "";
if (files && files[index]) {
var reader = new FileReader();
reader.onload = function (e) {
const imgElement = document.createElement("img");
imgElement.src = event.target.result;
if ($(".imgUpload li").length <= 15) {
imgElement.onload = function (e) {
const canvas = document.createElement("canvas");
const MAX_WIDTH = 960;
const scaleSize = MAX_WIDTH / e.target.width;
canvas.width = MAX_WIDTH;
canvas.height = e.target.height * scaleSize;
const ctx = canvas.getContext("2d");
ctx.drawImage(e.target, 0, 0, canvas.width, canvas.height);
const srcEncoded = ctx.canvas.toDataURL(e.target, "image/jpeg");
// you can send srcEncoded to the server
$(".imgUpload").append('<li class="d-inline-block vTop position-relative mr-3 ' + mainImage + '"><a class="removeImg center w3-text-white fullRadius osRedBg position-absolute" data-ghost="removeImg" href="javascript:;"><svg viewBox="0 0 32 32" width="20" height="20" class="d-inline-block vMiddle" data-name="IconClose"><path fill="#ffffff" stroke="#fff" stroke-width="1" d="M25.333 8.547l-1.88-1.88-7.453 7.453-7.453-7.453-1.88 1.88 7.453 7.453-7.453 7.453 1.88 1.88 7.453-7.453 7.453 7.453 1.88-1.88-7.453-7.453z"></path></svg></a><img class="ad-img" alt="" src="' + srcEncoded + '"></li>');
if ($('.imgUpload li.mainImage').length == 0) {
$('.imgUpload li:nth-child(2)').addClass("mainImage");
}
if ($(".imgUpload li").length > 2) {
$(".imgValMsg").text("#Messages.ClickToDetectMainImage");
}
};
}
else {
$('.modalCountImgs').modal('show');
}
if ($('.imgUpload li.mainImage').length == 0) {
$('.imgUpload li:nth-child(2)').addClass("mainImage");
}
if ($(".imgUpload li").length > 2) {
$(".imgValMsg").text("#Messages.ClickToDetectMainImage");
}
}
reader.readAsDataURL(files[index]); // convert to base64 string
}
}
}
$("#inputAddImage").change(function () {
var input = this;
var files = this.files;
var validFiles = true;
for (var index = 0; index < files.length; index++) {
var extLogo = files[index].name.split('.').pop().toLowerCase();
if ($.inArray(extLogo, ['gif', 'png', 'jpg', 'jpeg']) == -1) {
validFiles = false;
break;
}
}
if (validFiles) {
readURL(input);
$("#Imgs-error2").removeClass("d-block").addClass("d-none");
}
else {
$("#Imgs-error2").removeClass("d-none").addClass("d-block");
}
$("#Imgs-error").removeClass("d-block").addClass("d-none");
});
and AJAX request:
//Images
var lstImgs = [];
var ImgElements = $(".imgUpload img");
ImgElements.each(function () {
var obj = {
ImageData: $(this).attr("src")
}
lstImgs.push(obj);
});
var productModel = {
CategoryThirdId: $("#CategoryThirdId").val(),
ProductTitle: $("#ProductTitle").val(),
ProductDescription: $("#ProductDescription").val(),
Price: $("#Price").val(),
Images:lstImgs
};
var data = {
model:productModel
};
$.ajax({
method: "post",
url: '#Url.Action("CreateAdvertise", "Advertise")',
contentType: 'application/json',
data: JSON.stringify(data),
success: function (res) {
if (res != null) {
if (res == "ErrorCatalogName") {
$(".danger-product-name").show();
}
}
else {
$(".danger-product-name").hide();
}
$('#overlay').fadeOut();
},
error: function (res) {
$('#overlay').fadeOut();
}
});
At the end my goal is to upload 15 images and other data from client to server witch I use ASP .NET MVC
and the images to be compressed until the user upload an image with 20 MB.
You can't post files as string. Use something like this:
var fd = new FormData();
fd.append( 'file', input.files[0] );
$.ajax({
url: '/Example/SendData',
data: fd,
processData: false,
contentType: false,
type: 'POST',
success: function(data){
alert(data);
}
});

Controlling file upload location in Google Drive (App script not form)

I'm implementing Kanshi Tanaike's Resumable Upload For Web Apps code and it works, but I don't fully understand the AJAX and am trying add a feature. Right now the code places the new file in the user's Drive root folder. I would either like to define a specific folder and upload there directly, or automatically move the file from root to the correct folder (I also need to collect the download link). I see the upload function references location in the response header, but I'm struggling to figure out how to define it, and since the doUpload() function does not seem to treat the upload as a File object I can't figure out how to reference it after the upload to acquire the URL or move it. Any feedback would be enormously appreciated.
$('#uploadfile').on("change", function() {
var file = this.files[0];
if (file.name != "") {
var fr = new FileReader();
fr.fileName = file.name;
fr.fileSize = file.size;
fr.fileType = file.type;
fr.onload = init;
fr.readAsArrayBuffer(file);
}
});
function init() {
$("#progress").text("Initializing.");
var fileName = this.fileName;
var fileSize = this.fileSize;
var fileType = this.fileType;
console.log({fileName: fileName, fileSize: fileSize, fileType: fileType});
var buf = this.result;
var chunkpot = getChunkpot(chunkSize, fileSize);
var uint8Array = new Uint8Array(buf);
var chunks = chunkpot.chunks.map(function(e) {
return {
data: uint8Array.slice(e.startByte, e.endByte + 1),
length: e.numByte,
range: "bytes " + e.startByte + "-" + e.endByte + "/" + chunkpot.total,
};
});
google.script.run.withSuccessHandler(function(at) {
var xhr = new XMLHttpRequest();
xhr.open("POST", "https://www.googleapis.com/upload/drive/v3/files?uploadType=resumable");
xhr.setRequestHeader('Authorization', "Bearer " + at);
xhr.setRequestHeader('Content-Type', "application/json");
xhr.send(JSON.stringify({
mimeType: fileType,
name: fileName,
}));
xhr.onload = function() {
doUpload({
location: xhr.getResponseHeader("location"),
chunks: chunks,
});
};
xhr.onerror = function() {
console.log(xhr.response);
};
}).getAt();
}
function doUpload(e) {
var chunks = e.chunks;
var location = e.location;
var cnt = 0;
var end = chunks.length;
var temp = function callback(cnt) {
var e = chunks[cnt];
var xhr = new XMLHttpRequest();
xhr.open("PUT", location, true);
xhr.setRequestHeader('Content-Range', e.range);
xhr.send(e.data);
xhr.onloadend = function() {
var status = xhr.status;
cnt += 1;
console.log("Uploading: " + status + " (" + cnt + " / " + end + ")");
$("#progress").text("Uploading: " + Math.floor(100 * cnt / end) + "%");
if (status == 308) {
callback(cnt);
} else if (status == 200) {
$("#progress").text("Done.");
} else {
$("#progress").text("Error: " + xhr.response);
}
};
}(cnt);
}
I believe your goal and your current situation as follows.
You want to upload a file to the specific folder.
You want to retrieve webContentLink of the uploaded file.
You want to achieve above using Resumable Upload for Web Apps using Google Apps Script
You have already confirmed that the default script at the repository worked.
Modification points:
In this case, it is required to check the resumable upload and the method of "Files: create" in Drive API.
In order to upload the file to the specific folder, please add the folder ID to the request body of the initial request.
In order to return the value of webContentLink, please use fields value to the initial request.
When above points are reflected to the original script, it becomes as follows.
Modified script:
In this case, HTML is modified.
<!DOCTYPE html>
<html>
<head>
<script src="https://ajax.googleapis.com/ajax/libs/jquery/3.3.1/jquery.js"></script>
<title>Resumable upload for Web Apps</title>
</head>
<body>
<form>
<input name="file" id="uploadfile" type="file">
</form>
<div id="progress"></div>
<script>
const chunkSize = 5242880;
$('#uploadfile').on("change", function() {
var file = this.files[0];
if (file.name != "") {
var fr = new FileReader();
fr.fileName = file.name;
fr.fileSize = file.size;
fr.fileType = file.type;
fr.onload = init;
fr.readAsArrayBuffer(file);
}
});
function init() {
var folderId = "###"; // Added: Please set the folder ID.
$("#progress").text("Initializing.");
var fileName = this.fileName;
var fileSize = this.fileSize;
var fileType = this.fileType;
console.log({fileName: fileName, fileSize: fileSize, fileType: fileType});
var buf = this.result;
var chunkpot = getChunkpot(chunkSize, fileSize);
var uint8Array = new Uint8Array(buf);
var chunks = chunkpot.chunks.map(function(e) {
return {
data: uint8Array.slice(e.startByte, e.endByte + 1),
length: e.numByte,
range: "bytes " + e.startByte + "-" + e.endByte + "/" + chunkpot.total,
};
});
google.script.run.withSuccessHandler(function(at) {
var xhr = new XMLHttpRequest();
xhr.open("POST", "https://www.googleapis.com/upload/drive/v3/files?uploadType=resumable&fields=*");
xhr.setRequestHeader('Authorization', "Bearer " + at);
xhr.setRequestHeader('Content-Type', "application/json");
xhr.send(JSON.stringify({
mimeType: fileType,
name: fileName,
parents: [folderId] // Added
}));
xhr.onload = function() {
doUpload({
location: xhr.getResponseHeader("location"),
chunks: chunks,
});
};
xhr.onerror = function() {
console.log(xhr.response);
};
}).getAt();
}
function doUpload(e) {
var chunks = e.chunks;
var location = e.location;
var cnt = 0;
var end = chunks.length;
var temp = function callback(cnt) {
var e = chunks[cnt];
var xhr = new XMLHttpRequest();
xhr.open("PUT", location, true);
xhr.setRequestHeader('Content-Range', e.range);
xhr.send(e.data);
xhr.onloadend = function() {
var status = xhr.status;
cnt += 1;
console.log("Uploading: " + status + " (" + cnt + " / " + end + ")");
$("#progress").text("Uploading: " + Math.floor(100 * cnt / end) + "%");
if (status == 308) {
callback(cnt);
} else if (status == 200) {
var metadata = JSON.parse(xhr.response); // Added
$("#progress").text("Done. Link: " + metadata.webContentLink); // Modified
} else {
$("#progress").text("Error: " + xhr.response);
}
};
}(cnt);
}
function getChunkpot(chunkSize, fileSize) {
var chunkPot = {};
chunkPot.total = fileSize;
chunkPot.chunks = [];
if (fileSize > chunkSize) {
var numE = chunkSize;
var endS = function(f, n) {
var c = f % n;
if (c == 0) {
return 0;
} else {
return c;
}
}(fileSize, numE);
var repeat = Math.floor(fileSize / numE);
for (var i = 0; i <= repeat; i++) {
var startAddress = i * numE;
var c = {};
c.startByte = startAddress;
if (i < repeat) {
c.endByte = startAddress + numE - 1;
c.numByte = numE;
chunkPot.chunks.push(c);
} else if (i == repeat && endS > 0) {
c.endByte = startAddress + endS - 1;
c.numByte = endS;
chunkPot.chunks.push(c);
}
}
} else {
var chunk = {
startByte: 0,
endByte: fileSize - 1,
numByte: fileSize,
};
chunkPot.chunks.push(chunk);
}
return chunkPot;
}
</script>
</body>
</html>
When the above modified script is run, the uploaded file is created to the specific folder and webContentLink is displayed as the result.
References:
Perform a resumable upload
Files: create
Resumable Upload for Web Apps using Google Apps Script

Merging multiple parts of a file in Cordova

Within my Cordova app, I am downloading arbitrary files like images or video files. This is done with the Cordova file-transfer plugin and the "Range" Header, because I need to download the files in parts.
My Problem is, that I want to merge back the several small "Byte"-Files back together into the original file they once where to use that file. Every time I'm trying to read the resulting parts as binaryString via the FileReader and write them together in a new file, that file ends up a lot larger than the parts of the original file altogther and the resulting file is unusable.
Any help is appreciated.
Here is my code until now (long and ugly):
document.addEventListener('deviceready', deviceready, false);
var App;
var finishedFileUrl = "";
var async = {
sequence: function(items, callback) {
var def = $.Deferred(),
deferrers = [$.Deferred()];
for(var i = 0; i < items.length; i++) {
(function (n) {
deferrers[n + 1] = $.Deferred();
deferrers[n].always(function() {
callback(items[n], deferrers[n + 1]);
});
})(i);
}
deferrers[items.length].always(function() {
def.resolve();
});
deferrers[0].resolve();
return def.promise();
}
}
var aSmallImageArray = [
'' // Put URL to JPG accessible with Range Header Request here
];
var aByteSizeImageArray = [];
function formatDownloadArray(fileSize) {
for(var j = 1000; j <= fileSize; j += 1000) {
aByteSizeImageArray.push(j);
}
aByteSizeImageArray.push(j);
}
function deviceready() {
console.log('dv ready');
function registerHandlers() {
App = new DownloadApp();
formatDownloadArray(XXXXX); // XXXXX should be size of JPG in bytes
document.getElementById("startDl").onclick = function() {
var that = this;
console.log("load button clicked");
var folderName = "testimagefolder";
// sequence call
async.sequence(aByteSizeImageArray, function(currentBytes, iter) {
var filePath = aSmallImageArray[0];
var fileName = aSmallImageArray[0].substr(52,99) + currentBytes;
console.log(filePath);
console.log(fileName);
console.log("Starting with: " + fileName);
var uri = encodeURI(filePath);
var folderName = "testimagefolder";
document.getElementById("statusPlace").innerHTML = "<br/>Loading: " + uri;
App.load(currentBytes, uri, folderName, fileName,
function progress (percentage) {
document.getElementById("statusPlace").innerHTML = "<br/>" + percentage + "%";
},
function success (entry) {
console.log("Entry: " + entry);
document.getElementById("statusPlace").innerHTML = "<br/>Image saved to: " + App.filedir;
console.log("DownloadApp.filedir: " + App.filedir);
iter.resolve();
},
function error () {
document.getElementById("statusPlace").innerHTML = "<br/>Failed load image: " + uri;
iter.resolve();
}
);
}).then(function afterAsync () {
console.log("ASYNC DONE");
var ohNoItFailed = function ohNoItFailed (exeperro) {
console.log(exeperro);
}
// now we merge the fileparts into one file to show it
window.requestFileSystem(LocalFileSystem.PERSISTENT, 0, function (FileSystem) {
FileSystem.root.getDirectory(folderName, {create: true, exclusive: false}, function itSuccessed (Directory) {
Directory.getFile(aSmallImageArray[0].substr(52,99), {create: true, exclusive: false}, function itSuccessedAgain (fileEntry) {
finishedFileUrl = fileEntry.toURL();
var directoryReader = Directory.createReader();
var allFiles = directoryReader.readEntries(function succesReadDir (fileEntries) {
async.sequence(fileEntries, function(currentFile, iterThis) {
currentFile.file(function (theActualFile) {
var myFileReader = new FileReader();
myFileReader.onload = function (content) {
console.log('FileReader onload event fired!');
console.log('File Content should be: ' + content.target.result);
fileEntry.createWriter(
function mergeImage (writer) {
writer.onwrite = function (evnt) {
console.log("Writing successful!");
iterThis.resolve();
}
writer.seek(writer.length);
writer.write(content.target.result);
}, ohNoItFailed);
};
myFileReader.readAsBinaryString(theActualFile);
}, ohNoItFailed);
}).then(function afterAsyncTwo () {
console.log("NOW THE IMAGE SHOULD BE TAKEN FROM THIS PATH: " + finishedFileUrl);
//window.requestFileSystem(LocalFileSystem.PERSISTENT, 0, function (FileSystem) {
//FileSystem.root.getDirectory(folderName, {create: true, exclusive: false}, function itSuccessed (Directory) {
//Directory.getFile(aSmallImageArray[0].substr(52,99), {create: true, exclusive: false}, function itSuccessedAgain (fileEntry) {
//fileEntry.createWriter(
document.getElementById("image_here").src = finishedFileUrl;
});
}, ohNoItFailed);
}, ohNoItFailed);
}, ohNoItFailed);
}, ohNoItFailed);
});
};
}
registerHandlers();
}
var DownloadApp = function() {}
DownloadApp.prototype = {
filedir: "",
load: function(currentBytes, uri, folderName, fileName, progress, success, fail) {
var that = this;
that.progress = progress;
that.success = success;
that.fail = fail;
filePath = "";
that.getFilesystem(
function(fileSystem) {
console.log("GotFS");
that.getFolder(fileSystem, folderName, function(folder) {
filePath = folder.toURL() + fileName;
console.log("FILEPATH: " + filePath);
console.log("URI: " + uri);
that.transferFile(currentBytes, uri, filePath, progress, success, fail);
}, function(error) {
console.log("Failed to get folder: " + error.code);
typeof that.fail === 'function' && that.fail(error);
});
},
function(error) {
console.log("Failed to get filesystem: " + error.code);
typeof that.fail === 'function' && that.fail(error);
}
);
},
getFilesystem: function (success, fail) {
window.requestFileSystem = window.requestFileSystem || window.webkitRequestFileSystem;
window.requestFileSystem(LocalFileSystem.PERSISTENT, 0, success, fail);
},
getFolder: function (fileSystem, folderName, success, fail) {
fileSystem.root.getDirectory(folderName, {create: true, exclusive: false}, success, fail)
},
transferFile: function (currentBytes, uri, filePath, progress, success, fail) {
var that = this;
that.progress = progress;
that.success = success;
that.fail = fail;
console.log("here we go");
console.log("filePath before Request: " + filePath);
var previousBytes = currentBytes - 1000;
var transfer = new FileTransfer();
transfer.onprogress = function(progressEvent) {
if (progressEvent.lengthComputable) {
var perc = Math.floor(progressEvent.loaded / progressEvent.total * 100);
typeof that.progress === 'function' && that.progress(perc); // progression on scale 0..100 (percentage) as number
} else {
}
};
transfer.download(
uri,
filePath,
function success (entry) {
console.log("File saved to: " + entry.toURL());
typeof that.success === 'function' && that.success(entry);
},
function errorProblem(error) {
console.log("An error has occurred: Code = " + error.code);
console.log("download error source " + error.source);
console.log("download error target " + error.target);
console.log("download error code " + error.code);
typeof that.fail === 'function' && that.fail(error);
},
true,
{
headers: {
"Range": "bytes=" + previousBytes + "-" + currentBytes
}
}
);
}
}
async code by stackoverflow-user: Paul Facklam
-> Thanks a lot!
you can build a blob from other blobs, like the ones you use FileReader on now. (File()s are Blobs)
// put three blobs into a fourth:
var b=new Blob([new Blob(["hello"]), new Blob([" "]), new Blob(["world"])]);
// verify the blob has the data we expect:
var fr=new FileReader();
fr.onload=function(){alert(this.result);};
fr.readAsBinaryString(b); // shows: "hello world"
the binaryString flavor is used here to show how these low-order strings stack up, but the actual new blob instance should have all the orig (arbitrary) bytes from the original blobs, even if they aren't composed of simple strings...
Using readAsArrayBuffer() instead of readAsBinaryString() did the trick!
So instead of:
myFileReader.readAsBinaryString(theActualFile);
I did:
myFileReader.readAsBinaryArray(theActualFile);
And the resulting image file is usable.

upload image binary - using imageshack api

Moving a topic from google groups to here so it can help someone who is asking.
imageshack api: http://api.imageshack.us/
the final http reqeust is returning json:
{"success":true,"process_time":325,"result":{"max_filesize":5242880,"space_limit":52428800,"space_used":0,"space_left":52428800,"passed":0,"failed":0,"total":0,"images":[]}}
which is not good, as it didn't upload :(
it should return an image object. http://api.imageshack.us/#h.ws82a1l6pp9g
as this is what the upload image section on the imageshack api says
please help :(
my extension code
var blobUrl;
var makeBlob = function () {
bigcanvas.toBlob(function (blob) {
var reader = new window.FileReader();
reader.readAsBinaryString(blob);
reader.onloadend = function () {
blobBinaryString = reader.result;
blobUrl = blobBinaryString;
Cu.reportError(blobUrl);
uploadBlob();
}
});
};
var uploadedImageUrl;
var uploadBlob = function () {
HTTP('POST', 'https://api.imageshack.us/v1/images', {
contentType: 'application/x-www-form-urlencoded',
//'album=' + urlencode('Stock History') + '&
body: 'auth_token=' + urlencode(auth_token) + 'file#=' + blobUrl,
onSuccess: function (status, responseXML, responseText, headers, statusText) {
Cu.reportError('XMLHttpRequest SUCCESS - imageshack uploadBlob\n' + statusText + '\n' + responseText);
eval('var json = ' + responseText);
uploadedImageUrl = json.direct_link;
submitBamdex();
},
onFailure: function (status, responseXML, responseText, headers, statusText) {
Cu.reportError('XMLHttpRequest FAILLLLLLLL - imageshack uploadBlob\n' + statusText + '\n' + responseText);
}
});
};
makeBlob(); //callllll the func
What worked for me was reading about
the differences between URI's, files, blobs and Base64's in this article: https://yaz.in/p/blobs-files-and-data-uris/ .
fetching a new blob: https://masteringjs.io/tutorials/axios/form-data
and much more closed tabs along the way
so in my react component onChange handler I use new FileReader to read event.target.files[0], readAsDataURL(file), and set the Base64 encoded string to state.
I conditionally render an img src={base64stringfromState} to offer confirmation of correct image, then onSubmit, I convert this "Data URI" (the Base64 string), to a blob with one of these two codes I found somewhere (didn't end up using the first one but this is useful af and took forever to find):
const dataURItoBlob = (dataURI) => {
// convert base64/URLEncoded data component to raw binary data held in a string
var byteString;
if (dataURI.split(',')[0].indexOf('base64') >= 0)
byteString = atob(dataURI.split(',')[1]);
else
byteString = unescape(dataURI.split(',')[1]);
// separate out the mime component
var mimeString = dataURI.split(',')[0].split(':')[1].split(';')[0];
// write the bytes of the string to a typed array
var ia = new Uint8Array(byteString.length);
for (var i = 0; i < byteString.length; i++) {
ia[i] = byteString.charCodeAt(i);
}
return new Blob([ia], {type:mimeString});
}
And
const blob = await fetch(base64imageString).then(res => res.blob())
Instead of all that shit we can just do this fetch a new version of the image or whatever and blob it right there, in the middle of constructing our photo upload/request :
event.preventDefault()
const blob = await fetch(base64stringfromState).then(res => res.blob())
const formData = new FormData()
formData.append('file#', blob)
formData.append('api_key', 'XXXXX')
formData.append('auth_token', 'XXXXXXXXXX')
formData.append('album', 'youralbumname')
const res = await axios.post('https://api.imageshack.com/v2/images', formData, {headers{'Content-Type':'multipart/form-data'}})
then all we have to do to store the uploaded image is to append https:// to and record the returned direct link for storage alongside its id so you can delete it if you need to later. Per the code earlier they spit out at
res.data.result.images[0].direct_link
res.data.result.images[0].id
This was a bitch to solve so hopefully this helps someone else with uploading photos to imageshack api cuz it's potentially a great value considering the limits of the competitors.
this code uploads a drawing on a canvas to imageshack
Can copy paste but have to update some things:
update username
update password
uploads drawing from canvas with id "bigcanvas"
update your API key
...
//this code uploads a drawing on a canvas to imageshack
var auth_token;
var loginImageshack = function() {
HTTP('POST','https://api.imageshack.us/v1/user/login',{
contentType: 'application/x-www-form-urlencoded',
body: 'user=USERNAME_TO_IMAGESHACK_HERE&password=' + urlencode('PASSWORD_TO_USERNAME_FOR_IMAGESHACK_HERE'),
onSuccess: function(status, responseXML, responseText, headers, statusText) {
Cu.reportError('XMLHttpRequest SUCCESS - imageshack login\n' + statusText + '\n' + responseText);
eval('var json = ' + responseText);
auth_token = json.result.auth_token;
makeImageshackFile();
},
onFailure: function(status, responseXML, responseText, headers, statusText) {
Cu.reportError('XMLHttpRequest FAILLLLLLLL - imageshack login\n' + statusText + '\n' + responseText);
}
});
};
var uploadedImageUrl;
var makeImageshackFile = function() {
var fd = new window.FormData();
fd.append("api_key", 'A835WS6Bww584g3568efa2z9823uua5ceh0h6325'); //USE YOUR API KEY HERE
fd.append("auth_token", auth_token);
fd.append('album', 'Stock History');
fd.append('title', 'THE-title-you-want-showing-on-imageshack')
fd.append("file#", bigcanvas.mozGetAsFile("foo.png")); //bigcanvas is a canvas with the image drawn on it: var bigcanvas = document.querySelector('#bigcanvas');
var xhr = new XMLHttpRequest();
xhr.onreadystatechange = function() {
switch (xhr.readyState) {
case 4:
if (xhr.status==0 || (xhr.status>=200 && xhr.status<300)) {
Cu.reportError('XHR SUCCESS - \n' + xhr.responseText);
eval('var json = ' + xhr.responseText);
//ensure it passed else redo it I didnt program in the redo thing yet
//succesful json == {"success":true,"process_time":1274,"result":{"max_filesize":5242880,"space_limit":52428800,"space_used":270802,"space_left":52157998,"passed":1,"failed":0,"total":1,"images":[{"id":1067955963,"server":703,"bucket":2397,"lp_hash":"jj9g5p","filename":"9g5.png","original_filename":"foo.png","direct_link":"imageshack.us\/a\/img703\/2397\/9g5.png","title":"082813 200AM PST","description":null,"tags":[""],"likes":0,"liked":false,"views":0,"comments_count":0,"comments_disabled":false,"filter":0,"filesize":1029,"creation_date":1377681549,"width":760,"height":1110,"public":true,"is_owner":true,"owner":{"username":"bamdex","avatar":{"server":0,"filename":null}},"next_images":[],"prev_images":[{"server":59,"filename":"06mm.png"},{"server":706,"filename":"a1fg.png"}],"related_images":[{"server":59,"filename":"06mm.png"},{"server":41,"filename":"xn9q.png"},{"server":22,"filename":"t20a.png"},{"server":547,"filename":"fipx.png"},{"server":10,"filename":"dg6b.png"},{"se
uploadedImageUrl = json.result.images[0].direct_link;
Cu.reportError('succesfully uploaded image');
} else {
Cu.reportError('XHR FAIL - \n' + xhr.responseText);
}
break;
default:
//blah
}
}
xhr.open("POST", "https://api.imageshack.us/v1/images");
xhr.send(fd);
}
loginImageshack();
important note for code above
should use JSON.parse instead of eval if you want to submit the addon to AMO
should also probably change from using window to Services.appShel.hiddenDOMWindow so like new window.FormData(); would become new Services.appShel.hiddenDOMWindow.FormData(); OR var formData = Components.classes["#mozilla.org/files/formdata;1"].createInstance(Components.interfaces.nsIDOMFormData); OR Cu.import('resource://gre/modules/FormData.jsm')
helper functions required for the code above:
const {classes: Cc, interfaces: Ci, utils: Cu, Components: components} = Components
Cu.import('resource://gre/modules/Services.jsm');
...
function urlencode(str) {
return escape(str).replace(/\+/g,'%2B').replace(/%20/g, '+').replace(/\*/g, '%2A').replace(/\//g, '%2F').replace(/#/g, '%40');
};
...
//http request
const XMLHttpRequest = Cc["#mozilla.org/xmlextras/xmlhttprequest;1"];
/**
* The following keys can be sent:
* onSuccess (required) a function called when the response is 2xx
* onFailure a function called when the response is not 2xx
* username The username for basic auth
* password The password for basic auth
* overrideMimeType The mime type to use for non-XML response mime types
* timeout A timeout value in milliseconds for the response
* onTimeout A function to call if the request times out.
* body A string containing the entity body of the request
* contentType The content type of the entity body of the request
* headers A hash of optional headers
*/
function HTTP(method,url,options)
{
var requester = new XMLHttpRequest();
var timeout = null;
if (!options.synchronizedRequest) {
requester.onreadystatechange = function() {
switch (requester.readyState) {
case 0:
if (options.onUnsent) {
options.onUnsent(requester);
}
break;
case 1:
if (options.onOpened) {
options.onOpened(requester);
}
break;
case 2:
if (options.onHeaders) {
options.onHeaders(requester);
}
break;
case 3:
if (options.onLoading) {
options.onLoading(requester);
}
break;
case 4:
if (timeout) {
clearTimeout(timeout);
}
if (requester.status==0 || (requester.status>=200 && requester.status<300)) {
options.onSuccess(
requester.status,
requester.responseXML,
requester.responseText,
options.returnHeaders ? _HTTP_parseHeaders(requester.getAllResponseHeaders()) : null,
requester.statusText
);
} else {
if (options.onFailure) {
options.onFailure(
requester.status,
requester.responseXML,
requester.responseText,
options.returnHeaders ? _HTTP_parseHeaders(requester.getAllResponseHeaders()) : null,
requester.statusText
);
}
}
break;
}
}
}
if (options.overrideMimeType) {
requester.overrideMimeType(options.overrideMimeType);
}
if (options.username) {
requester.open(method,url,!options.synchronizedRequest,options.username,options.password);
} else {
requester.open(method,url,!options.synchronizedRequest);
}
if (options.timeout && !options.synchronizedRequest) {
timeout = setTimeout(
function() {
var callback = options.onTimeout ? options.onTimeout : options.onFailure;
callback(0,"Operation timeout.");
},
options.timeout
);
}
if (options.headers) {
for (var name in options.headers) {
requester.setRequestHeader(name,options.headers[name]);
}
}
if (options.sendAsBinary) {
Cu.reportError('sending as binary');
requester.sendAsBinary(options.body);
} else if (options.body) {
requester.setRequestHeader("Content-Type",options.contentType);
requester.send(options.body);
} else {
requester.send(null);
}
if (options.synchronizedRequest) {
if (requester.status==0 || (requester.status>=200 && requester.status<300)) {
options.onSuccess(
requester.status,
requester.responseXML,
requester.responseText,
options.returnHeaders ? _HTTP_parseHeaders(requester.getAllResponseHeaders()) : null,
requester.statusText
);
} else {
if (options.onFailure) {
options.onFailure(
requester.status,
requester.responseXML,
requester.responseText,
options.returnHeaders ? _HTTP_parseHeaders(requester.getAllResponseHeaders()) : null,
requester.statusText
);
}
}
return {
abort: function() {
}
};
} else {
return {
abort: function() {
clearTimeout(timeout);
requester.abort();
}
};
}
}
function _HTTP_parseHeaders(headerText)
{
var headers = {};
if (headerText) {
var eol = headerText.indexOf("\n");
while (eol>=0) {
var line = headerText.substring(0,eol);
headerText = headerText.substring(eol+1);
while (headerText.length>0 && !headerText.match(_HTTP_HEADER_NAME)) {
eol = headerText.indexOf("\n");
var nextLine = eol<0 ? headerText : headerText.substring(0,eol);
line = line+' '+nextLine;
headerText = eol<0 ? "" : headerText.substring(eol+1);
}
// Parse the name value pair
var colon = line.indexOf(':');
var name = line.substring(0,colon);
var value = line.substring(colon+1);
headers[name] = value;
eol = headerText.indexOf("\n");
}
if (headerText.length>0) {
var colon = headerText.indexOf(':');
var name = headerText.substring(0,colon);
var value = headerText.substring(colon+1);
headers[name] = value;
}
}
return headers;
}

Categories

Resources