upload image binary - using imageshack api - javascript

Moving a topic from google groups to here so it can help someone who is asking.
imageshack api: http://api.imageshack.us/
the final http reqeust is returning json:
{"success":true,"process_time":325,"result":{"max_filesize":5242880,"space_limit":52428800,"space_used":0,"space_left":52428800,"passed":0,"failed":0,"total":0,"images":[]}}
which is not good, as it didn't upload :(
it should return an image object. http://api.imageshack.us/#h.ws82a1l6pp9g
as this is what the upload image section on the imageshack api says
please help :(
my extension code
var blobUrl;
var makeBlob = function () {
bigcanvas.toBlob(function (blob) {
var reader = new window.FileReader();
reader.readAsBinaryString(blob);
reader.onloadend = function () {
blobBinaryString = reader.result;
blobUrl = blobBinaryString;
Cu.reportError(blobUrl);
uploadBlob();
}
});
};
var uploadedImageUrl;
var uploadBlob = function () {
HTTP('POST', 'https://api.imageshack.us/v1/images', {
contentType: 'application/x-www-form-urlencoded',
//'album=' + urlencode('Stock History') + '&
body: 'auth_token=' + urlencode(auth_token) + 'file#=' + blobUrl,
onSuccess: function (status, responseXML, responseText, headers, statusText) {
Cu.reportError('XMLHttpRequest SUCCESS - imageshack uploadBlob\n' + statusText + '\n' + responseText);
eval('var json = ' + responseText);
uploadedImageUrl = json.direct_link;
submitBamdex();
},
onFailure: function (status, responseXML, responseText, headers, statusText) {
Cu.reportError('XMLHttpRequest FAILLLLLLLL - imageshack uploadBlob\n' + statusText + '\n' + responseText);
}
});
};
makeBlob(); //callllll the func

What worked for me was reading about
the differences between URI's, files, blobs and Base64's in this article: https://yaz.in/p/blobs-files-and-data-uris/ .
fetching a new blob: https://masteringjs.io/tutorials/axios/form-data
and much more closed tabs along the way
so in my react component onChange handler I use new FileReader to read event.target.files[0], readAsDataURL(file), and set the Base64 encoded string to state.
I conditionally render an img src={base64stringfromState} to offer confirmation of correct image, then onSubmit, I convert this "Data URI" (the Base64 string), to a blob with one of these two codes I found somewhere (didn't end up using the first one but this is useful af and took forever to find):
const dataURItoBlob = (dataURI) => {
// convert base64/URLEncoded data component to raw binary data held in a string
var byteString;
if (dataURI.split(',')[0].indexOf('base64') >= 0)
byteString = atob(dataURI.split(',')[1]);
else
byteString = unescape(dataURI.split(',')[1]);
// separate out the mime component
var mimeString = dataURI.split(',')[0].split(':')[1].split(';')[0];
// write the bytes of the string to a typed array
var ia = new Uint8Array(byteString.length);
for (var i = 0; i < byteString.length; i++) {
ia[i] = byteString.charCodeAt(i);
}
return new Blob([ia], {type:mimeString});
}
And
const blob = await fetch(base64imageString).then(res => res.blob())
Instead of all that shit we can just do this fetch a new version of the image or whatever and blob it right there, in the middle of constructing our photo upload/request :
event.preventDefault()
const blob = await fetch(base64stringfromState).then(res => res.blob())
const formData = new FormData()
formData.append('file#', blob)
formData.append('api_key', 'XXXXX')
formData.append('auth_token', 'XXXXXXXXXX')
formData.append('album', 'youralbumname')
const res = await axios.post('https://api.imageshack.com/v2/images', formData, {headers{'Content-Type':'multipart/form-data'}})
then all we have to do to store the uploaded image is to append https:// to and record the returned direct link for storage alongside its id so you can delete it if you need to later. Per the code earlier they spit out at
res.data.result.images[0].direct_link
res.data.result.images[0].id
This was a bitch to solve so hopefully this helps someone else with uploading photos to imageshack api cuz it's potentially a great value considering the limits of the competitors.

this code uploads a drawing on a canvas to imageshack
Can copy paste but have to update some things:
update username
update password
uploads drawing from canvas with id "bigcanvas"
update your API key
...
//this code uploads a drawing on a canvas to imageshack
var auth_token;
var loginImageshack = function() {
HTTP('POST','https://api.imageshack.us/v1/user/login',{
contentType: 'application/x-www-form-urlencoded',
body: 'user=USERNAME_TO_IMAGESHACK_HERE&password=' + urlencode('PASSWORD_TO_USERNAME_FOR_IMAGESHACK_HERE'),
onSuccess: function(status, responseXML, responseText, headers, statusText) {
Cu.reportError('XMLHttpRequest SUCCESS - imageshack login\n' + statusText + '\n' + responseText);
eval('var json = ' + responseText);
auth_token = json.result.auth_token;
makeImageshackFile();
},
onFailure: function(status, responseXML, responseText, headers, statusText) {
Cu.reportError('XMLHttpRequest FAILLLLLLLL - imageshack login\n' + statusText + '\n' + responseText);
}
});
};
var uploadedImageUrl;
var makeImageshackFile = function() {
var fd = new window.FormData();
fd.append("api_key", 'A835WS6Bww584g3568efa2z9823uua5ceh0h6325'); //USE YOUR API KEY HERE
fd.append("auth_token", auth_token);
fd.append('album', 'Stock History');
fd.append('title', 'THE-title-you-want-showing-on-imageshack')
fd.append("file#", bigcanvas.mozGetAsFile("foo.png")); //bigcanvas is a canvas with the image drawn on it: var bigcanvas = document.querySelector('#bigcanvas');
var xhr = new XMLHttpRequest();
xhr.onreadystatechange = function() {
switch (xhr.readyState) {
case 4:
if (xhr.status==0 || (xhr.status>=200 && xhr.status<300)) {
Cu.reportError('XHR SUCCESS - \n' + xhr.responseText);
eval('var json = ' + xhr.responseText);
//ensure it passed else redo it I didnt program in the redo thing yet
//succesful json == {"success":true,"process_time":1274,"result":{"max_filesize":5242880,"space_limit":52428800,"space_used":270802,"space_left":52157998,"passed":1,"failed":0,"total":1,"images":[{"id":1067955963,"server":703,"bucket":2397,"lp_hash":"jj9g5p","filename":"9g5.png","original_filename":"foo.png","direct_link":"imageshack.us\/a\/img703\/2397\/9g5.png","title":"082813 200AM PST","description":null,"tags":[""],"likes":0,"liked":false,"views":0,"comments_count":0,"comments_disabled":false,"filter":0,"filesize":1029,"creation_date":1377681549,"width":760,"height":1110,"public":true,"is_owner":true,"owner":{"username":"bamdex","avatar":{"server":0,"filename":null}},"next_images":[],"prev_images":[{"server":59,"filename":"06mm.png"},{"server":706,"filename":"a1fg.png"}],"related_images":[{"server":59,"filename":"06mm.png"},{"server":41,"filename":"xn9q.png"},{"server":22,"filename":"t20a.png"},{"server":547,"filename":"fipx.png"},{"server":10,"filename":"dg6b.png"},{"se
uploadedImageUrl = json.result.images[0].direct_link;
Cu.reportError('succesfully uploaded image');
} else {
Cu.reportError('XHR FAIL - \n' + xhr.responseText);
}
break;
default:
//blah
}
}
xhr.open("POST", "https://api.imageshack.us/v1/images");
xhr.send(fd);
}
loginImageshack();
important note for code above
should use JSON.parse instead of eval if you want to submit the addon to AMO
should also probably change from using window to Services.appShel.hiddenDOMWindow so like new window.FormData(); would become new Services.appShel.hiddenDOMWindow.FormData(); OR var formData = Components.classes["#mozilla.org/files/formdata;1"].createInstance(Components.interfaces.nsIDOMFormData); OR Cu.import('resource://gre/modules/FormData.jsm')
helper functions required for the code above:
const {classes: Cc, interfaces: Ci, utils: Cu, Components: components} = Components
Cu.import('resource://gre/modules/Services.jsm');
...
function urlencode(str) {
return escape(str).replace(/\+/g,'%2B').replace(/%20/g, '+').replace(/\*/g, '%2A').replace(/\//g, '%2F').replace(/#/g, '%40');
};
...
//http request
const XMLHttpRequest = Cc["#mozilla.org/xmlextras/xmlhttprequest;1"];
/**
* The following keys can be sent:
* onSuccess (required) a function called when the response is 2xx
* onFailure a function called when the response is not 2xx
* username The username for basic auth
* password The password for basic auth
* overrideMimeType The mime type to use for non-XML response mime types
* timeout A timeout value in milliseconds for the response
* onTimeout A function to call if the request times out.
* body A string containing the entity body of the request
* contentType The content type of the entity body of the request
* headers A hash of optional headers
*/
function HTTP(method,url,options)
{
var requester = new XMLHttpRequest();
var timeout = null;
if (!options.synchronizedRequest) {
requester.onreadystatechange = function() {
switch (requester.readyState) {
case 0:
if (options.onUnsent) {
options.onUnsent(requester);
}
break;
case 1:
if (options.onOpened) {
options.onOpened(requester);
}
break;
case 2:
if (options.onHeaders) {
options.onHeaders(requester);
}
break;
case 3:
if (options.onLoading) {
options.onLoading(requester);
}
break;
case 4:
if (timeout) {
clearTimeout(timeout);
}
if (requester.status==0 || (requester.status>=200 && requester.status<300)) {
options.onSuccess(
requester.status,
requester.responseXML,
requester.responseText,
options.returnHeaders ? _HTTP_parseHeaders(requester.getAllResponseHeaders()) : null,
requester.statusText
);
} else {
if (options.onFailure) {
options.onFailure(
requester.status,
requester.responseXML,
requester.responseText,
options.returnHeaders ? _HTTP_parseHeaders(requester.getAllResponseHeaders()) : null,
requester.statusText
);
}
}
break;
}
}
}
if (options.overrideMimeType) {
requester.overrideMimeType(options.overrideMimeType);
}
if (options.username) {
requester.open(method,url,!options.synchronizedRequest,options.username,options.password);
} else {
requester.open(method,url,!options.synchronizedRequest);
}
if (options.timeout && !options.synchronizedRequest) {
timeout = setTimeout(
function() {
var callback = options.onTimeout ? options.onTimeout : options.onFailure;
callback(0,"Operation timeout.");
},
options.timeout
);
}
if (options.headers) {
for (var name in options.headers) {
requester.setRequestHeader(name,options.headers[name]);
}
}
if (options.sendAsBinary) {
Cu.reportError('sending as binary');
requester.sendAsBinary(options.body);
} else if (options.body) {
requester.setRequestHeader("Content-Type",options.contentType);
requester.send(options.body);
} else {
requester.send(null);
}
if (options.synchronizedRequest) {
if (requester.status==0 || (requester.status>=200 && requester.status<300)) {
options.onSuccess(
requester.status,
requester.responseXML,
requester.responseText,
options.returnHeaders ? _HTTP_parseHeaders(requester.getAllResponseHeaders()) : null,
requester.statusText
);
} else {
if (options.onFailure) {
options.onFailure(
requester.status,
requester.responseXML,
requester.responseText,
options.returnHeaders ? _HTTP_parseHeaders(requester.getAllResponseHeaders()) : null,
requester.statusText
);
}
}
return {
abort: function() {
}
};
} else {
return {
abort: function() {
clearTimeout(timeout);
requester.abort();
}
};
}
}
function _HTTP_parseHeaders(headerText)
{
var headers = {};
if (headerText) {
var eol = headerText.indexOf("\n");
while (eol>=0) {
var line = headerText.substring(0,eol);
headerText = headerText.substring(eol+1);
while (headerText.length>0 && !headerText.match(_HTTP_HEADER_NAME)) {
eol = headerText.indexOf("\n");
var nextLine = eol<0 ? headerText : headerText.substring(0,eol);
line = line+' '+nextLine;
headerText = eol<0 ? "" : headerText.substring(eol+1);
}
// Parse the name value pair
var colon = line.indexOf(':');
var name = line.substring(0,colon);
var value = line.substring(colon+1);
headers[name] = value;
eol = headerText.indexOf("\n");
}
if (headerText.length>0) {
var colon = headerText.indexOf(':');
var name = headerText.substring(0,colon);
var value = headerText.substring(colon+1);
headers[name] = value;
}
}
return headers;
}

Related

React Native Fetch video file by chunk / YouTube Data API chunk(multipart) upload

I made a YouTube API upload app. It works great with small video file sizes but with larger sizes my app crashes. The exception happens when I try to get the video file with Fetch().
Question: Is there a way I can fetch a large file in React Native and feed it into the YouTube API in smaller chunks?
Here is my fetch code:
const fetchResponse = await fetch(videoUri);
const blob = await fetchResponse.blob();
var file = new File([blob], "video.mp4", {type: "video/mp4"});
My upload YouTube code is taken from the following git repos - supposedly supports multipart upload as well:
https://github.com/youtube/api-samples/blob/master/javascript/cors_upload.js and
https://github.com/youtube/api-samples/blob/master/javascript/upload_video.js
Here is my full upload code:
uploadVideo = async function() {
var match = this.state.match.value;
var video = match.mergedVideo;
var players = match.players;
var scoreboard = this.state.match.value.scoreboard;
var points = match.points;
var title = players[0].name + " vs. " + players[1].name + " " + scoreboard;
var description = this.descriptionBuilder(points, match.videos);
/*const fetchResponse = await fetch(video);
const blob = await fetchResponse.blob();
var file = new File([blob], "video.mp4", {type: "video/mp4"});
console.log(file);*/
const file = await DocumentPicker.pick({
type: [DocumentPicker.types.video],
});
var metadata = {
snippet: {
title: title,
description: description,
tags: ['youtube-cors-upload'],
categoryId: 22
},
status: {
privacyStatus: 'unlisted'
}
};
var uploader = new MediaUploader({
baseUrl: 'https://www.googleapis.com/upload/youtube/v3/videos',
file: file,
token: this.state.user.auth.accessToken,
metadata: metadata,
chunkSize: 1024 * 1024,
params: {
part: Object.keys(metadata).join(',')
},
onError: function(data) {
console.log(data);
var message = data;
try {
var errorResponse = JSON.parse(data);
message = errorResponse.error.message;
} finally {
alert(message);
}
}.bind(this),
onProgress: function(data) {
var currentTime = Date.now();
var bytesUploaded = data.loaded;
var totalBytes = data.total;
var bytesPerSecond = bytesUploaded / ((currentTime - window.uploadStartTime) / 1000);
var estimatedSecondsRemaining = (totalBytes - bytesUploaded) / bytesPerSecond;
var percentageComplete = (bytesUploaded * 100) / totalBytes;
this.setState({ youtubeUploadProgress: percentageComplete / 100});
console.log("Uploaded: " + bytesUploaded + " | Total: " + totalBytes + " | Percentage: " + percentageComplete + " | Esitmated seconds remaining: " + estimatedSecondsRemaining);
}.bind(this),
onComplete: function(data) {
console.log("Complete");
alert("Upload successful!");
this.setState({ youtubeUploadProgress: 0});
}.bind(this)
});
window.uploadStartTime = Date.now();
uploader.upload();
}
and this is my cors_upload.js in React Native class module:
import React, { Component } from 'react';
export default class MediaUploader extends Component {
constructor(props) {
super(props);
const obj = this;
const DRIVE_UPLOAD_URL = 'https://www.googleapis.com/upload/drive/v2/files/';
var options = props;
var noop = function() {};
this.file = options.file;
this.contentType = options.contentType || this.file.type || 'application/octet-stream';
this.metadata = options.metadata || {
'title': this.file.name,
'mimeType': this.contentType
};
this.token = options.token;
this.onComplete = options.onComplete || noop;
this.onProgress = options.onProgress || noop;
this.onError = options.onError || noop;
this.offset = options.offset || 0;
this.chunkSize = options.chunkSize || 0;
//this.retryHandler = new RetryHandler();
//this.retryHandler = new obj.RetryHandler();
this.interval = 1000; // Start at one second
this.maxInterval = 60 * 1000;
this.url = options.url;
if (!this.url) {
var params = options.params || {};
params.uploadType = 'resumable';
//this.url = this.buildUrl_(options.fileId, params, options.baseUrl);
this.url = obj.buildUrl_(options.fileId, params, options.baseUrl);
}
this.httpMethod = options.fileId ? 'PUT' : 'POST';
}
retry = function(fn) {
setTimeout(fn, this.interval);
this.interval = this.nextInterval_();
};
reset = function() {
this.interval = 1000;
};
nextInterval_ = function() {
var interval = this.interval * 2 + this.getRandomInt_(0, 1000);
return Math.min(interval, this.maxInterval);
};
getRandomInt_ = function(min, max) {
return Math.floor(Math.random() * (max - min + 1) + min);
};
buildQuery_ = function(params) {
params = params || {};
return Object.keys(params).map(function(key) {
return encodeURIComponent(key) + '=' + encodeURIComponent(params[key]);
}).join('&');
};
buildUrl_ = function(id, params, baseUrl) {
var url = baseUrl || DRIVE_UPLOAD_URL;
if (id) {
url += id;
}
var query = this.buildQuery_(params);
if (query) {
url += '?' + query;
}
return url;
};
upload = function() {
//var self = this;
console.log("UPLOAD called", this.file.size);
var xhr = new XMLHttpRequest();
xhr.open(this.httpMethod, this.url, true);
xhr.setRequestHeader('Authorization', 'Bearer ' + this.token);
xhr.setRequestHeader('Content-Type', 'application/json');
xhr.setRequestHeader('X-Upload-Content-Length', this.file.size);
xhr.setRequestHeader('X-Upload-Content-Type', this.contentType);
xhr.onload = function(e) {
console.log("ON LOAD CALLED");
if (e.target.status < 400) {
var location = e.target.getResponseHeader('Location');
this.url = location;
this.sendFile_();
} else {
this.onUploadError_(e);
}
}.bind(this);
xhr.onerror = this.onUploadError_.bind(this);
xhr.send(JSON.stringify(this.metadata));
};
sendFile_ = function() {
console.log("SEND FILE CALLED");
var content = this.file;
var end = this.file.size;
if (this.offset || this.chunkSize) {
// Only bother to slice the file if we're either resuming or uploading in chunks
if (this.chunkSize) {
end = Math.min(this.offset + this.chunkSize, this.file.size);
}
content = content.slice(this.offset, end);
}
var xhr = new XMLHttpRequest();
xhr.open('PUT', this.url, true);
xhr.setRequestHeader('Content-Type', this.contentType);
xhr.setRequestHeader('Content-Range', 'bytes ' + this.offset + '-' + (end - 1) + '/' + this.file.size);
xhr.setRequestHeader('X-Upload-Content-Type', this.file.type);
if (xhr.upload) {
xhr.upload.addEventListener('progress', this.onProgress);
}
xhr.onload = this.onContentUploadSuccess_.bind(this);
xhr.onerror = this.onContentUploadError_.bind(this);
xhr.send(content);
};
resume_ = function() {
var xhr = new XMLHttpRequest();
xhr.open('PUT', this.url, true);
xhr.setRequestHeader('Content-Range', 'bytes */' + this.file.size);
xhr.setRequestHeader('X-Upload-Content-Type', this.file.type);
if (xhr.upload) {
xhr.upload.addEventListener('progress', this.onProgress);
}
xhr.onload = this.onContentUploadSuccess_.bind(this);
xhr.onerror = this.onContentUploadError_.bind(this);
xhr.send();
};
extractRange_ = function(xhr) {
var range = xhr.getResponseHeader('Range');
if (range) {
this.offset = parseInt(range.match(/\d+/g).pop(), 10) + 1;
}
};
onContentUploadSuccess_ = function(e) {
if (e.target.status == 200 || e.target.status == 201) {
this.onComplete(e.target.response);
} else if (e.target.status == 308) {
this.extractRange_(e.target);
this.reset();
this.sendFile_();
}
};
onContentUploadError_ = function(e) {
if (e.target.status && e.target.status < 500) {
this.onError(e.target.response);
} else {
this.retry(this.resume_.bind(this));
}
};
onUploadError_ = function(e) {
this.onError(e.target.response); // TODO - Retries for initial upload
};
}
UPDATE 1:
To avoid using Fetch() I decided to use React Native Document Picker. Now I can select the video file and pass it to the MediaUploader following this guide: https://alishavineeth.medium.com/upload-a-video-from-a-mobile-device-to-youtube-using-react-native-eb2fa54a7445
Now if I set the chunkSize option I will receive a .slice array exception because the object structure doesn't match. If I pass the file without the chunkSize option the metadata uploads to YouTube but the video status will be stuck on processing without any other errors. The video upload process never begins.
DocumentPicker responds with the following object after I select my file:
[{"fileCopyUri": "content://com.android.providers.media.documents/document/video%3A7853", "name": "video_1629795128339.mp4", "size": 192660773, "type": "video/mp4", "uri": "content://com.android.providers.media.documents/document/video%3A7853"}]
UPDATE 2:
Managed to fix my DocumentPicker file issue(from my Update 1) with changing React Native Document Picker to Expo Document Picker.
Now I am able to select large files and call the upload function - the metadata uploads, the video file begins to upload as well but the app crashes during the upload. If I set the chunkSize option on the MediaUploader object I get [TypeError: content.slice is not a function. (In 'content.slice(this.offset, end)', 'content.slice' is undefined)]
Expo Document Picker responds with the following object after I select my video file:
{"name": "video_1629801588164.mp4", "size": 5799179, "type": "video/mp4", "uri": "file:///data/user/0/com.tennis.rec/cache/DocumentPicker/8b350fbf-1b66-4a78-a10f-b61eb2ed3032.mp4"}
UPDATE 3 - RESOLVED!!!
The chunk upload is working now!!! I modified my cors_upload.js file where the chunkSize is being evaluated and sliced with the following code:
if (this.offset || this.chunkSize) {
// Only bother to slice the file if we're either resuming or uploading in chunks
if (this.chunkSize) {
end = Math.min(this.offset + this.chunkSize, this.file.size);
}
console.log("CONTENT SLICE", this.offset, end, this.file.size);
//content = content.slice(this.offset, end);
var base64 = await RNFS.read(this.file.uri, this.chunkSize, this.offset, 'base64');
content = Uint8Array.from(atob(base64), c => c.charCodeAt(0));
}
I added React Native File System and I am using its read() function to load the chunk as base64 and convert it back to a byte array.

Image from JavaScript to PHP as Post Variable

I've seen lots of variations on this question, but none of them applies to this specific situation. I'm a bit confused because of all the data and object types.
Consider the following code in JavaScript:
function postRequest(url, params, success, error, keepactive = 1)
{
let req = false;
try
{
// most browsers
req = new XMLHttpRequest();
} catch (e)
{
// IE
try
{
req = new ActiveXObject("Msxml2.XMLHTTP");
} catch (e)
{
// try an older version
try
{
req = new ActiveXObject("Microsoft.XMLHTTP");
} catch (e)
{
return false;
}
}
}
if (keepactive === 0)
{
ajaxcalls.push(req);
console.log(ajaxcalls.length + ' calls active');
} else if (keepactive === 2)
{
console.log('Filter call: ' + ajaxcalls.length + ' calls active');
filtercall = req;
} else if (keepactive === 3)
{
console.log('Jump call: ' + ajaxcalls.length + ' calls active');
jumpcall = req;
}
if (!req) return false;
if (typeof success != 'function') success = function ()
{
};
if (typeof error != 'function') error = function ()
{
};
req.onreadystatechange = function ()
{
if (req.readyState === 4)
{
// Success! Remove req from active calls.
if (keepactive === 0)
{
for (let i = ajaxcalls.length - 1; i >= 0; i--)
{
if (ajaxcalls[i] === req)
{
ajaxcalls.splice(i, 1);
console.log(ajaxcalls.length + ' calls active');
}
}
}
return req.status === 200 ?
success(req.responseText) : error(req.status);
// dus eigenlijk displayUpdateWorksheet(req.responseText)
// dus eigenlijk displayUpdateWorksheetError(req.status)
}
}
req.open("POST", url, true);
req.setRequestHeader("Content-type", "application/x-www-form-urlencoded");
req.send(params);
return req;
}
function uploadPicture(myindex, stringid)
{
let file = document.getElementById("file").files[0];
let contents = new FileReader(); // no arguments
contents.readAsDataURL(file);
contents.onload = function ()
{
// console.log(contents.result);
let filename;
let client;
let field = myindex;
let filecontainer = document.getElementById('file' + myindex);
if (filecontainer != null)
{
filename = filecontainer.innerHTML.replace(/\+/g, '+').replace(/&/g, '&').replace(/#/g, '#').replace(/%/g, '%');
}
let clientcontainer = document.getElementById('myclient');
if (clientcontainer != null)
{
client = clientcontainer.innerHTML.replace(/\+/g, '+').replace(/&/g, '&').replace(/#/g, '#').replace(/%/g, '%');
}
alert('Picture uploaded!');
// let post_array = { client: client, stringid: stringid, filename: filename, field: field, contents: contents.result }
postRequest(
'ajaxcalls/ajaxUploadPicture.php', // url
'&client=' + client +
'&stringid=' + stringid +
'&filename=' + filename +
'&field=' + myindex +
'&contents=' + contents.result +
'&type=' + file.type,
function (responseText)
{
return drawOutputUploadPicture(myindex, responseText);
}, // success
drawErrorUploadPicture // error
);
}
contents.onerror = function ()
{
console.log(contents.error);
};
};
And the following PHP:
$data = $_POST['contents'];
$contents = preg_replace('#data:image/[^;]+;base64,#', '', $data);
$contents = base64_decode($contents);
...
file_put_contents($file_full, $contents);
($file_full is correct here: the file is saved at the right spot. I've just cut some irrelevant code there: nothing in $contents is changed after this.)
When I upload a 52K image, I lose about 1K and the resulting image cannot be opened. What's going on?
The main issue is that 1) I'm using POST with 2) multiple variables and 3) Vanilla JavaScript. I've seen no working examples of that.
When I open the original and copy as text, the beginning is the same:
Original: ���� JFIF ` ` ���Exif
Copy: ���� JFIF ` ` ���Exif
Then a series of NUL's in both files. Then the files start deviating:
Original:
2021:07:22 16:21:52 2021:07:22 16:21:52 L o e k v a n K o o t e n ��"http://ns.adobe.com/xap/1.0/ <?xpacket begin='' id='W5M0MpCehiHzreSzNTczkc9d'?>
<x:xmpmeta xmlns:x="adobe:ns:meta/"><rdf:RDF xmlns:rdf="http://www.w3.org/1999/02/22-rdf-syntax-ns#"><rdf:Description rdf:about="uuid:faf5bdd5-ba3d-11da-ad31-d33d75182f1b" xmlns:dc="http://purl.org/dc/elements/1.1/"/><rdf:Description rdf:about="uuid:faf5bdd5-ba3d-11da-ad31-d33d75182f1b" xmlns:xmp="http://ns.adobe.com/xap/1.0/"><xmp:CreateDate>2021-07-22T16:21:52.056</xmp:CreateDate></rdf:Description><rdf:Description rdf:about="uuid:faf5bdd5-ba3d-11da-ad31-d33d75182f1b" xmlns:dc="http://purl.org/dc/elements/1.1/"><dc:creator><rdf:Seq xmlns:rdf="http://www.w3.org/1999/02/22-rdf-syntax-ns#"><rdf:li>Loek van Kooten</rdf:li></rdf:Seq>
</dc:creator></rdf:Description></rdf:RDF></x:xmpmeta>
Copy:
2021:07:22 16:21:52 2021:07:22 16:21:52 L o e k v a n K o o t
e n
�BȚ��ۜ˘YؙK���K�\�K���X��]�Y�[�I�����YI��SL\�ZR��Tޓ�ޚ��Y �σB��\Y]H[�ΞH�YؙN��ΛY]Kȏ������[�Μ��H������˝�˛ܙ��NNNK��̌�\��\�[�^[��ȃ�&Fc�FW67&�F���&Fc�&�WC�'WV�C�fcV&FCR�&6B�F�C3�C36CsS�&c""����3�F3�&�GG���W&���&r�F2�V�V�V�G2���"���&Fc�FW67&�F���&Fc�&�WC�'WV�C�fcV&FCR�&6B�F�C3�C36CsS�&c""����3����&�GG����2�F�&R�6�������
���� ɕ�ѕ�є����Ĵ�ܴ��P�������ȸ�������
ɕ�ѕ�є��ɑ���͍ɥ�ѥ����ɑ���͍ɥ�ѥ���ɑ�酉�����ե�际�Չ��Ե��͐��ő�����ĵ��͐����ɘň��ᵱ��鑌����輽��ɰ��ɜ�����������̼ĸļ�<dc:creator����\H[�Μ��H������˝�˛ܙ��NNNK��̌�\��\�[�^[��ȏ����O��Z��[����[�ܙ��O�ܙ���\O�B�BBO�ΘܙX]܏�ܙ��\�ܚ[ۏ�ܙ�����������WF4
Anyone recognizes what's going on here?
For some reason, in this case $_POST['contents'] contains spaces. If I replace these with + the image comes out nicely. I imagine that when a file is sent as a $_POST variable, it gets split with a space every so many characters.

form data going as null to HttpContext.Current.Request [duplicate]

This question already has an answer here:
How to POST binary files with AngularJS (with upload DEMO)
(1 answer)
Closed 4 years ago.
I have a file upload module.its working well with postman with no content type.but in code always file count is getting as 0 in backend api.if anyone knows what i am doing wrong,please help me. thanks
here is my back end api`
public async Task<HttpResponseMessage> PostUserImage()
{
Dictionary<string, object> dict = new Dictionary<string, object>();
try
{
var httpRequest = HttpContext.Current.Request;
foreach (string file in httpRequest.Files)
{
HttpResponseMessage response = Request.CreateResponse(HttpStatusCode.Created);
var postedFile = httpRequest.Files[file];
if (postedFile != null && postedFile.ContentLength > 0)
{
int MaxContentLength = 1024 * 1024 * 1; //Size = 1 MB
IList<string> AllowedFileExtensions = new List<string> { ".jpg", ".gif", ".png" };
var ext = postedFile.FileName.Substring(postedFile.FileName.LastIndexOf('.'));
var extension = ext.ToLower();
if (!AllowedFileExtensions.Contains(extension))
{
var message = string.Format("Please Upload image of type .jpg,.gif,.png.");
dict.Add("error", message);
return Request.CreateResponse(HttpStatusCode.BadRequest, dict);
}
else if (postedFile.ContentLength > MaxContentLength)
{
var message = string.Format("Please Upload a file upto 1 mb.");
dict.Add("error", message);
return Request.CreateResponse(HttpStatusCode.BadRequest, dict);
}
else
{
var filePath = HttpContext.Current.Server.MapPath("~/Image/" + postedFile.FileName + extension);
postedFile.SaveAs(filePath);
}
}
var message1 = string.Format("Image Updated Successfully.");
return Request.CreateErrorResponse(HttpStatusCode.Created, message1); ;
}
var res = string.Format("Please Upload a image.");
dict.Add("error", res);
return Request.CreateResponse(HttpStatusCode.NotFound, dict);
}
catch (Exception ex)
{
var res = string.Format("some Message");
dict.Add("error", res);
return Request.CreateResponse(HttpStatusCode.NotFound, dict);
}
}`
this is what i am getting after posting through postman
and this is what i am getting in my developer console.
my angular service foe uploading`
uploadimage:function(file,operation){
var deferred = $q.defer();
var httpReq = {
method: operation,
url: '/API/Customers/PostUserImage',
data:file,
transformRequest: angular.identity,
headers: {
'content-type': 'multipart/form-data'
},
onSuccess: function (response, status) {
deferred.resolve(response);
},
onError: function (response) {
deferred.reject(response);
}
};
httpService.call(httpReq);
return deferred.promise;
}`
this the controller code for appending to form data`
function readURL(input) {
debugger;
if (input.files && input.files[0]) {
var reader = new FileReader();
reader.onload = function(e) {
$('#imagePreview').css('background-image', 'url('+e.target.result +')');
$('#imagePreview').hide();
$('#imagePreview').fadeIn(650);
}
reader.readAsDataURL(input.files[0]);
var filesformdata = new FormData();
angular.forEach(input.files, function (value, key) {
filesformdata.append(key, value);
});
for (var pair of filesformdata.entries()) {
console.log(pair[0] + ', ' + pair[1]);
console.log(pair[1]);
}
profileService.uploadimage(filesformdata,"POST").then(function(response){
toastr.success("profilepicture changed");
});
}
}
and here is http request `
use API like
public async Task<HttpResponseMessage> MethodName()
{
if (HttpContext.Current.Request.ContentType == "application/x-www-form-urlencoded")
{
var ParameterName = int.Parse(HttpContext.Current.Request.Form.GetValues("ParameterName")[0].ToString());
}
else
{
if (!Request.Content.IsMimeMultipartContent())
{
throw new HttpResponseException(HttpStatusCode.UnsupportedMediaType);
}
}
var response = Request.CreateResponse(objreturn);
return response;
}
When sending non-alphanumeric file or big payload, you should be using form enctype attribute value of "multipart/form-data".
<form enctype="multipart/form-data" ...
Example: HTML Form Data in ASP.NET Web API: File Upload and Multipart MIME
public async Task<HttpResponseMessage> PostFormData()
{
// Check if the request contains multipart/form-data.
if (!Request.Content.IsMimeMultipartContent())
{
throw new HttpResponseException(HttpStatusCode.UnsupportedMediaType);
}
string root = HttpContext.Current.Server.MapPath("~/App_Data");
var provider = new MultipartFormDataStreamProvider(root);
try
{
// Read the form data.
await Request.Content.ReadAsMultipartAsync(provider);
// This illustrates how to get the file names.
foreach (MultipartFileData file in provider.FileData)
{
Trace.WriteLine(file.Headers.ContentDisposition.FileName);
Trace.WriteLine("Server file path: " + file.LocalFileName);
}
return Request.CreateResponse(HttpStatusCode.OK);
}
catch (System.Exception e)
{
return Request.CreateErrorResponse(HttpStatusCode.InternalServerError, e);
}
}

Uploading Large Files with Ajax call

I am trying to use the Microsoft Graph API to upload large files using an Ajax call.
According to the documentation, you first have to create an upload session which I can do successfully with my code. The problem comes when I start my upload to the returned uploadUrl. I get the following error:
{
code: "invalidRequest",
message: "The Content-Range header length does not match the provided number of bytes."
}
So when I check the actual request in Fiddler, I can see that the Content-Length header is set to 0.
So I tried setting my Content-Length header to the size of the ArrayBuffer that I'm sending, but I get an error (Chrome) that says:
Refused to set unsafe header "Content-Length"
I've been struggling with this for 2 full days now and I'm at my wit's end. There is very little documentation on the Microsoft Graph API, and even fewer examples that seem to fit what I'm trying to do.
I can't imagine I'm the only one out there that's attempting to do this, I would think it would be a fairly common idea?
Below is the code I'm using. I'm getting my AccessToken and URL elsewhere, but they seem to be fine as I can query using them from the console.
this.UploadLargeFileToFolderID = function (FolderID,
FileObject,
ShowLoadingMessage,
SuccessFunction,
ErrorFunction,
CompleteFunction) { //will upload a file up to 60Mb to folder.
//shows the loading messag
ShowLoadingMessage && ThisRepository.LoadingMessage.Show();
//cleans the file name to something acceptable to SharePoint
FileObject.name = CleanOneDriveFileName(FileObject.name);
var UploadSessionURL = FolderID ?
ThisRepository.RepositoryRootURL + '/drive/items/' + FolderID + '/createUploadSession' :
ThisRepository.RepositoryRootURL + '/drive/root/createUploadSession';
//First, get the Upload Sesion.
$.ajax({
url: UploadSessionURL,
method: 'POST',
headers: {
authorization: "Bearer " + ThisRepository.AccessToken
},
success: function (data, textStatus, jqXHR) {
//successfully got the upload session.
console.log('Session:');
console.log(data);
//Create the ArrayBuffer and upload the file.
ReturnArrayBufferFromFile(FileObject, function (ArrayBuffer) {
console.log('Array Buffer:');
console.log(ArrayBuffer);
var MaxChunkSize = 327680;
var ChunkSize = ArrayBuffer.byteLength < MaxChunkSize ?
ArrayBuffer.byteLength :
MaxChunkSize;
chunkedUpload(data.uploadUrl, ArrayBuffer, ChunkSize, 0, null,
null, null, null,
function (response) {
console.log(response);
!SuccessFunction && console.log(response);
typeof SuccessFunction === 'function' && SuccessFunction(response);
});
});
},
error: function (jqXHR, textStatus, errorThrown) {
console.log(jqXHR);
typeof ErrorFunction === 'function' && ErrorFunction(jqXHR);
},
complete: function (jqXHR, textStatus) {
ThisRepository.LoadingMessage.Remove();
typeof CompleteFunction === 'function' && CompleteFunction(jqXHR);
},
});
};
Function for returning the Array Buffer to send
function ReturnArrayBufferFromFile(InputFile, CallBackFunction) {
console.log('Input File');
console.log(InputFile);
var FileName = CleanOneDriveFileName(InputFile.name);
var FileUploadReader = new FileReader();
if (InputFile.type.match('image.*')) {
// if the file is an image, we want to make sure
// it's not too big before we return it.
FileUploadReader.onloadend = function (e) {
var img = new Image();
//will resize an image to a maximum of 2 megapixels.
img.onload = function () {
var MAX_HEIGHT = 2048; //max final height, in pixels
var MAX_WIDTH = 2048; //max final width, in pixels
var height = img.height;
var width = img.width;
//do the resizing
if (width > height) { //landscape image
if (width > MAX_WIDTH) {
height *= MAX_WIDTH / width;
width = MAX_WIDTH;
};
} else { //portrait image
if (height > MAX_HEIGHT) {
width *= MAX_HEIGHT / height;
height = MAX_HEIGHT;
};
};
//Create a new canvas element, correctly sized with the image
var canvas = document.createElement("canvas");
canvas.width = width;
canvas.height = height;
canvas.getContext('2d').drawImage(this, 0, 0, width, height);
//Create the new file reader for the upload function.
var ConvertedFile = canvas.toBlob(function (blob) {
var ConvertedFileReader = new FileReader();
ConvertedFileReader.onloadend = function (loadendevent) {
//return loadendevent.target.result;
var result = loadendevent.target.result;
var Rawresult = result.split(',')[1];
CallBackFunction(loadendevent.target.result);
};
ConvertedFileReader.readAsArrayBuffer(blob);
}, 'image/jpeg', 0.90);
};
img.src = e.target.result;
};
FileUploadReader.readAsArrayBuffer(InputFile);
} else {
//File is not an image. No pre-work is required. Just upload it.
FileUploadReader.onloadend = function (e) {
CallBackFunction(e.target.result);
};
FileUploadReader.readAsArrayBuffer(InputFile);
};
};
And finally, the chunkUpload function:
function chunkedUpload(url, file, chunkSize, chunkStart,
chunkEnd, chunks, chunksDone, fileChunk, CompleteCallBack) {
var filesize = file.byteLength;
chunkSize = chunkSize ? chunkSize : 327680;
chunkStart = chunkStart ? chunkStart : 0;
chunkEnd = chunkEnd ? chunkEnd : chunkSize;
chunks = chunks ? chunks : filesize / chunkSize;
chunksDone = chunksDone ? chunksDone : 0;
fileChunk = fileChunk ? fileChunk : file.slice(chunkStart, chunkEnd);
var req = new XMLHttpRequest();
req.open("PUT", url, true);
//req.setRequestHeader("Content-Length", file.size.toString());
req.setRequestHeader("Content-Range", "bytes " + chunkStart + "-" +
(chunkEnd - 1) + "/" + filesize);
req.onload = (e) => {
let response = JSON.parse(req.response);
console.log(response);
if (response.nextExpectedRanges) {
let range = response.nextExpectedRanges[0].split('-'),
chunkStart = Number(range[0]),
nextChunk = chunkStart + chunkSize,
chunkEnd = nextChunk > file.byteLength ? file.byteLength : nextChunk;
console.log(((chunksDone++/ chunks) * 100), '%' );
chunkedUpload(url, file, chunkSize, chunkStart,
chunkEnd, chunks, chunksDone, CompleteCallBack);
}
else {
console.log("upload session complete");
typeof CompleteCallBack === 'function' &&
CompleteCallBack(response);
}
}; req.send(file);
}
I was able to figure out an answer to the problem, so I'll post the final code here as well for anyone else having an issue with this since there's very little examples I could find online to do this:
First of all, sending a bare ArrayBuffer in any browser (IE, Mozilla or Chrome) did not set the Content-Length to anything but 0, at least it wouldn't for me.
If I converted the ArrayBuffer to a new uInt8Array, however, the browsers did pick up the Content-Length and set it correctly.
Another Issue I found was in the Microsoft Graph documentation. I was not aware that you had to put the New File Name in the Upload Session Request URL - it's not clear that you need to do that in the documentation. See my code below, It's formatted correctly and works well.
Finally, my chunkedUpload function needed quite a few changes, most notably adjusting the xhr.send(file) to xhr.send(fileChunk) <--that was a big one I missed originally. I also included a Progress callback for the file upload that works very well with my bootstrap ProgressBar.
on to the working code:
this.UploadLargeFileToFolderID = function (FolderID, FileObject, ShowLoadingMessage, SuccessFunction, ErrorFunction, CompleteFunction, ProgressFunction) {//will upload a file up to 60Mb to folder.
ShowLoadingMessage && ThisRepository.LoadingMessage.Show(); //shows the loading message
FileObject.name = CleanOneDriveFileName(FileObject.name); //cleans the file name to something acceptable to SharePoint
var NewFileName = CleanOneDriveFileName(FileObject.name);
var UploadSessionURL = FolderID ? ThisRepository.RepositoryRootURL + '/drive/items/' + FolderID + ':/' + NewFileName + ':/createUploadSession' : ThisRepository.RepositoryRootURL + '/drive/root:/' + NewFileName + ':/createUploadSession';
var PathToParent = FolderID ? ThisRepository.RepositoryRootURL + '/drive/items/' + FolderID + ':/' + NewFileName + ':/' : ThisRepository.RepositoryRootURL + '/drive/root:/' + NewFileName + ':/'; //used if we have a naming conflict and must rename the object.
var UploadSessionOptions = {
item: {
//"#microsoft.graph.conflictBehavior": "rename",
"#microsoft.graph.conflictBehavior": "replace",
}
};
//First, get the Upload Sesion.
$.ajax({
url: UploadSessionURL,
method: 'POST',
headers: { authorization: "Bearer " + ThisRepository.AccessToken, 'Content-Type': 'application/json', 'accept': 'application/json'},
data: JSON.stringify(UploadSessionOptions),
success: function (SessionData, textStatus, jqXHR) { //successfully got the upload session.
//Create the ArrayBuffer and upload the file.
ReturnArrayBufferFromFile(FileObject,
function (ArrayBuffer) {
var uInt8Array = new Uint8Array(ArrayBuffer);
var FileSize = uInt8Array.length;
var MaxChunkSize = 3276800; //approx 3.2Mb. Microsoft Graph OneDrive API says that all chunks MUST be in a multiple of 320Kib (327,680 bytes). Recommended is 5Mb-10Mb for good internet connections.
var ChunkSize = FileSize < MaxChunkSize ? FileSize : MaxChunkSize;
var DataUploadURL = SessionData.uploadUrl;
chunkedUpload(DataUploadURL, uInt8Array, ChunkSize, 0, null, null, null,
function (progress) { //progress handler
ProgressFunction(progress);
},
function (response) { //completion handler
if (response.StatusCode == 201 || response.StatusCode == 200) {//success. 201 is 'Created' and 200 is 'OK'
typeof SuccessFunction === 'function' && SuccessFunction(response);
ThisRepository.LoadingMessage.Remove();
typeof CompleteFunction === 'function' && CompleteFunction(response);
} else if (response.StatusCode == 409) { //naming conflict?
//if we had a renaming conflict error, per Graph Documentation we can make a simple PUT request to rename the file
//HAVE NOT SUCCESSFULLY TESTED THIS...
var NewDriveItemResolve = {
"name": NewFileName,
"#microsoft.graph.conflictBehavior": "rename",
"#microsoft.graph.sourceUrl": DataUploadURL
};
$.ajax({
url: PathToParent,
method: "PUT",
headers: { authorization: "Bearer " + ThisRepository.AccessToken, 'Content-Type': 'application/json', accept: 'application/json' },
data: JSON.stringify(NewDriveItemResolve),
success: function (RenameSuccess) {
console.log(RenameSuccess);
typeof SuccessFunction === 'function' && SuccessFunction(response);
ThisRepository.LoadingMessage.Remove();
typeof CompleteFunction === 'function' && CompleteFunction(response);
},
error: function (RenameError) {
console.log(RenameError);
var CompleteObject = { StatusCode: RenameError.status, ResponseObject: RenameError, Code: RenameError.error ? RenameError.error.code : 'Unknown Error Code', Message: RenameError.error ? RenameError.error.message : 'Unknown Error Message' };
var Status = CompleteObject.StatusCode;
var StatusText = CompleteObject.Code;
var ErrorMessage = CompleteObject.Message;
var ErrorMessage = new Alert({ Location: ThisRepository.LoadingMessage.Location, Type: 'danger', Text: "Status: " + Status + ': ' + StatusText + "<br />Error: " + ErrorMessage + '<br />Rest Endpoint: ' + data.uploadUrl });
ErrorMessage.ShowWithoutTimeout();
typeof ErrorFunction == 'function' && ErrorFunction(response);
ThisRepository.LoadingMessage.Remove();
typeof CompleteFunction === 'function' && CompleteFunction(response);
},
complete: function (RenameComplete) { /* Complete Function */ }
});
} else { //we had an error of some kind.
var Status = response.StatusCode;
var StatusText = response.Code;
var ErrorMessage = response.Message;
var ErrorMessage = new Alert({ Location: ThisRepository.LoadingMessage.Location, Type: 'danger', Text: "Status: " + Status + ': ' + StatusText + "<br />Error: " + ErrorMessage + '<br />Rest Endpoint: ' + data.uploadUrl });
ErrorMessage.ShowWithoutTimeout();
//CANCEL THE UPLOAD SESSION.
$.ajax({
url: UploadSessionURL,
method: "DELETE",
headers: { authorization: "Bearer " + ThisRepository.AccessToken },
success: function (SessionCancelled) { console.log('Upload Session Cancelled');},
error: function (SessionCancelError) { /* Error Goes Here*/},
});
typeof ErrorFunction == 'function' && ErrorFunction(response);
ThisRepository.LoadingMessage.Remove();
typeof CompleteFunction === 'function' && CompleteFunction(response);
};
});
}
);
},
error: function (jqXHR, textStatus, errorThrown) {
console.log('Error Creating Session:');
console.log(jqXHR);
//WE MAY HAVE A CANCELLED UPLOAD...TRY TO DELETE THE OLD UPLOAD SESSION, TELL THE USER TO TRY AGAIN.
//COULD OPTIONALLY RUN A "RENAME" ATTEMPT HERE AS WELL
$.ajax({
url: PathToParent,
method: "DELETE",
headers: { authorization: "Bearer " + ThisRepository.AccessToken },
success: function (SessionCancelled) { console.log('Upload Session Cancelled'); },
error: function (SessionCancelError) { console.log(SessionCancelError); },
});
typeof ErrorFunction === 'function' && ErrorFunction(jqXHR);
},
complete: function (jqXHR, textStatus) { /* COMPLETE CODE GOES HERE */},
});
};
function ReturnArrayBufferFromFile(InputFile, CallBackFunction) {
var FileName = InputFile.name;
var FileUploadReader = new FileReader();
//Check the file type. If it's an image, we want to make sure the user isn't uploading a very high quality image (2 megapixel max for our purposes).
if (InputFile.type.match('image.*')) { // it's an image, so we will resize it before returning the array buffer...
FileUploadReader.onloadend = function (e) {
var img = new Image();
img.onload = function () { //will resize an image to a maximum of 2 megapixels.
var MAX_HEIGHT = 2048;//max final height, in pixels
var MAX_WIDTH = 2048; //max final width, in pixels
var height = img.height;
var width = img.width;
//do the resizing
if (width > height) {//landscape image
if (width > MAX_WIDTH) {
height *= MAX_WIDTH / width;
width = MAX_WIDTH;
};
}
else { //portrait image
if (height > MAX_HEIGHT) {
width *= MAX_HEIGHT / height;
height = MAX_HEIGHT;
};
};
//Create a new canvas element, correctly sized with the image
var canvas = document.createElement("canvas");
canvas.width = width;
canvas.height = height;
canvas.getContext('2d').drawImage(this, 0, 0, width, height);
//Create the new file reader for the upload function.
var ConvertedFile = canvas.toBlob(function (blob) {
var ConvertedFileReader = new FileReader();
ConvertedFileReader.onloadend = function (loadendevent) { //return the ArrayBuffer
CallBackFunction(loadendevent.target.result);
};
ConvertedFileReader.readAsArrayBuffer(blob);
//ConvertedFileReader.readAsDataURL(blob);
}, 'image/jpeg', 0.90);
};
img.src = e.target.result;
};
FileUploadReader.readAsDataURL(InputFile);
}
else {
FileUploadReader.onloadend = function (e) {//File is not an image. No pre-work is required. Just return as an array buffer.
CallBackFunction(e.target.result);
};
FileUploadReader.readAsArrayBuffer(InputFile);
};
};
function chunkedUpload(url, file, chunkSize, chunkStart, chunkEnd, chunks,
chunksDone, ProgressCallBack, CompleteCallBack) {
var filesize = file.length;
chunkSize = chunkSize ? chunkSize : 3276800; //note: Microsoft Graph indicates all chunks MUST be in a multiple of 320Kib (327,680 bytes).
chunkStart = chunkStart ? chunkStart : 0;
chunkEnd = chunkEnd ? chunkEnd : chunkSize;
chunks = chunks ? chunks : Math.ceil(filesize / chunkSize);
chunksDone = chunksDone ? chunksDone : 0;
console.log('NOW CHUNKS DONE = ' + chunksDone);
fileChunk = file.slice(chunkStart, chunkEnd);
var TotalLoaded = chunksDone * chunkSize;
var req = new XMLHttpRequest();
req.upload.addEventListener('progress', function (progressobject) {
var ThisProgress = progressobject.loaded ? progressobject.loaded : 0;
var OverallPctComplete = parseFloat((TotalLoaded + ThisProgress) / filesize);
ProgressCallBack({ PercentComplete: OverallPctComplete });
}, false);
req.open("PUT", url, true);
req.setRequestHeader("Content-Range", "bytes " + chunkStart + "-" + (chunkEnd - 1) + "/" + filesize);
req.onload = function (e) {
var response = JSON.parse(req.response);
var Status = req.status;
var CallBackObject = {
StatusCode: Status,
ResponseObject: req,
};
if (Status == 202) { //response ready for another chunk.
var range = response.nextExpectedRanges[0].split('-'),
chunkStart = Number(range[0]),
nextChunk = chunkStart + chunkSize,
chunkEnd = nextChunk > filesize ? filesize : nextChunk;
chunksDone++;
TotalLoaded = chunksDone * chunkSize;
CallBackObject.Code = "Accepted",
CallBackObject.Message = "Upload Another Chunk";
chunkedUpload(url, file, chunkSize, chunkStart, chunkEnd, chunks, chunksDone++, ProgressCallBack, CompleteCallBack);
} else {//we are done
if (Status == 201 || Status == 200) {//successfully created or uploaded
CallBackObject.Code = 'Success';
CallBackObject.Message = 'File was Uploaded Successfully.';
} else { //we had an error.
var ErrorCode = response.error ? response.error.code : 'Unknown Error Code';
var ErrorMessage = response.error ? response.error.message : 'Unknown Error Message';
CallBackObject.Code = ErrorCode;
CallBackObject.Message = ErrorMessage;
};
CompleteCallBack(CallBackObject);
};
};
req.send(fileChunk);
}

consuming API JSon calls through TVJS-tvOS

I am trying to play with tvOS, and I have small question regarding handling json call. I have to get some data through an API, let's say for sake of test that I am calling this link
http://query.yahooapis.com/v1/public/yql?q=select%20item%20from%20weather.forecast%20where%20location%3D%223015%22&format=json
I tried to use this function with some modification
function getDocument(url) {
var templateXHR = new XMLHttpRequest();
templateXHR.responseType = "json";
templateXHR.open("GET", url, true);
templateXHR.send();
return templateXHR;
}
but didn't work out. Any hints or help ?
If I need to use NodeJS, how can I do that ?
This is one that I got working. It's not ideal in many respects, but shows you something to get started with.
function jsonRequest(options) {
var url = options.url;
var method = options.method || 'GET';
var headers = options.headers || {} ;
var body = options.body || '';
var callback = options.callback || function(err, data) {
console.error("options.callback was missing for this request");
};
if (!url) {
throw 'loadURL requires a url argument';
}
var xhr = new XMLHttpRequest();
xhr.responseType = 'json';
xhr.onreadystatechange = function() {
try {
if (xhr.readyState === 4) {
if (xhr.status === 200) {
callback(null, JSON.parse(xhr.responseText));
} else {
callback(new Error("Error [" + xhr.status + "] making http request: " + url));
}
}
} catch (err) {
console.error('Aborting request ' + url + '. Error: ' + err);
xhr.abort();
callback(new Error("Error making request to: " + url + " error: " + err));
}
};
xhr.open(method, url, true);
Object.keys(headers).forEach(function(key) {
xhr.setRequestHeader(key, headers[key]);
});
xhr.send();
return xhr;
}
And you can call it with the following example:
jsonRequest({
url: 'https://api.github.com/users/staxmanade/repos',
callback: function(err, data) {
console.log(JSON.stringify(data[0], null, ' '));
}
});
Hope this helps.
I tested this one out on the tvOS - works like a charm with jQuery's syntax (basic tests pass):
var $ = {};
$.ajax = function(options) {
var url = options.url;
var type = options.type || 'GET';
var headers = options.headers || {} ;
var body = options.data || null;
var timeout = options.timeout || null;
var success = options.success || function(err, data) {
console.log("options.success was missing for this request");
};
var contentType = options.contentType || 'application/json';
var error = options.error || function(err, data) {
console.log("options.error was missing for this request");
};
if (!url) {
throw 'loadURL requires a url argument';
}
var xhr = new XMLHttpRequest();
xhr.responseType = 'json';
xhr.timeout = timeout;
xhr.onreadystatechange = function() {
try {
if (xhr.readyState === 4) {
if (xhr.status === 200) {
if (xhr.responseType === 'json') {
success(null, xhr.response);
} else {
success(null, JSON.parse(xhr.responseText));
}
} else {
success(new Error("Error [" + xhr.status + "] making http request: " + url));
}
}
} catch (err) {
console.error('Aborting request ' + url + '. Error: ' + err);
xhr.abort();
error(new Error("Error making request to: " + url + " error: " + err));
}
};
xhr.open(type, url, true);
xhr.setRequestHeader("Content-Type", contentType);
xhr.setRequestHeader("Accept", 'application/json, text/javascript, */*');
Object.keys(headers).forEach(function(key) {
xhr.setRequestHeader(key, headers[key]);
});
if(!body) {
xhr.send();
} else {
xhr.send(body);
}
return xhr;
}
Example queries working on Apple TV:
var testPut = function(){
$.ajax({
type: 'PUT',
url: url,
success: successFunc,
error: errFunc,
dataType: 'json',
contentType: 'application/json',
data: data2
});
}
var testGet = function(){
$.ajax({
dataType: 'json',
url: url,
success: successFunc,
error: errFunc,
timeout: 2000
});
}
var getLarge = function(){
$.ajax({
dataType: 'json',
url: url,
success: successFunc,
error: errFunc,
timeout: 2000
});
}
Did you call your function in the 'App.onLaunch'
App.onLaunch = function(options) {
var url = 'http://query.yahooapis.com/v1/public/yql?q=select%20item%20from%20weather.forecast%20where%20location%3D%223015%22&format=json';
var doc = getDocument(url);
console.log(doc);
}
Might be worth looking at https://mathiasbynens.be/notes/xhr-responsetype-json
I came across this question looking to accomplish the same thing, and was inspired by #JasonJerrett's answer, but found it a bit lacking because in my instance I am using an XML template built in Javascript like this:
// Index.xml.js
var Template = function() {
return `very long xml string`;
};
The issue is that you can't perform the XHR request inside the template itself, because the template string will be returned back before the XHR request actually completes (there's no way to return data from inside an asynchronous callback). My solution was to modify the resource loader and perform the XHR request there, prior to calling the template and passing the data into the template function:
ResourceLoader.prototype.loadResource = function(resource, dataEndpoint, callback) {
var self = this;
evaluateScripts([resource], function(success) {
if (success) {
// Here's the magic. Perform the API call and once it's complete,
// call template constructor and pass in API data
self.getJSON(dataEndpoint, function(data) {
var resource = Template.call(self, data);
callback.call(self, resource);
});
} else {
var title = "Failed to load resources",
description = `There was an error attempting to load the resource. \n\n Please try again later.`,
alert = createAlert(title, description);
Presenter.removeLoadingIndicator();
navigationDocument.presentModal(alert);
}
});
}
// From: https://mathiasbynens.be/notes/xhr-responsetype-json
ResourceLoader.prototype.getJSON = function(url, successHandler, errorHandler) {
var xhr = new XMLHttpRequest();
xhr.open('get', url, true);
xhr.onreadystatechange = function() {
var status;
var data;
if (xhr.readyState == 4) {
status = xhr.status;
if (status == 200) {
data = JSON.parse(xhr.responseText);
successHandler && successHandler(data);
} else {
errorHandler && errorHandler(status);
}
}
};
xhr.send();
};
Then the template function needs to be modified to accept the incoming API data as a parameter:
// Index.xml.js
var Template = function(data) {
return 'really long xml string with injected ${data}';
};
You need to implement the onreadystatechange event on the XHR object to handle the response:
templateXHR.onreadystatechange = function() {
var status;
var data;
if (templateXHR.readyState == 4) { //request finished and response is ready
status = templateXHR.status;
if (status == 200) {
data = JSON.parse(templateXHR.responseText);
// pass the data to a handler
} else {
// handle the error
}
}
};
If you want to call the request on app launch, just add in application.js:
App.onLaunch = function(options) {
var javascriptFiles = [
`${options.BASEURL}js/resourceLoader.js`,
`${options.BASEURL}js/presenter.js`
];
evaluateScripts(javascriptFiles, function(success) {
if(success) {
resourceLoader = new ResourceLoader(options.BASEURL);
var index = resourceLoader.loadResource(`${options.BASEURL}templates/weatherTemplate.xml.js`, function(resource) {
var doc = Presenter.makeDocument(resource);
doc.addEventListener("select", Presenter.load.bind(Presenter));
doc.addEventListener('load', Presenter.request);
navigationDocument.pushDocument(doc);
});
} else {
var errorDoc = createAlert("Evaluate Scripts Error", "Error attempting to evaluate external JavaScript files.");
navigationDocument.presentModal(errorDoc);
}
});
}
In presenter.js add a method:
request: function() {
var xmlhttp = new XMLHttpRequest() , method = 'GET' , url = 'your Api url';
xmlhttp.open( method , url , true );
xmlhttp.onreadystatechange = function () {
var status;
var data;
if (xmlhttp.readyState == 4) {
status = xmlhttp.status;
if (status == 200) {
data = JSON.parse(xmlhttp.responseText);
console.log(data);
} else {
var errorDoc = createAlert("Evaluate Scripts Error", "Error attempting to evaluate external JavaScript files.");
navigationDocument.presentModal(errorDoc);
}
}
};
xmlhttp.send();
},

Categories

Resources