Controlling file upload location in Google Drive (App script not form) - javascript

I'm implementing Kanshi Tanaike's Resumable Upload For Web Apps code and it works, but I don't fully understand the AJAX and am trying add a feature. Right now the code places the new file in the user's Drive root folder. I would either like to define a specific folder and upload there directly, or automatically move the file from root to the correct folder (I also need to collect the download link). I see the upload function references location in the response header, but I'm struggling to figure out how to define it, and since the doUpload() function does not seem to treat the upload as a File object I can't figure out how to reference it after the upload to acquire the URL or move it. Any feedback would be enormously appreciated.
$('#uploadfile').on("change", function() {
var file = this.files[0];
if (file.name != "") {
var fr = new FileReader();
fr.fileName = file.name;
fr.fileSize = file.size;
fr.fileType = file.type;
fr.onload = init;
fr.readAsArrayBuffer(file);
}
});
function init() {
$("#progress").text("Initializing.");
var fileName = this.fileName;
var fileSize = this.fileSize;
var fileType = this.fileType;
console.log({fileName: fileName, fileSize: fileSize, fileType: fileType});
var buf = this.result;
var chunkpot = getChunkpot(chunkSize, fileSize);
var uint8Array = new Uint8Array(buf);
var chunks = chunkpot.chunks.map(function(e) {
return {
data: uint8Array.slice(e.startByte, e.endByte + 1),
length: e.numByte,
range: "bytes " + e.startByte + "-" + e.endByte + "/" + chunkpot.total,
};
});
google.script.run.withSuccessHandler(function(at) {
var xhr = new XMLHttpRequest();
xhr.open("POST", "https://www.googleapis.com/upload/drive/v3/files?uploadType=resumable");
xhr.setRequestHeader('Authorization', "Bearer " + at);
xhr.setRequestHeader('Content-Type', "application/json");
xhr.send(JSON.stringify({
mimeType: fileType,
name: fileName,
}));
xhr.onload = function() {
doUpload({
location: xhr.getResponseHeader("location"),
chunks: chunks,
});
};
xhr.onerror = function() {
console.log(xhr.response);
};
}).getAt();
}
function doUpload(e) {
var chunks = e.chunks;
var location = e.location;
var cnt = 0;
var end = chunks.length;
var temp = function callback(cnt) {
var e = chunks[cnt];
var xhr = new XMLHttpRequest();
xhr.open("PUT", location, true);
xhr.setRequestHeader('Content-Range', e.range);
xhr.send(e.data);
xhr.onloadend = function() {
var status = xhr.status;
cnt += 1;
console.log("Uploading: " + status + " (" + cnt + " / " + end + ")");
$("#progress").text("Uploading: " + Math.floor(100 * cnt / end) + "%");
if (status == 308) {
callback(cnt);
} else if (status == 200) {
$("#progress").text("Done.");
} else {
$("#progress").text("Error: " + xhr.response);
}
};
}(cnt);
}

I believe your goal and your current situation as follows.
You want to upload a file to the specific folder.
You want to retrieve webContentLink of the uploaded file.
You want to achieve above using Resumable Upload for Web Apps using Google Apps Script
You have already confirmed that the default script at the repository worked.
Modification points:
In this case, it is required to check the resumable upload and the method of "Files: create" in Drive API.
In order to upload the file to the specific folder, please add the folder ID to the request body of the initial request.
In order to return the value of webContentLink, please use fields value to the initial request.
When above points are reflected to the original script, it becomes as follows.
Modified script:
In this case, HTML is modified.
<!DOCTYPE html>
<html>
<head>
<script src="https://ajax.googleapis.com/ajax/libs/jquery/3.3.1/jquery.js"></script>
<title>Resumable upload for Web Apps</title>
</head>
<body>
<form>
<input name="file" id="uploadfile" type="file">
</form>
<div id="progress"></div>
<script>
const chunkSize = 5242880;
$('#uploadfile').on("change", function() {
var file = this.files[0];
if (file.name != "") {
var fr = new FileReader();
fr.fileName = file.name;
fr.fileSize = file.size;
fr.fileType = file.type;
fr.onload = init;
fr.readAsArrayBuffer(file);
}
});
function init() {
var folderId = "###"; // Added: Please set the folder ID.
$("#progress").text("Initializing.");
var fileName = this.fileName;
var fileSize = this.fileSize;
var fileType = this.fileType;
console.log({fileName: fileName, fileSize: fileSize, fileType: fileType});
var buf = this.result;
var chunkpot = getChunkpot(chunkSize, fileSize);
var uint8Array = new Uint8Array(buf);
var chunks = chunkpot.chunks.map(function(e) {
return {
data: uint8Array.slice(e.startByte, e.endByte + 1),
length: e.numByte,
range: "bytes " + e.startByte + "-" + e.endByte + "/" + chunkpot.total,
};
});
google.script.run.withSuccessHandler(function(at) {
var xhr = new XMLHttpRequest();
xhr.open("POST", "https://www.googleapis.com/upload/drive/v3/files?uploadType=resumable&fields=*");
xhr.setRequestHeader('Authorization', "Bearer " + at);
xhr.setRequestHeader('Content-Type', "application/json");
xhr.send(JSON.stringify({
mimeType: fileType,
name: fileName,
parents: [folderId] // Added
}));
xhr.onload = function() {
doUpload({
location: xhr.getResponseHeader("location"),
chunks: chunks,
});
};
xhr.onerror = function() {
console.log(xhr.response);
};
}).getAt();
}
function doUpload(e) {
var chunks = e.chunks;
var location = e.location;
var cnt = 0;
var end = chunks.length;
var temp = function callback(cnt) {
var e = chunks[cnt];
var xhr = new XMLHttpRequest();
xhr.open("PUT", location, true);
xhr.setRequestHeader('Content-Range', e.range);
xhr.send(e.data);
xhr.onloadend = function() {
var status = xhr.status;
cnt += 1;
console.log("Uploading: " + status + " (" + cnt + " / " + end + ")");
$("#progress").text("Uploading: " + Math.floor(100 * cnt / end) + "%");
if (status == 308) {
callback(cnt);
} else if (status == 200) {
var metadata = JSON.parse(xhr.response); // Added
$("#progress").text("Done. Link: " + metadata.webContentLink); // Modified
} else {
$("#progress").text("Error: " + xhr.response);
}
};
}(cnt);
}
function getChunkpot(chunkSize, fileSize) {
var chunkPot = {};
chunkPot.total = fileSize;
chunkPot.chunks = [];
if (fileSize > chunkSize) {
var numE = chunkSize;
var endS = function(f, n) {
var c = f % n;
if (c == 0) {
return 0;
} else {
return c;
}
}(fileSize, numE);
var repeat = Math.floor(fileSize / numE);
for (var i = 0; i <= repeat; i++) {
var startAddress = i * numE;
var c = {};
c.startByte = startAddress;
if (i < repeat) {
c.endByte = startAddress + numE - 1;
c.numByte = numE;
chunkPot.chunks.push(c);
} else if (i == repeat && endS > 0) {
c.endByte = startAddress + endS - 1;
c.numByte = endS;
chunkPot.chunks.push(c);
}
}
} else {
var chunk = {
startByte: 0,
endByte: fileSize - 1,
numByte: fileSize,
};
chunkPot.chunks.push(chunk);
}
return chunkPot;
}
</script>
</body>
</html>
When the above modified script is run, the uploaded file is created to the specific folder and webContentLink is displayed as the result.
References:
Perform a resumable upload
Files: create
Resumable Upload for Web Apps using Google Apps Script

Related

React Native Fetch video file by chunk / YouTube Data API chunk(multipart) upload

I made a YouTube API upload app. It works great with small video file sizes but with larger sizes my app crashes. The exception happens when I try to get the video file with Fetch().
Question: Is there a way I can fetch a large file in React Native and feed it into the YouTube API in smaller chunks?
Here is my fetch code:
const fetchResponse = await fetch(videoUri);
const blob = await fetchResponse.blob();
var file = new File([blob], "video.mp4", {type: "video/mp4"});
My upload YouTube code is taken from the following git repos - supposedly supports multipart upload as well:
https://github.com/youtube/api-samples/blob/master/javascript/cors_upload.js and
https://github.com/youtube/api-samples/blob/master/javascript/upload_video.js
Here is my full upload code:
uploadVideo = async function() {
var match = this.state.match.value;
var video = match.mergedVideo;
var players = match.players;
var scoreboard = this.state.match.value.scoreboard;
var points = match.points;
var title = players[0].name + " vs. " + players[1].name + " " + scoreboard;
var description = this.descriptionBuilder(points, match.videos);
/*const fetchResponse = await fetch(video);
const blob = await fetchResponse.blob();
var file = new File([blob], "video.mp4", {type: "video/mp4"});
console.log(file);*/
const file = await DocumentPicker.pick({
type: [DocumentPicker.types.video],
});
var metadata = {
snippet: {
title: title,
description: description,
tags: ['youtube-cors-upload'],
categoryId: 22
},
status: {
privacyStatus: 'unlisted'
}
};
var uploader = new MediaUploader({
baseUrl: 'https://www.googleapis.com/upload/youtube/v3/videos',
file: file,
token: this.state.user.auth.accessToken,
metadata: metadata,
chunkSize: 1024 * 1024,
params: {
part: Object.keys(metadata).join(',')
},
onError: function(data) {
console.log(data);
var message = data;
try {
var errorResponse = JSON.parse(data);
message = errorResponse.error.message;
} finally {
alert(message);
}
}.bind(this),
onProgress: function(data) {
var currentTime = Date.now();
var bytesUploaded = data.loaded;
var totalBytes = data.total;
var bytesPerSecond = bytesUploaded / ((currentTime - window.uploadStartTime) / 1000);
var estimatedSecondsRemaining = (totalBytes - bytesUploaded) / bytesPerSecond;
var percentageComplete = (bytesUploaded * 100) / totalBytes;
this.setState({ youtubeUploadProgress: percentageComplete / 100});
console.log("Uploaded: " + bytesUploaded + " | Total: " + totalBytes + " | Percentage: " + percentageComplete + " | Esitmated seconds remaining: " + estimatedSecondsRemaining);
}.bind(this),
onComplete: function(data) {
console.log("Complete");
alert("Upload successful!");
this.setState({ youtubeUploadProgress: 0});
}.bind(this)
});
window.uploadStartTime = Date.now();
uploader.upload();
}
and this is my cors_upload.js in React Native class module:
import React, { Component } from 'react';
export default class MediaUploader extends Component {
constructor(props) {
super(props);
const obj = this;
const DRIVE_UPLOAD_URL = 'https://www.googleapis.com/upload/drive/v2/files/';
var options = props;
var noop = function() {};
this.file = options.file;
this.contentType = options.contentType || this.file.type || 'application/octet-stream';
this.metadata = options.metadata || {
'title': this.file.name,
'mimeType': this.contentType
};
this.token = options.token;
this.onComplete = options.onComplete || noop;
this.onProgress = options.onProgress || noop;
this.onError = options.onError || noop;
this.offset = options.offset || 0;
this.chunkSize = options.chunkSize || 0;
//this.retryHandler = new RetryHandler();
//this.retryHandler = new obj.RetryHandler();
this.interval = 1000; // Start at one second
this.maxInterval = 60 * 1000;
this.url = options.url;
if (!this.url) {
var params = options.params || {};
params.uploadType = 'resumable';
//this.url = this.buildUrl_(options.fileId, params, options.baseUrl);
this.url = obj.buildUrl_(options.fileId, params, options.baseUrl);
}
this.httpMethod = options.fileId ? 'PUT' : 'POST';
}
retry = function(fn) {
setTimeout(fn, this.interval);
this.interval = this.nextInterval_();
};
reset = function() {
this.interval = 1000;
};
nextInterval_ = function() {
var interval = this.interval * 2 + this.getRandomInt_(0, 1000);
return Math.min(interval, this.maxInterval);
};
getRandomInt_ = function(min, max) {
return Math.floor(Math.random() * (max - min + 1) + min);
};
buildQuery_ = function(params) {
params = params || {};
return Object.keys(params).map(function(key) {
return encodeURIComponent(key) + '=' + encodeURIComponent(params[key]);
}).join('&');
};
buildUrl_ = function(id, params, baseUrl) {
var url = baseUrl || DRIVE_UPLOAD_URL;
if (id) {
url += id;
}
var query = this.buildQuery_(params);
if (query) {
url += '?' + query;
}
return url;
};
upload = function() {
//var self = this;
console.log("UPLOAD called", this.file.size);
var xhr = new XMLHttpRequest();
xhr.open(this.httpMethod, this.url, true);
xhr.setRequestHeader('Authorization', 'Bearer ' + this.token);
xhr.setRequestHeader('Content-Type', 'application/json');
xhr.setRequestHeader('X-Upload-Content-Length', this.file.size);
xhr.setRequestHeader('X-Upload-Content-Type', this.contentType);
xhr.onload = function(e) {
console.log("ON LOAD CALLED");
if (e.target.status < 400) {
var location = e.target.getResponseHeader('Location');
this.url = location;
this.sendFile_();
} else {
this.onUploadError_(e);
}
}.bind(this);
xhr.onerror = this.onUploadError_.bind(this);
xhr.send(JSON.stringify(this.metadata));
};
sendFile_ = function() {
console.log("SEND FILE CALLED");
var content = this.file;
var end = this.file.size;
if (this.offset || this.chunkSize) {
// Only bother to slice the file if we're either resuming or uploading in chunks
if (this.chunkSize) {
end = Math.min(this.offset + this.chunkSize, this.file.size);
}
content = content.slice(this.offset, end);
}
var xhr = new XMLHttpRequest();
xhr.open('PUT', this.url, true);
xhr.setRequestHeader('Content-Type', this.contentType);
xhr.setRequestHeader('Content-Range', 'bytes ' + this.offset + '-' + (end - 1) + '/' + this.file.size);
xhr.setRequestHeader('X-Upload-Content-Type', this.file.type);
if (xhr.upload) {
xhr.upload.addEventListener('progress', this.onProgress);
}
xhr.onload = this.onContentUploadSuccess_.bind(this);
xhr.onerror = this.onContentUploadError_.bind(this);
xhr.send(content);
};
resume_ = function() {
var xhr = new XMLHttpRequest();
xhr.open('PUT', this.url, true);
xhr.setRequestHeader('Content-Range', 'bytes */' + this.file.size);
xhr.setRequestHeader('X-Upload-Content-Type', this.file.type);
if (xhr.upload) {
xhr.upload.addEventListener('progress', this.onProgress);
}
xhr.onload = this.onContentUploadSuccess_.bind(this);
xhr.onerror = this.onContentUploadError_.bind(this);
xhr.send();
};
extractRange_ = function(xhr) {
var range = xhr.getResponseHeader('Range');
if (range) {
this.offset = parseInt(range.match(/\d+/g).pop(), 10) + 1;
}
};
onContentUploadSuccess_ = function(e) {
if (e.target.status == 200 || e.target.status == 201) {
this.onComplete(e.target.response);
} else if (e.target.status == 308) {
this.extractRange_(e.target);
this.reset();
this.sendFile_();
}
};
onContentUploadError_ = function(e) {
if (e.target.status && e.target.status < 500) {
this.onError(e.target.response);
} else {
this.retry(this.resume_.bind(this));
}
};
onUploadError_ = function(e) {
this.onError(e.target.response); // TODO - Retries for initial upload
};
}
UPDATE 1:
To avoid using Fetch() I decided to use React Native Document Picker. Now I can select the video file and pass it to the MediaUploader following this guide: https://alishavineeth.medium.com/upload-a-video-from-a-mobile-device-to-youtube-using-react-native-eb2fa54a7445
Now if I set the chunkSize option I will receive a .slice array exception because the object structure doesn't match. If I pass the file without the chunkSize option the metadata uploads to YouTube but the video status will be stuck on processing without any other errors. The video upload process never begins.
DocumentPicker responds with the following object after I select my file:
[{"fileCopyUri": "content://com.android.providers.media.documents/document/video%3A7853", "name": "video_1629795128339.mp4", "size": 192660773, "type": "video/mp4", "uri": "content://com.android.providers.media.documents/document/video%3A7853"}]
UPDATE 2:
Managed to fix my DocumentPicker file issue(from my Update 1) with changing React Native Document Picker to Expo Document Picker.
Now I am able to select large files and call the upload function - the metadata uploads, the video file begins to upload as well but the app crashes during the upload. If I set the chunkSize option on the MediaUploader object I get [TypeError: content.slice is not a function. (In 'content.slice(this.offset, end)', 'content.slice' is undefined)]
Expo Document Picker responds with the following object after I select my video file:
{"name": "video_1629801588164.mp4", "size": 5799179, "type": "video/mp4", "uri": "file:///data/user/0/com.tennis.rec/cache/DocumentPicker/8b350fbf-1b66-4a78-a10f-b61eb2ed3032.mp4"}
UPDATE 3 - RESOLVED!!!
The chunk upload is working now!!! I modified my cors_upload.js file where the chunkSize is being evaluated and sliced with the following code:
if (this.offset || this.chunkSize) {
// Only bother to slice the file if we're either resuming or uploading in chunks
if (this.chunkSize) {
end = Math.min(this.offset + this.chunkSize, this.file.size);
}
console.log("CONTENT SLICE", this.offset, end, this.file.size);
//content = content.slice(this.offset, end);
var base64 = await RNFS.read(this.file.uri, this.chunkSize, this.offset, 'base64');
content = Uint8Array.from(atob(base64), c => c.charCodeAt(0));
}
I added React Native File System and I am using its read() function to load the chunk as base64 and convert it back to a byte array.

_formdataPolyfill2.default is not a constructor is thrown when I try to use formdata-polyfill npm

I am using web workers for uploading file and I am sending the file in the form of formData object as got to know that the web worker doesn't have access to DOM I used formdata-polyfill in place of default FormData , it is throwing this error and I don't know how to use this polyill properly.
here is my code,
//trying to send the formdata-polyfill object to worker
require('formdata-polyfill');
let data = new FormData();
data.append('server-method', 'upload');
data.append('file', event.target.files[0]);
// let data = new FormData(event.target.files[0]);
if (this.state.headerActiveTabUid === '1')
this.props.dispatch(handleFileUpload({upload: 'assets', data}));
//worker.js
var file = [], p = true, url,token;
function upload(blobOrFile) {
var xhr = new XMLHttpRequest();
xhr.open('POST', url, true);//add url to upload
xhr.setRequestHeader('Authorization', token);
xhr.onload = function(e) {
};
xhr.send(blobOrFile);
}
function process() {
for (var j = 0; j <file.length; j++) {
var blob = file[j];
const BYTES_PER_CHUNK = 1024 * 1024;
// 1MB chunk sizes.
const SIZE = blob.size;
var start = 0;
var end = BYTES_PER_CHUNK;
while (start < SIZE) {
if ('mozSlice' in blob) {
var chunk = blob.mozSlice(start, end);
} else {
var chunk = blob.slice(start, end);
}
upload(chunk);
start = end;
end = start + BYTES_PER_CHUNK;
}
p = ( j === file.length - 1);
self.postMessage(blob.name + " Uploaded Succesfully");
}
}
self.addEventListener('message', function(e) {
url = e.data.url;
token = e.data.id;
file.push(e.data.files);
if (p) {
process();
}
});

Blob data is not being sent to the PHP through XHR

I am trying to chunk a file and send to the server as follows
var fileselect = document.getElementById("file");
fileselect.addEventListener("change", FileSelectHandler, false);
function FileDragHover(e) {
e.stopPropagation();
e.preventDefault();
}
function FileSelectHandler(e) {
FileDragHover(e);
var blob = e.target.files[0] || e.dataTransfer.files[0];
worker = new Worker("fileupload.js");
worker.postMessage(blob);
worker.onmessage = function(e) {
console.log(e);
};
}
fileupload.js
self.onmessage = function(e) {
const BYTES_PER_CHUNK = 1024 * 1024 * 32;
var blob = new Blob([e.data]),
start = 0,
index = 0,
slices = Math.ceil(blob.size / BYTES_PER_CHUNK),
slices2 = slices;
while (start < blob.size) {
end = start + BYTES_PER_CHUNK;
if (end > blob.size) end = blob.size;
uploadChunk(blob, index, start, end, slices, slices2);
start = end;
index++;
}
};
function uploadChunk(blob, index, start, end, slices, slices2) {
var xhr = new XMLHttpRequest();
xhr.onload = function() {
slices--;
if (slices == 0) {
var xhrMerge = new XMLHttpRequest();
xhrMerge.open("POST", "uploadlargefile/?a=merge&name=" + blob.name + "&slices=" + slices2);
xhrMerge.onload = function() {
self.close();
};
xhrMerge.send();
}
};
xhr.upload.onprogress = function(e) {
if (e.lengthComputable) self.postMessage(Math.round(100 / e.total * e.loaded)); //this doesn't work o.O
};
var chunk = blob.slice(start, end);
xhr.open("POST", "uploadlargefile/?a=chunk&name=" + blob.name + "&index=" + index);
xhr.setRequestHeader("Content-Type", "multipart\/form-data; boundary=--------------------");
xhr.send(chunk);
}
PHP
print_r($_GET['name']); print_r($_FILES);die;
I am able to get the name of the file but not the file . Any suggestion what could be wrong

Javascript - progress bar with muliple images

I want to preload a bunch of images and show the progress bar in the same time.
At the moment the code works only with 1 image, here is it:
$progress = document.querySelector('#progress');
var url = 'https://placekitten.com/g/2000/2000';
var request = new XMLHttpRequest();
request.onprogress = onProgress;
request.onload = onComplete;
request.onerror = onError;
function onProgress(event) {
if (!event.lengthComputable) {
return;
}
var loaded = event.loaded;
var total = event.total;
var progress = (loaded / total).toFixed(2);
$progress.textContent = 'Loading... ' + parseInt(progress * 100) + ' %';
console.log(progress);
}
function onComplete(event) {
var $img = document.createElement('img');
$img.setAttribute('src', url);
$progress.appendChild($img);
console.log('complete', url);
}
function onError(event) {
console.log('error');
}
$progress.addEventListener('click', function() {
request.open('GET', url, true);
request.overrideMimeType('text/plain; charset=x-user-defined');
request.send(null);
});
<div id="progress">Click me to load</div>
And here is the jsfiddle to test the code out: https://jsfiddle.net/q5d0osLr/
How can I make it work with more than one image?
Use an array to store each progress and create a function to load image and record the necessary information for you.
var $progress = document.querySelector('#progress');
var url = 'https://placekitten.com/g/';
var urls =
['https://i.imgur.com/7raUYR2.jpg', 'https://i.imgur.com/i8GSA1b.jpg', 'https://i.imgur.com/jGkaxEZ.jpg',
'http://i.imgur.com/cuS5DDv.jpg', 'https://i.imgur.com/bl5DOR0.jpg', 'http://i.imgur.com/TuFu2lK.jpg',
'https://i.imgur.com/qbwarWi.jpg', 'https://i.imgur.com/hJ9Ylph.gif', 'https://i.imgur.com/8KLbDxe.jpg',
'https://upload.wikimedia.org/wikipedia/commons/thumb/4/4e/Pleiades_large.jpg/1024px-Pleiades_large.jpg',
];
// Store all progress.
var allProgress = [];
var loadImage = function(url) {
// Get current index.
var idx = allProgress.length;
// Push a progress in array.
allProgress.push(0.0);
var onProgress = function(evt) {
if (!evt.lengthComputable) {
return;
}
var loaded = evt.loaded;
var total = evt.total;
var progress = (loaded / total);
allProgress[idx] = progress;
// Calculate the progress by sum then divide.
var sum = allProgress.reduce(function(sum, val) {
return sum + val;
});
sum /= allProgress.length;
sum = sum.toFixed(2);
$progress.textContent = 'Loading... ' + parseInt(sum * 100) + ' %';
console.log(progress, idx);
};
var onComplete = function(evt) {
var $img = document.createElement('img');
$img.setAttribute('src', url);
document.body.appendChild($img);
console.log('complete', url);
};
var onError = function(evt) {
// You may have to do something with progress.
console.log('error');
};
// Move the request in function. So each of them is independent.
var request = new XMLHttpRequest();
request.onprogress = onProgress;
request.onload = onComplete;
request.onerror = onError;
request.open('GET', url, true);
request.overrideMimeType('text/plain; charset=x-user-defined');
request.send(null);
};
$progress.addEventListener('click', function() {
urls.forEach(function(url) {
loadImage(url);
});
});
img {
width: 100px;
height: 100px;
}
<script src="https://ajax.googleapis.com/ajax/libs/jquery/2.1.0/jquery.min.js"></script>
<div id="progress">Click me to load</div>

JavaScript upload progress bar with xhr.status error 500, but one file loads

I've got a problem with a script that is used to upload one or more multipart/form-data files.
On my client's server, when I try to upload more than one file, I receive:
xhr.status error 500;
The first file is uploaded okay, but I can't upload more than 1 file. On my server, everything works fine - there is no error 500.
I have searched the web for help, but I could only find information about server problems, such as: can not accept POST, GET, OPTIONS...
But it's working with one file - and I'm stuck.
I tried to upload the files one by one, but then my progress bar is flashing like....
Here is my code:
$(document).ready( function() {
$(".errorNotice").hide();
});
var form = document.getElementById('upload-form');
var ident = document.getElementById('ident');
var fileSelect = document.getElementById('file-select');
var uploadButton = document.getElementById('submit');
var max = document.getElementById('max');
var formUrl = form.action;
function sleep(milliseconds) {
setTimeout(function(){ return true; }, milliseconds);
}
form.onsubmit = function(event) {
event.preventDefault();
uploadButton.innerHTML = 'Trwa ładowanie...';
$("#upload-form").fadeOut();
setTimeout(function() {
$("#progressBar").fadeIn();
}, 500);
var files = fileSelect.files;
var formData = new FormData();
if( files.length > max.value) {
if( max.value == 1) {
var text = max.value + ' zdjęcie';
}
else if( max.value >1 && max.value <= 4) {
var text = max.value + ' zdjęcia';
}
else {
var text = max.value + ' zdjęć';
}
$("#progressBar").find("p").html('Możesz dodać maksymalnie: ' + text);
setTimeout(function() {
$("#progressBar").hide();
$("#upload-form").fadeIn();
}, 5000);
return false;
}
if( files.length == 0 )
{
$("#progressBar").hide();
$("#upload-form").show();
}
formData.append('ident', ident.value);
formData.append('modules', 'true');
for (var i = 0; i < files.length; i++) {
var file = files[i];
if (!file.type.match('image.*')) {
continue;
}
formData.append('objectPhoto[]', file, file.name);
formData.append(name, file, file.name);
}
var xhr = new XMLHttpRequest();
xhr.open('POST', formUrl , true);
xhr.upload.addEventListener("progress", function(e) {
if(e.lengthComputable) {
var percentage = Math.round((e.loaded * 100) / e.total);
document.getElementById("bar").style.width = percentage + '%';
document.getElementById("percent").innerHTML = percentage + '%';
}
}, false);
xhr.onload = function () {
console.log(this.responseText);
if(this.responseText == "ok") {
document.getElementById("percent").innerHTML = "Zakończono";
document.getElementById("progressBar").style.display = "none";
document.getElementById("upload-form").style.display = "block";
} else {
$(".errorNotice").show();
$(".errorNotice .error-text").html(this.responseText);
}
if (xhr.status === 200) {
uploadButton.innerHTML = 'Wgraj';
} else {
$(".errorNotice").show();
$(".errorNotice .error-text").html("Wystąpił nieoczekiwany błąd o kodzie: " + xhr.status);
uploadButton.innerHTML = 'Wyślij';
}
};
xhr.send(formData);
return false;
};

Categories

Resources