Sending videos from the client to the server - javascript

I recently read about the File API which allows the client to use files from the file system in the website.
I did all the stuff of adding the file input tag and reading the file and everything.
I read the file as ArrayBuffer using a FileReader and used a Uint8Array view on it, then I send the view to the server.
I tried it with txt, pdf and images and it worked fine. But when to use it with video files the computer lagged that and I didn't even wait and closed the browser!
Here's the code
Why did the lag happen with video files and is there a way to avoid it?!

I gone through the code, you are appending the read data pushed into the array which may be the reason why it lags.
You need to upload the video in chunks. To make a chunk of file you choose, you can take a look here
HTML5 file reader api
function readBlob(opt_startByte, opt_stopByte) {
var files = document.getElementById('files').files;
if (!files.length) {
alert('Please select a file!');
return;
}
var file = files[0];
var start = parseInt(opt_startByte) || 0;
var stop = parseInt(opt_stopByte) || file.size - 1;
var reader = new FileReader();
// If we use onloadend, we need to check the readyState.
reader.onloadend = function(evt) {
if (evt.target.readyState == FileReader.DONE) { // DONE == 2
document.getElementById('byte_content').textContent = evt.target.result;
document.getElementById('byte_range').textContent =
['Read bytes: ', start + 1, ' - ', stop + 1,
' of ', file.size, ' byte file'].join('');
}
};
var blob = file.slice(start, stop + 1);
reader.readAsBinaryString(blob);
}
document.querySelector('.readBytesButtons')
.addEventListener('click', function(evt) {
if (evt.target.tagName.toLowerCase() == 'button') {
var startByte = evt.target.getAttribute('data-startbyte');
var endByte = evt.target.getAttribute('data-endbyte');
readBlob(startByte, endByte);
}
}, false);
You need to modify according to your requirements. Here when you get the chunk, make a request to upload it or play it.
Also you can play with you video file here
Upload your Video file here to test
Hope this solves your problem. To get this working you need to handle the chunks properly on the backend as well.

Related

Not able to create seekable video blobs from mediarecorder using EBML.js - MediaRecorder API - Chrome

Using media recorder, I am able to upload and append the video blobs on azure. But combined video is not seekable on download with following code -
var chunks =[];
var mediaRecorder = new MediaRecorder(stream, 'video/x-matroska;codecs=vp8,opus');
mediaRecorder.ondataavailable = function(event) {
if(event.data && event.data.size > 0) {
chunks.push(event.data);
appendBlockToAzure(chunks);
}
};
mediaRecorder.start(10000);
I tried using EBML.js, if I use the following code then I get the seekable video file. This approach needs the file to be processed at the end. Therefore, final file could be of 1GB in size which will take very long time to upload.
var chunks =[];
var mediaRecorder = new MediaRecorder(stream, 'video/x-matroska;codecs=vp8,opus');
mediaRecorder.ondataavailable = function(event) {
if(event.data && event.data.size > 0) {
chunks.push(event.data);
if(mediaRecorder.state == "inactive") { //if media recorder is stopped
var combined = new Blob(chunks, { type: event.data.type });
getSeekableBlob(combined, function (seekableBlob) {
saveCombinedVideoToAzure(seekableBlob);
});
}
}
};
mediaRecorder.start(10000);
That's the reason I want to upload simultaneously to the azure. If I use the following code, then it logs unknown tag warnings and then length error. Also, the video file is not playable.
var seekablechunks =[];
var mediaRecorder = new MediaRecorder(stream, 'video/x-matroska;codecs=vp8,opus');
mediaRecorder.ondataavailable = function(event) {
if(event.data && event.data.size > 0) {
getSeekableBlob(event.data, function (seekableBlob) {
seekablechunks.push(seekableBlob);
saveCombinedVideoToAzure(seekablechunks);
});
}
};
mediaRecorder.start(10000);
Function 'getSeekableBlob':
function getSeekableBlob(inputBlob, callback) {
// EBML.js copyrights goes to: https://github.com/legokichi/ts-ebml
if(typeof EBML === 'undefined') {
throw new Error('Please link: https://www.webrtc- experiment.com/EBML.js');
}
var reader = new EBML.Reader();
var decoder = new EBML.Decoder();
var tools = EBML.tools;
var fileReader = new FileReader();
fileReader.onload = function (e) {
var ebmlElms = decoder.decode(this.result);
ebmlElms.forEach(function (element) {
reader.read(element);
});
reader.stop();
var refinedMetadataBuf = tools.makeMetadataSeekable(reader.metadatas, reader.duration, reader.cues);
var body = this.result.slice(reader.metadataSize);
var newBlob = new Blob([refinedMetadataBuf, body], {
type: 'video/webm'
});
callback(newBlob);
};
fileReader.readAsArrayBuffer(inputBlob);
}
Is there a way to get seekable blobs and upload them to azure?
It's a challenge for an open-ended streaming source for media (for example MediaRecorder) to create a file with SeekHead elements in it. The Seek elements in a SeekHead element contain byte offsets to elements in the file.
MediaRecorder doesn't create segments or SeekHead elements as you have discovered. To do so it would need to be able to see the future to know how big future compressed video and audio elements will be in the file.
A good way for you to handle this problem might be to post-process your uploaded files on a server. You can use ts-ebml to do this in a streaming fashion on a server when a file is completely uploaded.
It's possible, I suppose, to create Javascript software in your browser that can transform the stream of data emitted by MediaRecorder so it's seekable, on the fly. To make your stream seekeable you'd need to insert SeekHead elements every so often. You'd buffer up multiple seconds of the stream, then locate the Cluster elements in each buffer, then write a SeekHead element pointing to some of them. (Chrome's MediaRecorder outputs Clusters beginning with video key frames.) If you succeed in doing this you'll know a lot about Matroska / webm.
Suddenly, our Face on camera web-cam recorder component stopped saving webm blob.
In the console there were warnings about {EBML_ID: "55b0", type: "unknown", ...} during reader.read(element) and then
"Uncaught (in promise) Error: No schema entry found for unknown" in EBMLEncoder.js" at tools.makeMetadataSeekable(...) call.
Ignoring unknown elements from the decoder workarounded the issue:
...
var ebmlElms = decoder.decode(this.result);
ebmlElms.forEach(function (element) {
if (element.type !== 'unknown') {
reader.read(element);
}
});
reader.stop();
...
Related issue on ts-ebml npm package https://github.com/legokichi/ts-ebml/issues/33 with similar workaround

Migrate FileReader ReadAsBinaryString() to ReadAsArrayBuffer() or ReadAsText()

I realize that the new Mozilla Firefox return allocation size overflow (on FileReader.ReadAsBinaryString()) when the file bigger than 200MB (something like that).
Here's some of my code on test for client web browser:
function upload(fileInputId, fileIndex)
{
var file = document.getElementById(fileInputId).files[fileIndex];
var blob;
var reader = new FileReader();
reader.readAsBinaryString(file);
reader.onloadend = function(evt)
{
xhr = new XMLHttpRequest();
xhr.open("POST", "upload.php", true);
XMLHttpRequest.prototype.mySendAsBinary = function(text){
var data = new ArrayBuffer(text.length);
var ui8a = new Uint8Array(data, 0);
for (var i = 0; i < text.length; i++){
ui8a[i] = (text.charCodeAt(i) & 0xff);
}
if(typeof window.Blob == "function")
{
blob = new Blob([data]);
}else{
var bb = new (window.MozBlobBuilder || window.WebKitBlobBuilder || window.BlobBuilder)();
bb.append(data);
blob = bb.getBlob();
}
this.send(blob);
}
var eventSource = xhr.upload || xhr;
eventSource.addEventListener("progress", function(e) {
var position = e.position || e.loaded;
var total = e.totalSize || e.total;
var percentage = Math.round((position/total)*100);
});
xhr.onreadystatechange = function()
{
if(xhr.readyState == 4)
{
if(xhr.status == 200)
{
console.log("Done");
}else{
console.log("Fail");
}
}
};
xhr.mySendAsBinary(evt.target.result);
};
}
So I tried change it to FileReader.ReadAsArrayBuffer(), the error has not shown up but the data are not the same (as it's not read as binary string).
Did anyone has any solution to solve this problem? Is there any way that we can upload bigger file from JS to Web Server in raw/string other than FileReader implementation?
I read on Mozilla JS Documentation that said:
This feature is non-standard and is not on a standards track. Do not
use it on production sites facing the Web: it will not work for every
user. There may also be large incompatibilities between
implementations and the behavior may change in the future. - Mozilla
If not ReadAsBinaryString, the how to implement ReadAsArrayBuffer or ReadAsText
To send Files to a web-server, you simply don't need js. HTML alone is well able to do this with the <form> element.
Now if you want to go through js, for e.g catch the different ProgressEvents, then you can send directly your File, no need to read it whatsoever on your side.
To do this, you've got two (or three) solutions.
If your server is able to handle PUT requests, you can simply xhr.send(file);.
Otherwise, you'd have to go through a FormData.
// if you really want to go the XHR way
document.forms[0].onsubmit = function handleSubmit(evt) {
if(!window.FormData) { // old browser use the <form>
return;
}
// now we handle the submit through js
evt.preventDefault();
var fD = new FormData(this);
var xhr = new XMLHttpRequest();
xhr.onprogress = function handleProgress(evt){};
xhr.onload = function handleLoad(evt){};
xhr.onerror = function handleError(evt){};
xhr.open(this.method, this.action);
// xhr.send(fD); // won't work in StackSnippet
log(fD, this.method, this.action); // so we just log its content
};
function log(formData, method, action) {
console.log('would have sent');
for(let [key, val] of formData.entries())
console.log(key, val);
console.log('through', method);
console.log('to', action);
}
<!-- this in itself is enough -->
<form method="POST" action="your_server.page">
<input type="file" name="file_upload">
<input type="submit">
</form>
Now, you sent a comment saying that you can't upload Files bigger than 1GB to your server.
This limitation is only due to your server's config, so the best if you want to accept such big files is to configure it correctly.
But if you really want to send your File by chunks, even then don't get off of the Blob interface.
Indeed Blobs have a slice() method, so use it.
document.forms[0].onsubmit = function handleSubmit(evt) {
evt.preventDefault();
var file = this.elements[0].files[0];
var processed = 0;
if(file) {
// var MAX_CHUNK_SIZE = Math.min(file.size, server_max_size);
// for demo we just split in 10 chunks
var MAX_CHUNK_SIZE = file.size > 10 ? (file.size / 10) | 0 : 1;
loadChunk(0);
}
function loadChunk(start) {
var fD = new FormData();
var sliced = file.slice(start, start+MAX_CHUNK_SIZE);
processed += sliced.size; // only for demo
fD.append('file_upload', sliced, file.name);
fD.append('starting_index', start);
if(start + MAX_CHUNK_SIZE >= file.size) {
fD.append('last_chunk', true);
}
var xhr = new XMLHttpRequest();
xhr.open('POST', 'your_server.page');
xhr.onload = function onchunkposted(evt) {
if(start + MAX_CHUNK_SIZE >= file.size) {
console.log('All done. Original file size: %s, total of chunks sizes %s', file.size, processed);
return;
}
loadChunk(start + MAX_CHUNK_SIZE);
};
// xhr.send(fD);
log(fD);
setTimeout(xhr.onload, 200); // fake XHR onload
}
};
function log(formData, method, action) {
console.log('would have sent');
for(let [key, val] of formData.entries())
console.log(key, val);
}
<form method="POST" action="your_server.page">
<input type="file" name="file_upload">
<input type="submit">
</form>
But you absolutely don't need to go through a FileReader for this operation.
Actually the only case where it could make sense to use a FileReader here would be for some Android browsers that don't support passing Blob into a FormData, even though they don't give a single clue about it.
So in this case, you'd have to set up your server to let you know the request was empty, and then only read the File as a dataURI that you would send in-place of the original File.
after a long week of research and sleepless nights, you can't upload binary strings without breaking it, also base64 doesn't work for all files, only images, the journey from the client-side to the server breaks the bytes being sent
Kaiido statement is correct
To send Files to a web-server, you simply don't need js
But that doesn't answer my question. Using the Simple XMLHttpRequest() can upload the file and track those progress as well. But still, it's not it. The direct upload, either from the <form> or using XMLHttpRequest() will need to increase your upload limit in php setting. This method is not convenience for me. How if the client upload file as 4GB? So I need to increase to 4GB. Then next time, client upload file as 6GB, then I have to increase to 6GB.
Using the slice() method is make sense for bigger file as we can send it part by part to server. But this time I am not using it yet.
Here's some of my test the worked as I want. I hope some expert could correct me if I am wrong.
My Upload.js
function upload(fileInputId, fileIndex)
{
var file = document.getElementById(fileInputId).files[fileIndex];
var blob;
var reader = new FileReader();
reader.readAsArrayBuffer(file);
reader.onloadend = function(evt)
{
xhr = new XMLHttpRequest();
xhr.open("POST", "upload.php?name=" + base64_encode(file.name), true);
XMLHttpRequest.prototype.mySendAsBinary = function(text){
var ui8a = new Uint8Array(new Int8Array(text));
if(typeof window.Blob == "function")
{
blob = new Blob([ui8a]);
}else{
var bb = new (window.MozBlobBuilder || window.WebKitBlobBuilder || window.BlobBuilder)();
bb.append(ui8a);
blob = bb.getBlob();
}
this.send(blob);
}
var eventSource = xhr.upload || xhr;
eventSource.addEventListener("progress", function(e) {
var position = e.position || e.loaded;
var total = e.totalSize || e.total;
var percentage = Math.round((position/total)*100);
console.log(percentage);
});
xhr.onreadystatechange = function()
{
if(xhr.readyState == 4)
{
if(xhr.status == 200)
{
console.log("Done");
}else{
console.log("Fail");
}
}
};
xhr.mySendAsBinary(evt.target.result);
};
}
Below is how the PHP server listen to the ArrayBuffer from JS
if(isset($_GET["name"])){
$name = base64_decode($_GET["name"]);
$loc = $name;
$inputHandler = fopen('php://input', "r");
$fileHandler = fopen($loc, "w+");
while(true) {
//$buffer = fgets($inputHandler, 1024);
$buffer = fread($inputHandler, 1000000);
if (strlen($buffer) == 0) {
fclose($inputHandler);
fclose($fileHandler);
return true;
}
//$b = base64_encode($buffer);
fwrite($fileHandler, $buffer);
}
}
The above method works well. The FileReader read the file as ArrayBuffer the upload to server. For me, migrating from ReadAsBinaryString() to ReadAsArrayBuffer() is important and ReadAsArrayBuffer() has some better performance rather than ReadAsBinaryString()
Here's some reason, why some developer relies to FileReader API:
Streaming. Using this method, the file will be stream, so we can avoid setting the php multiple time.
Easy Encrypt. As the file is sending via ArrayBuffer, it is easy for developer to Encrypt the file while upload in progress.
This method also support upload any type of file. I ve done some test and I realize that ReadAsArrayBuffer() method are more faster than ReadAsBinaryString() and direct form upload. You may try it.
Security Notice
The above code is only under test code, to use it in production, you have to consider sending the data in GET or POST under HTTPS.

Convert local image blob to base64, in PHP

I'm working on an (HTML) form for an internal tool. Users can fill data out about an issue and attach screenshots. This form is then submitted via ajax to PHPMailer to be sent. The issue is with the screenshots. Due to system limitations I'm unable to have the users actually upload the files to the server.
Currently, I'm using HTML5 filereader to select the files. I then convert the image blobs to base64 and send them to PHPMailer, to be converted to attachments. This is actually working great. However, I'm running into file size issues. Specifically a 1000px x 1000px (402KB) test image. The resulting base64 string is over a million characters long and the request is returning 413 (Request Entity Too Large).
I understand that base64 is not an efficient method for transferring large images and I've seen various posts about retrieving / converting image blobs from a database. What I haven't been able to find is info on retrieving a local image blob and converting it to base64.
My image blob URLs look like this:
blob:http://example.com/18960927-e220-4417-93a4-edb608e5b8b3
Is it possible to grab this local image data in PHP and then convert it to base64?
I cannot post much of the source but, the following will give you an idea of how I am using filereader
window.onload=function(){
window.URL = window.URL || window.webkitURL;
var fileSelect = document.getElementById("fileSelect"),
fileElem = document.getElementById("fileElem"),
fileList = document.getElementById("fileList");
fileSelect.addEventListener("click", function (e) {
if (fileElem) {
fileElem.click();
}
e.preventDefault(); // prevent navigation to "#"
}, false);
}
function handleFiles(files) {
if (!files.length) {
fileList.innerHTML = "<p>No files selected!</p>";
} else {
fileList.innerHTML = "";
var list = document.createElement("ul");
fileList.appendChild(list);
for (var i = 0; i < files.length; i++) {
if(files[i].size > 1000000) {
alert(files[i].name + ' is too big. Please resize it and try again.');
} else {
var li = document.createElement("li");
list.appendChild(li);
var img = document.createElement("img");
img.src = window.URL.createObjectURL(files[i]);
img.height = 60;
img.setAttribute("class", "shotzPrev");
img.onload = function() {
window.URL.revokeObjectURL(this.src);
}
li.appendChild(img);
var info = document.createElement("span");
info.innerHTML = files[i].name + "<br>" + files[i].size + " bytes";
li.appendChild(info);
}
}
}
}
You can POST the File object to php
fetch("/path/to/server", {
method: "POST"
body: files[i]
})
.then(response => console.log(response.ok))
.catch(err => console.error(err));
I think its nginx error, please change the client_max_body_size value in nginx.conf file.
for example :
# set client body size to 2M #
client_max_body_size 2M;
PHP configuration (optional)
Your php installation also put limits on upload file size. Edit php.ini and set the following directives.
;This sets the maximum amount of memory in bytes that a script is allowed to allocate
memory_limit = 32M
;The maximum size of an uploaded file.
upload_max_filesize = 2M
;Sets max size of post data allowed. This setting also affects file upload. To upload large files, this value must be larger than upload_max_filesize
post_max_size = 3M

How to create and upload a video to parse server?

I am able to upload image file to S3 using parse server. (by creating parse file from base64 image data and doing save() on parse file)
How can I do the same thing for a video file? I am doing this using parse-server js library in Ionic 2 app with typescript. The below code worked for images.
let file = new Parse.File("thumbnail", { base64: imageData });
file.save().then(() => {
// The file has been saved to Parse.
console.log("File uploaded....");
}, (error) => {
// The file either could not be read, or could not be saved to Parse.
console.log("File upload failed.");
});
In case of a video file, I have the file location received from cordova media capture callback. Help me in uploading the video file.
Thank you
here is my solution after days of research.
it works for iphone.
the important statement is this:
data=data.replace("quicktime","mov");
var options = { limit: 1, duration: 30 };
navigator.device.capture.captureVideo(function(files){
// Success! Audio data is here
console.log("video file ready");
var vFile = files[0];
console.log(vFile.fullPath);
///private/var/mobile/Containers/Data/Application/7A0069EB-F864-438F-A685-A0DAE97F8B2D/tmp/capture-T0x144510b50.tmp.GfXOow/capturedvideo.MOV
self.auctionvideo = vFile.fullPath; //localURL;
console.log(self.auctionvideo);
var fileReader = new FileReader();
var file;
fileReader.onload = function (readerEvt) {
var data = fileReader.result;
data=data.replace("quicktime","mov");
console.log(data);
//data:video/quicktime;base64,AAAAFGZ0
console.log(data.length);
self.auctionvideo=data;
self.videofile = {base64:data};
};
//fileReader.reasAsDataURL(audioFile); //This will result in your problem.
file = new window.File(vFile.name, vFile.localURL,
vFile.type, vFile.lastModifiedDate, vFile.size);
fileReader.readAsDataURL(file); //This will result in the solution.
// fileReader.readAsBinaryString(file); //This will result in the solution.
},
function(error){
},
options);

javascript to read file selects a file but doesn't read it

My script selects a file... but doesn't read it. I've been banging my head on it but can't make it work. It's part of my studies, I'm a greenhorn and I'm lost.
function readBlob() {
var files = document.getElementById('files').files;
if (!files.length) {
alert('Please select a file!');
return;
var file = files[0];
var start = 0;
var stop = file.size;
var reader = new FileReader();
if (file.webkitSlice) {
var blob = file.webkitSlice(start, stop);
//Creates new blob if using google chrome
} else if (file.mozSlice) {
var blob = file.mozSlice(start, stop);
//Creates new blob if using mozilla firefox
}
//read the contents of the file in as text into the blob
reader.readAsText(blob);
reader.onloadend = function(evt) {
if (evt.target.readyState == FileReader.DONE) {
document.getElementById('byte_content').textContent =
evt.target.result;
}
};
}
}
Seems like a simple syntax error to me, but maybe just an error inserting it into stack overflow. The entire thing, the slicing of the file, the insertion into the document, everything, is inside of the if (!files.length) statement. Therefore, the script only executes when there is no file (catching on to the problem yet :) but it is actually meant to do the opposite. All of the important stuff is supposed to be outside of the if statement.

Categories

Resources