I can play a mp4 video by requesting chunk of data using GET request and Range header.
var FILE = 'Momokuri_Ep_09-10_SUB_ITA_dashinit.mp4';
var NUM_CHUNKS = 10;
var chunk_size = 256 * 1024; // 2Kb
var current_chunk = 0;
var file_size = 1;
window.MediaSource = window.MediaSource || window.WebKitMediaSource;
if (!!!window.MediaSource) {
alert('MediaSource API is not available');
}
var mediaSource = new MediaSource();
var sourceBuffer;
video.src = window.URL.createObjectURL(mediaSource);
function callback(e) {
sourceBuffer = mediaSource.addSourceBuffer('video/mp4; codecs="avc1.640029, mp4a.40.5"');
console.log('mediaSource readyState: ' + this.readyState);
var readChunk = function() {
GET(FILE, current_chunk, function(uInt8Array) {
sourceBuffer.appendBuffer(uInt8Array);
});
};
sourceBuffer.addEventListener('update', function(e) {
if (!sourceBuffer.updating) {
if (current_chunk == Math.ceil(file_size/chunk_size)-1) {
if ( mediaSource.readyState!='ended' )
mediaSource.endOfStream();
} else {
current_chunk++;
readChunk();
if (video.paused) {
video.play();
}
}
}
});
readChunk();
}
mediaSource.addEventListener('sourceopen', callback, false);
mediaSource.addEventListener('webkitsourceopen', callback, false);
mediaSource.addEventListener('webkitsourceended', function(e) {
console.log('mediaSource readyState: ' + this.readyState);
}, false);
function GET(url, chunk_index, callback) {
var xhr = new XMLHttpRequest();
xhr.open('GET', url, true);
xhr.setRequestHeader('Range', 'bytes='+(chunk_index*chunk_size)+'-'+(++chunk_index*chunk_size-1));
xhr.responseType = 'arraybuffer';
xhr.send();
xhr.onload = function(e) {
if (xhr.status != 200 && xhr.status != 206) {
alert("Unexpected status code " + xhr.status + " for " + url);
return false;
}
file_size = parseInt(this.getResponseHeader('content-range').split("/").pop());
callback(new Uint8Array(xhr.response));
};
}
But I can't seek the video. So anyone can tell me how to solve these problems :
When I seek video, I can get video.currentTime ( let say 2.5 ) , how to convert it to byte-range request ( how to get the byte offset )
When I got the correct offset and load the correct data from Range GET request, how can I append to sourceBuffer at the right offset
Thanks
I've been looking for the solution to this myself, and I think I found it.
Have a look at this example.
Whenever a seeking event is emitted from the video element, indicating the user has requested a seek, the old sourcebuffer is closed using sourceBuffer.abort();.
The mediasource then emits a new sourceopen event which allows you to create a new sourcebuffer the same way you did the first time, but this time instead of appending data from the start of the file, you append data from an offset corresponding to the videoElem.currentTime.
How you turn a time offset into a byte offset seems to be left up to you, as it depends on the format of the media you're playing.
In a constant bitrate file, you might be able to get away with basically dividing the file length in bytes by the video length in seconds (and adding a little safety margin). For anything else, you will probably need to parse the file and get timestamps and byte offsets of keyframes.
Related
I achieved to get a video from php using this code :
var some_video_element = document.querySelector('video')
var req = new XMLHttpRequest();
req.onload = function () {
var blob_uri = URL.createObjectURL(this.response);
some_video_element.src = blob_uri;
some_video_element.addEventListener('oncanplaythrough', (e) => {
URL.revokeObjectURL(blob_uri);
});
};
req.open("get", "vid.php", true);
req.overrideMimeType('blob');
req.send(null);
However, the loading is long so I want to show data as soon as I get it. From Mozilia, it is indicated we can use plain or "" as mime to get the text in progress. However, I can't achieve to convert plain/text to video/mp4 using a blob. Currently this is the code that doesn't work. I try to get the video when some part is available while the rest is still downloading.
var some_video_element = document.querySelector('video')
var req = new XMLHttpRequest();
req.onprogress = function () {
var text = b64toBlob(Base64.encode(this.response), "video/mp4");
var blob_uri = URL.createObjectURL(text);
some_video_element.src = blob_uri;
some_video_element.addEventListener('oncanplaythrough', (e) => {
URL.revokeObjectURL(blob_uri);
});
};
req.onload = function () {
var text = b64toBlob(this.response, "video/mp4");
var blob_uri = URL.createObjectURL(text);
some_video_element.src = blob_uri;
some_video_element.addEventListener('oncanplaythrough', (e) => {
URL.revokeObjectURL(blob_uri);
});
};
req.open("get", "vid.php", true);
req.overrideMimeType('text\/plain');
req.send(null);
Thanks.
NB : This JavaScript is fetching for this php code : https://codesamplez.com/programming/php-html5-video-streaming-tutorial
But echo data has been changed by echo base64_encode(data);
If you use the Fetch API instead of XMLHttpRequest you can consume the response as a ReadableStream which can be fed into a SourceBuffer. This will allow the video to be playable as soon as it starts to load instead of waiting for the full file to download. This does not require any special video container formats, back-end processing or third-party libraries.
const vid = document.getElementById('vid');
const format = 'video/webm; codecs="vp8,vorbis"';
const mediaSource = new MediaSource();
let sourceBuffer = null;
mediaSource.addEventListener('sourceopen', event => {
sourceBuffer = mediaSource.addSourceBuffer(format);
fetch('https://bes.works/dev/samples/test.webm')
.then(response => process(response.body.getReader()))
.catch(err => console.error(err));
}); vid.src = URL.createObjectURL(mediaSource);
function process(stream) {
return new Response(
new ReadableStream({
start(controller) {
async function read() {
let { done, value } = await stream.read();
if (done) { controller.close(); return; }
sourceBuffer.appendBuffer(value);
sourceBuffer.addEventListener(
'updateend', event => read(),
{ once: true }
);
} read();
}
})
);
}
video { width: 300px; }
<video id="vid" controls></video>
As indicated in the comments, you are missing some decent components.
You can implement what you are asking for but you need to make some changes. Following up on the HTML5 streaming API you can create a stream that will make the video using segments you fetch from the server.
Something to keep in mind is the HLS or DASH protocol that already exists can help, looking at the HLS protocol can help as it's simple to use with the idea of segments that can reach out to your server and just decode your base64'd feed.
https://videojs.github.io/videojs-contrib-hls/
I am learning how to use the Web Audio API and I am experiencing a problem that I am not sure how to resolve. I am not sure if it is because I am a beginner at this, or there is something going on that I don't understand.
Basically I am creating a function that plays a short mp3. It should http GET to load MP3 into the audio buffer the first time the audio is played. (Subsequently requests for the same audio don't need to re-fetch the mp3)
The code below works completely correctly in Chrome. However in Safari playAudio('filename') will not play audio the very first time that it is invoked. Every single time after that it works fine. Any expert advise from someone more experienced than me would be very much appreciated.
<script>
var context;
function init() {
try {
window.AudioContext = window.AudioContext || window.webkitAudioContext;
context = new AudioContext();
}
catch(e) {
alert("Your browser doesn't support Web Audio API");
}
}
window.addEventListener('load', init);
function loadAudio(w) {
var audioURL='https://biblicaltext.com/audio/' + w + '.mp3';
var request = new XMLHttpRequest();
request.open("GET", audioURL, true);
request.responseType = 'arraybuffer';
request.onload = function() {
//console.log("update", request.readyState, request.status)
if (request.readyState === 4) {
if (request.status === 200) {
//take the audio from http request and decode it in an audio buffer
context.decodeAudioData(request.response, function(buffer) {
if(buffer) {
console.log(w, buffer);
playAudioBuffer(w, buffer);
} else {
console.log("audio buffer decoding failed")
}
}, function(ec) {
console.log("http request buffer decoding failed")
});
} else {
console.log("get", audioURL, "failed", request.status)
}
}
}
request.send();
}
var audioBuffer;
var currentWord;
function playAudio(w) {
if (typeof audioBuffer === 'undefined' || audioBuffer == null || currentWord != w) {
audioBuffer = null
currentWord = ""
console.log("reqest load of different word", w)
loadAudio(w)
} else {
playAudioBuffer(w, audioBuffer)
}
}
function playAudioBuffer(w, buffer) {
audioBuffer = buffer
currentWord = w
var source = context.createBufferSource();
source.buffer = audioBuffer;
source.connect(context.destination);
source.start();
console.log('playing', currentWord);
}
</script>
Play ἦν
<br>
Play ὥστε
<br>
Play οὐρανός
<br>
Play υἱός
<br>
Play εἷς
<br>
Am I hitting some kind of weird situation where Safari is blocking sound because the clicking of the link is disconnected from the loading/playing of the audio by a fraction of a second?
It turns out the way to 'unlock' audio is to play a silent/hidden sound when the user first interacts with the page to make sure future requests for audio will function correctly.
https://paulbakaus.com/tutorials/html5/web-audio-on-ios/
Here is what I used:
window.addEventListener('touchstart', function() {
// create empty buffer
var buffer = myContext.createBuffer(1, 1, 22050);
var source = myContext.createBufferSource();
source.buffer = buffer;
// connect to output (your speakers)
source.connect(myContext.destination);
// play the file
source.noteOn(0);
}, false);
I realize I am probably not the first person to have asked this on SO, however the search keywords (safari play first time vs second time) don't turn up anything on SO, so I am inclined to answer and leave this up. If the community things deleting my noob question would be better then I'm happy to do that.
I'm making a video streaming service and I'm having trouble recreating the source stream on the client.
I have made a camera page that sends video data chunks to the server along with a video index, the server then stores this data chunk on the harddisk for the client to download. I can retrieve the video data chunk from the client by calling the url:
/Lessen/LesStreamPart/{streamid}?Index={index}
Explanation:
hub.server.join(current_lesid);
When a client joins the stream the page will start to receive updates about the stream by SignalR:
hub.client.updateLesStream = function (lesid, lesstreamid, contenttype, index, duration)
When a update is received, the page checks if it has already setup the MediaSource control for that stream, if not, because it is the first time, the page will start the stream:
function startStream()
When the stream is started the page will setup the MediaSource object for the video element. Then wait till the MediaSource object is instantiated.
function openStream()
After the MediaSource object has been instantiated the page will start populating the MediaSource object with the Mimetype information, after that it will load the first part of the video stream and will append it to the MediaSource object.
function loadChunks()
Once the MediaSource update has finished, the page will start loading the remaining video parts.
hub.client.updateLesStream = function (lesid, lesstreamid, contenttype, index, duration)
When the camera adds a new chunk, the page will be signalled again using SignalR. Because the streamid will match the page will then continue with loading the newer chunks by calling:
function loadChunks()
JS:
// Declare variables
var hub = $.connection.lesHub; // The SignalR hub
var buffering = false; // Semaphore for buffering
var video; // Pointer to video element
var mediaSource; // Pointer to mediasource object
var sourceBuffer; // Pointer to mediasource' sourcebuffer object
var current_lesid = document.querySelector('#LesId').value; // Current les id
var current_lesstreamid; // Current stream id (set in update)
var current_contenttype; // Current video content type (mimetype)
var current_index; // Current loaded index
var current_indexlength; // Current loaded index length
// Will be called once SignalR sends a video chunk update event
function startStream() {
// Open MediaSource
mediaSource = new MediaSource();
// Add listeners
mediaSource.addEventListener('webkitsourceopen', openStream, false);
//mediaSource.addEventListener('webkitsourceclose', closed, false);
mediaSource.addEventListener('sourceopen', openStream, false);
//mediaSource.addEventListener('sourceclose', closed, false);
// Set MediaSource as video element source
video = document.querySelector('video#VideoPlayerElement');
video.src = URL.createObjectURL(mediaSource);
}
function openStream() {
// Set the buffering semafore
buffering = true;
// Start the stream with contenttype
sourceBuffer = mediaSource.addSourceBuffer(current_contenttype);
// If there are any video chunks
if (current_indexlength > 0) {
// Load the first video chunk
var url = "/Lessen/LesStreamPart/" + current_lesstreamid +"?Index=0";
var req = new XMLHttpRequest();
req.responseType = "arraybuffer";
req.open("GET", url, true);
req.onload = function () {
// Append response to the sourcebuffer
var resp = req.response;
var array = new Uint8Array(resp);
sourceBuffer.appendBuffer(array);
// Set the current index to 0
current_index = 0;
// Wait for the sourcebuffer to be ready to load all other chunks
sourceBuffer.addEventListener("updateend", loadChunks);
}
req.send();
}
else {
// Release buffering semafore
buffering = false;
}
}
function loadChunks() {
// Set the buffering semafore
buffering = true;
// Calculate the newindex
var newindex = current_index + 1;
// Check if the newindex is in use?
if (newindex < current_indexlength)
{
// Load new video chunk
var url = "/Lessen/LesStreamPart/" + current_lesstreamid + "?Index=" + newindex;
var req = new XMLHttpRequest();
req.responseType = "arraybuffer";
req.open("GET", url, true);
req.onload = function () {
// Append response to the sourcebuffer
var resp = req.response;
var array = new Uint8Array(resp);
sourceBuffer.appendBuffer(array);
// Set the current index to newindex
current_index = newindex;
// Recursive call to add remaining chunks
loadChunks();
}
req.send();
}
else {
// Newindex is not in use, release buffering semafore
buffering = false;
}
}
// Start recording callbacks
hub.client.startLesStream = function (lesid, lesstreamid, contenttype) {
// This is called while there are no video data chunks, so we can ignore it.
};
// Update recording callbacks
hub.client.updateLesStream = function (lesid, lesstreamid, contenttype, index, duration) {
// Check if update is for our lesid (not actually needed)
if (current_lesid == lesid) {
// Check if buffering
if (buffering) {
// The webpage is currently busy, we will time out this message with 100ms
setTimeout(function () {
hub.client.updateLesStream(lesid, lesstreamid, contenttype, index, duration);
}, 100);
}
else {
// Not buffering, so we can begin processing
// When the streamid is different reload the stream, when the page starts
// the "current_lesstreamid" is undefined, so we will reload the video
if (current_lesstreamid == lesstreamid) {
// Update to current stream
current_indexlength = index + 1;
loadChunks();
}
else {
// Different stream started
current_lesstreamid = lesstreamid;
current_contenttype = contenttype;
current_indexlength = index + 1;
startStream();
}
}
}
};
// Stop recording callbacks
hub.client.stopLesStream = function (lesid, lesstreamid, contenttype) {
// Check if update is for our lesid (not actually needed)
if (current_lesid == lesid) {
// Check if stream is currently shown
if (current_lesstreamid == lesstreamid) {
// Stop the stream
mediaSource.endOfStream();
}
}
};
// Start SignalR
$.connection.hub.start().done(function () {
// And join the room
hub.server.join(current_lesid);
});
HTML:
<input type="hidden" id="LesId" value="#(Model.Id)" />
<video autoplay controls id="VideoPlayerElement"></video>
OUTPUT:
The page doesn't show any errors, but I do get a broken video icon in the video element. Does anyone know what this might be?
I read in a different stackoverflow that it might be the VP8 codec that needs to be used, I changed it, but it remains not working.
EDIT:
I changed the javascript code a bit. It turned out I called the "loadChunks" function, but it was already called by the "updateend" event of the "sourceBuffer". I then got a lot more errors.
I changed the way I communicate with the server to "$.get();". It solved the errors, but I still get no image.
// Declare variables
var hub = $.connection.lesHub; // The SignalR hub
var buffering = false; // Semaphore for buffering
var video; // Pointer to video element
var mediaSource; // Pointer to mediasource object
var sourceBuffer; // Pointer to mediasource' sourcebuffer object
var current_lesid = document.querySelector('#LesId').value; // Current les id
var current_lesstreamid; // Current stream id (set in update)
var current_contenttype; // Current video content type (mimetype)
var current_index; // Current loaded index
var current_indexlength; // Current loaded index length
// Will be called once SignalR sends a video chunk update event
function startStream() {
// Open MediaSource
mediaSource = new MediaSource();
// Add listeners
mediaSource.addEventListener('webkitsourceopen', openStream, false);
//mediaSource.addEventListener('webkitsourceclose', closed, false);
mediaSource.addEventListener('sourceopen', openStream, false);
//mediaSource.addEventListener('sourceclose', closed, false);
// Set MediaSource as video element source
video = document.querySelector('video#VideoPlayerElement');
video.src = URL.createObjectURL(mediaSource);
}
function openStream() {
// Set the buffering semafore
buffering = true;
// Start the stream with contenttype
sourceBuffer = mediaSource.addSourceBuffer(current_contenttype);
// Wait for the sourcebuffer to be ready to load all other chunks
sourceBuffer.addEventListener("updateend", loadChunks);
// If there are any video chunks
if (current_indexlength > 0) {
// Load the first video chunk
var url = "/Lessen/LesStreamPart/" + current_lesstreamid + "?Index=0";
//$("body").append("<video><source src='" + url + "'/></video>");
$.get(url, function (resp) {
//var req = new XMLHttpRequest();
//req.responseType = "arraybuffer";
//req.open("GET", url, true);
//req.onload = function () {
// Append response to the sourcebuffer
//var resp = req.response;
var array = new Uint8Array(resp);
sourceBuffer.appendBuffer(array);
// Set the current index to 0
current_index = 0;
//}
//req.send();
});
}
else {
// Release buffering semafore
buffering = false;
}
}
function loadChunks() {
//video.play();
// Set the buffering semafore
buffering = true;
// Calculate the newindex
var newindex = current_index + 1;
// Check if the newindex is in use?
if (newindex < current_indexlength) {
// Load new video chunk
var url = "/Lessen/LesStreamPart/" + current_lesstreamid + "?Index=" + newindex;
$.get(url, function (resp) {
//var req = new XMLHttpRequest();
//req.responseType = "arraybuffer";
//req.open("GET", url, true);
//req.onload = function () {
// Append response to the sourcebuffer
//var resp = req.response;
var array = new Uint8Array(resp);
sourceBuffer.appendBuffer(array);
// Set the current index to newindex
current_index = newindex;
//}
//req.send();
});
}
else {
// Newindex is not in use, release buffering semafore
buffering = false;
}
}
// Start recording callbacks
hub.client.startLesStream = function (lesid, lesstreamid, contenttype) {
// This is called while there are no video data chunks, so we can ignore it.
};
// Update recording callbacks
hub.client.updateLesStream = function (lesid, lesstreamid, contenttype, index, duration) {
// Check if update is for our lesid (not actually needed)
if (current_lesid == lesid) {
// Check if buffering
if (buffering) {
// The webpage is currently busy, we will time out this message with 100ms
setTimeout(function () {
hub.client.updateLesStream(lesid, lesstreamid, contenttype, index, duration);
}, 100);
}
else {
// Not buffering, so we can begin processing
// When the streamid is different reload the stream, when the page starts
// the "current_lesstreamid" is undefined, so we will reload the video
if (current_lesstreamid == lesstreamid) {
// Update to current stream
current_indexlength = index + 1;
loadChunks();
}
else {
// Different stream started
current_lesstreamid = lesstreamid;
current_contenttype = contenttype;
current_indexlength = index + 1;
startStream();
}
}
}
};
// Stop recording callbacks
hub.client.stopLesStream = function (lesid, lesstreamid, contenttype) {
// Check if update is for our lesid (not actually needed)
if (current_lesid == lesid) {
// Check if stream is currently shown
if (current_lesstreamid == lesstreamid) {
// Stop the stream
mediaSource.endOfStream();
}
}
};
// Start SignalR
$.connection.hub.start().done(function () {
// And join the room
hub.server.join(current_lesid);
});
I get perfect example to solve this problem and into simple way...
i am using three static files, but you can append data from socket's or any api also.
<!DOCTYPE html>
<html>
<head>
</head>
<body>
<br>
<video controls="true" autoplay="true"></video>
<script>
(async() => {
const mediaSource = new MediaSource();
const video = document.querySelector("video");
// video.oncanplay = e => video.play();
const urls = ["https://nickdesaulniers.github.io/netfix/demo/frag_bunny.mp4", "https://raw.githubusercontent.com/w3c/web-platform-tests/master/media-source/mp4/test.mp4","https://nickdesaulniers.github.io/netfix/demo/frag_bunny.mp4"];
const request = url => fetch(url).then(response => response.arrayBuffer());
// `urls.reverse()` stops at `.currentTime` : `9`
const files = await Promise.all(urls.map(request));
/*
`.webm` files
Uncaught DOMException: Failed to execute 'appendBuffer' on 'SourceBuffer': This SourceBuffer has been removed from the parent media source.
Uncaught DOMException: Failed to set the 'timestampOffset' property on 'SourceBuffer': This SourceBuffer has been removed from the parent media source.
*/
// const mimeCodec = "video/webm; codecs=opus";
// https://stackoverflow.com/questions/14108536/how-do-i-append-two-video-files-data-to-a-source-buffer-using-media-source-api/
const mimeCodec = "video/mp4; codecs=avc1.42E01E, mp4a.40.2";
const media = await Promise.all(files.map(file => {
return new Promise(resolve => {
let media = document.createElement("video");
let blobURL = URL.createObjectURL(new Blob([file]));
media.onloadedmetadata = async e => {
resolve({
mediaDuration: media.duration,
mediaBuffer: file
})
}
media.src = blobURL;
})
}));
console.log(media);
mediaSource.addEventListener("sourceopen", sourceOpen);
video.src = URL.createObjectURL(mediaSource);
async function sourceOpen(event) {
if (MediaSource.isTypeSupported(mimeCodec)) {
const sourceBuffer = mediaSource.addSourceBuffer(mimeCodec);
for (let chunk of media) {
await new Promise(resolve => {
sourceBuffer.appendBuffer(chunk.mediaBuffer);
sourceBuffer.onupdateend = e => {
sourceBuffer.onupdateend = null;
sourceBuffer.timestampOffset += chunk.mediaDuration;
console.log(mediaSource.duration);
resolve()
}
})
}
mediaSource.endOfStream();
}
else {
console.warn(mimeCodec + " not supported");
}
};
})()
</script>
</body>
</html>
It seems to be a codec issue and the method for reading data. When you receive a video blob, you need to convert/store it using a FileReader, this worked for me. For best codec support I needed to use the VP8 codec (please inform me if you know a better one).
This is my working example where I use a MediaRecorder to record the webcam then paste the video blobs into a MediaSource.
const video1 = document.getElementById('video1');
const video2 = document.getElementById('video2');
const mediaSource = new MediaSource();
video2.src = URL.createObjectURL(mediaSource);
mediaSource.addEventListener('sourceopen', sourceOpen);
function sourceOpen(openargs) {
navigator.mediaDevices
.getUserMedia({ audio: false, video: true })
.then(function (stream) {
video1.srcObject = stream;
var options = { mimeType: 'video/webm; codecs=vp8' };
var mediaRecorder = new MediaRecorder(stream, options);
var sourceBuffer = null;
mediaRecorder.ondataavailable = function (e) {
if (sourceBuffer == null) {
sourceBuffer = mediaSource.addSourceBuffer(mediaRecorder.mimeType);
window.sourceBuffer = sourceBuffer;
}
var reader = new FileReader();
reader.addEventListener("loadend", function () {
var arr = new Uint8Array(reader.result);
sourceBuffer.appendBuffer(arr);
});
reader.readAsArrayBuffer(e.data);
};
mediaRecorder.start(5000);
});
}
video {
width: 320px;
height: 180px;
}
<!DOCTYPE html>
<html>
<head>
<meta charset="utf-8" />
<title></title>
</head>
<body>
<video id="video1" controls autoplay muted></video><br />
<video id="video2" controls autoplay muted></video>
</body>
</html>
I realize that the new Mozilla Firefox return allocation size overflow (on FileReader.ReadAsBinaryString()) when the file bigger than 200MB (something like that).
Here's some of my code on test for client web browser:
function upload(fileInputId, fileIndex)
{
var file = document.getElementById(fileInputId).files[fileIndex];
var blob;
var reader = new FileReader();
reader.readAsBinaryString(file);
reader.onloadend = function(evt)
{
xhr = new XMLHttpRequest();
xhr.open("POST", "upload.php", true);
XMLHttpRequest.prototype.mySendAsBinary = function(text){
var data = new ArrayBuffer(text.length);
var ui8a = new Uint8Array(data, 0);
for (var i = 0; i < text.length; i++){
ui8a[i] = (text.charCodeAt(i) & 0xff);
}
if(typeof window.Blob == "function")
{
blob = new Blob([data]);
}else{
var bb = new (window.MozBlobBuilder || window.WebKitBlobBuilder || window.BlobBuilder)();
bb.append(data);
blob = bb.getBlob();
}
this.send(blob);
}
var eventSource = xhr.upload || xhr;
eventSource.addEventListener("progress", function(e) {
var position = e.position || e.loaded;
var total = e.totalSize || e.total;
var percentage = Math.round((position/total)*100);
});
xhr.onreadystatechange = function()
{
if(xhr.readyState == 4)
{
if(xhr.status == 200)
{
console.log("Done");
}else{
console.log("Fail");
}
}
};
xhr.mySendAsBinary(evt.target.result);
};
}
So I tried change it to FileReader.ReadAsArrayBuffer(), the error has not shown up but the data are not the same (as it's not read as binary string).
Did anyone has any solution to solve this problem? Is there any way that we can upload bigger file from JS to Web Server in raw/string other than FileReader implementation?
I read on Mozilla JS Documentation that said:
This feature is non-standard and is not on a standards track. Do not
use it on production sites facing the Web: it will not work for every
user. There may also be large incompatibilities between
implementations and the behavior may change in the future. - Mozilla
If not ReadAsBinaryString, the how to implement ReadAsArrayBuffer or ReadAsText
To send Files to a web-server, you simply don't need js. HTML alone is well able to do this with the <form> element.
Now if you want to go through js, for e.g catch the different ProgressEvents, then you can send directly your File, no need to read it whatsoever on your side.
To do this, you've got two (or three) solutions.
If your server is able to handle PUT requests, you can simply xhr.send(file);.
Otherwise, you'd have to go through a FormData.
// if you really want to go the XHR way
document.forms[0].onsubmit = function handleSubmit(evt) {
if(!window.FormData) { // old browser use the <form>
return;
}
// now we handle the submit through js
evt.preventDefault();
var fD = new FormData(this);
var xhr = new XMLHttpRequest();
xhr.onprogress = function handleProgress(evt){};
xhr.onload = function handleLoad(evt){};
xhr.onerror = function handleError(evt){};
xhr.open(this.method, this.action);
// xhr.send(fD); // won't work in StackSnippet
log(fD, this.method, this.action); // so we just log its content
};
function log(formData, method, action) {
console.log('would have sent');
for(let [key, val] of formData.entries())
console.log(key, val);
console.log('through', method);
console.log('to', action);
}
<!-- this in itself is enough -->
<form method="POST" action="your_server.page">
<input type="file" name="file_upload">
<input type="submit">
</form>
Now, you sent a comment saying that you can't upload Files bigger than 1GB to your server.
This limitation is only due to your server's config, so the best if you want to accept such big files is to configure it correctly.
But if you really want to send your File by chunks, even then don't get off of the Blob interface.
Indeed Blobs have a slice() method, so use it.
document.forms[0].onsubmit = function handleSubmit(evt) {
evt.preventDefault();
var file = this.elements[0].files[0];
var processed = 0;
if(file) {
// var MAX_CHUNK_SIZE = Math.min(file.size, server_max_size);
// for demo we just split in 10 chunks
var MAX_CHUNK_SIZE = file.size > 10 ? (file.size / 10) | 0 : 1;
loadChunk(0);
}
function loadChunk(start) {
var fD = new FormData();
var sliced = file.slice(start, start+MAX_CHUNK_SIZE);
processed += sliced.size; // only for demo
fD.append('file_upload', sliced, file.name);
fD.append('starting_index', start);
if(start + MAX_CHUNK_SIZE >= file.size) {
fD.append('last_chunk', true);
}
var xhr = new XMLHttpRequest();
xhr.open('POST', 'your_server.page');
xhr.onload = function onchunkposted(evt) {
if(start + MAX_CHUNK_SIZE >= file.size) {
console.log('All done. Original file size: %s, total of chunks sizes %s', file.size, processed);
return;
}
loadChunk(start + MAX_CHUNK_SIZE);
};
// xhr.send(fD);
log(fD);
setTimeout(xhr.onload, 200); // fake XHR onload
}
};
function log(formData, method, action) {
console.log('would have sent');
for(let [key, val] of formData.entries())
console.log(key, val);
}
<form method="POST" action="your_server.page">
<input type="file" name="file_upload">
<input type="submit">
</form>
But you absolutely don't need to go through a FileReader for this operation.
Actually the only case where it could make sense to use a FileReader here would be for some Android browsers that don't support passing Blob into a FormData, even though they don't give a single clue about it.
So in this case, you'd have to set up your server to let you know the request was empty, and then only read the File as a dataURI that you would send in-place of the original File.
after a long week of research and sleepless nights, you can't upload binary strings without breaking it, also base64 doesn't work for all files, only images, the journey from the client-side to the server breaks the bytes being sent
Kaiido statement is correct
To send Files to a web-server, you simply don't need js
But that doesn't answer my question. Using the Simple XMLHttpRequest() can upload the file and track those progress as well. But still, it's not it. The direct upload, either from the <form> or using XMLHttpRequest() will need to increase your upload limit in php setting. This method is not convenience for me. How if the client upload file as 4GB? So I need to increase to 4GB. Then next time, client upload file as 6GB, then I have to increase to 6GB.
Using the slice() method is make sense for bigger file as we can send it part by part to server. But this time I am not using it yet.
Here's some of my test the worked as I want. I hope some expert could correct me if I am wrong.
My Upload.js
function upload(fileInputId, fileIndex)
{
var file = document.getElementById(fileInputId).files[fileIndex];
var blob;
var reader = new FileReader();
reader.readAsArrayBuffer(file);
reader.onloadend = function(evt)
{
xhr = new XMLHttpRequest();
xhr.open("POST", "upload.php?name=" + base64_encode(file.name), true);
XMLHttpRequest.prototype.mySendAsBinary = function(text){
var ui8a = new Uint8Array(new Int8Array(text));
if(typeof window.Blob == "function")
{
blob = new Blob([ui8a]);
}else{
var bb = new (window.MozBlobBuilder || window.WebKitBlobBuilder || window.BlobBuilder)();
bb.append(ui8a);
blob = bb.getBlob();
}
this.send(blob);
}
var eventSource = xhr.upload || xhr;
eventSource.addEventListener("progress", function(e) {
var position = e.position || e.loaded;
var total = e.totalSize || e.total;
var percentage = Math.round((position/total)*100);
console.log(percentage);
});
xhr.onreadystatechange = function()
{
if(xhr.readyState == 4)
{
if(xhr.status == 200)
{
console.log("Done");
}else{
console.log("Fail");
}
}
};
xhr.mySendAsBinary(evt.target.result);
};
}
Below is how the PHP server listen to the ArrayBuffer from JS
if(isset($_GET["name"])){
$name = base64_decode($_GET["name"]);
$loc = $name;
$inputHandler = fopen('php://input', "r");
$fileHandler = fopen($loc, "w+");
while(true) {
//$buffer = fgets($inputHandler, 1024);
$buffer = fread($inputHandler, 1000000);
if (strlen($buffer) == 0) {
fclose($inputHandler);
fclose($fileHandler);
return true;
}
//$b = base64_encode($buffer);
fwrite($fileHandler, $buffer);
}
}
The above method works well. The FileReader read the file as ArrayBuffer the upload to server. For me, migrating from ReadAsBinaryString() to ReadAsArrayBuffer() is important and ReadAsArrayBuffer() has some better performance rather than ReadAsBinaryString()
Here's some reason, why some developer relies to FileReader API:
Streaming. Using this method, the file will be stream, so we can avoid setting the php multiple time.
Easy Encrypt. As the file is sending via ArrayBuffer, it is easy for developer to Encrypt the file while upload in progress.
This method also support upload any type of file. I ve done some test and I realize that ReadAsArrayBuffer() method are more faster than ReadAsBinaryString() and direct form upload. You may try it.
Security Notice
The above code is only under test code, to use it in production, you have to consider sending the data in GET or POST under HTTPS.
I recently downloaded Armsglobe (Googles globe API to draw lines in countries using their country name). but their original code is written without getting the json datas every x-minutes.
so i was hoping if you could help me how i can achieve that without breaking the entire appearance, which i already did after i tried to do a simple setTimeout() function in dataloading.js as follows:
function loadContentData(callback){
function request_xhr() {
var filePath = "categories/All3.json";
filePath = encodeURI( filePath );
xhr = new XMLHttpRequest();
xhr.open( 'GET', filePath, true );
xhr.onreadystatechange = function() {
if ( xhr.readyState === 4 && xhr.status === 200 ) {
timeBins = JSON.parse( xhr.responseText ).timeBins;
// console.log(timeBins);
maxValue = 0;
// console.log(timeBins);
startTime = timeBins[0].t;
endTime = timeBins[timeBins.length-1].t;
timeLength = endTime - startTime;
if(callback)
callback();
// console.log("finished read data file");
}
};
xhr.send( null );
}
request_xhr();
setTimeout(request_xhr, 20000);
}
^ this works and actually gets the json file every given minute, but it doesnt draw it, or show it anyhow. infact, it contradicts with the 3D globe and it becomes immovable and corrupts immediately. i dont know what exactly is making it like this, but i think the browser is caching it and showing the cached datas instead of the new datas.
i know my ways are a bit odd and inappropriate, i just couldn't see other ways. any new idea or way is welcomed! please help! Thanks!
Here is one way to ensure you draw it only after you receive the data. By calling the function inside the call back you are ensuring the next call be made only after you render first one
function loadContentData(callback){
var self = this;
function request_xhr() {
var filePath = "categories/All3.json";
filePath = encodeURI( filePath );
xhr = new XMLHttpRequest();
xhr.open( 'GET', filePath, true );
xhr.onreadystatechange = function() {
if ( xhr.readyState === 4 && xhr.status === 200 ) {
timeBins = JSON.parse( xhr.responseText ).timeBins;
// console.log(timeBins);
maxValue = 0;
// console.log(timeBins);
startTime = timeBins[0].t;
endTime = timeBins[timeBins.length-1].t;
timeLength = endTime - startTime;
if(callback)
callback();
// console.log("finished read data file");
self.request_xhr();
}
};
xhr.send( null );
}
request_xhr();
}