Receiving a video parts and append them to a MediaSource using javascript - javascript

I'm making a video streaming service and I'm having trouble recreating the source stream on the client.
I have made a camera page that sends video data chunks to the server along with a video index, the server then stores this data chunk on the harddisk for the client to download. I can retrieve the video data chunk from the client by calling the url:
/Lessen/LesStreamPart/{streamid}?Index={index}
Explanation:
hub.server.join(current_lesid);
When a client joins the stream the page will start to receive updates about the stream by SignalR:
hub.client.updateLesStream = function (lesid, lesstreamid, contenttype, index, duration)
When a update is received, the page checks if it has already setup the MediaSource control for that stream, if not, because it is the first time, the page will start the stream:
function startStream()
When the stream is started the page will setup the MediaSource object for the video element. Then wait till the MediaSource object is instantiated.
function openStream()
After the MediaSource object has been instantiated the page will start populating the MediaSource object with the Mimetype information, after that it will load the first part of the video stream and will append it to the MediaSource object.
function loadChunks()
Once the MediaSource update has finished, the page will start loading the remaining video parts.
hub.client.updateLesStream = function (lesid, lesstreamid, contenttype, index, duration)
When the camera adds a new chunk, the page will be signalled again using SignalR. Because the streamid will match the page will then continue with loading the newer chunks by calling:
function loadChunks()
JS:
// Declare variables
var hub = $.connection.lesHub; // The SignalR hub
var buffering = false; // Semaphore for buffering
var video; // Pointer to video element
var mediaSource; // Pointer to mediasource object
var sourceBuffer; // Pointer to mediasource' sourcebuffer object
var current_lesid = document.querySelector('#LesId').value; // Current les id
var current_lesstreamid; // Current stream id (set in update)
var current_contenttype; // Current video content type (mimetype)
var current_index; // Current loaded index
var current_indexlength; // Current loaded index length
// Will be called once SignalR sends a video chunk update event
function startStream() {
// Open MediaSource
mediaSource = new MediaSource();
// Add listeners
mediaSource.addEventListener('webkitsourceopen', openStream, false);
//mediaSource.addEventListener('webkitsourceclose', closed, false);
mediaSource.addEventListener('sourceopen', openStream, false);
//mediaSource.addEventListener('sourceclose', closed, false);
// Set MediaSource as video element source
video = document.querySelector('video#VideoPlayerElement');
video.src = URL.createObjectURL(mediaSource);
}
function openStream() {
// Set the buffering semafore
buffering = true;
// Start the stream with contenttype
sourceBuffer = mediaSource.addSourceBuffer(current_contenttype);
// If there are any video chunks
if (current_indexlength > 0) {
// Load the first video chunk
var url = "/Lessen/LesStreamPart/" + current_lesstreamid +"?Index=0";
var req = new XMLHttpRequest();
req.responseType = "arraybuffer";
req.open("GET", url, true);
req.onload = function () {
// Append response to the sourcebuffer
var resp = req.response;
var array = new Uint8Array(resp);
sourceBuffer.appendBuffer(array);
// Set the current index to 0
current_index = 0;
// Wait for the sourcebuffer to be ready to load all other chunks
sourceBuffer.addEventListener("updateend", loadChunks);
}
req.send();
}
else {
// Release buffering semafore
buffering = false;
}
}
function loadChunks() {
// Set the buffering semafore
buffering = true;
// Calculate the newindex
var newindex = current_index + 1;
// Check if the newindex is in use?
if (newindex < current_indexlength)
{
// Load new video chunk
var url = "/Lessen/LesStreamPart/" + current_lesstreamid + "?Index=" + newindex;
var req = new XMLHttpRequest();
req.responseType = "arraybuffer";
req.open("GET", url, true);
req.onload = function () {
// Append response to the sourcebuffer
var resp = req.response;
var array = new Uint8Array(resp);
sourceBuffer.appendBuffer(array);
// Set the current index to newindex
current_index = newindex;
// Recursive call to add remaining chunks
loadChunks();
}
req.send();
}
else {
// Newindex is not in use, release buffering semafore
buffering = false;
}
}
// Start recording callbacks
hub.client.startLesStream = function (lesid, lesstreamid, contenttype) {
// This is called while there are no video data chunks, so we can ignore it.
};
// Update recording callbacks
hub.client.updateLesStream = function (lesid, lesstreamid, contenttype, index, duration) {
// Check if update is for our lesid (not actually needed)
if (current_lesid == lesid) {
// Check if buffering
if (buffering) {
// The webpage is currently busy, we will time out this message with 100ms
setTimeout(function () {
hub.client.updateLesStream(lesid, lesstreamid, contenttype, index, duration);
}, 100);
}
else {
// Not buffering, so we can begin processing
// When the streamid is different reload the stream, when the page starts
// the "current_lesstreamid" is undefined, so we will reload the video
if (current_lesstreamid == lesstreamid) {
// Update to current stream
current_indexlength = index + 1;
loadChunks();
}
else {
// Different stream started
current_lesstreamid = lesstreamid;
current_contenttype = contenttype;
current_indexlength = index + 1;
startStream();
}
}
}
};
// Stop recording callbacks
hub.client.stopLesStream = function (lesid, lesstreamid, contenttype) {
// Check if update is for our lesid (not actually needed)
if (current_lesid == lesid) {
// Check if stream is currently shown
if (current_lesstreamid == lesstreamid) {
// Stop the stream
mediaSource.endOfStream();
}
}
};
// Start SignalR
$.connection.hub.start().done(function () {
// And join the room
hub.server.join(current_lesid);
});
HTML:
<input type="hidden" id="LesId" value="#(Model.Id)" />
<video autoplay controls id="VideoPlayerElement"></video>
OUTPUT:
The page doesn't show any errors, but I do get a broken video icon in the video element. Does anyone know what this might be?
I read in a different stackoverflow that it might be the VP8 codec that needs to be used, I changed it, but it remains not working.
EDIT:
I changed the javascript code a bit. It turned out I called the "loadChunks" function, but it was already called by the "updateend" event of the "sourceBuffer". I then got a lot more errors.
I changed the way I communicate with the server to "$.get();". It solved the errors, but I still get no image.
// Declare variables
var hub = $.connection.lesHub; // The SignalR hub
var buffering = false; // Semaphore for buffering
var video; // Pointer to video element
var mediaSource; // Pointer to mediasource object
var sourceBuffer; // Pointer to mediasource' sourcebuffer object
var current_lesid = document.querySelector('#LesId').value; // Current les id
var current_lesstreamid; // Current stream id (set in update)
var current_contenttype; // Current video content type (mimetype)
var current_index; // Current loaded index
var current_indexlength; // Current loaded index length
// Will be called once SignalR sends a video chunk update event
function startStream() {
// Open MediaSource
mediaSource = new MediaSource();
// Add listeners
mediaSource.addEventListener('webkitsourceopen', openStream, false);
//mediaSource.addEventListener('webkitsourceclose', closed, false);
mediaSource.addEventListener('sourceopen', openStream, false);
//mediaSource.addEventListener('sourceclose', closed, false);
// Set MediaSource as video element source
video = document.querySelector('video#VideoPlayerElement');
video.src = URL.createObjectURL(mediaSource);
}
function openStream() {
// Set the buffering semafore
buffering = true;
// Start the stream with contenttype
sourceBuffer = mediaSource.addSourceBuffer(current_contenttype);
// Wait for the sourcebuffer to be ready to load all other chunks
sourceBuffer.addEventListener("updateend", loadChunks);
// If there are any video chunks
if (current_indexlength > 0) {
// Load the first video chunk
var url = "/Lessen/LesStreamPart/" + current_lesstreamid + "?Index=0";
//$("body").append("<video><source src='" + url + "'/></video>");
$.get(url, function (resp) {
//var req = new XMLHttpRequest();
//req.responseType = "arraybuffer";
//req.open("GET", url, true);
//req.onload = function () {
// Append response to the sourcebuffer
//var resp = req.response;
var array = new Uint8Array(resp);
sourceBuffer.appendBuffer(array);
// Set the current index to 0
current_index = 0;
//}
//req.send();
});
}
else {
// Release buffering semafore
buffering = false;
}
}
function loadChunks() {
//video.play();
// Set the buffering semafore
buffering = true;
// Calculate the newindex
var newindex = current_index + 1;
// Check if the newindex is in use?
if (newindex < current_indexlength) {
// Load new video chunk
var url = "/Lessen/LesStreamPart/" + current_lesstreamid + "?Index=" + newindex;
$.get(url, function (resp) {
//var req = new XMLHttpRequest();
//req.responseType = "arraybuffer";
//req.open("GET", url, true);
//req.onload = function () {
// Append response to the sourcebuffer
//var resp = req.response;
var array = new Uint8Array(resp);
sourceBuffer.appendBuffer(array);
// Set the current index to newindex
current_index = newindex;
//}
//req.send();
});
}
else {
// Newindex is not in use, release buffering semafore
buffering = false;
}
}
// Start recording callbacks
hub.client.startLesStream = function (lesid, lesstreamid, contenttype) {
// This is called while there are no video data chunks, so we can ignore it.
};
// Update recording callbacks
hub.client.updateLesStream = function (lesid, lesstreamid, contenttype, index, duration) {
// Check if update is for our lesid (not actually needed)
if (current_lesid == lesid) {
// Check if buffering
if (buffering) {
// The webpage is currently busy, we will time out this message with 100ms
setTimeout(function () {
hub.client.updateLesStream(lesid, lesstreamid, contenttype, index, duration);
}, 100);
}
else {
// Not buffering, so we can begin processing
// When the streamid is different reload the stream, when the page starts
// the "current_lesstreamid" is undefined, so we will reload the video
if (current_lesstreamid == lesstreamid) {
// Update to current stream
current_indexlength = index + 1;
loadChunks();
}
else {
// Different stream started
current_lesstreamid = lesstreamid;
current_contenttype = contenttype;
current_indexlength = index + 1;
startStream();
}
}
}
};
// Stop recording callbacks
hub.client.stopLesStream = function (lesid, lesstreamid, contenttype) {
// Check if update is for our lesid (not actually needed)
if (current_lesid == lesid) {
// Check if stream is currently shown
if (current_lesstreamid == lesstreamid) {
// Stop the stream
mediaSource.endOfStream();
}
}
};
// Start SignalR
$.connection.hub.start().done(function () {
// And join the room
hub.server.join(current_lesid);
});

I get perfect example to solve this problem and into simple way...
i am using three static files, but you can append data from socket's or any api also.
<!DOCTYPE html>
<html>
<head>
</head>
<body>
<br>
<video controls="true" autoplay="true"></video>
<script>
(async() => {
const mediaSource = new MediaSource();
const video = document.querySelector("video");
// video.oncanplay = e => video.play();
const urls = ["https://nickdesaulniers.github.io/netfix/demo/frag_bunny.mp4", "https://raw.githubusercontent.com/w3c/web-platform-tests/master/media-source/mp4/test.mp4","https://nickdesaulniers.github.io/netfix/demo/frag_bunny.mp4"];
const request = url => fetch(url).then(response => response.arrayBuffer());
// `urls.reverse()` stops at `.currentTime` : `9`
const files = await Promise.all(urls.map(request));
/*
`.webm` files
Uncaught DOMException: Failed to execute 'appendBuffer' on 'SourceBuffer': This SourceBuffer has been removed from the parent media source.
Uncaught DOMException: Failed to set the 'timestampOffset' property on 'SourceBuffer': This SourceBuffer has been removed from the parent media source.
*/
// const mimeCodec = "video/webm; codecs=opus";
// https://stackoverflow.com/questions/14108536/how-do-i-append-two-video-files-data-to-a-source-buffer-using-media-source-api/
const mimeCodec = "video/mp4; codecs=avc1.42E01E, mp4a.40.2";
const media = await Promise.all(files.map(file => {
return new Promise(resolve => {
let media = document.createElement("video");
let blobURL = URL.createObjectURL(new Blob([file]));
media.onloadedmetadata = async e => {
resolve({
mediaDuration: media.duration,
mediaBuffer: file
})
}
media.src = blobURL;
})
}));
console.log(media);
mediaSource.addEventListener("sourceopen", sourceOpen);
video.src = URL.createObjectURL(mediaSource);
async function sourceOpen(event) {
if (MediaSource.isTypeSupported(mimeCodec)) {
const sourceBuffer = mediaSource.addSourceBuffer(mimeCodec);
for (let chunk of media) {
await new Promise(resolve => {
sourceBuffer.appendBuffer(chunk.mediaBuffer);
sourceBuffer.onupdateend = e => {
sourceBuffer.onupdateend = null;
sourceBuffer.timestampOffset += chunk.mediaDuration;
console.log(mediaSource.duration);
resolve()
}
})
}
mediaSource.endOfStream();
}
else {
console.warn(mimeCodec + " not supported");
}
};
})()
</script>
</body>
</html>

It seems to be a codec issue and the method for reading data. When you receive a video blob, you need to convert/store it using a FileReader, this worked for me. For best codec support I needed to use the VP8 codec (please inform me if you know a better one).
This is my working example where I use a MediaRecorder to record the webcam then paste the video blobs into a MediaSource.
const video1 = document.getElementById('video1');
const video2 = document.getElementById('video2');
const mediaSource = new MediaSource();
video2.src = URL.createObjectURL(mediaSource);
mediaSource.addEventListener('sourceopen', sourceOpen);
function sourceOpen(openargs) {
navigator.mediaDevices
.getUserMedia({ audio: false, video: true })
.then(function (stream) {
video1.srcObject = stream;
var options = { mimeType: 'video/webm; codecs=vp8' };
var mediaRecorder = new MediaRecorder(stream, options);
var sourceBuffer = null;
mediaRecorder.ondataavailable = function (e) {
if (sourceBuffer == null) {
sourceBuffer = mediaSource.addSourceBuffer(mediaRecorder.mimeType);
window.sourceBuffer = sourceBuffer;
}
var reader = new FileReader();
reader.addEventListener("loadend", function () {
var arr = new Uint8Array(reader.result);
sourceBuffer.appendBuffer(arr);
});
reader.readAsArrayBuffer(e.data);
};
mediaRecorder.start(5000);
});
}
video {
width: 320px;
height: 180px;
}
<!DOCTYPE html>
<html>
<head>
<meta charset="utf-8" />
<title></title>
</head>
<body>
<video id="video1" controls autoplay muted></video><br />
<video id="video2" controls autoplay muted></video>
</body>
</html>

Related

XMLHTTPrequest plain text to blob (video)

I achieved to get a video from php using this code :
var some_video_element = document.querySelector('video')
var req = new XMLHttpRequest();
req.onload = function () {
var blob_uri = URL.createObjectURL(this.response);
some_video_element.src = blob_uri;
some_video_element.addEventListener('oncanplaythrough', (e) => {
URL.revokeObjectURL(blob_uri);
});
};
req.open("get", "vid.php", true);
req.overrideMimeType('blob');
req.send(null);
However, the loading is long so I want to show data as soon as I get it. From Mozilia, it is indicated we can use plain or "" as mime to get the text in progress. However, I can't achieve to convert plain/text to video/mp4 using a blob. Currently this is the code that doesn't work. I try to get the video when some part is available while the rest is still downloading.
var some_video_element = document.querySelector('video')
var req = new XMLHttpRequest();
req.onprogress = function () {
var text = b64toBlob(Base64.encode(this.response), "video/mp4");
var blob_uri = URL.createObjectURL(text);
some_video_element.src = blob_uri;
some_video_element.addEventListener('oncanplaythrough', (e) => {
URL.revokeObjectURL(blob_uri);
});
};
req.onload = function () {
var text = b64toBlob(this.response, "video/mp4");
var blob_uri = URL.createObjectURL(text);
some_video_element.src = blob_uri;
some_video_element.addEventListener('oncanplaythrough', (e) => {
URL.revokeObjectURL(blob_uri);
});
};
req.open("get", "vid.php", true);
req.overrideMimeType('text\/plain');
req.send(null);
Thanks.
NB : This JavaScript is fetching for this php code : https://codesamplez.com/programming/php-html5-video-streaming-tutorial
But echo data has been changed by echo base64_encode(data);
If you use the Fetch API instead of XMLHttpRequest you can consume the response as a ReadableStream which can be fed into a SourceBuffer. This will allow the video to be playable as soon as it starts to load instead of waiting for the full file to download. This does not require any special video container formats, back-end processing or third-party libraries.
const vid = document.getElementById('vid');
const format = 'video/webm; codecs="vp8,vorbis"';
const mediaSource = new MediaSource();
let sourceBuffer = null;
mediaSource.addEventListener('sourceopen', event => {
sourceBuffer = mediaSource.addSourceBuffer(format);
fetch('https://bes.works/dev/samples/test.webm')
.then(response => process(response.body.getReader()))
.catch(err => console.error(err));
}); vid.src = URL.createObjectURL(mediaSource);
function process(stream) {
return new Response(
new ReadableStream({
start(controller) {
async function read() {
let { done, value } = await stream.read();
if (done) { controller.close(); return; }
sourceBuffer.appendBuffer(value);
sourceBuffer.addEventListener(
'updateend', event => read(),
{ once: true }
);
} read();
}
})
);
}
video { width: 300px; }
<video id="vid" controls></video>
As indicated in the comments, you are missing some decent components.
You can implement what you are asking for but you need to make some changes. Following up on the HTML5 streaming API you can create a stream that will make the video using segments you fetch from the server.
Something to keep in mind is the HLS or DASH protocol that already exists can help, looking at the HLS protocol can help as it's simple to use with the idea of segments that can reach out to your server and just decode your base64'd feed.
https://videojs.github.io/videojs-contrib-hls/

JavaScript MediaSource Example

I'm trying to create video player using MediaSource , but I can't make it play while buffering new data. I have this code that downloads the full data then plays it.
var vidElement = document.querySelector('video');
if (window.MediaSource) {
var mediaSource = new MediaSource();
vidElement.src = URL.createObjectURL(mediaSource);
mediaSource.addEventListener('sourceopen', sourceOpen);
} else {
console.log("The Media Source Extensions API is not supported.")
}
function sourceOpen(e) {
URL.revokeObjectURL(vidElement.src);
var mime = 'video/webm; codecs="opus, vp09.00.10.08"';
var mediaSource = e.target;
var sourceBuffer = mediaSource.addSourceBuffer(mime);
var videoUrl = 'droid.webm';
fetch(videoUrl)
.then(function(response) {
return response.arrayBuffer();
})
.then(function(arrayBuffer) {
sourceBuffer.addEventListener('updateend', function(e) {
if (!sourceBuffer.updating && mediaSource.readyState === 'open') {
mediaSource.endOfStream();
}
});
sourceBuffer.appendBuffer(arrayBuffer);
});
}
This code is not working on the local host. I get the MediaSource closed and this line never gets called
mediaSource.endOfStream();
Can any one tell me why the state is closed, please?
Any help about creating player like YouTube or any open source.
and by the way I tried a lot of codes and sources for 2 days now, and it's always the MediaSource giving me errors like the source removed, or not linked.
For starters, on your first line you have a collection...
var V=document.querySelector('video');
Shouldn't that be...
var V=document.querySelector('video')[0];
Before you start operating on it?

AngularJS - Decode Byte Array and Play Audio File (Wav/MP3)

Using below service to play audio file (wav/mp3) that comes in byte array format.
myAudioService.getAudioTone(userid).then(function (data) {
var context; // Audio context
var buf; // Audio buffer
$window.AudioContext = $window.webkitAudioContext;
context = new AudioContext();
$timeout(function () {
$scope.playByteArray = function(){
var arrayBuffer = new ArrayBuffer(data.length);
var bufferView = new Uint8Array(arrayBuffer);
for (i = 0; i < data.length; i++) {
bufferView[i] = data[i];
}
context.decodeAudioData(arrayBuffer, function(buffer) {
buf = buffer;
play();
});
}
$scope.play = function(audioBuffer){
// Create a source node from the buffer
var source = context.createBufferSource();
source.buffer = buf;
// Connect to the final output node (the speakers)
source.connect(context.destination);
// Play immediately
source.start(0);
}
if(data.length !== '' || data !== ''){
$scope.playByteArray();
}
}, 3000);
});
The functions are called but it throws below exception.
Uncaught (in promise) DOMException: Unable to decode audio data
How do I run it in Chrome, FF and IE ?
P.S. $window and $timeout are already defined in controller.
Based on the error message arrayBuffer doesn't contain what you think it contains. You should verify that the bytes in the array are the same as the encoded wav/mp3 file.

Can't seek video when playing from MediaSource

I can play a mp4 video by requesting chunk of data using GET request and Range header.
var FILE = 'Momokuri_Ep_09-10_SUB_ITA_dashinit.mp4';
var NUM_CHUNKS = 10;
var chunk_size = 256 * 1024; // 2Kb
var current_chunk = 0;
var file_size = 1;
window.MediaSource = window.MediaSource || window.WebKitMediaSource;
if (!!!window.MediaSource) {
alert('MediaSource API is not available');
}
var mediaSource = new MediaSource();
var sourceBuffer;
video.src = window.URL.createObjectURL(mediaSource);
function callback(e) {
sourceBuffer = mediaSource.addSourceBuffer('video/mp4; codecs="avc1.640029, mp4a.40.5"');
console.log('mediaSource readyState: ' + this.readyState);
var readChunk = function() {
GET(FILE, current_chunk, function(uInt8Array) {
sourceBuffer.appendBuffer(uInt8Array);
});
};
sourceBuffer.addEventListener('update', function(e) {
if (!sourceBuffer.updating) {
if (current_chunk == Math.ceil(file_size/chunk_size)-1) {
if ( mediaSource.readyState!='ended' )
mediaSource.endOfStream();
} else {
current_chunk++;
readChunk();
if (video.paused) {
video.play();
}
}
}
});
readChunk();
}
mediaSource.addEventListener('sourceopen', callback, false);
mediaSource.addEventListener('webkitsourceopen', callback, false);
mediaSource.addEventListener('webkitsourceended', function(e) {
console.log('mediaSource readyState: ' + this.readyState);
}, false);
function GET(url, chunk_index, callback) {
var xhr = new XMLHttpRequest();
xhr.open('GET', url, true);
xhr.setRequestHeader('Range', 'bytes='+(chunk_index*chunk_size)+'-'+(++chunk_index*chunk_size-1));
xhr.responseType = 'arraybuffer';
xhr.send();
xhr.onload = function(e) {
if (xhr.status != 200 && xhr.status != 206) {
alert("Unexpected status code " + xhr.status + " for " + url);
return false;
}
file_size = parseInt(this.getResponseHeader('content-range').split("/").pop());
callback(new Uint8Array(xhr.response));
};
}
But I can't seek the video. So anyone can tell me how to solve these problems :
When I seek video, I can get video.currentTime ( let say 2.5 ) , how to convert it to byte-range request ( how to get the byte offset )
When I got the correct offset and load the correct data from Range GET request, how can I append to sourceBuffer at the right offset
Thanks
I've been looking for the solution to this myself, and I think I found it.
Have a look at this example.
Whenever a seeking event is emitted from the video element, indicating the user has requested a seek, the old sourcebuffer is closed using sourceBuffer.abort();.
The mediasource then emits a new sourceopen event which allows you to create a new sourcebuffer the same way you did the first time, but this time instead of appending data from the start of the file, you append data from an offset corresponding to the videoElem.currentTime.
How you turn a time offset into a byte offset seems to be left up to you, as it depends on the format of the media you're playing.
In a constant bitrate file, you might be able to get away with basically dividing the file length in bytes by the video length in seconds (and adding a little safety margin). For anything else, you will probably need to parse the file and get timestamps and byte offsets of keyframes.

Web Audio Api - Download edited MP3

I'm currently editing my mp3 file with multiple effects like so
var mainVerse = document.getElementById('audio1');
var s = source;
source.disconnect(audioCtx.destination);
for (var i in filters1) {
s.connect(filters1[i]);
s = filters1[i];
}
s.connect(audioCtx.destination);
The mp3 plays accordingly on the web with the filters on it. Is it possible to create and download a new mp3 file with these new effects, using web audio api or any writing to mp3 container javascript library ? If not whats the best to solve this on the web ?
UPDATE - Using OfflineAudioContext
Using the sample code from https://developer.mozilla.org/en-US/docs/Web/API/OfflineAudioContext/oncomplete
I've tried using the offline node like so;
var audioCtx = new AudioContext();
var offlineCtx = new OfflineAudioContext(2,44100*40,44100);
osource = offlineCtx.createBufferSource();
function getData() {
request = new XMLHttpRequest();
request.open('GET', 'Song1.mp3', true);
request.responseType = 'arraybuffer';
request.onload = function() {
var audioData = request.response;
audioCtx.decodeAudioData(audioData, function(buffer) {
myBuffer = buffer;
osource.buffer = myBuffer;
osource.connect(offlineCtx.destination);
osource.start();
//source.loop = true;
offlineCtx.startRendering().then(function(renderedBuffer) {
console.log('Rendering completed successfully');
var audioCtx = new (window.AudioContext || window.webkitAudioContext)();
var song = audioCtx.createBufferSource();
song.buffer = renderedBuffer;
song.connect(audioCtx.destination);
song.start();
rec = new Recorder(song, {
workerPath: 'Recorderjs/recorderWorker.js'
});
rec.exportWAV(function(e){
rec.clear();
Recorder.forceDownload(e, "filename.wav");
});
}).catch(function(err) {
console.log('Rendering failed: ' + err);
// Note: The promise should reject when startRendering is called a second time on an OfflineAudioContext
});
});
}
request.send();
}
// Run getData to start the process off
getData();
Still getting the recorder to download an empty file, I'm using the song source as the source for the recorder. The song plays and everything with his code but recorder doesn't download it
Use https://github.com/mattdiamond/Recorderjs to record a .wav file. Then use https://github.com/akrennmair/libmp3lame-js to encode it to .mp3.
There's a nifty guide here, if you need a hand: http://audior.ec/blog/recording-mp3-using-only-html5-and-javascript-recordmp3-js/
UPDATE
Try moving
rec = new Recorder(song, {
workerPath: 'Recorderjs/recorderWorker.js'
});
so that it is located above the call to start rendering, and connect it to osource instead, like so:
rec = new Recorder(osource, {
workerPath: 'Recorderjs/recorderWorker.js'
});
osource.connect(offlineCtx.destination);
osource.start();
offlineCtx.startRendering().then(function(renderedBuffer) {
.....

Categories

Resources