Why this JavaScript event that does not start? - javascript

I'm trying to follow a tutorial on RTCPeerConnection API that enables the sharing of sound, image, data between browsers.
Here is the code that works:
function call() {
callButton.disabled = true;
hangupButton.disabled = false;
trace("Starting call");
if (localStream.getVideoTracks().length > 0) {
trace('Using video device: ' + localStream.getVideoTracks()[0].label);
}
if (localStream.getAudioTracks().length > 0) {
trace('Using audio device: ' + localStream.getAudioTracks()[0].label);
}
var servers = null;
localPeerConnection = new mozRTCPeerConnection(servers);
console.log(localPeerConnection);
trace("Created local peer connection object localPeerConnection");
localPeerConnection.onicecandidate = gotLocalIceCandidate;
remotePeerConnection = new mozRTCPeerConnection(servers);
trace("Created remote peer connection object remotePeerConnection");
remotePeerConnection.onicecandidate = gotRemoteIceCandidate;
remotePeerConnection.onaddstream = gotRemoteStream;
localPeerConnection.addStream(localStream);
trace("Added localStream to localPeerConnection");
localPeerConnection.createOffer(gotLocalDescription);
}
Here is my code that does not work:
Visio.prototype.call = function(visio){
callButton.disabled = true;
hangupButton.disabled = false;
console.log("Starting call...");
if (visio.localStream.getVideoTracks().length > 0) {
visio.trace('Using video device: ' + visio.localStream.getVideoTracks()[0].label);
}
if (visio.localStream.getAudioTracks().length > 0) {
visio.trace('Using audio device: ' + visio.localStream.getAudioTracks()[0].label);
}
var servers = null;
visio.localPeerConnection = new RTCPeerConnection(servers);
console.log(visio.localPeerConnection );
console.log("Created local peer connection object localPeerConnection");
visio.localPeerConnection.onicecandidate = visio.gotLocalIceCandidate;
visio.remotePeerConnection = new RTCPeerConnection(servers);
console.log("Created remote peer connection object remotePeerConnection");
visio.remotePeerConnection.onicecandidate = visio.gotRemoteIceCandidate;
visio.remotePeerConnection.onaddstream = visio.gotRemoteStream;
visio.localPeerConnection.addStream(visio.localStream);
console.log("Added localStream to localPeerConnection");
visio.localPeerConnection.createOffer(visio.gotLocalDescription);
};
with my call to the call method :
callButton.onclick = function(){that.call(that);};
As said in the title, the events : onicecandidate and onaddstream never triggered. I do not know why.
Any idea?
Ok , i modified my function. But it doesn't works anymore :(
This is the full code :
Visio.js
if(mozRTCPeerConnection){
RTCPeerConnection = mozRTCPeerConnection;
}else if(webkitRTCPeerConnection){
RTCPeerConnection = webkitRTCPeerConnection;
} else if(oRTCPeerConnection) {
RTCPeerConnection = oRTCPeerConnection;
} else {
alert("Votre navigateur ne supporte pas l'API RTCPeerConnection");
}
$(document).ready(function(){
new Visio();
});
function Visio() {
this.localStream, this.localPeerConnection, this.remotePeerConnection;
this.cam = new Cam();
//var localVideo = document.getElementById("localVideo");
var remoteVideo = document.getElementById("remoteVideo");
var startButton = document.getElementById("startButton");
var callButton = document.getElementById("callButton");
var hangupButton = document.getElementById("hangupButton");
startButton.disabled = false;
callButton.disabled = true;
hangupButton.disabled = true;
var that = this;
startButton.onclick = function(){that.start();};
callButton.onclick = function () { that.startCall(); };
hangupButton.onclick = function(){that.hangup();};
};
Visio.prototype.trace = function(text) {
console.log((performance.now() / 1000).toFixed(3) + ": " + text);
};
Visio.prototype.start = function(){
startButton.disabled = true;
this.cam.start();
//Waiting for stream.
var that = this;
var id = setInterval(function(){
console.log("Getting Stream...");
that.localStream = that.cam.stream;
if(that.localStream !== null){
console.log("Stream Ok!");
callButton.disabled = false;
clearInterval(id);
}
},500);
};
Visio.prototype.startCall = function () {
callButton.disabled = true;
hangupButton.disabled = false;
console.log("Starting call...");
if (this.localStream.getVideoTracks().length > 0) {
this.trace('Using video device: ' + this.localStream.getVideoTracks()[0].label);
}
if (this.localStream.getAudioTracks().length > 0) {
this.trace('Using audio device: ' + this.localStream.getAudioTracks()[0].label);
}
var servers = null;
this.localPeerConnection = new RTCPeerConnection(servers);
console.log("Created local peer connection object localPeerConnection");
this.localPeerConnection.onicecandidate = this.gotLocalIceCandidate;
this.remotePeerConnection = new RTCPeerConnection(servers);
console.log("Created remote peer connection object remotePeerConnection");
this.remotePeerConnection.onicecandidate = this.gotRemoteIceCandidate;
this.remotePeerConnection.onaddstream = this.gotRemoteStream;
this.localPeerConnection.addStream(this.localStream);
console.log("Added localStream to localPeerConnection");
this.localPeerConnection.createOffer(this.gotLocalDescription);
};
Visio.prototype.gotLocalDescription = function(description){
this.localPeerConnection.setLocalDescription(description);
console.log("Offer from localPeerConnection: \n" + description.sdp);
this.remotePeerConnection.setRemoteDescription(description);
this.remotePeerConnection.createAnswer(this.gotRemoteDescription);
};
Visio.prototype.gotRemoteDescription = function(description){
this.remotePeerConnection.setLocalDescription(description);
console.log("Answer from remotePeerConnection: \n" + description.sdp);
this.localPeerConnection.setRemoteDescription(description);
};
Visio.prototype.gotRemoteStream = function(event){
remoteVideo.src = URL.createObjectURL(event.stream);
trace("Received remote stream");
};
Visio.prototype.gotLocalIceCandidate = function(event){
if (event.candidate) {
remotePeerConnection.addIceCandidate(new RTCIceCandidate(event.candidate));
trace("Local ICE candidate: \n" + event.candidate.candidate);
}
};
Visio.prototype.gotRemoteIceCandidate = function(event){
if (event.candidate) {
localPeerConnection.addIceCandidate(new RTCIceCandidate(event.candidate));
trace("Remote ICE candidate: \n " + event.candidate.candidate);
}
};
Visio.prototype.hangup = function() {
console.log("Ending call");
this.localPeerConnection.close();
this.remotePeerConnection.close();
this.localPeerConnection = null;
this.remotePeerConnection = null;
hangupButton.disabled = true;
callButton.disabled = false;
};
And cam.js
/*
* To change this template, choose Tools | Templates
* and open the template in the editor.
*/
function Cam(){
this.video = null;
this.stream = null;
navigator.getUserMedia ||
(navigator.getUserMedia = navigator.mozGetUserMedia ||
navigator.webkitGetUserMedia || navigator.msGetUserMedia);
if (!navigator.getUserMedia) {
alert('getUserMedia is not supported in this browser.');
}
};
Cam.prototype.start = function(){
var that = this;
navigator.getUserMedia({
video: true,
audio: true
}, function(stream){that.onSuccess(stream);}, that.onError);
};
Cam.prototype.stop = function(){
this.stopVideo();
this.stopSound();
};
Cam.prototype.startSound = function(){
};
Cam.prototype.stopSound = function(){
};
Cam.prototype.startVideo = function(){
};
Cam.prototype.stopVideo = function(){
};
Cam.prototype.onSuccess = function(stream){
var source = document.getElementById('localVideo');
var videoSource = null;
this.stream = stream;
if (window.webkitURL) {
videoSource = window.webkitURL.createObjectURL(stream);
} else {
videoSource = window.URL.createObjectURL(stream);
}
this.stream = stream;
source.autoplay = true;
source.src = videoSource;
this.video = source;
};
Cam.prototype.onError = function(err) {
alert('There has been a problem retrieving the streams - did you allow access?' + err);
};
Maybe it can help you :).

When you build a constructor function,
var Visio = function () {
// constructor
};
and add a prototype function,
Visio.prototype.startCall = function () {
// this === the new Visio object
};
you can use it like this:
var o = new Visio();
o.startCall();
Now inside your prototype function this is the object o instanceof Visio.
You are confusingly using the function name call. This name is also used by a Function.prototype.call function, with which you can override this.
Example:
Visio.prototype.myThisOverride = function () {
console.log(this);
};
Now when using Function.prototype.call, you see console log printing the string "Hello World", because this === 'Hello World':
var o = new Visio();
o.myThisOverride.call("Hello World");
So coming back to your code, you most likely have to rewrite it to:
Visio.prototype.startCall = function () {
callButton.disabled = true;
hangupButton.disabled = false;
console.log("Starting call...");
if (this.localStream.getVideoTracks().length > 0) {
this.trace('Using video device: ' + this.localStream.getVideoTracks()[0].label);
}
if (this.localStream.getAudioTracks().length > 0) {
this.trace('Using audio device: ' + this.localStream.getAudioTracks()[0].label);
}
var servers = null;
this.localPeerConnection = new RTCPeerConnection(servers);
console.log(this.localPeerConnection );
console.log("Created local peer connection object localPeerConnection");
this.localPeerConnection.onicecandidate = this.gotLocalIceCandidate;
this.remotePeerConnection = new RTCPeerConnection(servers);
console.log("Created remote peer connection object remotePeerConnection");
this.remotePeerConnection.onicecandidate = this.gotRemoteIceCandidate;
this.remotePeerConnection.onaddstream = this.gotRemoteStream;
this.localPeerConnection.addStream(this.localStream);
console.log("Added localStream to localPeerConnection");
this.localPeerConnection.createOffer(this.gotLocalDescription);
};
And use it like this:
var theVisio = new Visio();
callButton.onclick = function () { theVisio.startCall(); };
// which is the same as
callButton.onclick = function () { theVisio.startCall.call(theVisio); };
Or if you defined it like this inside the constructor:
var that = this;
callButton.onclick = function () { that.startCall(); };
// which is the same as
callButton.onclick = function () { that.startCall.call(that); };
And with this said, please respect the names call and apply because understanding operator this isn't the easiest thing in javaScript! ;)

Related

where to add my websocket code in javascript

I am very new to javaScript, I know some basics but have not yet completely understood the complete logics behind it (so far I have only worked with Python and a little bit of VBA)
For uni I have to build a browser interface to record audio and transfer it to a server where a Speech to text application runs. I found some opensource code here (https://github.com/mdn/dom-examples/blob/main/media/web-dictaphone/scripts/app.js) which I wanted to use, but is missing the websocket part. Now I don't know, where exactly to insert that. So far I have this:
code of the Webdictaphone:
// set up basic variables for app
const record = document.querySelector('.record');
const stop = document.querySelector('.stop');
const soundClips = document.querySelector('.sound-clips');
const canvas = document.querySelector('.visualizer');
const mainSection = document.querySelector('.main-controls');
// disable stop button while not recording
stop.disabled = true;
// visualiser setup - create web audio api context and canvas
let audioCtx;
const canvasCtx = canvas.getContext("2d");
//main block for doing the audio recording
if (navigator.mediaDevices.getUserMedia) {
console.log('getUserMedia supported.');
const constraints = { audio: true };
let chunks = [];
let onSuccess = function(stream) {
const mediaRecorder = new MediaRecorder(stream);
visualize(stream);
record.onclick = function() {
mediaRecorder.start();
console.log(mediaRecorder.state);
console.log("recorder started");
record.style.background = "red";
stop.disabled = false;
record.disabled = true;
}
stop.onclick = function() {
mediaRecorder.stop();
console.log(mediaRecorder.state);
console.log("recorder stopped");
record.style.background = "";
record.style.color = "";
// mediaRecorder.requestData();
stop.disabled = true;
record.disabled = false;
}
mediaRecorder.onstop = function(e) {
console.log("data available after MediaRecorder.stop() called.");
const clipName = prompt('Enter a name for your sound clip?','My unnamed clip');
const clipContainer = document.createElement('article');
const clipLabel = document.createElement('p');
const audio = document.createElement('audio');
const deleteButton = document.createElement('button');
clipContainer.classList.add('clip');
audio.setAttribute('controls', '');
deleteButton.textContent = 'Delete';
deleteButton.className = 'delete';
if(clipName === null) {
clipLabel.textContent = 'My unnamed clip';
} else {
clipLabel.textContent = clipName;
}
clipContainer.appendChild(audio);
clipContainer.appendChild(clipLabel);
clipContainer.appendChild(deleteButton);
soundClips.appendChild(clipContainer);
audio.controls = true;
const blob = new Blob(chunks, { 'type' : 'audio/ogg; codecs=opus' });
chunks = [];
const audioURL = window.URL.createObjectURL(blob);
audio.src = audioURL;
console.log("recorder stopped");
deleteButton.onclick = function(e) {
e.target.closest(".clip").remove();
}
clipLabel.onclick = function() {
const existingName = clipLabel.textContent;
const newClipName = prompt('Enter a new name for your sound clip?');
if(newClipName === null) {
clipLabel.textContent = existingName;
} else {
clipLabel.textContent = newClipName;
}
}
}
mediaRecorder.ondataavailable = function(e) {
chunks.push(e.data);
}
}
let onError = function(err) {
console.log('The following error occured: ' + err);
}
navigator.mediaDevices.getUserMedia(constraints).then(onSuccess, onError);
} else {
console.log('getUserMedia not supported on your browser!');
}
websocket part (client side):
window.addEventListener("DOMContentLoaded", () => {
// Open the WebSocket connection and register event handlers.
console.log('DOMContentLoaded done');
const ws = new WebSocket("ws://localhost:8001/"); // temp moved to mediarecorder.onstop
dataToBeSent = function (data) {
ws.send(data);
};
console.log('ws is defined');
})
Right now I just stacked both of the parts on top of each other, but this doesn't work, since, as I found out, you only can define and use variables (such as ws) within a block. This leads to an error that says that ws i not defined when I call the sending function within the if-statement.
I already tried to look for tutorials for hours but none that I found included this topic. I also tried moving the web socket part into the if statement, but that also did - unsurprisingly work, at least not in the way that I tried.
I feel like my problem lays in understanding how to define the websocket so I can call it within the if statement, or figure out a way to somehow get the audio somewhere where ws is considered to be defined. Unfortunately I just don't get behind it and already invested days which has become really frustrating.
I appreciate any help. If you have any ideas what I could change or move in the code or maybe just know any tutorial that could help, I'd be really grateful.
Thanks in advance!
You don't need that window.addEventListener("DOMContentLoaded", () => { part
const ws = new WebSocket("ws://localhost:8001/"); // temp moved to mediarecorder.onstop
dataToBeSent = function (data) {
ws.send(data);
};
const record = document.querySelector(".record");
const stop = document.querySelector(".stop");
const soundClips = document.querySelector(".sound-clips");
const canvas = document.querySelector(".visualizer");
const mainSection = document.querySelector(".main-controls");
// disable stop button while not recording
stop.disabled = true;
// visualiser setup - create web audio api context and canvas
let audioCtx;
const canvasCtx = canvas.getContext("2d");
//main block for doing the audio recording
if (navigator.mediaDevices.getUserMedia) {
console.log("getUserMedia supported.");
const constraints = { audio: true };
let chunks = [];
let onSuccess = function (stream) {
const mediaRecorder = new MediaRecorder(stream);
visualize(stream);
record.onclick = function () {
mediaRecorder.start();
console.log(mediaRecorder.state);
console.log("recorder started");
record.style.background = "red";
stop.disabled = false;
record.disabled = true;
};
stop.onclick = function () {
mediaRecorder.stop();
console.log(mediaRecorder.state);
console.log("recorder stopped");
record.style.background = "";
record.style.color = "";
// mediaRecorder.requestData();
stop.disabled = true;
record.disabled = false;
};
mediaRecorder.onstop = function (e) {
console.log("data available after MediaRecorder.stop() called.");
const clipName = prompt(
"Enter a name for your sound clip?",
"My unnamed clip"
);
const clipContainer = document.createElement("article");
const clipLabel = document.createElement("p");
const audio = document.createElement("audio");
const deleteButton = document.createElement("button");
clipContainer.classList.add("clip");
audio.setAttribute("controls", "");
deleteButton.textContent = "Delete";
deleteButton.className = "delete";
if (clipName === null) {
clipLabel.textContent = "My unnamed clip";
} else {
clipLabel.textContent = clipName;
}
clipContainer.appendChild(audio);
clipContainer.appendChild(clipLabel);
clipContainer.appendChild(deleteButton);
soundClips.appendChild(clipContainer);
audio.controls = true;
const blob = new Blob(chunks, { type: "audio/ogg; codecs=opus" });
chunks = [];
const audioURL = window.URL.createObjectURL(blob);
audio.src = audioURL;
console.log("recorder stopped");
deleteButton.onclick = function (e) {
e.target.closest(".clip").remove();
};
clipLabel.onclick = function () {
const existingName = clipLabel.textContent;
const newClipName = prompt("Enter a new name for your sound clip?");
if (newClipName === null) {
clipLabel.textContent = existingName;
} else {
clipLabel.textContent = newClipName;
}
};
};
mediaRecorder.ondataavailable = function (e) {
chunks.push(e.data);
};
};
let onError = function (err) {
console.log("The following error occured: " + err);
};
navigator.mediaDevices.getUserMedia(constraints).then(onSuccess, onError);
} else {
console.log("getUserMedia not supported on your browser!");
}

how to pause and resume voice recording in android using cordova media plugin

I'm using angularjs in visual studio.using cordova media plugin startRecord() and stopRecord() is working but not able to pause and resume recording.I'm not using media capture plugin as i don't have default recorder installed.
This is my code:
var audurl = '///storage/emulated/0/New/';
audurl += 'Voice_' + '.amr';
var mediaRec;
function recordAudio() {
mediaRec = new Media(audurl, onSuccess, onError);
mediaRec.startRecord();
}
function pauseAudio() {
mediaRec = new Media(audurl, onSuccess, onError);
mediaRec.pauseRecord();
}
thanks...
On my phone the method media.resumeRecord was not available, although in this soure-code it is defined. Nevertheless you can take advantage of all other methods, like startRecord and stopRecord, to rebuild a kind of resumeRecord function, as it is done in a handler below:
var myRecordHandler = function () {
// ALL RECORDED FILES ARE SAVED IN THIS ARRAY
var recordedAudioFiles = [];
// REMEMBER POSITION WHEN PLAYING IS STOPPED
var currentPosition = {index:0,shift:0};
// PAUSE-MODE
var paused = false;
// SET A SPECIFC DIRECTORY WHERE THE FILES ARE STORED INTO
// DEFAULT: ''
this.setDirectory = function(dir) {this.dir=dir;};
// SET FILENAME
// DEFAULT: recoredFilesX
this.setFilename = function(filename) {this.filename=filename;};
// SET MIME/Type of THE FILES;
// DEFAULT: mp3
this.setFileType = function(type) {this.filetype=type;};
// GET ALL RECORED FILES
this.getAllFiles = function() {return recordedAudioFiles;};
// STOP/PAUSE RECORDED FILES
var handleRecordedFileHold = function () {
for (var r = 0; r < recordedAudioFiles.length; r++) {
var recordedAudioFile = recordedAudioFiles[r];
if(recordedAudioFile.isBeingRecorded){
if(paused)recordedAudioFile.media.pause();
else recordedAudioFile.media.stop();
continue;
}
recordedAudioFile.duration = new Date().getTime() - recordedAudioFile.startTime;
// call release to free this created file so that it could get deleted for instance
recordedAudioFile.media.stopRecord();
recordedAudioFile.media.release();
recordedAudioFile.isBeingRecorded = true;
}
}
// START RECORDING
this.startAudioRecording = function() {
paused = false;
handleRecordedFileHold();
var dir = this.dir ? this.dir : '';
var filename = this.filename ? this.filename : 'recoredFiles';
var type = this.filetype ? this.filetype : 'mp3';
var src = dir + filename + (recordedAudioFiles.length + 1) + '.' + type;
var mediaRec = new Media(src,
function () {
console.log('recordAudio():Audio Success');
},
function (err) {
console.log('recordAudio():Audio Error: ' + err.code);
});
recordedAudioFiles.push({
media: mediaRec,
startTime: new Date().getTime()
});
mediaRec.startRecord();
}
// PAUSE RECORDING
this.pauseRecoredFiles = function () {
if(recordedAudioFiles.length){
paused = true;
clearTimeout(currentPosition.timeout);
handleRecordedFileHold();
var recoredMedia = recordedAudioFiles[currentPosition.index].media;
recoredMedia.getCurrentPosition(
function (position) {
currentPosition.shift = position;
},
function (e) {
console.log("Error getting pos=" + e);
}
);
}
}
// PLAY RECORD
this.playRecordedFiles = function () {
handleRecordedFileHold();
var playNextFile = function () {
var recoredMedia = recordedAudioFiles[currentPosition.index];
if (recoredMedia) {
if(paused){
recoredMedia.media.seekTo(currentPosition.shift*1000);
paused = false;
}
recoredMedia.media.play();
currentPosition.timeout = setTimeout(function () {
currentPosition.index++;
recoredMedia.media.stop();
playNextFile();
}, recoredMedia.duration ? recoredMedia.duration : 0);
}
else{
paused = false;
currentPosition.index = currentPosition.shift = 0;
}
};
playNextFile();
}
// RESET PLAY
this.stopRecordedFiles = function () {
currentPosition.index = currentPosition.shift = 0;
clearTimeout(currentPosition.timeout);
handleRecordedFileHold();
}
// REMOVE ALL RECORDED FILES
this.removeRecordedFiles = function() {
paused = false;
currentPosition.index = currentPosition.shift = 0;
clearTimeout(currentPosition.timeout);
handleRecordedFileHold();
recordedAudioFiles = [];
}
};
var handler = new myRecordHandler();
// you can use this handler in your functions like this:
function recordAudio() {
// records one track and stops former track if there is one
handler.startAudioRecording();
}
function playAudio() {
handler.playRecordedFiles();
}
function pauseAudio() {
handler.pauseRecoredFiles();
}
function resumeAudio() {
pauseAudio();
recordAudio();
}
function stopAudio() {
handler.stopRecordedFiles();
}
Although I could not test your directory/filenames, because I do not have this directory created on my phone, these methods might help you to to store your files in a specific directory as well as with certain filenames:
handler.setDirectory('__YOUR_DIR__');
handler.setFilename('__YOUR_FILENAME__');
handler.setFileType('__YOUR_FILETYPE__');

How to send blob gained by getusermedia() to socket.io then send it back to client and put it in video element

this is the code :
var mediaSource = new MediaSource();
mediaSource.addEventListener('sourceopen', handleSourceOpen, false);
var mediaRecorder;
var recordedBlobs;
var sourceBuffer;
var socket = io();
var recordedVideo = document.querySelector('video#recorded');
var gumVideo = document.querySelector('video#gum');
var translateButton = document.querySelector('button#record');
translateButton.onclick = startTranslate;
recordedVideo.src = window.URL.createObjectURL(mediaSource);
socket.on('video', function (data) {
sourceBuffer.appendBuffer(data);
});
navigator.mediaDevices.getUserMedia(constraints)
.then(handleSuccess).catc(handleError);
function handleSourceOpen(event) {
console.log('MediaSource opened');
sourceBuffer = mediaSource.addSourceBuffer('video/webm; codecs="vp8"');
sourceBuffer.onupdate = function(){
console.log("updating");
};
console.log('Source buffer: ', sourceBuffer);
}
function handleSuccess(stream) {
console.log('getUserMedia() got stream: ', stream);
window.stream = stream;
if (window.URL) {
gumVideo.src = window.URL.createObjectURL(stream);
} else {
gumVideo.src = stream;
}
}
function startTranslate() {
recordedBlobs = [];
var options = {mimeType: 'video/webm;codecs=vp9'};
if (!MediaRecorder.isTypeSupported(options.mimeType)) {
console.log(options.mimeType + ' is not Supported');
options = {mimeType: 'video/webm;codecs=vp8'};
if (!MediaRecorder.isTypeSupported(options.mimeType)) {
console.log(options.mimeType + ' is not Supported');
options = {mimeType: 'video/webm'};
if (!MediaRecorder.isTypeSupported(options.mimeType)) {
console.log(options.mimeType + ' is not Supported');
options = {mimeType: ''};
}
}
}
try {
mediaRecorder = new MediaRecorder(window.stream, options);
}
catch (e) {
console.error('Exception while creating MediaRecorder: ' + e);
alert('Exception while creating MediaRecorder: '+ e +'.mimeType: ' + options.mimeType);
return;
}
console.log('Created MediaRecorder', mediaRecorder, 'with options', options);
recordButton.textContent = 'Stop Recording';
playButton.disabled = true;
downloadButton.disabled = true;
mediaRecorder.onstop = handleStop;
mediaRecorder.ondataavailable = handleDataAvailable;
mediaRecorder.start(10); // collect 10ms of data
console.log('MediaRecorder started', mediaRecorder);
}
function handleDataAvailable(event) {
if (event.data && event.data.size > 0) {
socket.emit('video',event.data);
}
}
When i click the translate button after 2-3 seconds
Error like :
Failed to execute 'appendBuffer' on 'SourceBuffer': This SourceBuffer
has been removed from the parent media source
what does this error means.Or is it a bug of WebRTC?

WebRTC Between two pages in the same machine

I'm trying to implement a mechanism to send textual data (JSON for instance) in from page to page, using javascript at the same machine.
I found some code and wrapped it but it only works at the same page.
At the moment I don't want to use a WwebRTC framework, only adapter.js.
//Must include adapter.js before
var WebRTCManager = (function () {
'use strict';
//Ctor
function WebRTCManagerFn() {
console.log('WebRTCManagerFn ctor reached');
this._events = {};
this._localConnection = null
this._remoteConnection = null;
this._sendChannel = null;
this._receiveChannel = null;
}
WebRTCManagerFn.prototype.addEventListener = function (name, handler) {
if (this._events.hasOwnProperty(name))
this._events[name].push(handler);
else
this._events[name] = [handler];
};
WebRTCManagerFn.prototype._fireEvent = function (name, event) {
if (!this._events.hasOwnProperty(name))
return;
if (!event)
event = {};
var listeners = this._events[name], l = listeners.length;
for (var i = 0; i < l; i++) {
listeners[i].call(null, event);
}
};
WebRTCManagerFn.prototype.createConnection = function () {
var servers = null;
var pcConstraint = null;
var dataConstraint = null;
console.log('Using SCTP based data channels');
// SCTP is supported from Chrome 31 and is supported in FF.
// No need to pass DTLS constraint as it is on by default in Chrome 31.
// For SCTP, reliable and ordered is true by default.
// Add localConnection to global scope to make it visible
// from the browser console.
window.localConnection = this._localConnection =
new RTCPeerConnection(servers, pcConstraint);
console.log('Created local peer connection object localConnection');
this._sendChannel = this._localConnection.createDataChannel('sendDataChannel',
dataConstraint);
console.log('Created send data channel');
this._localConnection.onicecandidate = this._localIceCallback.bind(this);
this._sendChannel.onopen = this._onSendChannelStateChange.bind(this);
this._sendChannel.onclose = this._onSendChannelStateChange.bind(this);
// Add remoteConnection to global scope to make it visible
// from the browser console.
window.remoteConnection = this._remoteConnection =
new RTCPeerConnection(servers, pcConstraint);
console.log('Created remote peer connection object remoteConnection');
this._remoteConnection.onicecandidate = this._remoteIceCallback.bind(this);
this._remoteConnection.ondatachannel = this._receiveChannelCallback.bind(this);
this._localConnection.createOffer(this._gotOfferFromLocalConnection.bind(this), this._onCreateSessionDescriptionError.bind(this));
}
WebRTCManagerFn.prototype._onCreateSessionDescriptionError = function (error) {
console.log('Failed to create session description: ' + error.toString());
}
WebRTCManagerFn.prototype.sendMessage = function (msgText) {
var msg = new Message(msgText);
// Send the msg object as a JSON-formatted string.
var data = JSON.stringify(msg);
this._sendChannel.send(data);
console.log('Sent Data: ' + data);
}
WebRTCManagerFn.prototype.closeDataChannels = function () {
console.log('Closing data channels');
this._sendChannel.close();
console.log('Closed data channel with label: ' + this._sendChannel.label);
this._receiveChannel.close();
console.log('Closed data channel with label: ' + this._receiveChannel.label);
this._localConnection.close();
this._remoteConnection.close();
this._localConnection = null;
this._remoteConnection = null;
console.log('Closed peer connections');
}
WebRTCManagerFn.prototype._gotOfferFromLocalConnection = function (desc) {
console.log('reached _gotOfferFromLocalConnection');
if (this && this._localConnection != 'undefined' && this._remoteConnection != 'undefined') {
this._localConnection.setLocalDescription(desc);
console.log('Offer from localConnection \n' + desc.sdp);
this._remoteConnection.setRemoteDescription(desc);
this._remoteConnection.createAnswer(this._gotAnswerFromRemoteConnection.bind(this),
this._onCreateSessionDescriptionError.bind(this));
}
}
WebRTCManagerFn.prototype._gotAnswerFromRemoteConnection = function (desc) {
console.log('reached _gotAnswerFromRemoteConnection');
if (this && this._localConnection != 'undefined' && this._remoteConnection != 'undefined') {
this._remoteConnection.setLocalDescription(desc);
console.log('Answer from remoteConnection \n' + desc.sdp);
this._localConnection.setRemoteDescription(desc);
}
}
WebRTCManagerFn.prototype._localIceCallback = function (event) {
console.log('local ice callback');
if (event.candidate) {
this._remoteConnection.addIceCandidate(event.candidate,
this._onAddIceCandidateSuccess.bind(this), this._onAddIceCandidateError.bind(this));
console.log('Local ICE candidate: \n' + event.candidate.candidate);
}
}
WebRTCManagerFn.prototype._remoteIceCallback = function (event) {
console.log('remote ice callback');
if (event.candidate) {
this._localConnection.addIceCandidate(event.candidate,
this._onAddIceCandidateSuccess.bind(this), this._onAddIceCandidateError.bind(this));
console.log('Remote ICE candidate: \n ' + event.candidate.candidate);
}
}
WebRTCManagerFn.prototype._onAddIceCandidateSuccess = function (evt) {
debugger;
console.log('AddIceCandidate success. evt: '+ evt);
}
WebRTCManagerFn.prototype._onAddIceCandidateError = function (error) {
console.log('Failed to add Ice Candidate: ' + error.toString());
}
WebRTCManagerFn.prototype._receiveChannelCallback = function (event) {
console.log('Receive Channel Callback');
this._receiveChannel = event.channel;
this._receiveChannel.onmessage = this._onReceiveMessageCallback.bind(this);
this._receiveChannel.onopen = this._onReceiveChannelStateChange.bind(this);
this._receiveChannel.onclose = this._onReceiveChannelStateChange.bind(this);
}
WebRTCManagerFn.prototype._onReceiveMessageCallback = function (event) {
console.log('Received Message: ' + event.data);
console.log('Received Message this is: ' + this);
var msgObj = JSON.parse(event.data);
this._fireEvent("messageRecieved", {
details: {
msg: msgObj
}
});
}
WebRTCManagerFn.prototype._onSendChannelStateChange = function () {
console.log('_onSendChannelStateChange');
var readyState = this._sendChannel.readyState;
console.log('Send channel state is: ' + readyState);
}
WebRTCManagerFn.prototype._onReceiveChannelStateChange = function () {
var readyState = this._receiveChannel.readyState;
console.log('Receive channel state is: ' + readyState);
}
return WebRTCManagerFn;
})();
My question is how to pass data between two pages on the same machine using WebRTC?
This WebRTC tab chat demo works across tabs or windows in the same browser without a server: https://jsfiddle.net/f5y48hcd/ (I gave up making it work in a code snippet due to a SecurityError.)
Open the fiddle in two windows and try it out. For reference, here's the WebRTC code:
var pc = new RTCPeerConnection(), dc, enterPressed = e => e.keyCode == 13;
var connect = () => init(dc = pc.createDataChannel("chat"));
pc.ondatachannel = e => init(dc = e.channel);
var init = dc => {
dc.onopen = e => (dc.send("Hi!"), chat.select());
dc.onclose = e => log("Bye!");
dc.onmessage = e => log(e.data);
};
chat.onkeypress = e => {
if (!enterPressed(e)) return;
dc.send(chat.value);
log("> " + chat.value);
chat.value = "";
};
var sc = new localSocket(), send = obj => sc.send(JSON.stringify(obj));
var incoming = msg => msg.sdp &&
pc.setRemoteDescription(new RTCSessionDescription(msg.sdp))
.then(() => pc.signalingState == "stable" || pc.createAnswer()
.then(answer => pc.setLocalDescription(answer))
.then(() => send({ sdp: pc.localDescription })))
.catch(log) || msg.candidate &&
pc.addIceCandidate(new RTCIceCandidate(msg.candidate)).catch(log);
sc.onmessage = e => incoming(JSON.parse(e.data));
pc.oniceconnectionstatechange = e => log(pc.iceConnectionState);
pc.onicecandidate = e => send({ candidate: e.candidate });
pc.onnegotiationneeded = e => pc.createOffer()
.then(offer => pc.setLocalDescription(offer))
.then(() => send({ sdp: pc.localDescription }))
.catch(log);
var log = msg => div.innerHTML += "<br>" + msg;
I use this for demoing WebRTC data channels. Note that the secret sauce is the localSocket.js that I wrote for this, which looks like this:
function localSocket() {
localStorage.a = localStorage.b = JSON.stringify([]);
this.index = 0;
this.interval = setInterval(() => {
if (!this.in) {
if (!JSON.parse(localStorage.a).length) return;
this.in = "a"; this.out = "b";
}
var arr = JSON.parse(localStorage[this.in]);
if (arr.length <= this.index) return;
if (this.onmessage) this.onmessage({ data: arr[this.index] });
this.index++;
}, 200);
setTimeout(() => this.onopen && this.onopen({}));
}
localSocket.prototype = {
send: function(msg) {
if (!this.out) {
this.out = "a"; this.in = "b";
}
var arr = JSON.parse(localStorage[this.out]);
arr.push(msg);
localStorage[this.out] = JSON.stringify(arr);
},
close: function() {
clearInterval(this.interval);
}
};
It basically uses localStorage to simulate web sockets locally between two tabs. If this is all you want to do, then you don't even need WebRTC data channels.
Disclaimer: It's not very robust, and relies on two pages being ready to communicate, so not production-ready by any means.

How to decode only part of the mp3 for use with WebAudio API?

In my web application, I have a requirement to play part of mp3 file. This is a local web app, so I don't care about downloads etc, everything is stored locally.
My use case is as follows:
determine file to play
determine start and stop of the sound
load the file [I use BufferLoader]
play
Quite simple.
Right now I just grab the mp3 file, decode it in memory for use with WebAudio API, and play it.
Unfortunately, because the mp3 files can get quite long [30minutes of audio for example] the decoded file in memory can take up to 900MB. That's a bit too much to handle.
Is there any option, where I could decode only part of the file? How could I detect where to start and how far to go?
I cannot anticipate the bitrate, it can be constant, but I would expect variable as well.
Here's an example of what I did:
http://tinyurl.com/z9vjy34
The code [I've tried to make it as compact as possible]:
var MediaPlayerAudioContext = window.AudioContext || window.webkitAudioContext;
var MediaPlayer = function () {
this.mediaPlayerAudioContext = new MediaPlayerAudioContext();
this.currentTextItem = 0;
this.playing = false;
this.active = false;
this.currentPage = null;
this.currentAudioTrack = 0;
};
MediaPlayer.prototype.setPageNumber = function (page_number) {
this.pageTotalNumber = page_number
};
MediaPlayer.prototype.generateAudioTracks = function () {
var audioTracks = [];
var currentBegin;
var currentEnd;
var currentPath;
audioTracks[0] = {
begin: 4.300,
end: 10.000,
path: "example.mp3"
};
this.currentPageAudioTracks = audioTracks;
};
MediaPlayer.prototype.show = function () {
this.mediaPlayerAudioContext = new MediaPlayerAudioContext();
};
MediaPlayer.prototype.hide = function () {
if (this.playing) {
this.stop();
}
this.mediaPlayerAudioContext = null;
this.active = false;
};
MediaPlayer.prototype.play = function () {
this.stopped = false;
console.trace();
this.playMediaPlayer();
};
MediaPlayer.prototype.playbackStarted = function() {
this.playing = true;
};
MediaPlayer.prototype.playMediaPlayer = function () {
var instance = this;
var audioTrack = this.currentPageAudioTracks[this.currentAudioTrack];
var newBufferPath = audioTrack.path;
if (this.mediaPlayerBufferPath && this.mediaPlayerBufferPath === newBufferPath) {
this.currentBufferSource = this.mediaPlayerAudioContext.createBufferSource();
this.currentBufferSource.buffer = this.mediaPlayerBuffer;
this.currentBufferSource.connect(this.mediaPlayerAudioContext.destination);
this.currentBufferSource.onended = function () {
instance.currentBufferSource.disconnect(0);
instance.audioTrackFinishedPlaying()
};
this.playing = true;
this.currentBufferSource.start(0, audioTrack.begin, audioTrack.end - audioTrack.begin);
this.currentAudioStartTimeInAudioContext = this.mediaPlayerAudioContext.currentTime;
this.currentAudioStartTimeOffset = audioTrack.begin;
this.currentTrackStartTime = this.mediaPlayerAudioContext.currentTime - (this.currentTrackResumeOffset || 0);
this.currentTrackResumeOffset = null;
}
else {
function finishedLoading(bufferList) {
instance.mediaPlayerBuffer = bufferList[0];
instance.playMediaPlayer();
}
if (this.currentBufferSource){
this.currentBufferSource.disconnect(0);
this.currentBufferSource.stop(0);
this.currentBufferSource = null;
}
this.mediaPlayerBuffer = null;
this.mediaPlayerBufferPath = newBufferPath;
this.bufferLoader = new BufferLoader(this.mediaPlayerAudioContext, [this.mediaPlayerBufferPath], finishedLoading);
this.bufferLoader.load();
}
};
MediaPlayer.prototype.stop = function () {
this.stopped = true;
if (this.currentBufferSource) {
this.currentBufferSource.onended = null;
this.currentBufferSource.disconnect(0);
this.currentBufferSource.stop(0);
this.currentBufferSource = null;
}
this.bufferLoader = null;
this.mediaPlayerBuffer = null;
this.mediaPlayerBufferPath = null;
this.currentTrackStartTime = null;
this.currentTrackResumeOffset = null;
this.currentAudioTrack = 0;
if (this.currentTextTimeout) {
clearTimeout(this.currentTextTimeout);
this.textHighlightFinished();
this.currentTextTimeout = null;
this.currentTextItem = null;
}
this.playing = false;
};
MediaPlayer.prototype.getNumberOfPages = function () {
return this.pageTotalNumber;
};
MediaPlayer.prototype.playbackFinished = function () {
this.currentAudioTrack = 0;
this.playing = false;
};
MediaPlayer.prototype.audioTrackFinishedPlaying = function () {
this.currentAudioTrack++;
if (this.currentAudioTrack >= this.currentPageAudioTracks.length) {
this.playbackFinished();
} else {
this.playMediaPlayer();
}
};
//
//
// Buffered Loader
//
// Class used to get the sound files
//
function BufferLoader(context, urlList, callback) {
this.context = context;
this.urlList = urlList;
this.onload = callback;
this.bufferList = [];
this.loadCount = 0;
}
// this allows us to handle media files with embedded artwork/id3 tags
function syncStream(node) { // should be done by api itself. and hopefully will.
var buf8 = new Uint8Array(node.buf);
buf8.indexOf = Array.prototype.indexOf;
var i = node.sync, b = buf8;
while (1) {
node.retry++;
i = b.indexOf(0xFF, i);
if (i == -1 || (b[i + 1] & 0xE0 == 0xE0 )) break;
i++;
}
if (i != -1) {
var tmp = node.buf.slice(i); //carefull there it returns copy
delete(node.buf);
node.buf = null;
node.buf = tmp;
node.sync = i;
return true;
}
return false;
}
BufferLoader.prototype.loadBuffer = function (url, index) {
// Load buffer asynchronously
var request = new XMLHttpRequest();
request.open("GET", url, true);
request.responseType = "arraybuffer";
var loader = this;
function decode(sound) {
loader.context.decodeAudioData(
sound.buf,
function (buffer) {
if (!buffer) {
alert('error decoding file data');
return
}
loader.bufferList[index] = buffer;
if (++loader.loadCount == loader.urlList.length)
loader.onload(loader.bufferList);
},
function (error) {
if (syncStream(sound)) {
decode(sound);
} else {
console.error('decodeAudioData error', error);
}
}
);
}
request.onload = function () {
// Asynchronously decode the audio file data in request.response
var sound = {};
sound.buf = request.response;
sound.sync = 0;
sound.retry = 0;
decode(sound);
};
request.onerror = function () {
alert('BufferLoader: XHR error');
};
request.send();
};
BufferLoader.prototype.load = function () {
for (var i = 0; i < this.urlList.length; ++i)
this.loadBuffer(this.urlList[i], i);
};
There is no way of streaming with decodeAudioData(), you need to use MediaElement with createMediaStreamSource and run your stuff then. decodeAudioData() cannot stream on a part.#zre00ne And mp3 will be decoded big!!! Verybig!!!

Categories

Resources