WEBRTC - Webcamera Source and View - javascript

i am new to WEBRTC. i have two html pages that supposed to capture and show webcamera respectively. my expectation is that "webcamerasrc.html" page should capture webcamera and through "WEBCAMERAVew.html" page, one can view the camera. the first page is capturing the video but the second page is not showing it. i think webrtc handshake is not getting completed . any suggestion regarding making things work or understand webrtc handshake or sdp exchange between two diffrent pages would be appreciated .
Here are the snippets.
webcamerasrc.html
<!DOCTYPE html>
<html>
<head>
<meta charset="ISO-8859-1">
<title>WEB CAMERA SOURCE</title>
<script src='https://cdn.firebase.com/js/client/2.2.1/firebase.js'></script>
<script src="https://webrtcexperiment-webrtc.netdna-ssl.com/RTCPeerConnection-v1.5.js"> </script>
</head>
<body>
<h1>WEB CAMERA SOURCE</h1>
<div id="container">
<video autoplay="true" id="localVideo">
</video>
<video autoplay="true" id="rVideo">
</video>
</div>
<script>
var socket = new WebSocket('ws://localhost:8080/IntegrateIntoWebTest/websocket');
var mediaConstraints = {
optional: [],
mandatory: {
OfferToReceiveAudio: true,
OfferToReceiveVideo: true
}
};
navigator.getUserMedia = navigator.getUserMedia || navigator.webkitGetUserMedia || navigator.mozGetUserMedia || navigator.msGetUserMedia || navigator.oGetUserMedia;
window.RTCPeerConnection = window.mozRTCPeerConnection || window.webkitRTCPeerConnection;
window.RTCSessionDescription = window.mozRTCSessionDescription || window.RTCSessionDescription;
window.RTCIceCandidate = window.mozRTCIceCandidate || window.RTCIceCandidate;
var isChrome = !!navigator.webkitGetUserMedia;
var STUN = {url: isChrome ? 'stun:stun.l.google.com:19302' : 'stun:23.21.150.121' };
var TURN = {url: 'turn:homeo#turn.bistri.com:80', credential: 'homeo'};
var iceServers = {iceServers: [STUN, TURN] };
var DtlsSrtpKeyAgreement = {DtlsSrtpKeyAgreement: true};
var optional = {optional: [DtlsSrtpKeyAgreement]};
var video = document.getElementById('localVideo');
var offerer = new RTCPeerConnection(iceServers);
if (navigator.getUserMedia) {
navigator.getUserMedia({video: true}, VideoSuccess, VideoError);
}
function VideoSuccess(stream) {
video.src = window.URL.createObjectURL(stream);
offerer.addStream(stream);
offerer.onicecandidate = function (event) {
if (!event || !event.candidate) return;
var o_icecandidate = event.candidate;
socket.send(JSON.stringify({o_icecandidate}));
socket.onmessage = function(event) {
var iceData = JSON.parse(event.data);
console.log(iceData);
offerer.addIceCandidate(iceData);
};
};
offerer.onaddstream = function (stream) {
//alert(stream.stream);
video.src = window.URL.createObjectURL(stream.stream);
};
}
offerer.createOffer(function (offerSdp) {
offerer.setLocalDescription(offerSdp);
socket.send(JSON.stringify({offerSdp}));
}, function(e) {console.log(e);}, mediaConstraints);
socket.onmessage = function(event)
{
//alert(JSON.parse(event.data.answerSdp));
// alert(answerSdp);
//if(answerSdp='answer'){var remoteDescription = new RTCSessionDescription(answerSdp);offerer.setRemoteDescription(remoteDescription);}
var actualData = JSON.parse(event.data);
console.log(actualData);
if(actualData.answerSdp.type='answer')
{
console.log(event.data);
var sd = JSON.parse(event.data);
console.log(sd.answerSdp);
var sd1 = new RTCSessionDescription(sd.answerSdp);
console.log(sd1);
offerer.setRemoteDescription(sd1);
}
}
function VideoError(e) {
console.log(e);
}
</script>
</body>
</html>
WEBCAMERAVew.html
<!DOCTYPE html>
<html>
<head>
<meta charset="ISO-8859-1">
<title>REMOTE WEB CAMERA VIEW</title>
</head>
<body>
<h1>REMOTE WEB CAMERA VIEW</h1>
<script src='https://cdn.firebase.com/js/client/2.2.1/firebase.js'></script>
<script src="//cdn.webrtc-experiment.com/RTCPeerConnection-v1.5.js"> </script>
<div id="container">
<video autoplay="true" id="localVideo">
</video>
<video autoplay="true" id="rVideo">
</video>
</div>
<script>
var myDataRef = new WebSocket('ws://localhost:8080/wsTest/websocket');
myDataRef.onmessage = function(event) {
var actualData = JSON.parse(event.data);
if(actualData.offerSdp.type='offer')
{ answererPeer(event);}
};
var mediaConstraints = {
optional: [],
mandatory: {
OfferToReceiveAudio: true,
OfferToReceiveVideo: true
}
};
window.RTCPeerConnection = window.mozRTCPeerConnection || window.webkitRTCPeerConnection;
window.RTCSessionDescription = window.mozRTCSessionDescription || window.RTCSessionDescription;
window.RTCIceCandidate = window.mozRTCIceCandidate || window.RTCIceCandidate;
navigator.getUserMedia = navigator.getUserMedia || navigator.webkitGetUserMedia || navigator.mozGetUserMedia || navigator.msGetUserMedia || navigator.oGetUserMedia;
var isChrome = !!navigator.webkitGetUserMedia;
var STUN = {url: isChrome ? 'stun:stun.l.google.com:19302' : 'stun:23.21.150.121' };
var TURN = {url: 'turn:homeo#turn.bistri.com:80', credential: 'homeo'};
var iceServers = {iceServers: [STUN, TURN] };
var DtlsSrtpKeyAgreement = {DtlsSrtpKeyAgreement: true};
var optional = {optional: [DtlsSrtpKeyAgreement]};
var answerer = new RTCPeerConnection(iceServers);
var video = document.getElementById('localVideo');
var remoteVideo = document.getElementById('rVideo');
function answererPeer(event) {
//window.alert(offer.sdp);
//window.alert(video_stream);
var sd = JSON.parse(event.data);
var rd = new RTCSessionDescription(sd.offerSdp);
if (navigator.getUserMedia) {
navigator.getUserMedia({video: true}, VideoSuccess, VideoError);
}
function VideoSuccess(mediaStream)
{
//answerer.addIceCandidate(icecandicate.candidate);
answerer.addStream(mediaStream);
answerer.setRemoteDescription(rd);
answerer.createAnswer(function (answerSdp) {
answerer.setLocalDescription(answerSdp);
myDataRef.send(JSON.stringify({answerSdp}));
}, function() {}, mediaConstraints);
answerer.onicecandidate = function (event) {
if (!event || !event.candidate) return;
var a_icecandidate = event.candidate;
myDataRef.send(JSON.stringify({a_icecandidate}));
myDataRef.onmessage = function(event) {
var iceData = JSON.parse(event.data);
console.log('over here : '+iceData);
};
};
answerer.onaddstream = function (mediaStream) {
alert(mediaStream);
remoteVideo.src = URL.createObjectURL(mediaStream);
remoteVideo.play();
};
}
function VideoError(e) {
console.log(e);
}
}
</script>
</body>
</html>

Related

How to save video using web cam?

I have a plug & play web cam and i want to record video by clicking start and stop button. My web cam is iball company and if there are any simple script then please provide me. I want simple php code.
<p><video id="video" autoplay="autoplay"></video></p>
<p><input type="button" id="buttonSnap" value="Take screenshot" disabled="disabled" onclick="snapshot()" /></p>
<p>
<input type="button" id="buttonStart" value="Start" disabled="disabled" onclick="start()" />
<input type="button" id="buttonStop" value="Stop" disabled="disabled" onclick="stop()" />
</p>
<p><canvas id="canvas"></canvas></p>
<script type="text/javascript">
"use strict";
var video = document.getElementById('video');
var canvas = document.getElementById('canvas');
var videoStream = null;
var preLog = document.getElementById('preLog');
function log(text)
{
if (preLog) preLog.textContent += ('\n' + text);
else alert(text);
}
function snapshot()
{
canvas.width = video.videoWidth;
canvas.height = video.videoHeight;
canvas.getContext('2d').drawImage(video, 0, 0);
}
function noStream()
{
log('Access to camera was denied!');
}
function stop()
{
var myButton = document.getElementById('buttonStop');
if (myButton) myButton.disabled = true;
myButton = document.getElementById('buttonSnap');
if (myButton) myButton.disabled = true;
if (videoStream)
{
if (videoStream.stop) videoStream.stop();
else if (videoStream.msStop) videoStream.msStop();
videoStream.onended = null;
videoStream = null;
}
if (video)
{
video.onerror = null;
video.pause();
if (video.mozSrcObject)
video.mozSrcObject = null;
video.src = "";
}
myButton = document.getElementById('buttonStart');
if (myButton) myButton.disabled = false;
}
function gotStream(stream)
{
var myButton = document.getElementById('buttonStart');
if (myButton) myButton.disabled = true;
videoStream = stream;
log('Got stream.');
video.onerror = function ()
{
log('video.onerror');
if (video) stop();
};
stream.onended = noStream;
if (window.webkitURL) video.src = window.webkitURL.createObjectURL(stream);
else if (video.mozSrcObject !== undefined)
{//FF18a
video.mozSrcObject = stream;
video.play();
}
else if (navigator.mozGetUserMedia)
{//FF16a, 17a
video.src = stream;
video.play();
}
else if (window.URL) video.src = window.URL.createObjectURL(stream);
else video.src = stream;
myButton = document.getElementById('buttonSnap');
if (myButton) myButton.disabled = false;
myButton = document.getElementById('buttonStop');
if (myButton) myButton.disabled = false;
}
function start()
{
if ((typeof window === 'undefined') || (typeof navigator === 'undefined')) log('This page needs a Web browser with the objects window.* and navigator.*!');
else if (!(video && canvas)) log('HTML context error!');
else
{
log('Get user media…');
if (navigator.getUserMedia) navigator.getUserMedia({video:true}, gotStream, noStream);
else if (navigator.oGetUserMedia) navigator.oGetUserMedia({video:true}, gotStream, noStream);
else if (navigator.mozGetUserMedia) navigator.mozGetUserMedia({video:true}, gotStream, noStream);
else if (navigator.webkitGetUserMedia) navigator.webkitGetUserMedia({video:true}, gotStream, noStream);
else if (navigator.msGetUserMedia) navigator.msGetUserMedia({video:true, audio:false}, gotStream, noStream);
else log('getUserMedia() not available from your Web browser!');
}
}
start();
</script>
I got this code from https://codepen.io/blaberus/pen/raGdBG . Using This code I can see video but how can I record.
Anyone help?
<!DOCTYPE html>
<html>
<head>
<meta http-equiv="Content-Type" content="text/html; charset=utf-8" />
<meta name="viewport" content="width=device-width, minimum-scale=1.0, initial-scale=1.0, user-scalable=yes">
<meta name="mobile-web-app-capable" content="yes">
<meta id="theme-color" name="theme-color" content="#fff">
<base target="_blank">
<title>Media Recorder API Demo</title>
<link rel="stylesheet" href="css/main.css" />
<style>
a#downloadLink {
display: block;
margin: 0 0 1em 0;
min-height: 1.2em;
}
p#data {
min-height: 6em;
}
</style>
</head>
<body>
<div id="container">
<div style = "text-align:center;">
<h1>Media Recorder API Demo </h1>
<!-- <h2>Record a 640x480 video using the media recorder API implemented in Firefox and Chrome</h2> -->
<video controls autoplay></video><br>
<button id="rec" onclick="onBtnRecordClicked()">Record</button>
<button id="pauseRes" onclick="onPauseResumeClicked()" disabled>Pause</button>
<button id="stop" onclick="onBtnStopClicked()" disabled>Stop</button>
</div>
<a id="downloadLink" download="mediarecorder.webm" name="mediarecorder.webm" href></a>
<p id="data"></p>
<script src="js/main.js"></script>
</div>
</body>
</html>
<script>
'use strict';
/* globals MediaRecorder */
// Spec is at http://dvcs.w3.org/hg/dap/raw-file/tip/media-stream-capture/RecordingProposal.html
navigator.getUserMedia = navigator.getUserMedia || navigator.webkitGetUserMedia || navigator.mozGetUserMedia || navigator.msGetUserMedia;
if(getBrowser() == "Chrome"){
var constraints = {"audio": true, "video": { "mandatory": { "minWidth": 640, "maxWidth": 640, "minHeight": 480,"maxHeight": 480 }, "optional": [] } };//Chrome
}else if(getBrowser() == "Firefox"){
var constraints = {audio: true,video: { width: { min: 640, ideal: 640, max: 640 }, height: { min: 480, ideal: 480, max: 480 }}}; //Firefox
}
var recBtn = document.querySelector('button#rec');
var pauseResBtn = document.querySelector('button#pauseRes');
var stopBtn = document.querySelector('button#stop');
var videoElement = document.querySelector('video');
var dataElement = document.querySelector('#data');
var downloadLink = document.querySelector('a#downloadLink');
videoElement.controls = false;
function errorCallback(error){
console.log('navigator.getUserMedia error: ', error);
}
/*
var mediaSource = new MediaSource();
mediaSource.addEventListener('sourceopen', handleSourceOpen, false);
var sourceBuffer;
*/
var mediaRecorder;
var chunks = [];
var count = 0;
function startRecording(stream) {
log('Start recording...');
if (typeof MediaRecorder.isTypeSupported == 'function'){
/*
MediaRecorder.isTypeSupported is a function announced in https://developers.google.com/web/updates/2016/01/mediarecorder and later introduced in the MediaRecorder API spec http://www.w3.org/TR/mediastream-recording/
*/
if (MediaRecorder.isTypeSupported('video/webm;codecs=vp9')) {
var options = {mimeType: 'video/webm;codecs=vp9'};
} else if (MediaRecorder.isTypeSupported('video/webm;codecs=h264')) {
var options = {mimeType: 'video/webm;codecs=h264'};
} else if (MediaRecorder.isTypeSupported('video/webm;codecs=vp8')) {
var options = {mimeType: 'video/webm;codecs=vp8'};
}
log('Using '+options.mimeType);
mediaRecorder = new MediaRecorder(stream, options);
}else{
log('Using default codecs for browser');
mediaRecorder = new MediaRecorder(stream);
}
pauseResBtn.textContent = "Pause";
mediaRecorder.start(10);
var url = window.URL || window.webkitURL;
videoElement.src = url ? url.createObjectURL(stream) : stream;
videoElement.play();
mediaRecorder.ondataavailable = function(e) {
//log('Data available...');
//console.log(e.data);
//console.log(e.data.type);
//console.log(e);
chunks.push(e.data);
};
mediaRecorder.onerror = function(e){
log('Error: ' + e);
console.log('Error: ', e);
};
mediaRecorder.onstart = function(){
log('Started & state = ' + mediaRecorder.state);
};
mediaRecorder.onstop = function(){
log('Stopped & state = ' + mediaRecorder.state);
var blob = new Blob(chunks, {type: "video/mp4"});
chunks = [];
var videoURL = window.URL.createObjectURL(blob);
downloadLink.href = videoURL;
videoElement.src = videoURL;
downloadLink.innerHTML = 'Download video file';
var rand = Math.floor((Math.random() * 10000000));
var name = "video_"+rand+".mp4" ;
downloadLink.setAttribute( "download", name);
downloadLink.setAttribute( "name", name);
};
mediaRecorder.onpause = function(){
log('Paused & state = ' + mediaRecorder.state);
}
mediaRecorder.onresume = function(){
log('Resumed & state = ' + mediaRecorder.state);
}
mediaRecorder.onwarning = function(e){
log('Warning: ' + e);
};
}
//function handleSourceOpen(event) {
// console.log('MediaSource opened');
// sourceBuffer = mediaSource.addSourceBuffer('video/webm; codecs="vp9"');
// console.log('Source buffer: ', sourceBuffer);
//}
function onBtnRecordClicked (){
if (typeof MediaRecorder === 'undefined' || !navigator.getUserMedia) {
alert('MediaRecorder not supported on your browser, use Firefox 30 or Chrome 49 instead.');
}else {
navigator.getUserMedia(constraints, startRecording, errorCallback);
recBtn.disabled = true;
pauseResBtn.disabled = false;
stopBtn.disabled = false;
}
}
function onBtnStopClicked(){
mediaRecorder.stop();
videoElement.controls = true;
recBtn.disabled = false;
pauseResBtn.disabled = true;
stopBtn.disabled = true;
}
function onPauseResumeClicked(){
if(pauseResBtn.textContent === "Pause"){
console.log("pause");
pauseResBtn.textContent = "Resume";
mediaRecorder.pause();
stopBtn.disabled = true;
}else{
console.log("resume");
pauseResBtn.textContent = "Pause";
mediaRecorder.resume();
stopBtn.disabled = false;
}
recBtn.disabled = true;
pauseResBtn.disabled = false;
}
function log(message){
dataElement.innerHTML = dataElement.innerHTML+'<br>'+message ;
}
//browser ID
function getBrowser(){
var nVer = navigator.appVersion;
var nAgt = navigator.userAgent;
var browserName = navigator.appName;
var fullVersion = ''+parseFloat(navigator.appVersion);
var majorVersion = parseInt(navigator.appVersion,10);
var nameOffset,verOffset,ix;
// In Opera, the true version is after "Opera" or after "Version"
if ((verOffset=nAgt.indexOf("Opera"))!=-1) {
browserName = "Opera";
fullVersion = nAgt.substring(verOffset+6);
if ((verOffset=nAgt.indexOf("Version"))!=-1)
fullVersion = nAgt.substring(verOffset+8);
}
// In MSIE, the true version is after "MSIE" in userAgent
else if ((verOffset=nAgt.indexOf("MSIE"))!=-1) {
browserName = "Microsoft Internet Explorer";
fullVersion = nAgt.substring(verOffset+5);
}
// In Chrome, the true version is after "Chrome"
else if ((verOffset=nAgt.indexOf("Chrome"))!=-1) {
browserName = "Chrome";
fullVersion = nAgt.substring(verOffset+7);
}
// In Safari, the true version is after "Safari" or after "Version"
else if ((verOffset=nAgt.indexOf("Safari"))!=-1) {
browserName = "Safari";
fullVersion = nAgt.substring(verOffset+7);
if ((verOffset=nAgt.indexOf("Version"))!=-1)
fullVersion = nAgt.substring(verOffset+8);
}
// In Firefox, the true version is after "Firefox"
else if ((verOffset=nAgt.indexOf("Firefox"))!=-1) {
browserName = "Firefox";
fullVersion = nAgt.substring(verOffset+8);
}
// In most other browsers, "name/version" is at the end of userAgent
else if ( (nameOffset=nAgt.lastIndexOf(' ')+1) <
(verOffset=nAgt.lastIndexOf('/')) )
{
browserName = nAgt.substring(nameOffset,verOffset);
fullVersion = nAgt.substring(verOffset+1);
if (browserName.toLowerCase()==browserName.toUpperCase()) {
browserName = navigator.appName;
}
}
// trim the fullVersion string at semicolon/space if present
if ((ix=fullVersion.indexOf(";"))!=-1)
fullVersion=fullVersion.substring(0,ix);
if ((ix=fullVersion.indexOf(" "))!=-1)
fullVersion=fullVersion.substring(0,ix);
majorVersion = parseInt(''+fullVersion,10);
if (isNaN(majorVersion)) {
fullVersion = ''+parseFloat(navigator.appVersion);
majorVersion = parseInt(navigator.appVersion,10);
}
return browserName;
}
</script>

Access multiple cameras on Firefox with javascript/HTML5?

I've managed to access 2 cameras simultaneously on Chrome with Javascript and HTML5 but not on Firefox. Anyway to make it works? or Firefox still not support multiple cameras?
I've attached my code below. Please see if anything I have to rework.
** you have to replace your own device ID to make this code work**
<script>
document.addEventListener('DOMContentLoaded', function() {
var video = document.getElementById('cam1');
var video2 = document.getElementById('cam2');
var audio, audioType;
var canvas = document.querySelector('canvas');
var context = canvas.getContext('2d');
var sHeight = video.height/canvas.height;
var sWidth = video.width/canvas.width;
navigator.getUserMedia = navigator.getUserMedia || navigator.webkitGetUserMedia ||
navigator.mozGetUserMedia || navigator.msGetUserMedia;
window.URL = window.URL || window.webkitURL || window.mozURL || window.msURL;
if (navigator.getUserMedia) {
navigator.getUserMedia({video: {deviceId: "8b6cf59198c32c9b3544d9252d96c0d26938780787f0fc04cb162ba978aecf4c"}, audio: false}, onSuccessCallback, onErrorCallback);
navigator.getUserMedia({video: {deviceId: "024ad3a357f5dd716e658ba749ac0bc53a4de31f00aba58c35da2736141f51c1"}, audio: false}, onSuccessCallback2, onErrorCallback);
function onSuccessCallback(stream) {
video.src = window.URL.createObjectURL(stream) || stream;
video.play();
}
function onSuccessCallback2(stream) {
video2.src = window.URL.createObjectURL(stream) || stream;
video2.play();
}
// Display an error
function onErrorCallback(e) {
var expl = 'An error occurred: [Reason: ' + e.code + ']';
console.error(expl);
alert(expl);
return;
}
}
}, false);
</script>

How to make a voice recording in a browser?

I use webrtc in javascript:
function start() {
var constraints = {
audio: true,
video: true
};
navigator.mediaDevices.getUserMedia(constraints)
.then(function (mediaStream) {
console.log(window.URL.createObjectURL(mediaStream));
var video = document.querySelector('#my-video');
video.src = window.URL.createObjectURL(mediaStream);
/*video.srcObject = mediaStream;
video.onloadedmetadata = function (e) {
video.play();
};*/
})
.catch(function (err) {
console.log(err.name + ": " + err.message);
});
}
html:
<video id="my-video" autoplay="true" muted="true"></video>
<br />
<input id="start" type="button" value="Start" onclick="start()" />
Please, tell me, what you need to do to record sound and send it to the server (Asp .NET Core)?
For the record needed library RecorderJS.
HTML:
<h4>Recording audio</h4>
<input type="button" onclick="startRecording(this);" value="Record" />
<input type="button" onclick="stopRecording(this);" value="Stop" />
<h4>Record:</h4>
<div class="newRecord"></div>
JS:
window.onload = function () {
init();
};
var audio_context;
var recorder;
function init() {
try {
// webkit shim
window.AudioContext = window.AudioContext || window.webkitAudioContext;
navigator.getUserMedia = navigator.getUserMedia || navigator.webkitGetUserMedia;
window.URL = window.URL || window.webkitURL;
audio_context = new AudioContext;
} catch (e) {
alert('No web audio support in this browser!');
console.log(err.name + ": " + err.message);
}
queryToUseMicrophone();
};
function queryToUseMicrophone() {
navigator.mediaDevices.getUserMedia({ audio: true })
.then(function (mediaStream) {
var input = audio_context.createMediaStreamSource(mediaStream);
recorder = new Recorder(input);
}).catch(function (err) {
console.log(err.name + ": " + err.message);
});
}
function startRecording(button) {
recorder && recorder.record();
button.disabled = true;
button.nextElementSibling.disabled = false;
}
function stopRecording(button) {
recorder && recorder.stop();
button.disabled = true;
button.previousElementSibling.disabled = false;
createDownloadLink();
recorder.clear();
}
function createDownloadLink() {
recorder && recorder.exportWAV(function (blob) {
var url = URL.createObjectURL(blob);
var audio = document.createElement('audio');
var a = document.createElement('a');
audio.controls = true;
audio.src = url;
a.href = url;
a.download = new Date().toISOString() + '.wav';
a.innerHTML = a.download;
document.querySelector(".newRecord").appendChild(audio);
document.querySelector(".newRecord").appendChild(a);
});
}

How do I get audio data from microphone using AudioContext HTML5

I'm trying to get a stream of data from my microphone (ex. volume, pitch).
For now, I've been using getUserMedia to access my microphone audio.
But I couldn't find a way to extract the data from it.
My code :
$(function () {
var audioContext = new AudioContext();
var audioInput = null,
realAudioInput = null,
inputPoint = null,
analyserNode = null;
if (!navigator.getUserMedia)
navigator.getUserMedia = navigator.getUserMedia || navigator.webkitGetUserMedia ||
navigator.mozGetUserMedia || navigator.msGetUserMedia;
if (navigator.getUserMedia){
navigator.getUserMedia({audio:true}, success, function(e) {
alert('Error capturing audio.');
});
} else alert('getUserMedia not supported in this browser.');
function success(stream){
inputPoint = audioContext.createGain();
realAudioInput = audioContext.createMediaStreamSource(stream);
audioInput = realAudioInput;
audioInput.connect(inputPoint);
analyserNode = audioContext.createAnalyser();
analyserNode.fftSize = 2048;
inputPoint.connect( analyserNode );
}
function live(){
requestAnimationFrame(live);
var freqByteData = new Uint8Array(analyserNode.frequencyBinCount);
analyserNode.getByteFrequencyData(freqByteData);
console.log(analyserNode.frequencyBinCount);
}
});
Here is a version of your code which does two things :
retrieves raw PCM audio buffer from the live microphone which is sent to console.log (to show javascript console hit ctrl-shift-i ), this is the PCM raw audio curve of streaming mic audio data in the time domain.
It also runs this same audio data into a FFT (fast Fourier transform) which is also sent to console.log, this is the frequency domain representation of the same Web Audio API event loop buffer
NOTE - either wear headphones OR turn down your speaker volume otherwise you will hear the squeal of audio feedback as the mic will pickup speaker audio a la Jimmy Hendrix !
<html><head><meta http-equiv="Content-Type" content="text/html; charset=ISO-8859-1">
<title>capture microphone then show time & frequency domain output</title>
<script type="text/javascript">
var webaudio_tooling_obj = function () {
var audioContext = new AudioContext();
console.log("audio is starting up ...");
var BUFF_SIZE_RENDERER = 16384;
var audioInput = null,
microphone_stream = null,
gain_node = null,
script_processor_node = null,
script_processor_analysis_node = null,
analyser_node = null;
if (!navigator.getUserMedia)
navigator.getUserMedia = navigator.getUserMedia || navigator.webkitGetUserMedia ||
navigator.mozGetUserMedia || navigator.msGetUserMedia;
if (navigator.getUserMedia){
navigator.getUserMedia({audio:true},
function(stream) {
start_microphone(stream);
},
function(e) {
alert('Error capturing audio.');
}
);
} else { alert('getUserMedia not supported in this browser.'); }
// ---
function show_some_data(given_typed_array, num_row_to_display, label) {
var size_buffer = given_typed_array.length;
var index = 0;
console.log("__________ " + label);
if (label === "time") {
for (; index < num_row_to_display && index < size_buffer; index += 1) {
var curr_value_time = (given_typed_array[index] / 128) - 1.0;
console.log(curr_value_time);
}
} else if (label === "frequency") {
for (; index < num_row_to_display && index < size_buffer; index += 1) {
console.log(given_typed_array[index]);
}
} else {
throw new Error("ERROR - must pass time or frequency");
}
}
function process_microphone_buffer(event) {
var i, N, inp, microphone_output_buffer;
microphone_output_buffer = event.inputBuffer.getChannelData(0); // just mono - 1 channel for now
}
function start_microphone(stream){
gain_node = audioContext.createGain();
gain_node.connect( audioContext.destination );
microphone_stream = audioContext.createMediaStreamSource(stream);
microphone_stream.connect(gain_node);
script_processor_node = audioContext.createScriptProcessor(BUFF_SIZE_RENDERER, 1, 1);
script_processor_node.onaudioprocess = process_microphone_buffer;
microphone_stream.connect(script_processor_node);
// --- enable volume control for output speakers
document.getElementById('volume').addEventListener('change', function() {
var curr_volume = this.value;
gain_node.gain.value = curr_volume;
console.log("curr_volume ", curr_volume);
});
// --- setup FFT
script_processor_analysis_node = audioContext.createScriptProcessor(2048, 1, 1);
script_processor_analysis_node.connect(gain_node);
analyser_node = audioContext.createAnalyser();
analyser_node.smoothingTimeConstant = 0;
analyser_node.fftSize = 2048;
microphone_stream.connect(analyser_node);
analyser_node.connect(script_processor_analysis_node);
var buffer_length = analyser_node.frequencyBinCount;
var array_freq_domain = new Uint8Array(buffer_length);
var array_time_domain = new Uint8Array(buffer_length);
console.log("buffer_length " + buffer_length);
script_processor_analysis_node.onaudioprocess = function() {
// get the average for the first channel
analyser_node.getByteFrequencyData(array_freq_domain);
analyser_node.getByteTimeDomainData(array_time_domain);
// draw the spectrogram
if (microphone_stream.playbackState == microphone_stream.PLAYING_STATE) {
show_some_data(array_freq_domain, 5, "frequency");
show_some_data(array_time_domain, 5, "time"); // store this to record to aggregate buffer/file
}
};
}
}(); // webaudio_tooling_obj = function()
</script>
</head>
<body>
<p>Volume</p>
<input id="volume" type="range" min="0" max="1" step="0.1" value="0.5"/>
</body>
</html>

webrtc, is it possible convert image to mediastream?

I make webrtc video chating.
we need to sending image instead of video. someone say image can convert mediastream.
I try, image to base64 and call addstream but I am fail. how to do that?
var imagestream = getBase64FromImageUrl('./unown.png');
function getBase64FromImageUrl(URL) {
var img = new Image();
img.src = URL;
img.onload = function () {
var canvas = document.createElement("canvas");
canvas.width =this.width;
canvas.height =this.height;
var ctx = canvas.getContext("2d");
ctx.drawImage(this, 0, 0);
var dataURL = canvas.toDataURL("image/png");
alert( dataURL.replace(/^data:image\/(png|jpg);base64,/, ""));
}
}
Try Whammy.js : A Real Time Javascript WebM Encoder
Try Recorder.js : This is for Audio (if you need) ;)
JS(script.js):
/*Adapating for different vendors*/
window.URL =
window.URL ||
window.webkitURL ||
window.mozURL ||
window.msURL;
window.requestAnimationFrame =
window.requestAnimationFrame ||
window.webkitRequestAnimationFrame ||
window.mozRequestAnimationFrame ||
window.msRequestAnimationFrame ||
window.oRequestAnimationFrame;
window.cancelAnimationFrame =
window.cancelAnimationFrame ||
window.webkitCancelAnimationFrame ||
window.mozCancelAnimationFrame ||
window.msCancelAnimationFrame ||
window.oCancelAnimationFrame;
navigator.getUserMedia =
navigator.getUserMedia ||
navigator.webkitGetUserMedia ||
navigator.mozGetUserMedia ||
navigator.msGetUserMedia;
window.AudioContext =
window.AudioContext ||
window.webkitAudioContext;
/*Global stuff*/
var video = get('video');
video.width = 320;
video.height = 240;
var canvas = document.createElement('canvas');
var rafId = null;
var frames = [];
var audioContext = new AudioContext;
var audioRecorder;
/*Save typing :) */
function get(selector) {
return document.querySelector(selector) || null;
}
/*Wrapper for recording*/
function recordIt() {
var record = get('#record');
record.textContent = record.disabled ? 'Record' : 'Recording...';
record.classList.toggle('recording');
record.disabled = !record.disabled;
}
/*Get Media (Video and Audio) from user*/
function getMedia(event) {
event.target.disabled = true;
get('#record').disabled = false;
video.controls = false;
var setVideo = function() {
setTimeout(function() {
video.width = 320;
video.height = 240;
canvas.width = video.width;
canvas.height = video.height;
}, 1000);
};
if (navigator.getUserMedia) {
navigator.getUserMedia({video: true, audio: true}, function(stream) {
if (video.mozSrcObject !== undefined) {
video.mozSrcObject = stream;
} else {
video.src = (window.URL && window.URL.createObjectURL(stream)) || stream;
}
var audioInput = audioContext.createMediaStreamSource(stream);
audioInput.connect(audioContext.destination);
audioRecorder = new Recorder(audioInput);
setVideo();
}, function(e) {
alert('Error'+e);
console.log(e)
});
} else {
console.log('getUserMedia() not supported in this browser.');
}
};
/*Record function: Draws frames and pushes to array*/
function record() {
var context = canvas.getContext('2d');
var CANVAS_HEIGHT = canvas.height;
var CANVAS_WIDTH = canvas.width;
frames = [];
recordIt();
get('#stop').disabled = false;
function draw(time) {
rafId = requestAnimationFrame(draw);
context.drawImage(video, 0, 0, CANVAS_WIDTH, CANVAS_HEIGHT);
var url = canvas.toDataURL('image/webp', 1);
frames.push(url);
};
rafId = requestAnimationFrame(draw);
//Audio stuff
audioRecorder.clear();
audioRecorder.record();
};
/*Stop Recording*/
function stop() {
cancelAnimationFrame(rafId);
get('#stop').disabled = true;
recordIt();
setVideo();
//Audio stuff
audioRecorder.stop();
setAudio();
};
/*Call Whammy for creating video*/
function setVideo(vidUrl) {
var url = vidUrl || null;
var video = get('#recordedDiv video') || null;
if (!video) {
video = document.createElement('video');
video.autoplay = true;
video.controls = true;
video.style.width = canvas.width + 'px';
video.style.height = canvas.height + 'px';
get('#recordedDiv').appendChild(video);
} else {
window.URL.revokeObjectURL(video.src);
}
if (!url) {
var webmBlob = Whammy.fromImageArray(frames, 1000 / 60);
url = window.URL.createObjectURL(webmBlob);
}
video.src = url;
}
function setAudio() {
audioRecorder.exportWAV(function(blob) {
var audio = get('#recordedDiv audio') || null;
var url = URL.createObjectURL(blob);
if(!audio) {
var audio = document.createElement('audio');
audio.autoplay = true;
audio.controls = true;
audio.src = url;
get('#recordedDiv').appendChild(audio);
}
else {
audio.src = url;
}
});
}
/*Fingers Crossed*/
function init() {
get('#camera').addEventListener('click', getMedia);
get('#record').addEventListener('click', record);
get('#stop').addEventListener('click', stop);
}
init();
HTML
<html><head>
<meta charset="utf-8">
<title>Record and Play Simple Messages</title>
<link rel="stylesheet" type="text/css" href="./css/style.css">
<style type="text/css"></style></head>
<body>
Records webm video and audio using WebAudioAPI, whammy.js and recorder.js
Webp images not supported in firefox, hence it fails. Works on Chrome though.
<section>
<div>
<video autoplay="" width="320" height="240"></video><br>
<button id="camera">GetUserMedia</button>
</div>
<div id="recordedDiv">
<button id="record" disabled="">Record</button>
<button id="stop" disabled="">Stop</button><br>
</div>
</section>
<script type="text/javascript" src="./js/whammy.min.js"></script>
<script type="text/javascript" src="./js/recorder.js"></script>
<script type="text/javascript" src="./js/script.js"></script>
</body></html>
DEMO
I know I am answering bit late, and this is only applicable for firefox( 41 and above), you can try and create a mediastream from the canvas using CanvasCaptureMediaStream
Edit: they are implementing this media capture option in chrome as well, you can follow the issue here

Categories

Resources