Is it possible to save the video stream from the webcam to a physical mp4 file? I want to let the user record and send a video from within our web app.
So far Iv seen people doing a stream.record() but that does not seem exist :
function onVideoFail(e) {
console.log('webcam fail!', e);
};
function hasGetUserMedia() {
// Note: Opera is unprefixed.
return !!(navigator.getUserMedia || navigator.webkitGetUserMedia ||
navigator.mozGetUserMedia || navigator.msGetUserMedia);
}
if (hasGetUserMedia()) {
// Good to go!
} else {
alert('getUserMedia() is not supported in your browser');
}
window.URL = window.URL || window.webkitURL;
navigator.getUserMedia = navigator.getUserMedia ||
navigator.webkitGetUserMedia ||
navigator.mozGetUserMedia ||
navigator.msGetUserMedia;
var video = document.querySelector('video');
var streamRecorder;
var webcamstream;
if (navigator.getUserMedia) {
navigator.getUserMedia({audio: true, video: true}, function(stream) {
video.src = window.URL.createObjectURL(stream);
webcamstream = stream;
// streamrecorder = webcamstream.record();
}, onVideoFail);
} else {
alert ('failed');
}
function sendXHR(){
//Envia bien blob, no interpretado
var xhr = new XMLHttpRequest();
var video=$("#video");
xhr.open('GET', video.src , true);
xhr.responseType = 'blob';
xhr.onload = function(e) {
if (this.status == 200) {
// Note: .response instead of .responseText
var blob = new Blob([this.response], {type: 'video/webm'});
console.log(blob.size/1024);
console.log(blob.type);
form = new FormData(),
request = new XMLHttpRequest();
form.append("myblob",blob,"Capture.webm");
form.append("myname",$("#name_test").value);
request.open("POST","./UploadServlet",true);
request.send(form);
}
};
xhr.send();
}
function startRecording() {
sendXHR()
}
function stopRecording() {
streamRecorder.getRecordedData(postVideoToServer);
}
function postVideoToServer(videoblob) {
var data = {};
data.video = videoblob;
data.metadata = 'test metadata';
data.action = "upload_video";
jQuery.post("http://www.kongraju.in/uploadvideo.php", data, onUploadSuccess);
}
function onUploadSuccess() {
alert ('video uploaded');
}
webcamstream has no record function.
Has anyone a wokring example perhaps?
Related
I'm trying to get audio stream using navigator.mediaDevices.getUserMedia.
This works fine in Firefox, unfortunatly not in Chrome.
have done al this with a secure page (thus https) but Chome does not react on "onAudioProcess" function.
in console, Firefox shows the audio chunks captured by microphone. But Chrome does not show anythink.
Does anybody have an idea?
that would be great.
a working jsfiddle example can be shown here:
https://jsfiddle.net/aminekassir/3fjxq7wr/2/
<script src="https://code.jquery.com/jquery-1.12.4.min.js"></script>
<script src="https://webrtc.github.io/adapter/adapter-latest.js"></script>
<input type="button" id="btn_001" value="Click" />
```html
```javascript
console.log(adapter.browserDetails.browser);
$(function () {
$('#btn_001').click(function () {
console.log('start recording');
startRecording();
});
});
const mediaConstraints = window.constraints = { audio: true, video: false };
const micBufferSize = 512;
var audioCtx;
function startRecording() {
if (typeof AudioContext === 'function') {
console.log('AudioContext defined');
audioCtx = new AudioContext();
} else if (typeof webkitAudioContext === 'function') {
console.log('webkitAudioContext defined');
audioCtx = new webkitAudioContext();
} else if (typeof mozAudioContext === 'function') {
console.log('mozAudioContext defined');
audioCtx = new mozAudioContext();
} else {
console.error('Web Audio not supported!');
}
console.log('audioCtx', audioCtx);
audioCtx.resume();
navigator.mediaDevices.getUserMedia(mediaConstraints).then(onMicrophoneStream).catch(onMicrophoneStreamError);
console.log('hasOwnProperty("createScriptProcessor")', window.AudioContext.prototype.hasOwnProperty('createScriptProcessor'));
function onMicrophoneStream(stream) {
console.log('onMicrophoneStream', stream);
let micStream = audioCtx.createMediaStreamSource(stream);
var scriptProcessorNode = audioCtx.createScriptProcessor(micBufferSize, 1, 1);
scriptProcessorNode.onaudioprocess = onAudioProcess;
micStream.connect(scriptProcessorNode);
}
function onMicrophoneStreamError(e) {
console.log('onMicrophoneStreamError', e);
}
function onAudioProcess(e) {
//console.log('onAudioProcess');
if (audioCtx.state === 'suspended') {
audioCtx.resume();
}
var micOutBuff = e.inputBuffer.getChannelData(0); // incoming microphone stream is Float32
console.log(micOutBuff);
}
}
```javascript
I've managed to access 2 cameras simultaneously on Chrome with Javascript and HTML5 but not on Firefox. Anyway to make it works? or Firefox still not support multiple cameras?
I've attached my code below. Please see if anything I have to rework.
** you have to replace your own device ID to make this code work**
<script>
document.addEventListener('DOMContentLoaded', function() {
var video = document.getElementById('cam1');
var video2 = document.getElementById('cam2');
var audio, audioType;
var canvas = document.querySelector('canvas');
var context = canvas.getContext('2d');
var sHeight = video.height/canvas.height;
var sWidth = video.width/canvas.width;
navigator.getUserMedia = navigator.getUserMedia || navigator.webkitGetUserMedia ||
navigator.mozGetUserMedia || navigator.msGetUserMedia;
window.URL = window.URL || window.webkitURL || window.mozURL || window.msURL;
if (navigator.getUserMedia) {
navigator.getUserMedia({video: {deviceId: "8b6cf59198c32c9b3544d9252d96c0d26938780787f0fc04cb162ba978aecf4c"}, audio: false}, onSuccessCallback, onErrorCallback);
navigator.getUserMedia({video: {deviceId: "024ad3a357f5dd716e658ba749ac0bc53a4de31f00aba58c35da2736141f51c1"}, audio: false}, onSuccessCallback2, onErrorCallback);
function onSuccessCallback(stream) {
video.src = window.URL.createObjectURL(stream) || stream;
video.play();
}
function onSuccessCallback2(stream) {
video2.src = window.URL.createObjectURL(stream) || stream;
video2.play();
}
// Display an error
function onErrorCallback(e) {
var expl = 'An error occurred: [Reason: ' + e.code + ']';
console.error(expl);
alert(expl);
return;
}
}
}, false);
</script>
I use webrtc in javascript:
function start() {
var constraints = {
audio: true,
video: true
};
navigator.mediaDevices.getUserMedia(constraints)
.then(function (mediaStream) {
console.log(window.URL.createObjectURL(mediaStream));
var video = document.querySelector('#my-video');
video.src = window.URL.createObjectURL(mediaStream);
/*video.srcObject = mediaStream;
video.onloadedmetadata = function (e) {
video.play();
};*/
})
.catch(function (err) {
console.log(err.name + ": " + err.message);
});
}
html:
<video id="my-video" autoplay="true" muted="true"></video>
<br />
<input id="start" type="button" value="Start" onclick="start()" />
Please, tell me, what you need to do to record sound and send it to the server (Asp .NET Core)?
For the record needed library RecorderJS.
HTML:
<h4>Recording audio</h4>
<input type="button" onclick="startRecording(this);" value="Record" />
<input type="button" onclick="stopRecording(this);" value="Stop" />
<h4>Record:</h4>
<div class="newRecord"></div>
JS:
window.onload = function () {
init();
};
var audio_context;
var recorder;
function init() {
try {
// webkit shim
window.AudioContext = window.AudioContext || window.webkitAudioContext;
navigator.getUserMedia = navigator.getUserMedia || navigator.webkitGetUserMedia;
window.URL = window.URL || window.webkitURL;
audio_context = new AudioContext;
} catch (e) {
alert('No web audio support in this browser!');
console.log(err.name + ": " + err.message);
}
queryToUseMicrophone();
};
function queryToUseMicrophone() {
navigator.mediaDevices.getUserMedia({ audio: true })
.then(function (mediaStream) {
var input = audio_context.createMediaStreamSource(mediaStream);
recorder = new Recorder(input);
}).catch(function (err) {
console.log(err.name + ": " + err.message);
});
}
function startRecording(button) {
recorder && recorder.record();
button.disabled = true;
button.nextElementSibling.disabled = false;
}
function stopRecording(button) {
recorder && recorder.stop();
button.disabled = true;
button.previousElementSibling.disabled = false;
createDownloadLink();
recorder.clear();
}
function createDownloadLink() {
recorder && recorder.exportWAV(function (blob) {
var url = URL.createObjectURL(blob);
var audio = document.createElement('audio');
var a = document.createElement('a');
audio.controls = true;
audio.src = url;
a.href = url;
a.download = new Date().toISOString() + '.wav';
a.innerHTML = a.download;
document.querySelector(".newRecord").appendChild(audio);
document.querySelector(".newRecord").appendChild(a);
});
}
I'm having an issue getting a captured blob from the mediaRecorder api to playback in Chrome (it works in Firefox). Not sure if it's a bug in Chrome.
The error it reports:
undefined:1 Uncaught (in promise) DOMException: Unable to decode audio data
window.AudioContext = window.AudioContext || window.webkitAudioContext;
navigator.getUserMedia = (navigator.getUserMedia ||
navigator.webkitGetUserMedia ||
navigator.mozGetUserMedia ||
navigator.msGetUserMedia);
var context = new AudioContext();
var record = document.querySelector('#record');
var stop = document.querySelector('#stop');
if (navigator.getUserMedia) {
console.log('getUserMedia supported.');
var constraints = {
audio: true
};
var chunks = [];
var onSuccess = function(stream) {
var mediaRecorder = new MediaRecorder(stream);
record.onclick = function() {
mediaRecorder.start();
console.log(mediaRecorder.state);
console.log("recorder started");
record.style.background = "red";
stop.disabled = false;
record.disabled = true;
}
stop.onclick = function() {
mediaRecorder.stop();
console.log(mediaRecorder.state);
console.log("recorder stopped");
record.style.background = "";
record.style.color = "";
stop.disabled = true;
record.disabled = false;
}
mediaRecorder.onstop = function(e) {
console.log("onstop() called.", e);
var blob = new Blob(chunks, {
'type': 'audio/wav'
});
chunks = [];
var reader = new FileReader();
reader.addEventListener("loadend", function() {
context.decodeAudioData(reader.result, function(buffer) {
playsound(buffer);
},
function(e) {
console.log("error ", e)
});
});
reader.readAsArrayBuffer(blob);
}
mediaRecorder.ondataavailable = function(e) {
chunks.push(e.data);
}
}
var onError = function(err) {
console.log('The following error occured: ' + err);
}
navigator.getUserMedia(constraints, onSuccess, onError);
} else {
console.log('getUserMedia not supported on your browser!');
}
function playsound(thisbuffer) {
var source = context.createBufferSource();
source.buffer = thisbuffer;
source.connect(context.destination);
source.start(0);
}
<button id="record">record</button>
<button id="stop">stop</button>
I have used your code exactly the way it is. Everything is working fine in Chrome browser.
This issue was fixed when bug https://codereview.chromium.org/1579693006/ was closed and added to the Chrome pipeline.
This is no longer an issue.
To close the loop on this, I suspect this was due to the Chrome bug documented in a comment above. It appears this bug was fixed several years ago and should no longer be a problem as WebAudio now uses ffmpeg for decoding.
i never use WebRTC. so do not have any understand how their syntax look like.
i got a couple of syntax which i think it is not jquery. so anyone mind to tell me is it specific to webRTC related code.
document.querySelector('#stop-recording').onclick = function() {
this.disabled = true;
mediaRecorder.stop();
mediaRecorder.stream.stop();
document.querySelector('#pause-recording').disabled = true;
document.querySelector('#start-recording').disabled = false;
};
what is querySelector ?
i got the code from this url https://github.com/streamproc/MediaStreamRecorder/blob/master/demos/video-recorder.html
looking for bit info. thanks
You can refer following code:
var audio_context;
var recorder;
$(function () {
try {
//Audio Recording
window.AudioContext = window.AudioContext || window.webkitAudioContext;
navigator.getUserMedia = (navigator.getUserMedia ||
navigator.webkitGetUserMedia ||
navigator.mozGetUserMedia ||
navigator.msGetUserMedia);
window.URL = window.URL || window.webkitURL;
var recorderObject;
var VM_IDForAudio = "";
var audio_context = new AudioContext;
var localMediaStreamForAudio;
var audioStream;
//Audio-Video Recording (Firefox)
var videoFile = !!navigator.mozGetUserMedia ? 'video.gif' : 'video.webm';
var inner = document.querySelector('.inner');
var videoElement = document.getElementById('webcamVideo');
var VM_IDForAudioVideo = "";
var localMediaStreamForAudioVideo;
//Disable Live Webcam Button
$("#btnShowWebcam").prop("disabled", true);
} catch (e) {
//alert('No web audio support in this browser!');
console.log("No web audio support in this browser!");
}
//Audio Recording
$("[id$='btnAudioRecord']").click(function () {
//VM_IDForAudio = $("[id$='hdVMID']").val();
VM_IDForAudio = $("[id$='hdPRN']").val() + "_" + $("[id$='hdVMID']").val() + "_" +
patientDet.visitType + "_" + replateDateString(patientDet.visitDate);
$this = $(this);
$recorder = $this.parent();
if ($("[id$='btnAudioRecord']").val() == "Record Audio") {
if (VM_IDForAudio != "") {
$this.attr("value", "Stop Record");
navigator.getUserMedia({ audio: true }, function (stream) {
if (window.IsChrome) stream = new window.MediaStream(stream.getAudioTracks());
audioStream = stream;
recorder = window.RecordRTC(stream, {
type: 'audio'
});
recorder.startRecording();
}, function () { });
}
else {
//Select Patient
}
} else {
$this.attr("value", "Record Audio");
if (recorder)
recorder.stopRecording(function (url) {
var reader = new window.FileReader();
reader.readAsDataURL(blob);
reader.onloadend = function () {
base64data = reader.result;
PageMethods.SaveAudioRecording(base64data, VM_IDForAudio);
audioStream.stop();
}
});
}
});
//Audio-Video Recording
$("[id$='btnAudioVideoRecord']").click(function () {
//VM_IDForAudioVideo = $("[id$='hdVMID']").val();
VM_IDForAudioVideo = $("[id$='hdPRN']").val() + "_" + $("[id$='hdVMID']").val() + "_" +
patientDet.visitType + "_" + replateDateString(patientDet.visitDate);
$this = $(this);
if ($("[id$='btnAudioVideoRecord']").val() == "Record Aud/Vid") {
if (VM_IDForAudioVideo != "") {
$this.attr("value", "Stop Record");
captureUserMedia(function (stream) {
window.audioVideoRecorder = window.RecordRTC(stream, {
type: 'video', // don't forget this; otherwise you'll get video/webm instead of audio/ogg
canvas: {
width: 320,
height: 240
}
});
localMediaStreamForAudioVideo = stream;
$("#btnShowWebcam").prop("disabled", false);
window.audioVideoRecorder.startRecording();
});
}
else {
//Select Patient
}
} else {
$this.attr("value", "Record Aud/Vid");
$("#btnShowWebcam").prop("disabled", true);
window.audioVideoRecorder.stopRecording(function (url) {
convertStreams(audioVideoRecorder.getBlob(), videoFile, VM_IDForAudioVideo);
});
localMediaStreamForAudioVideo.stop();
}
});
and use RecordRTC javascript library.
for more go through this: http://recordrtc.org/RecordRTC.html,
for live demo: https://www.webrtc-experiment.com/RecordRTC/AudioVideo-on-Firefox.html
you should check webtorrent github repository, there is a detailed description about webRTC and how it is implemented. also check out the webtorrent official website