How to get audio buffer from video element? i know a ways from BaseAudioContext.decodeAudioData(). it fetch directly from audio file via request. but i need to get the buffer from video directly to manipulate.
const video = document.createElement('video');
video.src = 'https://www.w3schools.com/html/mov_bbb.mp4';
document.body.appendChild(video);
let audioCtx;
let audioSource;
const play = () => {
audioCtx = new AudioContext();
audioSource = audioCtx.createMediaElementSource(video);
audioSource.connect(audioCtx.destination);
video.play()
};
video.ontimeupdate = () => {
let buffer = new AudioBufferSourceNode(audioCtx);
// Manipulate audio buffer realtime
// Problem is buffer null
buffer.connect(audioSource.context.destination);
console.log(buffer.buffer)
}
<!DOCTYPE html>
<html>
<body>
<button onclick="play()">Play</button>
</body>
</html>
Related
I have this code which playing my camera's view in a video tag and then into canvas element at the same page, the page has 2 squares one for the video tag(playing camera's view) and the other is for canvas(playing the same video of the video tag). How I can edit my code to make the video play in a canvas at another page to let my user watch it live, and if there is a method to let users watch it at the same page not another page and see the canvas only, it's also accepted.
here is my html file:
<html>
<head>
<meta charset="UTF-8">
<title>With canvas</title>
</head>
<body>
<div class="booth">
<video id="video" width="400" height="300" autoplay></video>
<canvas id="canvas" width="400" height="300" style="border: 1px solid black;"></canvas>
</div>
<script src="video.js"></script>
</body>
</html>
And here is my js file:
(function() {
var canvas= document.getElementById('canvas'),
context = canvas.getContext('2d'),
video = document.getElementById('video'),
vendorUrl = window.URL || window.webkitURL;
navigator.getMedia = navigator.getUserMedia ||
navigator.webkitGetUserMedia ||
navigator.mozGetUserMedia ||
navigator.msGetUserMedia;
navigator.getMedia ({
video: true,
audio: false
}, function(stream) {
//video.src = vendorUrl.createObjectURL(stream);
if ('srcObject' in video) {
video.srcObject = stream;
} else {
video.src = vendorUrl.createObjectURL(stream);
}
video.play();
}, function(error) {
// An error occured
//error.code
});
video.addEventListener('play', function() {
setInterval(() => {
draw(this, context, 400, 300);
}, 100);
}, false );
function draw(video, context, width, height) {
context.drawImage(video, 0, 0, width, height);
}
}) ();
use MediaRecorder to get live data in chunks as media recorder records from camera and give chunks that can be send over other end of user
then use mediasource to append chunks on other side of user to append it and play it live as it recieves new chunks
i know its a complicated thing but believe me when i was experimenting this i have spent good amount of time to understand i believe you will get some benefit from my experiment code below
here is a working demo
var mediasource = new MediaSource(),video = document.querySelector("video"),
mime = 'video/webm;codecs=vp9,opus'
video.src = URL.createObjectURL(mediasource)
mediasource.addEventListener("sourceopen",function(_){
var source = mediasource.addSourceBuffer(mime)
navigator.mediaDevices.getUserMedia({video: true,audio: true}).then(stream=>{
var recorder = new MediaRecorder(stream,{mimeType:mime})
recorder.ondataavailable = d =>{
// creating chunks of 5 seconds of video continiously
var r = new Response(d.data).arrayBuffer() // convert blob to arraybuffer
r.then(arraybuffer=>{
source.appendBuffer(arraybuffer)
})
}
recorder.start(5000)
})
})
<video class="video" controls autoplay></video>
separated version for gathering live data and playing it demo
// gathering camera data to liveData array
var vid = document.querySelector(".video"),
mime = 'video/webm;codecs=vp9,opus',
liveData = []
navigator.mediaDevices.getUserMedia({video:true,audio:true}).then(stream=>{
var recorder = new MediaRecorder(stream,{mimeType:mime})
recorder.ondataavailable = d =>{
console.log("capturing")
new Response(d.data).arrayBuffer().then(eachChunk=>{
// recording in chunks here and saving it to liveData array
liveData.push(eachChunk)
})
}
recorder.start(5000)
})
//--------------------------------------------------------------------------------
// playing from liveData in video
var ms = new MediaSource()
vid.src =URL.createObjectURL(ms)
ms.onsourceopen = function(){
var source = ms.addSourceBuffer(mime),chunkIndex = 0
setInterval(function(){
if(liveData.length > 0){
var currentChunk = liveData[chunkIndex]
source.appendBuffer(currentChunk)
console.log("playing")
chunkIndex++
}
},5000)
}
I'm getting a arraybuffer from http response. The code below plays the audio just once but I want to to use audio as src for audio element!
this.toSpeechService.getAudioFile(this.text).subscribe(res => {
const context = new AudioContext();
let source = context.createBufferSource();
context.decodeAudioData(res, function(buffer) {
source.buffer = buffer;
}, null).then(() => {
source.connect(context.destination);
this.audioSrc = context.destination; // doesn't fill the audio player
source.start(0);
});
console.log(res)
});
<audio controls="controls">
<source [src]="audioSrc" type="audio/mp3/wav" />
Your browser does not support the audio element.
</audio>
In case you know the mime type of the data in the ArrayBuffer the following should work.
this.toSpeechService.getAudioFile(this.text).subscribe(res => {
const blob = new Blob([ res ], { type: THE_MIME_TYPE });
const url = URL.createObjectURL(blob);
YOUR_AUDIO_ELEMENT.src = url;
YOUR_AUDIO_ELEMENT.load();
});
THE_MIME_TYPE is a placeholder for the actual mimeType. And YOUR_AUDIO_ELEMENT is a reference to your audio element.
Once you don't need the object URL anymore you can release the memory by calling URL.revokeObjectURL(url).
I'm working on an app running in a browser where the user can record his voice. Using the MediaRecorder API I can get the recording and POST it to my server. The problem comes in if the user pauses and restarts the recording. When that happens, the first time they get it from the server it plays fine, but when it is replayed only the last segment is played. If I reload the web page and try again, once again the first time I get the whole recording, subsequent times it is just the last segment.
It is using opus codec, so I tried playing it in VLC. There, I get only the 1st segment, never any of the subsequent ones. Finally, I tried converting to MP3, and when I do that - the MP3 has the whole recording! So the whole thing is being saved, but somehow the segments seem to be stored in the file and mess up replay. In each case only one segment of the blob is playing. I have to say I'm at something of a loss even as how to attack this. The time showed by the player is the time of the 1st segment, whether it plays the first, second, or the whole thing. Any thoughts?
edited to provide a working example
How to test: put this code where it can be served and open it (I use a chrome-based browser). Click on Start to start recording, then Pause, then Start again to continue recording, then Pause again to stop. Then click on setup to load the recording, and listen to the recording. The first time I listen I get the whole recording, though the playback timer only shows the first segment. Subsequent playbacks only play the last segment. Pressing the setup button again will cause it to play the whole recording, but again only the first time.
<!doctype html>
<html>
<head>
<script>
var audio = null;
function init() {
audio = new Recording();
audio.prepareAudio();
}
class Recording {
recordButton;
pauseButton;
recorder;
mimeType;
audioChunks;
constructor() {
this.mimeType = this.recorder = null;
this.recordButton = this.pauseButton = null;
this.audioChunks = [];
}
getRecorder() {
return new Promise(async resolve => {
const stream = await navigator.mediaDevices.getUserMedia({ audio: true });
const mediaRecorder = new MediaRecorder(stream);
var _this = this;
mediaRecorder.addEventListener('dataavailable', event => {
_this.audioChunks.push(event.data);
});
const start = () => {
mediaRecorder.start();
};
const pause = () =>
new Promise(resolve => {
mediaRecorder.addEventListener('stop', () => {
resolve(_this.audioChunks);
});
mediaRecorder.stop();
_this.mimeType = mediaRecorder.mimeType;
});
resolve({ start, pause });
});
}
get codec() {
if (!this.mimeType) return null;
let split = this.mimeType.split('=');
return (split.length > 1) ? split[1] : split;
}
prepareAudio() {
this.recordButton = document.querySelector('#record');
this.pauseButton = document.querySelector('#pause');
var _this = this;
this.recordButton.addEventListener('click', async () => {
_this.recordButton.setAttribute('disabled', true);
_this.pauseButton.removeAttribute('disabled');
if (!_this.recorder) {
_this.recorder = await this.getRecorder();
}
_this.recorder.start();
});
this.pauseButton.addEventListener('click', async () => {
_this.recordButton.removeAttribute('disabled');
_this.pauseButton.setAttribute('disabled', true);
await _this.recorder.pause();
});
}
data() {
return new Promise((resolve, reject) => {
const reader = new FileReader();
let codec = this.audioChunks[0].type;
let audioBlob = new Blob(this.audioChunks, {type: this.codec || codec});
reader.readAsDataURL(audioBlob);
reader.onload = () => resolve({codec: codec, data: reader.result.split(',')[1]});
});
}
blobUrl() {
let codec = this.audioChunks[0].type;
let audioBlob = new Blob(this.audioChunks, {type: this.codec || codec});
let blobUrl = URL.createObjectURL(audioBlob);
let player = document.getElementById('play-blob');
player.src = blobUrl;
player.disabled = false;
return blobUrl;
}
}
</script>
</head>
<body onload="init()">
<div>
<button id="record" class="button fill" >Start</button>
<br />
<button id="pause" class="button fill" >Pause</button>
<br />
<button class="button fill" onclick="audio.blobUrl()">setup</button>
</div>
<audio id="play-blob" controls></audio>
</body>
</html>
This isn't a complete answer, but I'm understanding more of what is happening. The audio player, at least for the versions of Chrome and Firefox I am using (up-to-date), does not seem to handle streaming audio properly. When the source is loaded it does not know the length (of course). When the blob is created with multiple segments (new Blob([segment1, segment2, ...])) the first time the duration is given as infinite, and the whole clip plays. On subsequent plays the clip time is given as the length of the longest segment and only the last segment is played. The audio object gives the duration as the length of the longest segment.
I've also solved my immediate problem by replacing the audio device, using howler. That plays the entire clip as I expected repeatedly.
I'm trying to test playing audio using a HTMLAudioElement and a AudioSourceNode. For the later application I need two features:
The pitch must be preserved after the playbackRate changed.
The volume needs to be changed to a value greater than 1.
Because feature 2 I added a workaround with the AudioSourceNode and the GainNode.
I need the audio file as ArrayBuffer in the later app that's why I added the file reading part before.
Problem:
The code works fine with Chrome and Opera, but not with Firefox. The playBackRate is set to 2, but the playbackRate of audio signal did not change. Why is that's the case and how can I fix it?
<!DOCTYPE html>
<html lang="en">
<head>
<meta charset="UTF-8">
<title>Timestretching 2</title>
<script type="text/javascript">
var audioContext = window.AudioContext // Default
|| window.webkitAudioContext // Safari and old versions of Chrome
|| window.mozAudioContext
|| false;
function play() {
var fileInput = document.getElementById("file");
var file = fileInput.files[0];
var reader = new FileReader();
reader.onload = function (event) {
console.log("finished!");
console.log(event.srcElement.result);
var audioCtxt = new audioContext();
var url = URL.createObjectURL(new File([event.srcElement.result], "test.wav"));
var player = new Audio();
const source = audioCtxt.createMediaElementSource(player);
player.src = url;
console.log("wait to play");
player.addEventListener("canplay", function () {
// create a gain node to set volume greater than 1
const gainNode = audioCtxt.createGain();
gainNode.gain.value = 2.0; // double the volume
source.connect(gainNode);
gainNode.connect(audioCtxt.destination);
player.playbackRate = 2.0;
player.play();
player.playbackRate = 2.0;
console.log("playbackRate is " + player.playbackRate);
});
};
reader.onprogress = function (progress) {
console.log(progress.loaded / progress.total);
};
reader.onerror = function (error) {
console.error(error);
};
reader.readAsArrayBuffer(file);
}
</script>
</head>
<body>
<input id="file" type="file" value="Audio Datei auswählen"/>
<button onclick="play()">PLAY</button>
</body>
</html>
I would like to record the video tag so that I can stream the blob data to a websocket server, however, when I attempt to start the mediaRecorder I get the following error:
The MediaRecorder failed to start because there are no audio or video
tracks available.
Is it possible to add the audio/video tracks from the html5 video tag to the media stream?
<script>
var video = document.getElementById('video');
var mediaStream = video.captureStream(30);
var mediaRecorder = new MediaRecorder(
mediaStream,
{
mimeType: 'video/webm;codecs=h264',
videoBitsPerSecond: 3000000
}
);
fileElem.onchange = function () {
var file = fileElem.files[0],
canplay = !!video.canPlayType(file.type).replace("no", ""),
isaudio = file.type.indexOf("audio/") === 0 && file.type !== "audio/mpegurl";
video.src = URL.createObjectURL(file);
video.play();
mediaRecorder.start(1000); // Start recording, and dump data every second
};
</script>
<p id="choice" class="info"><input type="file" id="file"> File type: <span id="ftype"></span></p>
<video width="640" height="360" id="video" src="" controls="false"></video>
Is it possible to use the MediaRecorder API with html5 video?
Yes, however you need to initialize MediaRecorder after the HTMLVideoElement.readyState has metadata.
Here is a sample, but it only works if the video source is from the same origin (since captureStream cannot capture from element with cross-origin data)
Note: In this sample, I use onloadedmetadata to initialize MediaRecorder after the video got metadata.
var mainVideo = document.getElementById("mainVideo"),
displayVideo = document.getElementById("displayVideo"),
videoData = [],
mediaRecorder;
var btnStart = document.getElementById("btnStart"),
btnStop = document.getElementById("btnStop"),
btnResult = document.getElementById("btnResult");
var initMediaRecorder = function() {
// Record with 25 fps
mediaRecorder = new MediaRecorder(mainVideo.captureStream(25));
mediaRecorder.ondataavailable = function(e) {
videoData.push(e.data);
};
}
function startCapture() {
videoData = [];
mediaRecorder.start();
// Buttons 'disabled' state
btnStart.setAttribute('disabled', 'disabled');
btnStop.removeAttribute('disabled');
btnResult.setAttribute('disabled', 'disabled');
};
function endCapture() {
mediaRecorder.stop();
// Buttons 'disabled' state
btnStart.removeAttribute('disabled');
btnStop.setAttribute('disabled', 'disabled');
btnResult.removeAttribute('disabled');
};
function showCapture() {
return new Promise(resolve => {
setTimeout(() => {
// Wrapping this in setTimeout, so its processed in the next RAF
var blob = new Blob(videoData, {
'type': 'video/mp4'
}),
videoUrl = window.URL.createObjectURL(blob);
displayVideo.src = videoUrl;
resolve();
}, 0);
});
};
video {
max-width: 300px;
max-height: 300px;
display: block;
margin: 10px 0;
}
<video id="mainVideo" playsinline="" webkit-playsinline="1" controls="1" onloadedmetadata="initMediaRecorder()">
<source src="sampleVideo.mp4" type="video/mp4">
</video>
<button id="btnStart" onclick="startCapture()"> Start </button>
<button id="btnStop" disabled='disabled' onclick="endCapture()"> Stop </button>
<button id="btnResult" disabled='disabled' onclick="showCapture()"> Show Result </button>
<video id="displayVideo" playsinline="" webkit-playsinline="1" controls="1"></video>