arraybuffor into audio src - javascript

I'm getting a arraybuffer from http response. The code below plays the audio just once but I want to to use audio as src for audio element!
this.toSpeechService.getAudioFile(this.text).subscribe(res => {
const context = new AudioContext();
let source = context.createBufferSource();
context.decodeAudioData(res, function(buffer) {
source.buffer = buffer;
}, null).then(() => {
source.connect(context.destination);
this.audioSrc = context.destination; // doesn't fill the audio player
source.start(0);
});
console.log(res)
});
<audio controls="controls">
<source [src]="audioSrc" type="audio/mp3/wav" />
Your browser does not support the audio element.
</audio>

In case you know the mime type of the data in the ArrayBuffer the following should work.
this.toSpeechService.getAudioFile(this.text).subscribe(res => {
const blob = new Blob([ res ], { type: THE_MIME_TYPE });
const url = URL.createObjectURL(blob);
YOUR_AUDIO_ELEMENT.src = url;
YOUR_AUDIO_ELEMENT.load();
});
THE_MIME_TYPE is a placeholder for the actual mimeType. And YOUR_AUDIO_ELEMENT is a reference to your audio element.
Once you don't need the object URL anymore you can release the memory by calling URL.revokeObjectURL(url).

Related

How to get audio buffer from video element api?

How to get audio buffer from video element? i know a ways from BaseAudioContext.decodeAudioData(). it fetch directly from audio file via request. but i need to get the buffer from video directly to manipulate.
const video = document.createElement('video');
video.src = 'https://www.w3schools.com/html/mov_bbb.mp4';
document.body.appendChild(video);
let audioCtx;
let audioSource;
const play = () => {
audioCtx = new AudioContext();
audioSource = audioCtx.createMediaElementSource(video);
audioSource.connect(audioCtx.destination);
video.play()
};
video.ontimeupdate = () => {
let buffer = new AudioBufferSourceNode(audioCtx);
// Manipulate audio buffer realtime
// Problem is buffer null
buffer.connect(audioSource.context.destination);
console.log(buffer.buffer)
}
<!DOCTYPE html>
<html>
<body>
<button onclick="play()">Play</button>
</body>
</html>

HTML5 video tag not working on iOS's browser (iPhone)

Context
I am trying to play a video in edge browser.
The project framework is
.NET 6 MVC and VUE for client side
.NET 6 WebApi for server side
the client's Vue component will send request with range header (1MB) to request fragmented MP4, and use media Source extension(MSE) to append arrayBuffer to the blobUrl that point to video.src.
like this
var mediaSource = new MediaSource;
mediaSource.addEventListener('sourceopen', sourceOpen);
video.src = URL.createObjectURL(mediaSource);
This works fine in window's EDGE, BUT didn't work on iPhone (tested by iPhone SE's edge browser)
the video tag didn't work, it only show a blank page.
image from iPhone SE (EDGE Version 100.0.1185.50)
Works fine on window's EDGE Version 100.0.1185.50
image from window 10
Tried Solution
I had tried add playsinline prop to video tag and other solutions in HTML5 Video tag not working in Safari , iPhone and iPad, but still didn't work.
Code Snippet
The Vue Component's method is as bellow:
/*
* check the videoId is equal to course or not.
* if not, fetch new video stream, and create blob url to this.videoUrl
*/
async displayVideo() {
if (this.videoId != this.course) {
//common.loader.show("#255AC4", 1.5, "Loading...");
this.videoId = this.course;
let video = document.querySelector("video");
let assetURL = FrontEndUrl + `/Stream/${this.videoId}`;
let mimeCodec = 'video/mp4; codecs="avc1.42E01E, mp4a.40.2"';
let sourceBuffer;
let chunkSize;
let contentRange;
let loop;
let index;
const token = common.cookieManager.getCookieValue(`signin`);
if ('MediaSource' in window && MediaSource.isTypeSupported(mimeCodec)) {
let mediaSource = new MediaSource;
mediaSource.addEventListener('sourceopen', sourceOpen);
video.src = URL.createObjectURL(mediaSource);
} else {
console.error('Unsupported MIME type or codec: ', mimeCodec);
}
function sourceOpen() {
// chunkSize set to 1MB
chunkSize = 1024 * 1024 * 1;
// index set to 1 because the fetchNextSegment start from second fragment
index = 1;
// 取出 mediaSource 並且把sourceBuffer加上 updateend event
let mediaSoruce = this;
sourceBuffer = mediaSoruce.addSourceBuffer(mimeCodec);
sourceBuffer.addEventListener("updateend", fetchNextSegment);
fetchArrayBuffer(assetURL, chunkSize * 0, appendBuffer);
// 📌 這裡會報一個 DOM interact 的 錯誤
//video.play();
}
function fetchNextSegment() {
if (index > loop) {
sourceBuffer.removeEventListener("updateend", fetchNextSegment);
return;
}
fetchArrayBuffer(assetURL, chunkSize * index, appendBuffer);
index++;
}
function fetchArrayBuffer(url, range, appendBuffer) {
fetch(url, {
headers: {
"Range": `bytes=${range}-`,
"Authorization": `Bearer ${token}`,
}
})
.then(response => {
if (!loop) {
contentRange = response.headers.get("x-content-range-total");
loop = Math.floor(contentRange / chunkSize);
}
return response.arrayBuffer();
})
.then(data => { appendBuffer(data) });
}
function appendBuffer(videoChunk) {
if (videoChunk) {
sourceBuffer.appendBuffer(videoChunk);
}
}
// 📌 這是原本舊的方式,此方式無法附上 JWT token
// this.videoUrl = await this.fetchVideoStream(this.videoId); //FrontEndUrl + `/Stream/${this.videoId}`;
//common.loader.close();
}
},
And My Vue's template
<p-card>
<template #header>
</template>
<template #title>
<h2>課程名: {{courseName}}</h2>
</template>
<template #subtitle>
</template>
<template #content>
<video id="video" style="width: 100%; height: auto; overflow: hidden;" :key="videoUrl" v-on:loadeddata="setCurrentTime" v-on:playing="playing" v-on:pause="pause" controls autoplay playsinline>
</video>
</template>
<template #footer>
版權所有.轉載必究
</template>
</p-card>
I also had logs in machine, but the logs was normal.
Does anyone know why it can's work on the iOS's EDGE? Thanks
Summary
After checked at Can I use MSE, found the reason is the IOS of iPhone does not support MSE (Media Source Extension). So the MSE didn't work.
an image from Can I use MSE
Solution
The iPhone's iOS support HLS originally (apple dev docs), so you need to convert the MP4 to HLS format.(can use bento4 HLS)
📌 MP4 format must be fMP4 format
After converted, the HLS format output directory will look like this
The filename extension should be .m3u8, and the master.m3u8 is a doument that describe the video's all information.
Then let the video tag's src attribute point to the HLS resource's URL(master.m3u8).
like this sample code
<video src="https://XXX/master.m3u8">
</video>
Also can use the videoJS library, the srcObject's type is "application/x-mpegURL"
var player = videojs("videoId");
if (this.iOS()) {
// ⚠ 驗證會失敗 還不清楚原因,有確定後端 驗證 是正常的
//const token = common.cookieManager.getCookieValue(`signin`);
url = FrontEndUrl + `/Media/Video/${this.videoId}/master.m3u8`;
srcObject = { src: url, type: "application/x-mpegURL" }
}
try {
player.src(srcObject);
player.play();
} catch (e) {
alert(e.message);
console.log(e);
}
a simple HLS demo on github page
But this demo use the hls.js not video.js
<iframe width="auto" src="https://s780609.github.io/hlsJsDemo/">
</iframe>
Reference of bento4
Convert to HLS format command example
📌 -f: force to replace the old file
     -o: output directory
mp4hls -f -o [output directory] [source mp4 file]

Html canvas video streaming

I have this code which playing my camera's view in a video tag and then into canvas element at the same page, the page has 2 squares one for the video tag(playing camera's view) and the other is for canvas(playing the same video of the video tag). How I can edit my code to make the video play in a canvas at another page to let my user watch it live, and if there is a method to let users watch it at the same page not another page and see the canvas only, it's also accepted.
here is my html file:
<html>
<head>
<meta charset="UTF-8">
<title>With canvas</title>
</head>
<body>
<div class="booth">
<video id="video" width="400" height="300" autoplay></video>
<canvas id="canvas" width="400" height="300" style="border: 1px solid black;"></canvas>
</div>
<script src="video.js"></script>
</body>
</html>
And here is my js file:
(function() {
var canvas= document.getElementById('canvas'),
context = canvas.getContext('2d'),
video = document.getElementById('video'),
vendorUrl = window.URL || window.webkitURL;
navigator.getMedia = navigator.getUserMedia ||
navigator.webkitGetUserMedia ||
navigator.mozGetUserMedia ||
navigator.msGetUserMedia;
navigator.getMedia ({
video: true,
audio: false
}, function(stream) {
//video.src = vendorUrl.createObjectURL(stream);
if ('srcObject' in video) {
video.srcObject = stream;
} else {
video.src = vendorUrl.createObjectURL(stream);
}
video.play();
}, function(error) {
// An error occured
//error.code
});
video.addEventListener('play', function() {
setInterval(() => {
draw(this, context, 400, 300);
}, 100);
}, false );
function draw(video, context, width, height) {
context.drawImage(video, 0, 0, width, height);
}
}) ();
use MediaRecorder to get live data in chunks as media recorder records from camera and give chunks that can be send over other end of user
then use mediasource to append chunks on other side of user to append it and play it live as it recieves new chunks
i know its a complicated thing but believe me when i was experimenting this i have spent good amount of time to understand i believe you will get some benefit from my experiment code below
here is a working demo
var mediasource = new MediaSource(),video = document.querySelector("video"),
mime = 'video/webm;codecs=vp9,opus'
video.src = URL.createObjectURL(mediasource)
mediasource.addEventListener("sourceopen",function(_){
var source = mediasource.addSourceBuffer(mime)
navigator.mediaDevices.getUserMedia({video: true,audio: true}).then(stream=>{
var recorder = new MediaRecorder(stream,{mimeType:mime})
recorder.ondataavailable = d =>{
// creating chunks of 5 seconds of video continiously
var r = new Response(d.data).arrayBuffer() // convert blob to arraybuffer
r.then(arraybuffer=>{
source.appendBuffer(arraybuffer)
})
}
recorder.start(5000)
})
})
<video class="video" controls autoplay></video>
separated version for gathering live data and playing it demo
// gathering camera data to liveData array
var vid = document.querySelector(".video"),
mime = 'video/webm;codecs=vp9,opus',
liveData = []
navigator.mediaDevices.getUserMedia({video:true,audio:true}).then(stream=>{
var recorder = new MediaRecorder(stream,{mimeType:mime})
recorder.ondataavailable = d =>{
console.log("capturing")
new Response(d.data).arrayBuffer().then(eachChunk=>{
// recording in chunks here and saving it to liveData array
liveData.push(eachChunk)
})
}
recorder.start(5000)
})
//--------------------------------------------------------------------------------
// playing from liveData in video
var ms = new MediaSource()
vid.src =URL.createObjectURL(ms)
ms.onsourceopen = function(){
var source = ms.addSourceBuffer(mime),chunkIndex = 0
setInterval(function(){
if(liveData.length > 0){
var currentChunk = liveData[chunkIndex]
source.appendBuffer(currentChunk)
console.log("playing")
chunkIndex++
}
},5000)
}

Is it possible to use the MediaRecorder API with html5 video?

I would like to record the video tag so that I can stream the blob data to a websocket server, however, when I attempt to start the mediaRecorder I get the following error:
The MediaRecorder failed to start because there are no audio or video
tracks available.
Is it possible to add the audio/video tracks from the html5 video tag to the media stream?
<script>
var video = document.getElementById('video');
var mediaStream = video.captureStream(30);
var mediaRecorder = new MediaRecorder(
mediaStream,
{
mimeType: 'video/webm;codecs=h264',
videoBitsPerSecond: 3000000
}
);
fileElem.onchange = function () {
var file = fileElem.files[0],
canplay = !!video.canPlayType(file.type).replace("no", ""),
isaudio = file.type.indexOf("audio/") === 0 && file.type !== "audio/mpegurl";
video.src = URL.createObjectURL(file);
video.play();
mediaRecorder.start(1000); // Start recording, and dump data every second
};
</script>
<p id="choice" class="info"><input type="file" id="file"> File type: <span id="ftype"></span></p>
<video width="640" height="360" id="video" src="" controls="false"></video>
Is it possible to use the MediaRecorder API with html5 video?
Yes, however you need to initialize MediaRecorder after the HTMLVideoElement.readyState has metadata.
Here is a sample, but it only works if the video source is from the same origin (since captureStream cannot capture from element with cross-origin data)
Note: In this sample, I use onloadedmetadata to initialize MediaRecorder after the video got metadata.
var mainVideo = document.getElementById("mainVideo"),
displayVideo = document.getElementById("displayVideo"),
videoData = [],
mediaRecorder;
var btnStart = document.getElementById("btnStart"),
btnStop = document.getElementById("btnStop"),
btnResult = document.getElementById("btnResult");
var initMediaRecorder = function() {
// Record with 25 fps
mediaRecorder = new MediaRecorder(mainVideo.captureStream(25));
mediaRecorder.ondataavailable = function(e) {
videoData.push(e.data);
};
}
function startCapture() {
videoData = [];
mediaRecorder.start();
// Buttons 'disabled' state
btnStart.setAttribute('disabled', 'disabled');
btnStop.removeAttribute('disabled');
btnResult.setAttribute('disabled', 'disabled');
};
function endCapture() {
mediaRecorder.stop();
// Buttons 'disabled' state
btnStart.removeAttribute('disabled');
btnStop.setAttribute('disabled', 'disabled');
btnResult.removeAttribute('disabled');
};
function showCapture() {
return new Promise(resolve => {
setTimeout(() => {
// Wrapping this in setTimeout, so its processed in the next RAF
var blob = new Blob(videoData, {
'type': 'video/mp4'
}),
videoUrl = window.URL.createObjectURL(blob);
displayVideo.src = videoUrl;
resolve();
}, 0);
});
};
video {
max-width: 300px;
max-height: 300px;
display: block;
margin: 10px 0;
}
<video id="mainVideo" playsinline="" webkit-playsinline="1" controls="1" onloadedmetadata="initMediaRecorder()">
<source src="sampleVideo.mp4" type="video/mp4">
</video>
<button id="btnStart" onclick="startCapture()"> Start </button>
<button id="btnStop" disabled='disabled' onclick="endCapture()"> Stop </button>
<button id="btnResult" disabled='disabled' onclick="showCapture()"> Show Result </button>
<video id="displayVideo" playsinline="" webkit-playsinline="1" controls="1"></video>

How to generate Initialization Segment of webm video to use with Media Source API

I'm building a small application that use MediaRecoder API to split the recoding video from webcam and upload all the part to server.
I see that with Media Source API, I need to play the first part and then play any other part.
According to http://www.w3.org/TR/media-source/#examples I need the "Initialization Segment" at the beginning of the file.
How can I generate "Initialization Segment" of WebM in JS so I can play any part I choose. Is there any lib for that? (I have no knowledge about WebM byte stream format)
You need to use media source extensions
Please refer to below example
<script>
var appendID = "123";
function onOpen(e) {
var video = e.target;
var headers = GetStreamHeaders();
if (headers == null) {
// Error fetching headers. Signal end of stream with an error.
video.sourceEndOfStream(HTMLMediaElement.EOS_NETWORK_ERR);
}
video.sourceAddId(appendID, 'video/webm; codecs="vorbis,vp8"');
// Append the stream headers (i.e. WebM Header, Info, and Tracks elements)
video.sourceAppend(appendID, headers);
// Append some initial media data.
video.sourceAppend(appendID, GetNextCluster());
}
function onSeeking(e) {
var video = e.target;
// Abort current segment append.
video.sourceAbort(appendID);
// Notify the cluster loading code to start fetching data at the
// new playback position.
SeekToClusterAt(video.currentTime);
// Append clusters from the new playback position.
video.sourceAppend(appendID, GetNextCluster());
video.sourceAppend(appendID, GetNextCluster());
}
function onProgress(e) {
var video = e.target;
if (video.sourceState == video.SOURCE_ENDED)
return;
// If we have run out of stream data, then signal end of stream.
if (!HaveMoreClusters()) {
video.sourceEndOfStream(HTMLMediaElement.EOS_NO_ERROR);
return;
}
video.sourceAppend(appendID, GetNextCluster());
}
var video = document.getElementById('v');
video.addEventListener('sourceopen', onOpen);
video.addEventListener('seeking', onSeeking);
video.addEventListener('progress', onProgress);
</script>
<video id="v" autoplay> </video>
<script>
var video = document.getElementById('v');
video.src = video.mediaSourceURL;
</script>

Categories

Resources