how do I keep playing audio file in browser - javascript

I am trying to play audio clips of audio receieved from server through socketio. The code works but on the first audio clip recieved is played, any clip recieved afterwards are not played. I confirmed their reciept but I don't know why it only plays the first received audio file.
socketio.on('audio_playback_results', function (data) {
console.log("I recieved an audio playback! ",data);
playOutput(data);
});
function playOutput(arrayBuffer){
let audioContext = new AudioContext();
let outputSource;
try {
if(arrayBuffer.byteLength > 0){
console.log(arrayBuffer.byteLength);
audioContext.decodeAudioData(arrayBuffer,
function(buffer){
audioContext.resume();
outputSource = audioContext.createBufferSource();
outputSource.connect(audioContext.destination);
outputSource.buffer = buffer;
outputSource.start(0);
},
function(){
console.log(`playoutput PLAYING OUTPUT ARGUMMENT ${arguments}`);
// console.log(arguments);
});
}
} catch(e) {
console.log(`ERORRR PLAYING OUTPUT ${e}`);
}
}
i perform a text to audiobuffer on server side and then stream it back to client side.
async function textToAudioBuffer(text) {
requestTTS.input = { text: text }; // text or SSML
const response = await ttsClient.synthesizeSpeech(requestTTS);
console.log("RESPONSE # ttsClient, textToAudioBuffer() : ", response[0].audioContent)
var results = response[0].audioContent
io.emit('audio_playback_results', results);
return response[0].audioContent;
}
what is causing it not to play audio clip recieved after the first one and how can I fix this ?

Related

trying to load a video element with nodejs. Getting Error: Not implemented: HTMLMediaElement.prototype.load

Trying to render a mp4 video with jsdom and fabric
const object = new (cls || Media)(videoElement, options);
object.set("url", url);
object.set("__options", options);
videoElement.loop = true;
const onSeeked = () => {
videoElement.pause();
// middle frame loaded and we can play
// now we can return ready video
callback && callback(object);
resolve(object);
videoElement.removeEventListener("canplay", onSeeked);
};
const onLoad = () => {
// Wait for video to load
videoElement.addEventListener("canplay", onSeeked);
// Seek middle frame - for sake of paused preview
// we will get result after another canplay event
videoElement.currentTime = videoElement.duration * 0.5;
videoElement.width = videoElement.videoWidth;
videoElement.height = videoElement.videoHeight;
videoElement.removeEventListener("canplay", onLoad);
videoElement.load();
videoElement.play();
};
videoElement.addEventListener("canplay", onLoad);
videoElement.addEventListener('error', (e) => {
callback && callback(null, true);
reject(e);
});
source.addEventListener('error', (e) => {
callback && callback(null, true);
reject(e);
});
videoElement.load();
Since this is server side nodejs I am using jsdom to createDOM elements.
Once it reaches videoElement.load() It gives the following error. I looked up jsdom architecture and it seems they dont support video mp4 load and play
Getting Error:
Not implemented: HTMLMediaElement.prototype.load
UPDATED:
I was able to get video frames with ffmpeg library but now how can i generate the video on a canvas with node?
current implementation with ffmpeg

Webcam Recordings from in .mp4 or .webm are not play on MacOS Safari browser and on iOS devices

I record a video through VideoJS. The code looks like this:
// Video recording via webcam
var videoMaxLengthInSeconds = 180;
// Inialize the video player
let videoBlob;
var player = videojs("myVideo", {
controls: true,
width: 720,
height: 480,
fluid: false,
plugins: {
record: {
audio: true,
video: true,
maxLength: videoMaxLengthInSeconds,
debug: true,
videoMimeType: "video/mp4"
}
}
}, function() {
// print version information at startup
videojs.log(
'Using video.js', videojs.VERSION,
'with videojs-record', videojs.getPluginVersion('record'),
'and recordrtc', RecordRTC.version
);
});
// error handling for getUserMedia
player.on('deviceError', function() {
console.log('device error:', player.deviceErrorCode);
});
// Handle error events of the video player
player.on('error', function(error) {
console.log('error:', error);
});
// user clicked the record button and started recording !
player.on('startRecord', function() {
console.log('started recording! Do whatever you need to');
});
// user completed recording and stream is available
// Upload the Blob to your server or download it locally !
let recording;
let recordingData;
player.on('finishRecord', function() {
// the blob object contains the recorded data that
// can be downloaded by the user, stored on server etc.
recordingData = player.recordedData;
videoBlob = player.recordedData.video;
//let myblob = new Blob(player.recordedData, { type: "video/webm" });
let objectURL = window.URL.createObjectURL(player.recordedData)
let downloadButton = document.getElementById('downloadButton');
downloadButton.href = objectURL;
downloadButton.download = "Vlog.webm";
//recording = new File(myBlob, 'vlog.webm')
console.log(recording)
console.log('finished recording: ', videoBlob);
});
// Sending recorder video to server
$('#postButton').click(function() {
// Get form data
form = document.querySelectorAll('#form');
let formData = new FormData(form[0]);
let disabled = document.getElementById("commentsDisable").checked
console.log("Comments Enabled: " + disabled)
formData.append('commentsDisabled', disabled);
let selection = document.getElementById('categorySelect');
let selected = selection.options[selection.selectedIndex].value;
// Append selected category
formData.append('category', selected)
//Apend YouTube embed link
if (ytUrl) {
formData.append('ytlink', ytUrl)
}
// Append recordedBlob to form data as file
if (recordingData) {
console.log('Recording detected: ' + recordingData)
formData.append('videoFile', recordingData, recordingData.name);
}
//Append video from local upload
if (tempFile) {
formData.append('videoFile', tempFile);
}
// Send POST request via AJAX to server
$.ajax({
type: "POST",
url: "/make_vlog/",
processData: false,
contentType: false,
data: formData,
success: function(response) {
alert(response);
//location.href = "/vlogs";
}
});
});``
On the server side I have a django app which stores the file as .mp4 creates a new Vlog model.
When I open the page the video is loaded and can be played by all browsers. Except Safari and iOS devices don't play the video (Format not supported).
When I upload video from file instead of webcam recording. And the file is a valid mp4 video (for example from here:example_video) the file is played on every device and browser.
I think the problem is the video encoding in my js code. The same problem occurs with .webm file as well.
When I download the webm, convert into mp4 in VLC and upload on the server the video is played correctly.
Does anyone have experience with such problem?
Thanks
You need to convert the webm videos to mp4 server site for playback in Safari.
On web based webcam recording each browser saves in specific native format (mime type). Safari saves mp4/mp3 while other browsers usually save webm.
Changing the file extension does not help. You need to convert the video.
You can convert the webm to mp4 with ffmpeg, server side.

HTML 5 video and audio appended to a SourceBuffer become de-synchronised

I'm using the Media Source Extensions API to capture the desktop / an application / a chrome tab with it's audio and stream that data to multiple clients through Socket.io.
I am using a MediaRecorder which routinely records the stream every 10 ms, sending each 10ms chunk to my server which relays it back to every client to be appended to a SourceBuffer which is attached to a MediaSource attached to a video tag. Both the audio and video are successfully sent, received and displayed by the video tag, however, after a short time the audio gets ahead of the video and both gradually get further apart.
let videoMimeType = 'video/webm;codecs="vp9,opus"';
let player = document.getElementById("player");
let mediaSource = new MediaSource();
player.src = window.URL.createObjectURL(mediaSource);
mediaSource.addEventListener('sourceopen', function(e) {
if (videoBuffer == null) {
videoBuffer = mediaSource.addSourceBuffer(videoMimeType);
videoBuffer.mode = 'sequence'
}
});
navigator.mediaDevices.getDisplayMedia({
video: {
width: {
max: 1280
},
height: {
max: 720
},
frameRate: 30
},
audio: {
echoCancellation: false,
googEchoCancellation: false,
googAutoGainControl: false,
googAutoGainControl2: false,
googNoiseSuppression: false,
googHighpassFilter: false
}
}).then(
stream => {
let videoRecorder = new MediaRecorder(new MediaStream(stream), {
mimeType: videoMimeType
});
stream.getTracks()[0].addEventListener("ended", () => {
stream.getTracks().forEach(track => track.stop());
videoRecorder.stop();
playing = false;
player.src = "";
socket.emit("end")
return;
});
videoRecorder.ondataavailable = function(e) {
if (e.data.size > 0) {
socket.emit('update', e.data);
}
};
videoRecorder.start(10);
},
error => {
console.log("Unable to acquire screen capture", error);
});
socket.on("update", (arrayBuffer) => {
if (playing) {
player.play()
if (!videoBuffer.updating && queue.length == 0) {
videoBuffer.appendBuffer(arrayBuffer);
}
}
})
I expected the video and audio to remain synchronised throughout playback but instead over time the audio get ahead of the video. The video also buffers at times, which seems to herald the de-synchronisation, though during buffering both the audio and video stop and then start again after.

how to open html 5 video fullscreen if it was fullscreen before

I'm watching a series of videos on a website organised in a playlist. Each video is about 2 minutes long.
The website uses HTML 5 video player and it supports auto-play. That is each time a video ends, the next video is loaded and automatically played, which is great.
However, with Fullscreen, even if I fullscreened a video previously, when the next video loads in the playlist, the screen goes back to normal, and I have to click the fullscreen button again....
I've tried writing a simple javascript extension with Tampermonkey to load the video fullscreen automatically.
$(document).ready(function() {
function makefull() {
var vid = $('video')[0]
if (vid.requestFullscreen) {
vid.requestFullscreen();
} else if (vid.mozRequestFullScreen) {
vid.mozRequestFullScreen();
} else if (vid.webkitRequestFullscreen) {
vid.webkitRequestFullscreen();
}
//var vid = $('button.vjs-fullscreen-control').click();
}
makefull()
But I'm getting this error:
Failed to execute 'requestFullscreen' on 'Element': API can only be initiated by a user gesture.
It's extremely annoying to have to manually click fullscreen after each 2 min video. Is there a way I can achieve this in my own browser? I'm using Chrome.
If you can get the list of URL's then you can create your own playlist. The code cannot be accurately tested within a cross-origin <iframe>, for example at plnkr.co. The code can be tested at console at this very document. To test the code, you can use the variable urls at MediaFragmentRecorder and substitute "pause" event for "ended" event at .addEventListener().
If you have no control over the HTML or JavaScript used at the site not sure how to provide any code that will be able to solve the inquiry.
const video = document.createElement("video");
video.controls = true;
video.autoplay = true;
const urls = [
{
src: "/path/to/video/"
}, {
src: "/path/to/video/"
}
];
(async() => {
try {
video.requestFullscreen = video.requestFullscreen
|| video.mozRequestFullscreen
|| video.webkitRequestFullscreen;
let fullScreen = await video.requestFullscreen().catch(e => {throw e});
console.log(fullScreen);
} catch (e) {
console.error(e.message)
}
for (const {src} of urls) {
await new Promise(resolve => {
video.addEventListener("canplay", e => {
video.load();
video.play();
}, {
once: true
});
video.addEventListener("ended", resolve, {
once: true
});
video.src = src;
});
}
})();

Fluent-ffmpeg: merging video and audio = wrong frames

I'm trying to merge a video (mp4) without audio stream with an audio file (mp3). I'm developing under nodewebkit a video software which means that I have to use ogg files, so when the user upload a video or a audio file it converts it in ogg whatever its format. Then when the user want to export its video I’m exporting frames from a canvas to PNG images. Once this is done I’m creating a video from the frames with a 30 fps with this following code:
var videoMaker = function () {
console.log('videoMaker');
var deffered = Q.defer();
if (!FS.existsSync($rootScope.project.path + '/video')) {
filestorageService.createFolder($rootScope.project.path + '/video');
}
audioMaker().then(function () {
var commandVideo = new Ffmpeg({
source: $rootScope.project.path + '/frames/%d.png'
});
commandVideo.setFfmpegPath(ffmpegPath);
commandVideo.addOptions(['-c:v libx264', '-r 30']).withFpsInput(30).format('mp4').on('error', function (err) {
console.log('video', err);
}).on('end', function () {
console.log('video win');
deffered.resolve();
}).save($rootScope.project.path + '/video/rendu.mp4');
});
return deffered.promise;
};
Then i'm reconverting the audio wich has been uploaded by the user to mp3:
var audioMaker = function () {
console.log('audioMaker');
var deffered = Q.defer();
if ($rootScope.project.settings.music.path !== '') {
FS.writeFileSync($rootScope.project.path + '/music/finalMusic.mp3', null);
var commandAudio = new Ffmpeg({
source: $rootScope.project.settings.music.path
});
commandAudio.setFfmpegPath(ffmpegPath);
if ($rootScope.project.settings.music.fadeIn) {
commandAudio.audioFilters('afade=t=in:ss=0:d=0.5');
}
console.log($rootScope.project.settings.music.fadeOut, $rootScope.project.settings.music.fadeIn);
if ($rootScope.project.settings.music.fadeOut) {
var time = sceneService.getTotalDuration() - 0.5;
commandAudio.audioFilters('afade=t=out:st=' + time + ':d=0.5');
}
commandAudio.toFormat('mp3').on('end', function () {
console.log('audio win');
deffered.resolve();
}).on('error', function (err) {
console.log('audio', err);
}).save($rootScope.project.path + '/music/finalMusic.mp3');
} else {
deffered.resolve();
}
return deffered.promise;
};
Until there everything is alright those files work well but when i do this:
var command = new Ffmpeg({
source: $rootScope.project.path + '/video/rendu.mp4'
});
command.setFfmpegPath(ffmpegPath);
console.log($rootScope.project.settings.music.path !== '');
if ($rootScope.project.settings.music.path !== '') {
command.addInput($rootScope.project.path + '/music/finalMusic.mp3');
command.addOptions(['-c:v copy', '-c:a copy']);
if ($rootScope.project.settings.music.duration > sceneService.getTotalDuration()) {
command.addOptions(['-shortest']);
}
command.on('error', function (err) {
console.log(err);
}).on('end', function () {
console.log("win");
//filestorageService.rmFolder($rootScope.project.path + '/frames');
}).save($rootScope.project.path + '/video/rendu.mp4');
} else {
filestorageService.rmFolder($rootScope.project.path + '/frames');
}
And my final file has the music and the right duration but the frames aren't right, any ideas?
I finally find out how to fix this, the final video wasn't good because i was merging the video and the audio to the same video file which means that i was getting the data of the video via outsream while i was writing on the file. So in my program i created a temp folder which contains the video and the audio then i merge them to a new video file which is in an other final folder to finally delete the temp folder.

Categories

Resources