Recording Audio from Google Colab using javascript - javascript

The following function records audio from collab perfectly, the issue is that it only stops and saves the recording when manually selecting the stop recording button. I want the recording to end at a set time (example= 5 seconds), without me clicking anything after running the cell.
def get_audio2():
from IPython.display import HTML, Audio
from google.colab.output import eval_js
from base64 import b64decode
import numpy as np
from scipy.io.wavfile import read as wav_read
import io
import ffmpeg
# Modulo para grabar desde collab
AUDIO_HTML = """
<script>
var my_div = document.createElement("DIV");
var my_p = document.createElement("P");
var my_btn = document.createElement("BUTTON");
var t = document.createTextNode("Press to start recording");
my_btn.appendChild(t);
//my_p.appendChild(my_btn);
my_div.appendChild(my_btn);
document.body.appendChild(my_div);
var base64data = 0;
var reader;
var recorder, gumStream;
var recordButton = my_btn;
var handleSuccess = function(stream) {
gumStream = stream;
var options = {
//bitsPerSecond: 8000, //chrome seems to ignore, always 48k
mimeType : 'audio/webm;codecs=opus'
//mimeType : 'audio/webm;codecs=pcm'
};
//recorder = new MediaRecorder(stream, options);
recorder = new MediaRecorder(stream);
recorder.ondataavailable = function(e) {
var url = URL.createObjectURL(e.data);
var preview = document.createElement('audio');
preview.controls = true;
preview.src = url;
document.body.appendChild(preview);
reader = new FileReader();
reader.readAsDataURL(e.data);
reader.onloadend = function() {
base64data = reader.result;
//console.log("Inside FileReader:" + base64data);
}
};
recorder.start();
};
recordButton.innerText = "Recording... press to stop";
navigator.mediaDevices.getUserMedia({audio: true}).then(handleSuccess);
function toggleRecording() {
if (recorder && recorder.state == "recording") {
recorder.stop();
gumStream.getAudioTracks()[0].stop();
recordButton.innerText = "Saving the recording... pls wait!"
}
}
// https://stackoverflow.com/a/951057
function sleep(ms) {
return new Promise(resolve => setTimeout(resolve, ms));
}
var data = new Promise(resolve=>{
//recordButton.addEventListener("click", toggleRecording);
recordButton.onclick = ()=>{
toggleRecording()
sleep(2000).then(() => {
// wait 2000ms for the data to be available...
// ideally this should use something like await...
//console.log("Inside data:" + base64data)
resolve(base64data.toString())
});
}
});
</script>
"""
display(HTML(AUDIO_HTML))
data = eval_js("data")
binary = b64decode(data.split(',')[1])
process = (ffmpeg
.input('pipe:0')
.output('pipe:1', format='wav')
.run_async(pipe_stdin=True, pipe_stdout=True, pipe_stderr=True, quiet=True, overwrite_output=True)
)
output, err = process.communicate(input=binary)
riff_chunk_size = len(output) - 8
# Break up the chunk size into four bytes, held in b.
q = riff_chunk_size
b = []
for i in range(4):
q, r = divmod(q, 256)
b.append(r)
# Replace bytes 4:8 in proc.stdout with the actual size of the RIFF chunk.
riff = output[:4] + bytes(b) + output[8:]
sr, audio = wav_read(io.BytesIO(riff))
return audio, sr
I tried changing the recordButton.onclick to variations of sleep thinking it would run after a set time, but either it never stopped recording even if I clicked the stop recording button and the cell never ended or I kept getting this error:
MessageError: ReferenceError: data is not defined

Related

How to cut audiofile from input in browser with js, lamejs?

I have input in my Vue component where i upload audiofile via #change, next i need to cut it, for example i have 30 seconds audio and i need to receive audio from 5 to 15 second.
I have installed lamejs package to do that.
But after all my operations with audio i receive cutted audio but without any sound, so i dont know where is the reason of that. Need help!
method which upload file
async onUploadFile(event) {
const fileData = event.target.files[0];
this.file = fileData;
await this.decodeFile(fileData);
},
method which decodes file to audioBuffer
async onUploadFile(event) {
const fileData = event.target.files[0];
this.file = fileData;
await this.decodeFile(fileData);
},
method which cut audio and encode it to mp3 and then receive blob url
async audioBufferSlice(buffer, begin, end) {
const audioContext = new AudioContext();
const channels = buffer.numberOfChannels;
const rate = buffer.sampleRate;
const duration = buffer.duration;
const startOffset = rate * begin;
const endOffset = rate * end;
const frameCount = endOffset - startOffset;
const audioLength = endOffset - startOffset;
let trimmedAudio = audioContext.createBuffer(
buffer.numberOfChannels,
audioLength,
rate
);
for(var i = 0; i < buffer.numberOfChannels; i++){
trimmedAudio.copyToChannel(buffer.getChannelData(i).slice(begin, end), i);
}
var audioData = this.serializeAudioBuffer(trimmedAudio);
let mp3Data = [];
const sampleBlockSize = 1152;
let mp3encoder = new lamejs.Mp3Encoder(2, audioData.sampleRate, 128);
var left = new Int8Array(audioData.channels[0].length);
var right = new Int8Array(audioData.channels[1].length);
for (var i = 0; i < audioData.channels[0].length; i += sampleBlockSize) {
var leftChunk = left.subarray(i, i + sampleBlockSize);
var rightChunk = right.subarray(i, i + sampleBlockSize);
var mp3buf = await mp3encoder.encodeBuffer(leftChunk, rightChunk);
if (mp3buf.length > 0) {
mp3Data.push(mp3buf);
}
}
let buf = await mp3encoder.flush();
if (buf.length > 0) {
mp3Data.push(buf);
}
var blob = new Blob(mp3Data, {type: 'audio/mp3'});
var url = window.URL.createObjectURL(blob);
console.log('MP3 URl: ', url);
},
What did I do wrong that I receive cut audio but without any sound?
I look at that repository as example https://github.com/Vinit-Dantkale/AudioFy

Append blobs to url and show

What I am trying to achieve is to have "stream" of blobs from my socket and append it to a video.
Currently, I have something like
const socket = io();
const videoGrid = document.getElementById("video-grid");
const video = document.createElement("video");
video.muted = true;
videoGrid.append(video);
let blobArray = [];
socket.on("view-stream-10", (data) => {
blobArray.push(
new Blob([new Uint8Array(data)], {
type: "video/x-matroska;codecs=avc1,opus",
})
);
let currentTime = video.currentTime;
let blob = new Blob(blobArray, { type: "video/x-matroska;codecs=avc1,opus" });
video.src = window.URL.createObjectURL(blob);
video.currentTime = currentTime;
video.play();
});
It works but there are some problems that video stops for a X ms at the point where new blob beeing created and url is changed and it's beeing so visible.
Is there any better way?

Is it possible to mix multiple audio files on top of each other preferably with javascript

I want to combine audio clips, layered on top of each other so that they play synchronously and are saved in a new audio file. Any help would be much appreciated. I've done some digging online, but couldn't find a definitive answer as to whether or not many of the tools available as far as Javascript audio editing librarys go (Mix.js for example) are capable.
Yes, it is possible using OfflineAudioContext() or AudioContext.createChannelMerger() and creating a MediaStream. See Phonegap mixing audio files , Web Audio API.
You can use fetch() or XMLHttpRequest() to retrieve audio resource as an ArrayBuffer, AudioContext.decodeAudioData() to create an AudioBufferSourceNode from response; OfflineAudioContext() to render merged audio, AudioContext, AudioContext.createBufferSource(), AudioContext.createMediaStreamDestination() , MediaRecorder() to record stream; Promise.all(), Promise() constructor, .then() to process asynchronous requests to fetch(), AudioContext.decodeAudioData(), pass resulting mixed audio Blob at stop event of MediaRecorder.
Connect each AudioContext AudioBufferSourceNode to OfflineAudioContext.destination, call .start() on each node; call OfflineAudioContext.startRendering(); create new AudioContext node, connect renderedBuffer; call .createMediaStreamDestination() on AudioContext to create a MediaStream from merged audio buffers, pass .stream to MediaRecorder(), at stop event of MediaRecorder, create Blob URL of Blob of recorded audio mix with URL.createObjectURL(), which can be downloaded using <a> element with download attribute and href set to Blob URL.
var sources = ["https://upload.wikimedia.org/wikipedia/commons/b/be/"
+ "Hidden_Tribe_-_Didgeridoo_1_Live.ogg"
, "https://upload.wikimedia.org/wikipedia/commons/6/6e/"
+ "Micronesia_National_Anthem.ogg"];
var description = "HiddenTribeAnthem";
var context;
var recorder;
var div = document.querySelector("div");
var duration = 60000;
var chunks = [];
var audio = new AudioContext();
var mixedAudio = audio.createMediaStreamDestination();
var player = new Audio();
player.controls = "controls";
function get(src) {
return fetch(src)
.then(function(response) {
return response.arrayBuffer()
})
}
function stopMix(duration, ...media) {
setTimeout(function(media) {
media.forEach(function(node) {
node.stop()
})
}, duration, media)
}
Promise.all(sources.map(get)).then(function(data) {
var len = Math.max.apply(Math, data.map(function(buffer) {
return buffer.byteLength
}));
context = new OfflineAudioContext(2, len, 44100);
return Promise.all(data.map(function(buffer) {
return audio.decodeAudioData(buffer)
.then(function(bufferSource) {
var source = context.createBufferSource();
source.buffer = bufferSource;
source.connect(context.destination);
return source.start()
})
}))
.then(function() {
return context.startRendering()
})
.then(function(renderedBuffer) {
return new Promise(function(resolve) {
var mix = audio.createBufferSource();
mix.buffer = renderedBuffer;
mix.connect(audio.destination);
mix.connect(mixedAudio);
recorder = new MediaRecorder(mixedAudio.stream);
recorder.start(0);
mix.start(0);
div.innerHTML = "playing and recording tracks..";
// stop playback and recorder in 60 seconds
stopMix(duration, mix, recorder)
recorder.ondataavailable = function(event) {
chunks.push(event.data);
};
recorder.onstop = function(event) {
var blob = new Blob(chunks, {
"type": "audio/ogg; codecs=opus"
});
console.log("recording complete");
resolve(blob)
};
})
})
.then(function(blob) {
console.log(blob);
div.innerHTML = "mixed audio tracks ready for download..";
var audioDownload = URL.createObjectURL(blob);
var a = document.createElement("a");
a.download = description + "." + blob.type.replace(/.+\/|;.+/g, "");
a.href = audioDownload;
a.innerHTML = a.download;
document.body.appendChild(a);
a.insertAdjacentHTML("afterend", "<br>");
player.src = audioDownload;
document.body.appendChild(player);
})
})
.catch(function(e) {
console.log(e)
});
<!DOCTYPE html>
<html>
<head>
</head>
<body>
<div>loading audio tracks.. please wait</div>
</body>
</html>
You can alternatively utilize AudioContext.createChannelMerger(), AudioContext.createChannelSplitter()
var sources = ["/path/to/audoi1", "/path/to/audio2"];
var description = "mix";
var chunks = [];
var channels = [[0, 1], [1, 0]];
var audio = new AudioContext();
var player = new Audio();
var merger = audio.createChannelMerger(2);
var splitter = audio.createChannelSplitter(2);
var mixedAudio = audio.createMediaStreamDestination();
var duration = 60000;
var context;
var recorder;
var audioDownload;
player.controls = "controls";
function get(src) {
return fetch(src)
.then(function(response) {
return response.arrayBuffer()
})
}
function stopMix(duration, ...media) {
setTimeout(function(media) {
media.forEach(function(node) {
node.stop()
})
}, duration, media)
}
Promise.all(sources.map(get)).then(function(data) {
return Promise.all(data.map(function(buffer, index) {
return audio.decodeAudioData(buffer)
.then(function(bufferSource) {
var channel = channels[index];
var source = audio.createBufferSource();
source.buffer = bufferSource;
source.connect(splitter);
splitter.connect(merger, channel[0], channel[1]);
return source
})
}))
.then(function(audionodes) {
merger.connect(mixedAudio);
merger.connect(audio.destination);
recorder = new MediaRecorder(mixedAudio.stream);
recorder.start(0);
audionodes.forEach(function(node) {
node.start(0)
});
stopMix(duration, ...audionodes, recorder);
recorder.ondataavailable = function(event) {
chunks.push(event.data);
};
recorder.onstop = function(event) {
var blob = new Blob(chunks, {
"type": "audio/ogg; codecs=opus"
});
audioDownload = URL.createObjectURL(blob);
var a = document.createElement("a");
a.download = description + "." + blob.type.replace(/.+\/|;.+/g, "");
a.href = audioDownload;
a.innerHTML = a.download;
player.src = audioDownload;
document.body.appendChild(a);
document.body.appendChild(player);
};
})
})
.catch(function(e) {
console.log(e)
});

Sending base64 image to Firebase Storage phonegap

I am having problem in sending my base64 image from phonegap (ios) to firebase storage. The main problem is firebase storage only accepted BLOB or File as attachment.
Heres my code for the camera function. Cordova-plugin-camera
function GetCamera(){
navigator.camera.getPicture( cameraSuccess, cameraError, {quality :50,
destinationType: Camera.DestinationType.DATA_URL,
encodingType: Camera.EncodingType.JPEG,
saveToPhotoAlbum: true});}
function to convert base 64 to blob
function b64toblob(b64_data, content_type) {
content_type = content_type || '';
var slice_size = 512;
var byte_characters = atob(b64_data);
var byte_arrays = [];
for(var offset = 0; offset < byte_characters.length; offset += slice_size) {
var slice = byte_characters.slice(offset, offset + slice_size);
var byte_numbers = new Array(slice.length);
for(var i = 0; i < slice.length; i++) {
byte_numbers[i] = slice.charCodeAt(i);
}
var byte_array = new Uint8Array(byte_numbers);
byte_arrays.push(byte_array);
}
var blob = new Blob(byte_arrays, {type: content_type});
return blob;};
Camera success function. take note that imageblob is a global variable
function cameraSuccess(imageData){
document.getElementById('Attachment1').innerHTML = "Attachment: True";
var image = imageData;
imageblob = b64toblob(image,"image/jpeg");}
putting the blob to firebase storage
try{
var storageRef = storage.ref().child('fire');
var uploadTask = storageRef.put(imageblob);
uploadTask.on('state_changed',null, null, function(){
var downloadURL = uploadTask.snapshot.downloadURL;
console.log("downloadURL :"+downloadURL);
});
i have tried every single thing, but its not working. Really need your guys help.. i am out of ideas
Cordova camera plugin doesn't return file object. That is problem with plugin.
But it returns all details about image. By using that you can create a blob or file object.
Reference for creating blob from file url.
var getFileBlob = function (url, cb) {
var xhr = new XMLHttpRequest();
xhr.open("GET", url);
xhr.responseType = "blob";
xhr.addEventListener('load', function() {
cb(xhr.response);
});
xhr.send();
};
var blobToFile = function (blob, name) {
blob.lastModifiedDate = new Date();
blob.name = name;
return blob;
};
var getFileObject = function(filePathOrUrl, cb) {
getFileBlob(filePathOrUrl, function (blob) {
cb(blobToFile(blob, 'test.jpg')); // Second argument is name of the image
});
};
Calling function for get file blob
getFileObject('img/test.jpg', function (fileObject) { // First argument is path of the file
console.log(fileObject);
});
In your camera success function try this.
function cameraSuccess(imageData){
document.getElementById('Attachment1').innerHTML = "Attachment: True";
getFileObject(imageData.nativeURL, function(fileObject) {
console.log(fileObject);
var imgName = fileObject.name;
var metadata = { contentType: fileObject.type };
var uploadFile = storageRef.child("images/" + imgName).put(fileObject, metadata);
uploadFile.on(firebase.storage.TaskEvent.STATE_CHANGED, function(snapshot) {
var progress = (snapshot.bytesTransferred / snapshot.totalBytes) * 100;
console.log(progress);
}, function(error) {
console.log(error);
}, function() {
var imgFirebaseURL = uploadFile.snapshot.downloadURL;
console.log(imgFirebaseURL);
});
});
}

Get video duration when input a video file

I'm doing a project with HTML and Javascript that will run local with local files.
I need to select a file by input, get its information and then decide if I'll add to my list and reproduce or not. And if Ii decide to use it I'll have to put it on a queue to use later. Otherwise I'll just discard and select another file.
The problem that I'm facing is that I can't find a way to get the video duration just by selecting it on input.
I've searched a lot and I didn't find any method to get the duration. In this code below I tried to use 'file.duration' but it didn't work, it just returns 'undefined'.
This is my input, normal as you can see.
<div id="input-upload-file" class="box-shadow">
<span>upload! (ღ˘⌣˘ღ)</span> <!--ignore the text face lol -->
<input type="file" class="upload" id="fileUp" name="fileUpload" onchange="setFileInfo()">
</div>
And this is the function that I'm using to get all the information.
function setFileInfo(){
showInfo(); //change div content
var file = document.getElementById("fileUp").files[0];
var pid = 1;
var Pname = file.name;
Pname = Pname.slice(0, Pname.indexOf(".")); //get filename without extension
var Ptype = file.type;
var Psize = bytesToSize(file.size); //turns into KB,MB, etc...
var Pprior = setPriority(Ptype); //returns 1, 2 or 3
var Pdur = file.duration;
var Pmem = getMemory(Psize); //returns size * (100 || 10 || 1)
var Pown = 'user';
/* a lot of stuff throwing this info to the HTML */
console.log(Pdur);
}
Is there way to do this? If not, what are the alternatives that can help me?
In modern browsers, You can use the URL API's URL.createObjectURL() with an non appended video element to load the content of your file.
var myVideos = [];
window.URL = window.URL || window.webkitURL;
document.getElementById('fileUp').onchange = setFileInfo;
function setFileInfo() {
var files = this.files;
myVideos.push(files[0]);
var video = document.createElement('video');
video.preload = 'metadata';
video.onloadedmetadata = function() {
window.URL.revokeObjectURL(video.src);
var duration = video.duration;
myVideos[myVideos.length - 1].duration = duration;
updateInfos();
}
video.src = URL.createObjectURL(files[0]);;
}
function updateInfos() {
var infos = document.getElementById('infos');
infos.textContent = "";
for (var i = 0; i < myVideos.length; i++) {
infos.textContent += myVideos[i].name + " duration: " + myVideos[i].duration + '\n';
}
}
<div id="input-upload-file" class="box-shadow">
<span>upload! (ღ˘⌣˘ღ)</span>
<input type="file" class="upload" id="fileUp" name="fileUpload">
</div>
<pre id="infos"></pre>
I needed to validate a single file before continuing to execute more code, here is my method with the help of Kaiido's answer!
onchange event when a user uploads a file:
$("input[type=file]").on("change", function(e) {
var file = this.files[0]; // Get uploaded file
validateFile(file) // Validate Duration
e.target.value = ''; // Clear value to allow new uploads
})
Now validate duration:
function validateFile(file) {
var video = document.createElement('video');
video.preload = 'metadata';
video.onloadedmetadata = function() {
window.URL.revokeObjectURL(video.src);
if (video.duration < 1) {
console.log("Invalid Video! video is less than 1 second");
return;
}
methodToCallIfValid();
}
video.src = URL.createObjectURL(file);
}
Here is async/await Promise version:
const loadVideo = file => new Promise((resolve, reject) => {
try {
let video = document.createElement('video')
video.preload = 'metadata'
video.onloadedmetadata = function () {
resolve(this)
}
video.onerror = function () {
reject("Invalid video. Please select a video file.")
}
video.src = window.URL.createObjectURL(file)
} catch (e) {
reject(e)
}
})
Can be used as follows:
const video = await loadVideo(e.currentTarget.files[0])
console.log(video.duration)
It's simple to get video duration from FileReader and it's easy to manage in async/await.
const getVideoDuration = file =>
new Promise((resolve, reject) => {
const reader = new FileReader();
reader.onload = () => {
const media = new Audio(reader.result);
media.onloadedmetadata = () => resolve(media.duration);
};
reader.readAsDataURL(file);
reader.onerror = error => reject(error);
});
const duration = await getVideoDuraion(file);
where file is File object
Live Example
const getVideoDuration = (file) =>
new Promise((resolve, reject) => {
const reader = new FileReader();
reader.onload = () => {
const media = new Audio(reader.result);
media.onloadedmetadata = () => resolve(media.duration);
};
reader.readAsDataURL(file);
reader.onerror = (error) => reject(error);
});
const handleChange = async (e) => {
const duration = await getVideoDuration(e.target.files[0]);
document.querySelector("#duration").innerText = `Duration: ${duration}`;
};
<div>
<input type="file" onchange="handleChange(event)" />
<p id="duration">Duration: </p>
</div>
This is how I managed to get video duration before pushing it on S3..
I am using this code to upload video files of 4+ GB.
Note - for formats like .avi,.flv, .vob, .mpeg etc, duration will not be found, so handle it with a simple message to the user
//same method can be used for images/audio too, making some little changes
getVideoDuration = async (f) => {
const fileCallbackToPromise = (fileObj) => {
return Promise.race([
new Promise((resolve) => {
if (fileObj instanceof HTMLImageElement) fileObj.onload = resolve;
else fileObj.onloadedmetadata = resolve;
}),
new Promise((_, reject) => {
setTimeout(reject, 1000);
}),
]);
};
const objectUrl = URL.createObjectURL(f);
// const isVideo = type.startsWith('video/');
const video = document.createElement("video");
video.src = objectUrl;
await fileCallbackToPromise(video);
return {
duration: video.duration,
width: video.videoWidth,
height: video.videoHeight,
};
}
//call the function
//removing unwanted code for clarity
const meta = await this.getVideoDuration(file);
//meta.width, meta.height, meta.duration is ready for you to use
I've implemented getting video duration in nano-metadata package https://github.com/kalashnikovisme/nano-metadata.
You can do something like this
import nanoMetadata from 'nano-metadata'
const change = (e) => {
const file = e.target.files[0]
nanoMetadata.video.duration(file).then((duration) => {
console.log(duration) // will show you video duration in seconds
})
}

Categories

Resources