onaudioprocess not fired in an HTML5 voice recorder - javascript

I'm building an HTML5 voice recording with a visualizer, but the program when running keeps giving me an error in the onaudioprocess that the variables are not declared this is because the onaudioprocess is not running.what is going wrong???here is my full code:
// variables
var leftchannel = [];
var rightchannel = [];
var recorder = null;
var recording = false;
var recordingLength = 0;
var volume = null;
var audioInput = null;
var sampleRate = 44100;
var audioContext = null;
var context = null;
var outputString;
if (!navigator.getUserMedia)
navigator.getUserMedia = navigator.getUserMedia ||
navigator.webkitGetUserMedia ||
navigator.mozGetUserMedia ||
navigator.msGetUserMedia;
if (navigator.getUserMedia){
navigator.getUserMedia({audio:true}, success, function(e) {
alert('Error capturing audio.');
});
} else alert('getUserMedia not supported in this browser.');
function getVal(value)
{
if ( value == "record"){
recording = true;
// reset the buffers for the new recording
leftchannel.length = rightchannel.length = 0;
recordingLength = 0;
document.getElementById('output').innerHTML="Recording now...";
} else if ( value == "stop" ){
// we stop recording
recording = false;
document.getElementById('output').innerHTML="Building wav file...";
// we flat the left and right channels down
var leftBuffer = mergeBuffers ( leftchannel, recordingLength );
var rightBuffer = mergeBuffers ( rightchannel, recordingLength );
// we interleave both channels together
var interleaved = interleave ( leftBuffer, rightBuffer );
var buffer = new ArrayBuffer(44 + interleaved.length * 2);
var view = new DataView(buffer);
// RIFF chunk descriptor
writeUTFBytes(view, 0, 'RIFF');
view.setUint32(4, 44 + interleaved.length * 2, true);
writeUTFBytes(view, 8, 'WAVE');
// FMT sub-chunk
writeUTFBytes(view, 12, 'fmt ');
view.setUint32(16, 16, true);
view.setUint16(20, 1, true);
// stereo (2 channels)
view.setUint16(22, 2, true);
view.setUint32(24, sampleRate, true);
view.setUint32(28, sampleRate * 4, true);
view.setUint16(32, 4, true);
view.setUint16(34, 16, true);
// data sub-chunk
writeUTFBytes(view, 36, 'data');
view.setUint32(40, interleaved.length * 2, true);
var lng = interleaved.length;
var index = 44;
var volume = 1;
for (var i = 0; i < lng; i++){
view.setInt16(index, interleaved[i] * (0x7FFF * volume), true);
index += 2;
}
var blob = new Blob ( [ view ], { type : 'audio/wav' } );
// let's save it locally
document.getElementById('output').innerHTML='Handing off the file now...';
var url = (window.URL || window.webkitURL).createObjectURL(blob);
var li = document.createElement('li');
var au = document.createElement('audio');
var hf = document.createElement('a');
au.controls = true;
au.src = url;
hf.href = url;
hf.download = 'audio_recording_' + new Date().getTime() + '.wav';
hf.innerHTML = hf.download;
li.appendChild(au);
li.appendChild(hf);
recordingList.appendChild(li);
}
}
function success(e){
audioContext = window.AudioContext || window.webkitAudioContext;
context = new audioContext();
volume = context.createGain();
// creates an audio node from the microphone incoming stream(source)
source = context.createMediaStreamSource(e);
// connect the stream(source) to the gain node
source.connect(volume);
var bufferSize = 2048;
recorder = context.createScriptProcessor(bufferSize, 2, 2);
//node for the visualizer
analyser = context.createAnalyser();
analyser.smoothingTimeConstant = 0.3;
analyser.fftSize = 1024;
analyser2 = context.createAnalyser();
analyser2.smoothingTimeConstant = 0.0;
analyser2.fftSize = 1024;
splitter = context.createChannelSplitter();
//when recording happens
recorder.onaudioprocess = function(e){
if (!recording) return;
var left = e.inputBuffer.getChannelData (0);
var right = e.inputBuffer.getChannelData (1);
// get the average of the first channel, bincount is fftsize / 2
var array = new Uint8Array(analyser.frequencyBinCount);
analyser.getByteFrequencyData(array);
var average = getAverageVolume(array);
// get the average for the second channel
var array2 = new Uint8Array(analyser2.frequencyBinCount);
analyser2.getByteFrequencyData(array2);
var average2 = getAverageVolume(array2);
// clear the current state
context.clearRect(0, 0, 60, 130);
// set the fill style
context.fillStyle=gradient;
// create the meters
context.fillRect(0,130-average,25,130);
context.fillRect(30,130-average2,25,130);
}
function getAverageVolume(array) {
var values = 0;
var average;
var length = array.length;
// get all the frequency amplitudes
for (var i = 0; i < length; i++) {
values += array[i];
}
average = values / length;
return average;
}
leftchannel.push (new Float32Array (left));
rightchannel.push (new Float32Array (right));
recordingLength += bufferSize;
// we connect the recorder(node to destination(speakers))
volume.connect(splitter);
splitter.connect(analyser, 0, 0);
splitter.connect(analyser2, 1, 0);
analyser.connect(recorder);
recorder.connect(context.destination);
}
function mergeBuffers(channelBuffer, recordingLength){
var result = new Float32Array(recordingLength);
var offset = 0;
var lng = channelBuffer.length;
for (var i = 0; i < lng; i++){
var buffer = channelBuffer[i];
result.set(buffer, offset);
offset += buffer.length;
}
return result;
}
function interleave(leftChannel, rightChannel){
var length = leftChannel.length + rightChannel.length;
var result = new Float32Array(length);
var inputIndex = 0;
for (var index = 0; index < length; ){
result[index++] = leftChannel[inputIndex];
result[index++] = rightChannel[inputIndex];
inputIndex++;
}
return result;
}
function writeUTFBytes(view, offset, string){
var lng = string.length;
for (var i = 0; i < lng; i++){
view.setUint8(offset + i, string.charCodeAt(i));
}
}
please help

Problem solved, pushing the left and right channel should be within the onaudioprocess
leftchannel.push (new Float32Array (left));
rightchannel.push (new Float32Array (right));
recordingLength += bufferSize;

Related

FF and Opera not open encoded files with "URL Blob"

I have encoded song with Blob.
But in FF (38) and Opera it's not work.
"Media resource blob: site.com/75f35d17-9aaf-4c7a-a52c-943d62ffd40f could not be decoded."
In FF 37 it's work well.
http://jsfiddle.net/mLq0uptw/1/
Help please, how i can play this Blob URL song in FF and Opera?
window.requestAnimFrame = (function(){
return window.requestAnimationFrame ||
window.webkitRequestAnimationFrame ||
window.mozRequestAnimationFrame ||
function( callback ){
window.setTimeout(callback, 1000 / 60);
};
})();
// old draw --------------------------------------------------------
//var context = new AudioContext();
var audioBuffer;
var sourceNode;
var analyser;
var javascriptNode;
var actx = new(AudioContext || webkitAudioContext)(), tid,
url = "https://cdn.rawgit.com/epistemex/free-music-for-test-and-demo/master/music/kf_colibris.mp3";
// url = "hello.mp3";
var canvas = document.getElementById('c');
var canvas_context = canvas.getContext('2d');
var gradient = canvas_context.createLinearGradient(0, 0, 0, canvas.height);
gradient.addColorStop(1, '#550000');
gradient.addColorStop(0.75, '#ff0000');
gradient.addColorStop(0.25, '#ffff00');
gradient.addColorStop(0, '#ffff00');
canvas_context.fillStyle = gradient;
function playSound(buffer) {
sourceNode.buffer = buffer;
sourceNode.start(0);
}
// log if an error occurs
function onError(e) {
console.log(e);
}
// STEP 1: Load audio file using AJAX ----------------------------------
loadXHR(url, decode);
tid = setInterval(function() {document.querySelector("div").innerHTML += "."}, 500);
function loadXHR(url, callback) {
try {
var xhr = new XMLHttpRequest();
xhr.open("GET", url);
xhr.responseType = "arraybuffer";
xhr.onerror = function() {console.log("Network error.")};
xhr.onload = function() {
if (xhr.status === 200) callback(xhr.response);
else console.log("Loading error:" + xhr.statusText);
};
xhr.send();
} catch (err) {console.log(err.message)}
}
// STEP 2: Decode the audio file ---------------------------------------
function decode(buffer) {
clearInterval(tid);
document.querySelector("div").innerHTML = "Decoding file...";
actx.decodeAudioData(buffer, split);
}
// STEP 3: Split the buffer --------------------------------------------
function split(abuffer) {
document.querySelector("div").innerHTML = "Splitting...";
setTimeout(function() { // to allow DOM to update status-text
// calc number of segments and segment length
var channels = abuffer.numberOfChannels,
duration = abuffer.duration
rate = abuffer.sampleRate,
segmentLen = 10,
count = Math.floor(duration / segmentLen),
offset = 0,
// block = 10 * rate;
block = abuffer.length;
// while(count--) {
var url = URL.createObjectURL(bufferToWave(abuffer, offset, block));
var audio = new Audio(url);
audio.controls = true;
audio.volume = 0.5;
audio.autoplay = true;
//playSound(abuffer);
document.body.appendChild(audio);
var analyser = actx.createAnalyser();
analyser.fftSize = 2048;
// connect the stuff up to eachother/*
var source = actx.createMediaElementSource(audio);
source.connect(analyser);
analyser.connect(actx.destination);
freqAnalyser();
function freqAnalyser() {
window.requestAnimFrame(freqAnalyser);
var sum;
var average;
var bar_width;
var scaled_average;
var num_bars = 60;
var data = new Uint8Array(64);
analyser.getByteFrequencyData(data);
//ctx.clearRect(0, 0, 1000, 325);
// clear canvas
canvas_context.clearRect(0, 0, canvas.width, canvas.height);
var bin_size = Math.floor(data.length / num_bars);
for (var i = 0; i < num_bars; i += 1) {
sum = 0;
for (var j = 0; j < bin_size; j += 1) {
sum += data[(i * bin_size) + j];
}
average = sum / bin_size;
bar_width = canvas.width / num_bars;
scaled_average = (average / 256) * canvas.height;
canvas_context.fillRect(i * bar_width, canvas.height, bar_width - 2, - scaled_average);
}
}
//offset += block;
// }
document.querySelector("div").innerHTML = "Ready!";
}, 60)
}
// Convert a audio-buffer segment to a Blob using WAVE representation
function bufferToWave(abuffer, offset, len) {
var numOfChan = abuffer.numberOfChannels,
length = len * numOfChan * 2 + 44,
buffer = new ArrayBuffer(length),
view = new DataView(buffer),
channels = [], i, sample,
pos = 0;
// write WAVE header
setUint32(0x46464952); // "RIFF"
setUint32(length - 8); // file length - 8
setUint32(0x45564157); // "WAVE"
setUint32(0x20746d66); // "fmt " chunk
setUint32(16); // length = 16
setUint16(1); // PCM (uncompressed)
setUint16(numOfChan);
setUint32(abuffer.sampleRate);
setUint32(abuffer.sampleRate * 2 * numOfChan); // avg. bytes/sec
setUint16(numOfChan * 2); // block-align
setUint16(16); // 16-bit (hardcoded in this demo)
setUint32(0x61746164); // "data" - chunk
setUint32(length - pos - 4); // chunk length
// write interleaved data
for(i = 0; i < abuffer.numberOfChannels; i++)
channels.push(abuffer.getChannelData(i));
while(pos < length) {
for(i = 0; i < numOfChan; i++) { // interleave channels
sample = Math.max(-1, Math.min(1, channels[i][offset])); // clamp
sample = (0.5 + sample < 0 ? sample * 32768 : sample * 32767)|0; // scale to 16-bit signed int
view.setInt16(pos, sample, true); // update data chunk
pos += 2;
}
offset++ // next source sample
}
// create Blob
return new Blob([buffer], {type: "audio/mpeg"});
function setUint16(data) {
view.setUint16(pos, data, true);
pos += 2;
}
function setUint32(data) {
view.setUint32(pos, data, true);
pos += 4;
}
}
<canvas id="c" width="1000" height="225" style="display: block;"></canvas>
<div>Loading.</div>

Audio recording implementation in web2py using recorder.js

I am developing a web app in which i want to use recorder.js file to implement audio recording. I have this code but i am unable to use in web2py framework. Can you give me detail info how to use it?
There is a .bower.json file which is getting hidden on uploading in web2py. how can we use this and link all the files so that recording can be done?
This is my .bower.json script:
{
"name": "Recorderjs",
"homepage": "https://github.com/mattdiamond/Recorderjs",
"_release": "f814ac7b3f",
"_resolution": {
"type": "branch",
"branch": "master",
"commit": "f814ac7b3f4ed4f62729860d5a02720f167480b3"
},
"_source": "git://github.com/mattdiamond/Recorderjs.git",
"_target": "*",
"_originalSource": "Recorderjs",
"_direct": true
}
This is my html code:
<body>
<script src="bower_components/Recorderjs/recorder.js">
</script>
<script src="app.js"></script>
<button onclick="record()">Record</button>
<button onclick="stop()">Stop</button>
These are my javascripts: /app.js
var navigator = window.navigator;
var Context = window.AudioContext || window.webkitAudioContext;
var context = new Context();
// audio
var mediaStream;
var rec;
navigator.getUserMedia = (
navigator.getUserMedia ||
navigator.webkitGetUserMedia ||
navigator.mozGetUserMedia ||
navigator.msGetUserMedia
);
function record() {
navigator.getUserMedia({audio: true}, function(localMediaStream){
mediaStream = localMediaStream;
var mediaStreamSource = context.createMediaStreamSource(localMediaStream);
rec = new Recorder(mediaStreamSource, {
workerPath: '/bower_components/Recorderjs/recorderWorker.js'
});
rec.record();
}, function(err){
console.log('Not supported');
});
}
function stop() {
mediaStream.stop();
rec.stop();
rec.exportWAV(function(e){
rec.clear();
Recorder.forceDownload(e, "test.wav");
});
}
/recorder.js
(function(window){
var WORKER_PATH = 'recorderWorker.js';
var Recorder = function(source, cfg){
var config = cfg || {};
var bufferLen = config.bufferLen || 4096;
this.context = source.context;
this.node = (this.context.createScriptProcessor ||
this.context.createJavaScriptNode).call(this.context,
bufferLen, 2, 2);
var worker = new Worker(config.workerPath || WORKER_PATH);
worker.postMessage({
command: 'init',
config: {
sampleRate: this.context.sampleRate
}
});
var recording = false,
currCallback;
this.node.onaudioprocess = function(e){
if (!recording) return;
worker.postMessage({
command: 'record',
buffer: [
e.inputBuffer.getChannelData(0),
e.inputBuffer.getChannelData(1)
]
});
}
this.configure = function(cfg){
for (var prop in cfg){
if (cfg.hasOwnProperty(prop)){
config[prop] = cfg[prop];
}
}
}
this.record = function(){
recording = true;
}
this.stop = function(){
recording = false;
}
this.clear = function(){
worker.postMessage({ command: 'clear' });
}
this.getBuffer = function(cb) {
currCallback = cb || config.callback;
worker.postMessage({ command: 'getBuffer' })
}
recorderWorker.js:
var recLength = 0,
recBuffersL = [],
recBuffersR = [],
sampleRate;
this.onmessage = function(e){
switch(e.data.command){
case 'init':
init(e.data.config);
break;
case 'record':
record(e.data.buffer);
break;
case 'exportWAV':
exportWAV(e.data.type);
break;
case 'getBuffer':
getBuffer();
break;
case 'clear':
clear();
break;
}
};
function init(config){
sampleRate = config.sampleRate;
}
function record(inputBuffer){
recBuffersL.push(inputBuffer[0]);
recBuffersR.push(inputBuffer[1]);
recLength += inputBuffer[0].length;
}
function exportWAV(type){
var bufferL = mergeBuffers(recBuffersL, recLength);
var bufferR = mergeBuffers(recBuffersR, recLength);
var interleaved = interleave(bufferL, bufferR);
var dataview = encodeWAV(interleaved);
var audioBlob = new Blob([dataview], { type: type });
this.postMessage(audioBlob);
}
function getBuffer() {
var buffers = [];
buffers.push( mergeBuffers(recBuffersL, recLength) );
buffers.push( mergeBuffers(recBuffersR, recLength) );
this.postMessage(buffers);
}
function clear(){
recLength = 0;
recBuffersL = [];
recBuffersR = [];
}
function mergeBuffers(recBuffers, recLength){
var result = new Float32Array(recLength);
var offset = 0;
for (var i = 0; i < recBuffers.length; i++){
result.set(recBuffers[i], offset);
offset += recBuffers[i].length;
}
return result;
}
function interleave(inputL, inputR){
var length = inputL.length + inputR.length;
var result = new Float32Array(length);
var index = 0,
inputIndex = 0;
while (index < length){
result[index++] = inputL[inputIndex];
result[index++] = inputR[inputIndex];
inputIndex++;
}
return result;
}
function floatTo16BitPCM(output, offset, input){
for (var i = 0; i < input.length; i++, offset+=2){
var s = Math.max(-1, Math.min(1, input[i]));
output.setInt16(offset, s < 0 ? s * 0x8000 : s * 0x7FFF, true);
}
}
function writeString(view, offset, string){
for (var i = 0; i < string.length; i++){
view.setUint8(offset + i, string.charCodeAt(i));
}
}
function encodeWAV(samples){
var buffer = new ArrayBuffer(44 + samples.length * 2);
var view = new DataView(buffer);
/* RIFF identifier */
writeString(view, 0, 'RIFF');
/* file length */
view.setUint32(4, 32 + samples.length * 2, true);
/* RIFF type */
writeString(view, 8, 'WAVE');
/* format chunk identifier */
writeString(view, 12, 'fmt ');
/* format chunk length */
view.setUint32(16, 16, true);
/* sample format (raw) */
view.setUint16(20, 1, true);
/* channel count */
view.setUint16(22, 2, true);
/* sample rate */
view.setUint32(24, sampleRate, true);
/* byte rate (sample rate * block align) */
view.setUint32(28, sampleRate * 4, true);
/* block align (channel count * bytes per sample) */
view.setUint16(32, 4, true);
/* bits per sample */
view.setUint16(34, 16, true);
/* data chunk identifier */
writeString(view, 36, 'data');
/* data chunk length */
view.setUint32(40, samples.length * 2, true);
floatTo16BitPCM(view, 44, samples);
return view;
}
I don't know if this is the only problem, but the following script source references are relative URLs:
<script src="bower_components/Recorderjs/recorder.js"></script>
<script src="app.js"></script>
Because the URLs are not preceded with a /, they will be appended to the URL of the current page. So, if you are at /myapp/default/index, the first URL will reference /myapp/default/index/bower_components/Recorderjs/recorder.js, which will not be correct.
You should keep your Javascript files in the /web2py/applications/myapp/static folder. In that case, it is best to use the web2py URL() function to generate proper URLs:
<script src="{{=URL('static', 'js/bower_components/Recorderjs/recorder.js')}}"></script>
<script src="{{=URL('static', 'js/app.js')}}"></script>
Regarding internal URLs, assuming you move everything into the /myapp/static/js folder, a URL like:
/bower_components/Recorderjs/recorderWorker.js
would change to:
/myapp/static/js/bower_components/Recorderjs/recorderWorker.js
Relative internal URLs should continue to work without alteration.

Using ffmpeg to convert voice recording captured by HTML5

I'm building an HTML5 voice recording software with visualizer.I want the user when recording the voice, and after uploading the file as wave in a blob (server-side), the user should be able to select the audio format of that file using ffmpeg. what I Have achieved so far is uploading the file as wave.what I still want to do is:
On the server
side pick your preferable web programming framework
The web programming framework accepts the upload and stores the file on the server
The web programming framework runs a ffmpeg (command line) which processes the file
The user can download the processed file
here is my code so far:
// variables
var leftchannel = [];
var rightchannel = [];
var recorder = null;
var recording = false;
var recordingLength = 0;
var volume = null;
var audioInput = null;
var sampleRate = 44100;
var audioContext = null;
var context = null;
var outputString;
if (!navigator.getUserMedia)
navigator.getUserMedia = navigator.getUserMedia ||
navigator.webkitGetUserMedia ||
navigator.mozGetUserMedia ||
navigator.msGetUserMedia;
if (navigator.getUserMedia) {
navigator.getUserMedia({
audio: true
}, success, function (e) {
alert('Error capturing audio.');
});
} else alert('getUserMedia not supported in this browser.');
function getVal(value) {
// if R is pressed, we start recording
if (value == "record") {
recording = true;
// reset the buffers for the new recording
leftchannel.length = rightchannel.length = 0;
recordingLength = 0;
document.getElementById('output').innerHTML = "Recording now...";
// if S is pressed, we stop the recording and package the WAV file
} else if (value == "stop") {
// we stop recording
recording = false;
document.getElementById('output').innerHTML = "Building wav file...";
// we flat the left and right channels down
var leftBuffer = mergeBuffers(leftchannel, recordingLength);
var rightBuffer = mergeBuffers(rightchannel, recordingLength);
// we interleave both channels together
var interleaved = interleave(leftBuffer, rightBuffer);
var buffer = new ArrayBuffer(44 + interleaved.length * 2);
var view = new DataView(buffer);
// RIFF chunk descriptor
writeUTFBytes(view, 0, 'RIFF');
view.setUint32(4, 44 + interleaved.length * 2, true);
writeUTFBytes(view, 8, 'WAVE');
// FMT sub-chunk
writeUTFBytes(view, 12, 'fmt ');
view.setUint32(16, 16, true);
view.setUint16(20, 1, true);
// stereo (2 channels)
view.setUint16(22, 2, true);
view.setUint32(24, sampleRate, true);
view.setUint32(28, sampleRate * 4, true);
view.setUint16(32, 4, true);
view.setUint16(34, 16, true);
// data sub-chunk
writeUTFBytes(view, 36, 'data');
view.setUint32(40, interleaved.length * 2, true);
var lng = interleaved.length;
var index = 44;
var volume = 1;
for (var i = 0; i < lng; i++) {
view.setInt16(index, interleaved[i] * (0x7FFF * volume), true);
index += 2;
}
var blob = new Blob([view], {
type: 'audio/wav'
});
// let's save it locally
document.getElementById('output').innerHTML = 'Handing off the file now...';
var url = (window.URL || window.webkitURL).createObjectURL(blob);
var li = document.createElement('li');
var au = document.createElement('audio');
var hf = document.createElement('a');
au.controls = true;
au.src = url;
hf.href = url;
hf.download = 'audio_recording_' + new Date().getTime() + '.wav';
hf.innerHTML = hf.download;
li.appendChild(au);
li.appendChild(hf);
recordingList.appendChild(li);
}
}
function success(e) {
audioContext = window.AudioContext || window.webkitAudioContext;
context = new audioContext();
volume = context.createGain();
// creates an audio node from the microphone incoming stream(source)
source = context.createMediaStreamSource(e);
// connect the stream(source) to the gain node
source.connect(volume);
var bufferSize = 2048;
recorder = context.createScriptProcessor(bufferSize, 2, 2);
//node for the visualizer
analyser = context.createAnalyser();
analyser.smoothingTimeConstant = 0.3;
analyser.fftSize = 512;
splitter = context.createChannelSplitter();
//when recording happens
recorder.onaudioprocess = function (e) {
if (!recording) return;
var left = e.inputBuffer.getChannelData(0);
var right = e.inputBuffer.getChannelData(1);
leftchannel.push(new Float32Array(left));
rightchannel.push(new Float32Array(right));
recordingLength += bufferSize;
// get the average for the first channel
var array = new Uint8Array(analyser.frequencyBinCount);
analyser.getByteFrequencyData(array);
var c = document.getElementById("myCanvas");
var ctx = c.getContext("2d");
// clear the current state
ctx.clearRect(0, 0, 1000, 325);
var gradient = ctx.createLinearGradient(0, 0, 0, 300);
gradient.addColorStop(1, '#000000');
gradient.addColorStop(0.75, '#ff0000');
gradient.addColorStop(0.25, '#ffff00');
gradient.addColorStop(0, '#ffffff');
// set the fill style
ctx.fillStyle = gradient;
drawSpectrum(array);
function drawSpectrum(array) {
for (var i = 0; i < (array.length); i++) {
var value = array[i];
ctx.fillRect(i * 5, 325 - value, 3, 325);
}
}
}
function getAverageVolume(array) {
var values = 0;
var average;
var length = array.length;
// get all the frequency amplitudes
for (var i = 0; i < length; i++) {
values += array[i];
}
average = values / length;
return average;
}
// we connect the recorder(node to destination(speakers))
volume.connect(splitter);
splitter.connect(analyser, 0, 0);
analyser.connect(recorder);
recorder.connect(context.destination);
}
function mergeBuffers(channelBuffer, recordingLength) {
var result = new Float32Array(recordingLength);
var offset = 0;
var lng = channelBuffer.length;
for (var i = 0; i < lng; i++) {
var buffer = channelBuffer[i];
result.set(buffer, offset);
offset += buffer.length;
}
return result;
}
function interleave(leftChannel, rightChannel) {
var length = leftChannel.length + rightChannel.length;
var result = new Float32Array(length);
var inputIndex = 0;
for (var index = 0; index < length;) {
result[index++] = leftChannel[inputIndex];
result[index++] = rightChannel[inputIndex];
inputIndex++;
}
return result;
}
function writeUTFBytes(view, offset, string) {
var lng = string.length;
for (var i = 0; i < lng; i++) {
view.setUint8(offset + i, string.charCodeAt(i));
}
}

htl5 voice recording with isualization [closed]

Closed. This question needs debugging details. It is not currently accepting answers.
Edit the question to include desired behavior, a specific problem or error, and the shortest code necessary to reproduce the problem. This will help others answer the question.
Closed 8 years ago.
Improve this question
I'm building a HTML5 software that records a voice and when playing that voice a visualizer should be in action.
Here is my code:
// variables
var leftchannel = [];
var rightchannel = [];
var recorder = null;
var recording = false;
var recordingLength = 0;
var volume = null;
var audioInput = null;
var sampleRate = 44100;
var audioContext = null;
var context = null;
var outputString;
if (!navigator.getUserMedia) navigator.getUserMedia = navigator.getUserMedia || navigator.webkitGetUserMedia || navigator.mozGetUserMedia || navigator.msGetUserMedia;
if (navigator.getUserMedia) {
navigator.getUserMedia({
audio: true
}, success, function (e) {
alert('Error capturing audio.');
});
} else alert('getUserMedia not supported in this browser.');
// when pressing record
function getVal(value) {
// if R is pressed, we start recording
if (value == "record") {
recording = true;
// reset the buffers for the new recording
leftchannel.length = rightchannel.length = 0;
recordingLength = 0;
document.getElementById('output').innerHTML = "Recording now...";
// if S is pressed, we stop the recording and package the WAV file
} else if (value == "stop") {
// we stop recording
recording = false;
document.getElementById('output').innerHTML = "Building wav file...";
// we flat the left and right channels down
var leftBuffer = mergeBuffers(leftchannel, recordingLength);
var rightBuffer = mergeBuffers(rightchannel, recordingLength);
// we interleave both channels together
var interleaved = interleave(leftBuffer, rightBuffer);
var buffer = new ArrayBuffer(44 + interleaved.length * 2);
var view = new DataView(buffer);
// RIFF chunk descriptor
writeUTFBytes(view, 0, 'RIFF');
view.setUint32(4, 44 + interleaved.length * 2, true);
writeUTFBytes(view, 8, 'WAVE');
// FMT sub-chunk
writeUTFBytes(view, 12, 'fmt ');
view.setUint32(16, 16, true);
view.setUint16(20, 1, true);
// stereo (2 channels)
view.setUint16(22, 2, true);
view.setUint32(24, sampleRate, true);
view.setUint32(28, sampleRate * 4, true);
view.setUint16(32, 4, true);
view.setUint16(34, 16, true);
// data sub-chunk
writeUTFBytes(view, 36, 'data');
view.setUint32(40, interleaved.length * 2, true);
var lng = interleaved.length;
var index = 44;
var volume = 1;
for (var i = 0; i < lng; i++) {
view.setInt16(index, interleaved[i] * (0x7FFF * volume), true);
index += 2;
}
var blob = new Blob([view], {
type: 'audio/wav'
});
// let's save it locally
document.getElementById('output').innerHTML = 'Handing off the file now...';
var url = (window.URL || window.webkitURL).createObjectURL(blob);
var li = document.createElement('li');
var au = document.createElement('audio');
var hf = document.createElement('a');
au.controls = true;
au.src = url;
hf.href = url;
hf.download = 'audio_recording_' + new Date().getTime() + '.wav';
hf.innerHTML = hf.download;
li.appendChild(au);
li.appendChild(hf);
recordingList.appendChild(li);
}
}
function success(e) {
audioContext = window.AudioContext || window.webkitAudioContext;
context = new audioContext();
volume = context.createGain();
// creates an audio node from the microphone incoming stream(source)
source = context.createMediaStreamSource(e);
// connect the stream(source) to the gain node
source.connect(volume);
var bufferSize = 2048;
recorder = context.createScriptProcessor(bufferSize, 2, 2);
//node for the visualizer
analyser = context.createAnalyser();
analyser.smoothingTimeConstant = 0.3;
analyser.fftSize = 1024;
analyser2 = context.createAnalyser();
analyser2.smoothingTimeConstant = 0.0;
analyser2.fftSize = 1024;
splitter = context.createChannelSplitter();
//when recording happens
recorder.onaudioprocess = function (e) {
if (!recording) return;
var left = e.inputBuffer.getChannelData(0);
var right = e.inputBuffer.getChannelData(1);
// get the average of the first channel, bincount is fftsize / 2
var array = new Uint8Array(analyser.frequencyBinCount);
analyser.getByteFrequencyData(array);
var average = getAverageVolume(array);
// get the average for the second channel
var array2 = new Uint8Array(analyser2.frequencyBinCount);
analyser2.getByteFrequencyData(array2);
var average2 = getAverageVolume(array2);
// clear the current state
ctx.clearRect(0, 0, 60, 130);
// set the fill style
ctx.fillStyle = gradient;
// create the meters
ctx.fillRect(0, 130 - average, 25, 130);
ctx.fillRect(30, 130 - average2, 25, 130);
}
function getAverageVolume(array) {
var values = 0;
var average;
var length = array.length;
// get all the frequency amplitudes
for (var i = 0; i < length; i++) {
values += array[i];
}
average = values / length;
return average;
}
leftchannel.push(new Float32Array(left));
rightchannel.push(new Float32Array(right));
recordingLength += bufferSize;
}
// we connect the recorder(node to destination(speakers))
volume.connect(splitter);
splitter.connect(analyser, 0, 0);
splitter.connect(analyser2, 1, 0);
analyser.connect(recorder);
recorder.connect(context.destination);
function mergeBuffers(channelBuffer, recordingLength) {
var result = new Float32Array(recordingLength);
var offset = 0;
var lng = channelBuffer.length;
for (var i = 0; i < lng; i++) {
var buffer = channelBuffer[i];
result.set(buffer, offset);
offset += buffer.length;
}
return result;
}
function interleave(leftChannel, rightChannel) {
var length = leftChannel.length + rightChannel.length;
var result = new Float32Array(length);
var inputIndex = 0;
for (var index = 0; index < length;) {
result[index++] = leftChannel[inputIndex];
result[index++] = rightChannel[inputIndex];
inputIndex++;
}
return result;
}
function writeUTFBytes(view, offset, string) {
var lng = string.length;
for (var i = 0; i < lng; i++) {
view.setUint8(offset + i, string.charCodeAt(i));
}
}
My problem is that when running it's giving me an error :
cannot read property 'connect' of null in this statement: volume.connect(splitter);
What is going wrong??
The creation of the volume gain node is done only after the success of getUserMedia, in the success function.
By the time the code encounter the volume connect command, volume is not yet allocated.
You have to 'chain' all your node connection starting from success.
Quick fix : just put those lines :
// we connect the recorder(node to destination(speakers))
volume.connect(splitter);
splitter.connect(analyser, 0, 0);
splitter.connect(analyser2, 1, 0);
analyser.connect(recorder);
recorder.connect(context.destination);
at the end of the success function.

HTML5 audio recorder with visualizer [duplicate]

Closed. This question needs debugging details. It is not currently accepting answers.
Edit the question to include desired behavior, a specific problem or error, and the shortest code necessary to reproduce the problem. This will help others answer the question.
Closed 8 years ago.
Improve this question
I'm building a HTML5 software that records a voice and when playing that voice a visualizer should be in action.
Here is my code:
// variables
var leftchannel = [];
var rightchannel = [];
var recorder = null;
var recording = false;
var recordingLength = 0;
var volume = null;
var audioInput = null;
var sampleRate = 44100;
var audioContext = null;
var context = null;
var outputString;
if (!navigator.getUserMedia) navigator.getUserMedia = navigator.getUserMedia || navigator.webkitGetUserMedia || navigator.mozGetUserMedia || navigator.msGetUserMedia;
if (navigator.getUserMedia) {
navigator.getUserMedia({
audio: true
}, success, function (e) {
alert('Error capturing audio.');
});
} else alert('getUserMedia not supported in this browser.');
// when pressing record
function getVal(value) {
// if R is pressed, we start recording
if (value == "record") {
recording = true;
// reset the buffers for the new recording
leftchannel.length = rightchannel.length = 0;
recordingLength = 0;
document.getElementById('output').innerHTML = "Recording now...";
// if S is pressed, we stop the recording and package the WAV file
} else if (value == "stop") {
// we stop recording
recording = false;
document.getElementById('output').innerHTML = "Building wav file...";
// we flat the left and right channels down
var leftBuffer = mergeBuffers(leftchannel, recordingLength);
var rightBuffer = mergeBuffers(rightchannel, recordingLength);
// we interleave both channels together
var interleaved = interleave(leftBuffer, rightBuffer);
var buffer = new ArrayBuffer(44 + interleaved.length * 2);
var view = new DataView(buffer);
// RIFF chunk descriptor
writeUTFBytes(view, 0, 'RIFF');
view.setUint32(4, 44 + interleaved.length * 2, true);
writeUTFBytes(view, 8, 'WAVE');
// FMT sub-chunk
writeUTFBytes(view, 12, 'fmt ');
view.setUint32(16, 16, true);
view.setUint16(20, 1, true);
// stereo (2 channels)
view.setUint16(22, 2, true);
view.setUint32(24, sampleRate, true);
view.setUint32(28, sampleRate * 4, true);
view.setUint16(32, 4, true);
view.setUint16(34, 16, true);
// data sub-chunk
writeUTFBytes(view, 36, 'data');
view.setUint32(40, interleaved.length * 2, true);
var lng = interleaved.length;
var index = 44;
var volume = 1;
for (var i = 0; i < lng; i++) {
view.setInt16(index, interleaved[i] * (0x7FFF * volume), true);
index += 2;
}
var blob = new Blob([view], {
type: 'audio/wav'
});
// let's save it locally
document.getElementById('output').innerHTML = 'Handing off the file now...';
var url = (window.URL || window.webkitURL).createObjectURL(blob);
var li = document.createElement('li');
var au = document.createElement('audio');
var hf = document.createElement('a');
au.controls = true;
au.src = url;
hf.href = url;
hf.download = 'audio_recording_' + new Date().getTime() + '.wav';
hf.innerHTML = hf.download;
li.appendChild(au);
li.appendChild(hf);
recordingList.appendChild(li);
}
}
function success(e) {
audioContext = window.AudioContext || window.webkitAudioContext;
context = new audioContext();
volume = context.createGain();
// creates an audio node from the microphone incoming stream(source)
source = context.createMediaStreamSource(e);
// connect the stream(source) to the gain node
source.connect(volume);
var bufferSize = 2048;
recorder = context.createScriptProcessor(bufferSize, 2, 2);
//node for the visualizer
analyser = context.createAnalyser();
analyser.smoothingTimeConstant = 0.3;
analyser.fftSize = 1024;
analyser2 = context.createAnalyser();
analyser2.smoothingTimeConstant = 0.0;
analyser2.fftSize = 1024;
splitter = context.createChannelSplitter();
//when recording happens
recorder.onaudioprocess = function (e) {
if (!recording) return;
var left = e.inputBuffer.getChannelData(0);
var right = e.inputBuffer.getChannelData(1);
// get the average of the first channel, bincount is fftsize / 2
var array = new Uint8Array(analyser.frequencyBinCount);
analyser.getByteFrequencyData(array);
var average = getAverageVolume(array);
// get the average for the second channel
var array2 = new Uint8Array(analyser2.frequencyBinCount);
analyser2.getByteFrequencyData(array2);
var average2 = getAverageVolume(array2);
// clear the current state
ctx.clearRect(0, 0, 60, 130);
// set the fill style
ctx.fillStyle = gradient;
// create the meters
ctx.fillRect(0, 130 - average, 25, 130);
ctx.fillRect(30, 130 - average2, 25, 130);
}
function getAverageVolume(array) {
var values = 0;
var average;
var length = array.length;
// get all the frequency amplitudes
for (var i = 0; i < length; i++) {
values += array[i];
}
average = values / length;
return average;
}
leftchannel.push(new Float32Array(left));
rightchannel.push(new Float32Array(right));
recordingLength += bufferSize;
}
// we connect the recorder(node to destination(speakers))
volume.connect(splitter);
splitter.connect(analyser, 0, 0);
splitter.connect(analyser2, 1, 0);
analyser.connect(recorder);
recorder.connect(context.destination);
function mergeBuffers(channelBuffer, recordingLength) {
var result = new Float32Array(recordingLength);
var offset = 0;
var lng = channelBuffer.length;
for (var i = 0; i < lng; i++) {
var buffer = channelBuffer[i];
result.set(buffer, offset);
offset += buffer.length;
}
return result;
}
function interleave(leftChannel, rightChannel) {
var length = leftChannel.length + rightChannel.length;
var result = new Float32Array(length);
var inputIndex = 0;
for (var index = 0; index < length;) {
result[index++] = leftChannel[inputIndex];
result[index++] = rightChannel[inputIndex];
inputIndex++;
}
return result;
}
function writeUTFBytes(view, offset, string) {
var lng = string.length;
for (var i = 0; i < lng; i++) {
view.setUint8(offset + i, string.charCodeAt(i));
}
}
My problem is that when running it's giving me an error :
cannot read property 'connect' of null in this statement: volume.connect(splitter);
What is going wrong??
The creation of the volume gain node is done only after the success of getUserMedia, in the success function.
By the time the code encounter the volume connect command, volume is not yet allocated.
You have to 'chain' all your node connection starting from success.
Quick fix : just put those lines :
// we connect the recorder(node to destination(speakers))
volume.connect(splitter);
splitter.connect(analyser, 0, 0);
splitter.connect(analyser2, 1, 0);
analyser.connect(recorder);
recorder.connect(context.destination);
at the end of the success function.

Categories

Resources