I am in the process of doing some slight modification (color keying) to an inline html5 video using canvas. Now, this video is a loop and I'm thinking there are more optimal ways then having a looping video being evaluated and repainted every single time. What optimizations if any can I do to either a) pre-render the entire animation or b) cache the original loop so that no more processing/evaluation needs to occur. I find that I'm being totally inefficient in CPU / memory usage by constantly letting it run.
I am currently using the following code (note, im using Vue.js, so assume that all the current function and variable assignments work correctly already) :
loadVideo() {
this.video = document.getElementById('globe');
this.c1 = document.getElementById("c1");
this.ctx1 = this.c1.getContext("2d");
let that = this;
this.video.addEventListener("play", function() {
that.vWidth = that.video.videoWidth / 2;
that.vHeight = that.video.videoHeight / 2;
that.fpsInterval = 1000 / 120;
that.then = Date.now();
that.startTime = that.then;
that.computeFrame();
}, false);
}
computeFrame() {
requestAnimationFrame(this.computeFrame);
this.now = Date.now();
this.elapsed = this.now - this.then;
if (this.elapsed > this.fpsInterval) {
this.ctx1.canvas.width = this.video.offsetWidth;
this.ctx1.canvas.height = this.video.offsetHeight;
if (this.video.offsetWidth > 0 && this.video.offsetHeight > 0) {
this.then = this.now - (this.elapsed % this.fpsInterval);
this.ctx1.drawImage(this.video, 0, 0, this.ctx1.canvas.width, this.ctx1.canvas.height);
let frame = this.ctx1.getImageData(0, 0, this.ctx1.canvas.width, this.ctx1.canvas.height);
let l = frame.data.length / 4;
let primaryColor = this.ctx1.getImageData(0, 0, 8, 8).data;
let primaryR = primaryColor[60];
let primaryG = primaryColor[61];
let primaryB = primaryColor[62];
for (let i = 0; i < l; i++) {
let r = frame.data[i * 4 + 0];
let g = frame.data[i * 4 + 1];
let b = frame.data[i * 4 + 2];
if (r == primaryR && g == primaryG && b == primaryB) {
frame.data[i * 4 + 1] = 255;
frame.data[i * 4 + 2] = 0;
frame.data[i * 4 + 3] = 0;
}
}
this.ctx1.putImageData(frame, 0, 0);
}
}
}
loadVideo();
You can use a MediaRecorder to record the video stream returned by your cavnas captureStream() method to record the first pass, and then read the resulting recorded video directly in a looping <video>:
btn.onclick = e => {
// initialise the <video>
const vid = document.createElement('video');
vid.muted = true;
vid.crossOrigin = true;
vid.src = "https://upload.wikimedia.org/wikipedia/commons/transcoded/a/a4/BBH_gravitational_lensing_of_gw150914.webm/BBH_gravitational_lensing_of_gw150914.webm.480p.webm";
vid.playbackRate = 2;
vid.onplaying = startProcessing;
vid.play();
btn.remove();
log.textContent = 'fetching';
};
function startProcessing(evt) {
// when video is playing
const vid = evt.target;
const canvas = document.createElement('canvas');
const ctx = canvas.getContext('2d');
canvas.width = vid.videoWidth;
canvas.height = vid.videoHeight;
// show the canvas for first round
document.body.appendChild(canvas);
// force first frame
anim();
const chunks = []; // we'll store our recorder's data here
const canvasStream = canvas.captureStream();
const vp = 'video/webm; codecs="vp',
vp8 = vp + '8"',
vp9 = vp + '9"';
const recorder = new MediaRecorder(canvasStream, {
mimeType: MediaRecorder.isTypeSupported(vp9) ? vp9:vp8,
videoBitsPerSeconds: 5000000
});
// every time new data is available
recorder.ondataavailable = evt => chunks.push(evt.data);
// record until the video ends
vid.onended = evt => {
recorder.stop();
};
recorder.onstop = exportVid;
recorder.start();
log.textContent = "recording";
function anim() {
if(vid.paused) {
console.log('stop drawing');
return;
}
ctx.drawImage(vid, 0, 0);
applyFilter(ctx);
requestAnimationFrame(anim);
}
function exportVid() {
// concatenate all our chunks in a single Blob
const blob = new Blob(chunks);
const url = URL.createObjectURL(blob);
// we reuse the same <video> (for Safari autoplay)
vid.onplaying = vid.onended = null;
vid.src = url;
vid.loop = true;
vid.playbackRate = 1;
log.textContent = "playing";
vid.play().then(()=> canvas.replaceWith(vid));
}
}
function applyFilter(ctx) {
const img = ctx.getImageData(0, 0, ctx.canvas.width, ctx.canvas.height);
const data = new Uint32Array(img.data.buffer);
for(let i=0; i<data.length; i++) {
if(data[i] < 0xFF111111) data[i] = ((Math.random()*0xFFFFFF) + 0xFF000000 | 0);
}
ctx.putImageData(img, 0, 0);
}
canvas,video{
max-height: 100vh;
max-width: 100vw;
}
<button id="btn">start</button><pre id="log"></pre>
Related
I look at stackoverflow other people similar problems a they have very similar solution but all examples are done in three.js (Not in my case).
Jump is relative good working at the moment, i powered gravity (double it).
Stairs also works fine even without jumping it is ok for me.
I put one big cube to make blocking volume but i dont now how to make player to simple stops because i use WASD for body set position. My player in big cube just have big jump , i wanna prevent WASD body set position in moment of first touch.
Some solutions [not yet tested]:
1] To make movement only with force [not set position] => [possible not nature movements]
2] Make collide detection then prevent set position => [possible angle problems]
3] Some other solution....
All related code is visible in example.
Matrix-engine example for FPS:
import App from '../program/manifest';
import * as CANNON from 'cannon';
import {ENUMERATORS, ORBIT_FROM_ARRAY, OSCILLATOR, randomFloatFromTo} from '../lib/utility';
export var runThis = (world) => {
// Camera
canvas.style.cursor = 'none';
App.camera.FirstPersonController = true;
matrixEngine.Events.camera.fly = false;
App.camera.speedAmp = 0.01;
matrixEngine.Events.camera.yPos = 2;
// Audio effects
App.sounds.createAudio('shoot', 'res/music/single-gunshot.mp3', 5);
// Prevent right click context menu
window.addEventListener("contextmenu", (e) => {
e.preventDefault();
});
// Override mouse up
App.events.CALCULATE_TOUCH_UP_OR_MOUSE_UP = () => {
App.scene.FPSTarget.glBlend.blendParamSrc = matrixEngine.utility.ENUMERATORS.glBlend.param[4];
App.scene.FPSTarget.glBlend.blendParamDest = matrixEngine.utility.ENUMERATORS.glBlend.param[4];
App.scene.FPSTarget.geometry.setScale(0.1);
App.scene.xrayTarget.visible = false;
};
// Override right mouse down
matrixEngine.Events.SYS.MOUSE.ON_RIGHT_BTN_PRESSED = (e) => {
App.scene.FPSTarget.geometry.setScale(0.6);
App.scene.FPSTarget.glBlend.blendParamSrc = matrixEngine.utility.ENUMERATORS.glBlend.param[5];
App.scene.FPSTarget.glBlend.blendParamDest = matrixEngine.utility.ENUMERATORS.glBlend.param[5];
App.scene.xrayTarget.visible = true;
};
// Override mouse down
App.events.CALCULATE_TOUCH_DOWN_OR_MOUSE_DOWN = (ev, mouse) => {
// `checkingProcedure` gets secound optimal argument
// for custom ray origin target.
if(mouse.BUTTON_PRESSED == 'RIGHT') {
// Zoom
} else {
// This call represent `SHOOT` Action.
// And it is center of screen
matrixEngine.raycaster.checkingProcedure(ev, {
clientX: ev.target.width / 2,
clientY: ev.target.height / 2
});
App.sounds.play('shoot');
}
};
window.addEventListener('ray.hit.event', (ev) => {
console.log("You shoot the object! Nice!", ev);
// Physics force apply also change ambienty light.
if(ev.detail.hitObject.physics.enabled == true) {
// Shoot the object - apply force
ev.detail.hitObject.physics.currentBody.force.set(0, 0, 1000);
// Apply random diff color
if(ev.detail.hitObject.LightsData) ev.detail.hitObject.LightsData.ambientLight.set(
randomFloatFromTo(0, 2), randomFloatFromTo(0, 2), randomFloatFromTo(0, 2));
}
});
// Load obj seq animation
const createObjSequence = (objName) => {
function onLoadObj(meshes) {
for(let key in meshes) {
matrixEngine.objLoader.initMeshBuffers(world.GL.gl, meshes[key]);
}
var textuteImageSamplers2 = {
source: [
"res/bvh-skeletal-base/swat-guy/textures/Ch15_1001_Diffuse.png" // ,
// "res/bvh-skeletal-base/swat-guy/textures/Ch15_1001_Diffuse.png"
],
mix_operation: "multiply", // ENUM : multiply , divide
};
var animArg = {
id: objName,
meshList: meshes,
currentAni: 0,
animations: {
active: 'walk',
walk: {
from: 0,
to: 20,
speed: 3
}
}
};
world.Add("obj", 1, objName, textuteImageSamplers2, meshes[objName], animArg);
// Fix object orientation - this can be fixed also in blender.
matrixEngine.Events.camera.yaw = 0;
// Add collision cube to the local player.
world.Add("cube", 0.5, "playerCollisonBox");
var collisionBox = new CANNON.Body({
mass: 500,
linearDamping: 0.001,
position: new CANNON.Vec3(0, 0, 0),
shape: new CANNON.Box(new CANNON.Vec3(3, 3, 3))// new CANNON.Sphere(2)
});
physics.world.addBody(collisionBox);
App.scene.playerCollisonBox.physics.currentBody = collisionBox;
App.scene.playerCollisonBox.physics.enabled = true;
App.scene.playerCollisonBox.physics.currentBody.fixedRotation = true;
App.scene.playerCollisonBox.geometry.setScale(0.02);
App.scene.playerCollisonBox.glBlend.blendEnabled = true;
App.scene.playerCollisonBox.glBlend.blendParamSrc = ENUMERATORS.glBlend.param[5];
App.scene.playerCollisonBox.glBlend.blendParamDest = ENUMERATORS.glBlend.param[6];
// test
addEventListener('hit.keyDown', (e) => {
// console.log('Bring to the top level', e.detail.keyCode);
// dont mess in events
if(e.detail.keyCode == 32) {
setTimeout(() => {
App.scene.playerCollisonBox.physics.currentBody.force.set(0, 0, 111)
}, 250)
} else if(e.detail.keyCode == 87) {
// Good place for blocking volume
// App.scene.playerCollisonBox.physics.currentBody.force.set(0,10,0)
setTimeout(() => {
// App.scene.playerCollisonBox.physics.currentBody.force.set(0,100,0)
}, 100);
}
});
var playerUpdater = {
UPDATE: () => {
App.scene[objName].rotation.rotateY(
matrixEngine.Events.camera.yaw + 180)
var detPitch;
var limit = 2;
if(matrixEngine.Events.camera.pitch < limit &&
matrixEngine.Events.camera.pitch > -limit) {
detPitch = matrixEngine.Events.camera.pitch * 2;
} else if(matrixEngine.Events.camera.pitch > limit) {
detPitch = limit * 2;
} else if(matrixEngine.Events.camera.pitch < -(limit + 2)) {
detPitch = -(limit + 2) * 2;
}
if(matrixEngine.Events.camera.virtualJumpActive == true) {
// invert logic
// Scene object set
App.scene[objName].rotation.rotateX(-detPitch);
var detPitchPos = matrixEngine.Events.camera.pitch;
if(detPitchPos > 4) detPitchPos = 4;
App.scene.playerCollisonBox.physics.currentBody.mass = 10;
App.scene.playerCollisonBox.physics.currentBody.force.set(0, 0, 700);
App.scene[objName].position.setPosition(
App.scene.playerCollisonBox.physics.currentBody.position.x,
App.scene.playerCollisonBox.physics.currentBody.position.z,
App.scene.playerCollisonBox.physics.currentBody.position.y
)
// Cannonjs object set / Switched Z - Y
matrixEngine.Events.camera.xPos = App.scene.playerCollisonBox.physics.currentBody.position.x;
matrixEngine.Events.camera.zPos = App.scene.playerCollisonBox.physics.currentBody.position.y;
matrixEngine.Events.camera.yPos = App.scene.playerCollisonBox.physics.currentBody.position.z;
App.scene.playerCollisonBox.
physics.currentBody.angularVelocity.set(0, 0, 0);
setTimeout(() => {
matrixEngine.Events.camera.virtualJumpActive = false;
App.scene.playerCollisonBox.physics.currentBody.mass = 550;
}, 1350);
} else {
// Make more stable situation
App.scene.playerCollisonBox.physics.currentBody.mass = 500;
App.scene.playerCollisonBox.physics.currentBody.quaternion.setFromEuler(0,0,0)
// Tamo tu iznad duge nebo zri...
// Cannonjs object set
// Switched Z - Y
matrixEngine.Events.camera.yPos = App.scene.playerCollisonBox.physics.currentBody.position.z;
// Scene object set
App.scene[objName].rotation.rotateX(-detPitch);
var detPitchPos = matrixEngine.Events.camera.pitch;
if(detPitchPos > 4) detPitchPos = 4;
App.scene[objName].position.setPosition(
matrixEngine.Events.camera.xPos,
matrixEngine.Events.camera.yPos, // - 0.3 + detPitchPos / 50,
// App.scene.playerCollisonBox.physics.currentBody.position.y,
matrixEngine.Events.camera.zPos,
)
// Cannonjs object set
// Switched Z - Y
App.scene.playerCollisonBox.
physics.currentBody.position.set(
matrixEngine.Events.camera.xPos,
matrixEngine.Events.camera.zPos,
matrixEngine.Events.camera.yPos)
// App.scene.playerCollisonBox.physics.currentBody.position.y)
}
}
};
App.updateBeforeDraw.push(playerUpdater);
// Player Energy status
App.scene.player.energy = {};
for(let key in App.scene.player.meshList) {
App.scene.player.meshList[key].setScale(1.85);
}
// Target scene object
var texTarget = {
source: [
"res/bvh-skeletal-base/swat-guy/target.png",
"res/bvh-skeletal-base/swat-guy/target.png"
],
mix_operation: "multiply",
};
world.Add("squareTex", 0.25, 'FPSTarget', texTarget);
App.scene.FPSTarget.position.setPosition(0, 0, -4);
App.scene.FPSTarget.glBlend.blendEnabled = true;
App.scene.FPSTarget.glBlend.blendParamSrc = matrixEngine.utility.ENUMERATORS.glBlend.param[4];
App.scene.FPSTarget.glBlend.blendParamDest = matrixEngine.utility.ENUMERATORS.glBlend.param[4];
App.scene.FPSTarget.isHUD = true;
App.scene.FPSTarget.geometry.setScale(0.1);
// Energy active bar
// Custom generic textures. Micro Drawing.
// Example for arg shema square for now only.
var options = {
squareShema: [8, 8],
pixels: new Uint8Array(8 * 8 * 4)
};
// options.pixels.fill(0);
App.scene.player.energy.value = 8;
App.scene.player.updateEnergy = function(v) {
this.energy.value = v;
var t = App.scene.energyBar.preparePixelsTex(App.scene.energyBar.specialValue);
App.scene.energyBar.textures.pop()
App.scene.energyBar.textures.push(App.scene.energyBar.createPixelsTex(t));
};
function preparePixelsTex(options) {
var I = 0, R = 0, G = 0, B = 0, localCounter = 0;
for(var funny = 0;funny < 8 * 8 * 4;funny += 4) {
if(localCounter > 7) {
localCounter = 0;
}
if(localCounter < App.scene.player.energy.value) {
I = 128;
if(App.scene.player.energy.value < 3) {
R = 255;
G = 0;
B = 0;
I = 0;
} else if(App.scene.player.energy.value > 2 && App.scene.player.energy.value < 5) {
R = 255;
G = 255;
B = 0;
} else {
R = 0;
G = 255;
B = 0;
}
} else {
I = 0;
R = 0;
G = 0;
B = 0;
}
options.pixels[funny] = R;
options.pixels[funny + 1] = G;
options.pixels[funny + 2] = B;
options.pixels[funny + 3] = 0;
localCounter++;
}
return options;
}
var tex2 = {
source: [
"res/images/hud/energy-bar.png",
"res/images/hud/energy-bar.png"
],
mix_operation: "multiply",
};
world.Add("squareTex", 1, 'energyBar', tex2);
App.scene.energyBar.glBlend.blendEnabled = true;
App.scene.energyBar.glBlend.blendParamSrc = matrixEngine.utility.ENUMERATORS.glBlend.param[5];
App.scene.energyBar.glBlend.blendParamDest = matrixEngine.utility.ENUMERATORS.glBlend.param[5];
App.scene.energyBar.isHUD = true;
// App.scene.energy.visible = false;
App.scene.energyBar.position.setPosition(0, 1.1, -3);
App.scene.energyBar.geometry.setScaleByX(1)
App.scene.energyBar.geometry.setScaleByY(0.05)
App.scene.energyBar.preparePixelsTex = preparePixelsTex;
options = preparePixelsTex(options);
// App.scene.energyBar.textures[0] = App.scene.energyBar.createPixelsTex(options);
App.scene.energyBar.textures.push(App.scene.energyBar.createPixelsTex(options));
App.scene.energyBar.specialValue = options;
}
matrixEngine.objLoader.downloadMeshes(
matrixEngine.objLoader.makeObjSeqArg(
{
id: objName,
// path: "res/bvh-skeletal-base/swat-guy/anims/swat-multi",
path: "res/bvh-skeletal-base/swat-guy/FPShooter-hands/FPShooter-hands",
from: 1,
to: 20
}),
onLoadObj
);
};
let promiseAllGenerated = [];
const objGenerator = (n) => {
for(var j = 0;j < n;j++) {
promiseAllGenerated.push(new Promise((resolve) => {
setTimeout(() => {
world.Add("cubeLightTex", 1, "CUBE" + j, tex);
var b2 = new CANNON.Body({
mass: 1,
linearDamping: 0.01,
position: new CANNON.Vec3(1, -14.5, 15),
shape: new CANNON.Box(new CANNON.Vec3(1, 1, 1))
});
physics.world.addBody(b2);
App.scene['CUBE' + j].physics.currentBody = b2;
App.scene['CUBE' + j].physics.enabled = true;
resolve();
}, 1000 * j);
}));
}
}
// objGenerator(15);
createObjSequence('player');
Promise.all(promiseAllGenerated).then((what) => {
console.info(`Runtime wait for some generetion of scene objects,
then swap scene array index for target ->
must be manual setup for now!`, what);
// swap(5, 19, matrixEngine.matrixWorld.world.contentList);
});
// Add ground for physics bodies.
var tex = {
source: ["res/images/complex_texture_1/diffuse.png"],
mix_operation: "multiply",
};
// Load Physics world.
// let gravityVector = [0, 0, -9.82];
let gravityVector = [0, 0, -29.82];
let physics = world.loadPhysics(gravityVector);
// Add ground
var groundBody = new CANNON.Body({
mass: 0, // mass == 0 makes the body static
position: new CANNON.Vec3(0, -15, -2)
});
var groundShape = new CANNON.Plane();
groundBody.addShape(groundShape);
physics.world.addBody(groundBody);
// Matrix engine visual
world.Add("squareTex", 1, "FLOOR_STATIC", tex);
App.scene.FLOOR_STATIC.geometry.setScaleByX(20);
App.scene.FLOOR_STATIC.geometry.setScaleByY(20);
App.scene.FLOOR_STATIC.position.SetY(-2);
App.scene.FLOOR_STATIC.position.SetZ(-15);
App.scene.FLOOR_STATIC.rotation.rotx = 90;
// Target x-ray
// See through the objects.
// In webGL context it is object how was drawn before others.
var texTarget = {
source: [
"res/bvh-skeletal-base/swat-guy/target-night.png"
],
mix_operation: "multiply",
};
world.Add("squareTex", 0.18, 'xrayTarget', texTarget);
App.scene.xrayTarget.glBlend.blendEnabled = true;
App.scene.xrayTarget.glBlend.blendParamSrc = matrixEngine.utility.ENUMERATORS.glBlend.param[5];
App.scene.xrayTarget.glBlend.blendParamDest = matrixEngine.utility.ENUMERATORS.glBlend.param[5];
App.scene.xrayTarget.isHUD = true;
App.scene.xrayTarget.visible = false;
App.scene.xrayTarget.position.setPosition(-0.3, 0.27, -4);
// Energy
var tex1 = {
source: [
"res/images/hud/energy.png"
],
mix_operation: "multiply",
};
world.Add("squareTex", 0.5, 'energy', tex1);
App.scene.energy.glBlend.blendEnabled = true;
App.scene.energy.glBlend.blendParamSrc = matrixEngine.utility.ENUMERATORS.glBlend.param[5];
App.scene.energy.glBlend.blendParamDest = matrixEngine.utility.ENUMERATORS.glBlend.param[5];
App.scene.energy.isHUD = true;
// App.scene.energy.visible = false;
App.scene.energy.position.setPosition(-1, 1.15, -3);
App.scene.energy.geometry.setScaleByX(0.35)
App.scene.energy.geometry.setScaleByY(0.1)
// good for fix rotation in future
world.Add("cubeLightTex", 1, "FLOOR2", tex);
var b2 = new CANNON.Body({
mass: 0,
linearDamping: 0.01,
position: new CANNON.Vec3(1, -14.5, -1),
shape: new CANNON.Box(new CANNON.Vec3(3, 1, 1))
});
physics.world.addBody(b2);
App.scene['FLOOR2'].position.setPosition(1, -1, -14.5)
App.scene['FLOOR2'].geometry.setScaleByX(3);
App.scene['FLOOR2'].physics.currentBody = b2;
App.scene['FLOOR2'].physics.enabled = true;
world.Add("cubeLightTex", 2, "FLOOR3", tex);
var b3 = new CANNON.Body({
mass: 0,
linearDamping: 0.01,
position: new CANNON.Vec3(0, -19, 0),
shape: new CANNON.Box(new CANNON.Vec3(3, 3, 3))
});
physics.world.addBody(b3);
App.scene['FLOOR3'].position.setPosition(0, 0, -19)
// App.scene['FLOOR3'].geometry.setScaleByX(3);
App.scene['FLOOR3'].physics.currentBody = b3;
App.scene['FLOOR3'].physics.enabled = true;
world.Add("cubeLightTex", 5, "WALL_BLOCK", tex);
var b5 = new CANNON.Body({
mass: 0,
linearDamping: 0.01,
position: new CANNON.Vec3(10, -19, 0),
shape: new CANNON.Box(new CANNON.Vec3(5, 5, 5))
});
physics.world.addBody(b5);
App.scene['WALL_BLOCK'].position.setPosition(10, 0, -19)
// App.scene['WALL_BLOCK'].geometry.setScaleByX(3);
App.scene['WALL_BLOCK'].physics.currentBody = b5;
App.scene['WALL_BLOCK'].physics.enabled = true;
};
Any suggestion ?
i need to make a decibel meter
i try to use 2 libary but they wont work and they very old
i found a function that make this but it wont work as expected
first of all the issue is
inside this function i get the
" let decibelNumString = decibelNum.toString().substring(0, 2); "
which is get me the number of the decibel and make it to integear
but
i want to make the minNumber and MaxNumber as state because it should re render after the changes happen
the probme it make a lot of re render becuase every 100ms it change the numbers
i have also the current decibel so it need to be render every 100ms
and it not good for perfomance
// const [minNumber, setMinNumber] = useState(25);
// const [maxNumber, setMaxNumber] = useState(30);
let arr = [];
let volumeCallback = null;
let volumeInterval = null;
(async () => {
// Initialize
try {
const audioStream = await navigator.mediaDevices.getUserMedia({
audio: {
echoCancellation: true,
},
});
const audioContext = new AudioContext();
const audioSource = audioContext.createMediaStreamSource(audioStream);
const analyser = audioContext.createAnalyser();
analyser.fftSize = 512;
analyser.minDecibels = -127;
analyser.maxDecibels = 0;
analyser.smoothingTimeConstant = 0.4;
audioSource.connect(analyser);
const volumes = new Uint8Array(analyser.frequencyBinCount);
volumeCallback = () => {
analyser.getByteFrequencyData(volumes);
let volumeSum = 0;
for (const volume of volumes) volumeSum += volume;
const averageVolume = volumeSum / volumes.length;
// Value range: 127 = analyser.maxDecibels - analyser.minDecibels;
let decibelNum = (averageVolume * 100) / 127;
let decibelNumString = decibelNum.toString().substring(0, 2);
if (Number(decibelNumString) > maxNumber) {
// (maxNumber = Number(decibelNum))
// setMaxNumber((prev) => {
// return (prev = Number(decibelNumString));
// });
}
if (Number(decibelNumString) < minNumber) {
// setMinNumber((prev) => {
// return (prev = Number(decibelNumString));
// });
}
console.log(decibelNumString);
// console.log(minNumber + "min");
// console.log(maxNumber + "max");
arr.push(Number(decibelNumString));
// console.log(arr)
};
} catch (e) {
console.error("Failed to initialize volume visualizer, simulating instead...", e);
// Simulation
//TODO remove in production!
let lastVolume = 50;
volumeCallback = () => {
const volume = Math.min(Math.max(Math.random() * 100, 0.8 * lastVolume), 1.2 * lastVolume);
lastVolume = volume;
};
}
// Use
// start();
})();
const stop = () => {
clearInterval(volumeInterval);
// volumeInterval = null;
};
const start = () => {
// Updating every 100ms (should be same as CSS transition speed)
if (volumeCallback !== null && volumeInterval === null) volumeInterval = setInterval(volumeCallback, 100);
};
I'm on Firefox 84.0.1, Windows 10, x86_64. I have a very basic WebAudioWorklet synthesiser that maps keys to the frequencies of musical notes. It is generating very choppy audio when a key is held down. This makes me think that there is not enough audio samples being queued for the speaker to play, hence the audio dropping in and out. However, in audio processing terms, I'm performing a very low-intensive task. As a result, I feel like the default Worklet setup should be able to handle this. Here is my code:
syn.js
(async() => {
let a2_hertz = 110.0;
let twelfth_root_of_two = Math.pow(2.0, 1.0 / 12.0);
let audio_cxt = new AudioContext();
await audio_cxt.audioWorklet.addModule("syn-worklet.js", {credentials: "omit"});
let audio_worklet_options = {
numberOfInputs: 0,
numberOfOutputs: 1,
outputChannelCount: [audio_cxt.destination.channelCount]
};
let audio_worklet = new AudioWorkletNode(audio_cxt, "synthesiser", audio_worklet_options);
audio_worklet.connect(audio_cxt.destination);
document.addEventListener("keydown", (evt) => {
for (let key = 0; key < 12; ++key) {
if (evt.code == "Key" + "QWERTYUIOPAS"[key]) {
audio_worklet.port.postMessage(a2_hertz * Math.pow(twelfth_root_of_two, key));
}
}
});
document.addEventListener("keyup", (evt) => {
audio_worklet.port.postMessage(0.0);
});
})();
syn-worklet.js
function angular_frequency(hertz) {
return hertz * 2 * Math.PI;
}
let OSC_TYPES = {"sine": 0, "square": 1, "triangle": 2};
function oscillator(hertz, osc_type) {
switch (osc_type) {
case OSC_TYPES.sine: {
return Math.sin(angular_frequency(hertz) * currentTime);
} break;
case OSC_TYPES.square: {
return Math.sin(angular_frequency(hertz) * currentTime) > 0.0 ? 1.0 : -1.0;
} break;
case OSC_TYPES.triangle: {
return Math.asin(Math.sin(angular_frequency(hertz) * currentTime)) * (2.0 / Math.PI);
} break;
default: {
return 0.0;
}
}
}
class Synthesiser extends AudioWorkletProcessor {
constructor() {
super();
this.hertz = 0.0;
this.port.onmessage = (evt) => {
this.hertz = evt.data;
};
}
process(inputs, outputs) {
let channels = outputs[0];
let num_samples_per_channel = channels[0].length;
for (let pcm_i = 0; pcm_i < num_samples_per_channel; ++pcm_i) {
let volume = 0.1;
let pcm_value = volume * oscillator(this.hertz, OSC_TYPES.sine);
for (let channel_i = 0; channel_i < channels.length; ++channel_i) {
channels[channel_i][pcm_i] = pcm_value;
}
}
return true;
}
}
registerProcessor("synthesiser", Synthesiser);
I think the problem is that currentTime seems to be the only thing which influences the output of your oscillator() function. But currentTime doesn't change during the invocation of the process() function.
I would recommend using currentFrame instead. It will give you an integer value which represents the currentTime in frames. If you combine that with pcm_i you get the actual index of the sample that you're processing.
const currentSample = currentFrame + pcm_i;
const currentSampleInSeconds = (currentFrame + pcm_i) / sampleRate;
I am trying to recieve data of multiple JSON files. I have the data I need, but multiple times, even though I use a return statement. I need the data from the getData() function. Which needs to progress to the loadImage() function. I have no clue why the code returns the same multiple times.
Thanks in advance.
My Chrome Developer Console:
My Directory Structure:
My code:
// var defaultHairData = require(['json!../character/hair/data.json'], function(data) {
// console.log("1", data);
// });
// var defaultHeadData = require("../character/head/data.json");
// var defaultLeftArmData = require("../character/leftArm/data.json");
// var defaultLegsData = require("../character/legs/data.json");
// var defaultRightArmData = require("../character/rightArm/data.json");
// var defaultTorsoData = require("../character/torso/data.json");
// var defaultHairImage = require("../character/hair/0/hair.png");
// var defaultHeadImage = require("../character/head/0/head.png");
// var defaultLeftArmImage = require("../character/leftArm/0/leftArm.png");
// var defaultLeftArmJumpImage = require("../character/leftArm_jump/0/leftArm_jump.png"); // Jump!
// var defaultLegsImage = require("../character/legs/0/legs.png");
// var defaultLegsJumpImage = require("../character/legs_jump/0/legs_jump.png"); // Jump!
// var defaultRightArmImage = require("../character/rightArm/0/rightArm.png");
// var defaultRightArmJumpImage = require("../character/rightArm_jump/0/rightArm_jump.png"); // Jump!
// var defaultTorsoImage = require("../character/torso/0/torso.png");
var character = {
name: "Homie",
jumping: false
}
var totalResources = 9;
var loadedResources = 0;
var fps = 30;
var characterXPos = 245;
var characterYPos = 185;
var characterEyesOpenTime = 0;
var characterMaxEyesHeight = 14;
var characterCursiveEyeHeight = characterMaxEyesHeight;
var characterTimeBetweenBlinks = 4000;
var characterBlinkUpdateTime = 200;
var characterBlinkTimer = setInterval(updateBlink, characterBlinkUpdateTime);
var FPSInterval = setInterval(updateFPS, 1000);
var characterCanvas;
var characterCanvasContext;
var breathAmount = 0;
var breathMax = 2;
var breathIncrease = 0.1;
var breathDirection = 1;
var breathInterval = setInterval(updateBreathing, 1000 / fps);
var cursiveFPS = 0;
var framesDrawn = 0;
window.defaultHairData = {};
function updateFPS() {
cursiveFPS = framesDrawn;
framesDrawn = 0;
}
// This is where I need the data from \/
function getData(characterPart) {
var xhr = new XMLHttpRequest();
xhr.open("GET", `./character/${characterPart}/data.json`);
xhr.onreadystatechange = function() {
if (xhr.status === 200) {
console.log("Yoink", window.defaultData);
return window.defaultData = JSON.parse(xhr.response);
} else {
throw new Error(`Could not get file with the name "data.json" in the directory "${characterPart}".`);
}
}
xhr.send();
}
async function prepareCharacterCanvas(canvasContainer, canvasWidth, canvasHeight) {
characterCanvas = document.createElement('canvas');
characterCanvas.setAttribute('width', canvasWidth);
characterCanvas.setAttribute('height', canvasHeight);
characterCanvas.setAttribute('id', 'gameCanvas');
canvasContainer.appendChild(characterCanvas);
if (typeof G_vmlCanvasManager != 'undefined') {
characterCanvas = G_vmlCanvasManager.initElement(characterCanvas);
};
characterCanvasContext = characterCanvas.getContext('2d');
characterCanvas.width = characterCanvas.width;
characterCanvasContext.fillText("Loading...", 40, 140);
// This is where the data needs to come from
await getData("hair");
await getData("head");
await getData("leftArm");
await getData("legs");
await getData("rigthArm");
await getData("torso");
loadImage(`${defaultData.filename}`);
loadImage(`${defaultHeadData.filename}`);
loadImage(`${defaultLeftArmData.filename}`);
loadImage(`${defaultLegsData.filename}`);
loadImage(`${defaultRightArmData.filename}`);
loadImage(`${defaultTorsoData.filename}`);
}
function loadImage(datatags, picture) {
var generateButtons = Boolean;
if (datatags.filename !== picture.name) {
throw new Error("Datatag 'filename' must be the same as the picture name!");
} else {
if (datatags.fileExtension !== "png") {
throw new Error("Datatag 'fileExtension' must be png!");
} else {
if (datatags.customizeable === false) {
generateButtons = false;
} else {
generateButtons = true;
}
if (generateButtons === true) {
// Generate buttons here!
}
if (datatags.changesIfJumping === true) {
// Load normal image first here
var pic = new Image();
pic.onload = function() {
resourceIsLoaded();
}
pic.src = `../character/${datatags.filename}/0/${datatags.filename}.png`;
// Load image if jumping next
var jumpingPic = new Image();
jumpingPic.onload = function() {
resourceIsLoaded();
}
jumpingPic.src = `../character/${datatags.filename}_jump/0/${datatags.filename}_jump.png`;
}
var pic = new Image();
pic.onload = function() {
resourceIsLoaded();
}
pic.src = `../character/${datatags.filename}/0/${datatags.filename}.png`;
}
}
var pic = new Image();
pic.onload = function() {
resourceIsLoaded();
};
pic.src = `${pic}`;
};
function resourceIsLoaded() {
loadedResources += 1;
if (loadedResources === totalResources) {
setInterval(redrawCharacter(), 1000 / userFPS);
};
};
function redrawCharacter() {
var x = characterXPos;
var y = characterYPos;
var jumpHeight = 45;
characterCanvas.width = characterCanvas.width;
if (character.jumping === true) {
drawEyes(x + 40, y + 29, 100 - breathAmount, 4);
y -= jumpHeight;
characterCanvasContext.drawImage("leftArm_jump", x + 40, y - 42 - breathAmount);
characterCanvasContext.drawImage("legs_jump", x, y - 6);
characterCanvasContext.drawImage("rightArm_jump", x - 35, y - 42 - breathAmount);
} else {
drawEyes(x + 40, y + 29, 160 - breathAmount, 6);
characterCanvasContext.drawImage("leftArm", x + 40, y - 42 - breathAmount);
characterCanvasContext.drawImage("legs", x, y);
characterCanvasContext.drawImage("rightArm", x - 15, y - 42 - breathAmount);
};
characterCanvasContext.drawImage("torso", x, y - 50);
characterCanvasContext.drawImage("head", x - 10, y - 125 - breathAmount);
characterCanvasContext.drawImage("hair", x - 37, y - 138 - breathAmount);
characterCanvasContext.drawEyes(x + 47, y - 68 - breathAmount, 8, characterCursiveEyeHeight);
characterCanvasContext.drawEyes(x + 58, y - 68 - breathAmount, 8, characterCursiveEyeHeight);
};
function drawEyes(centerX, centerY, width, height) {
characterCanvasContext.beginPath();
characterCanvasContext.moveTo(centerX, centerY - height / 2);
characterCanvasContext.bezierCurveTo(
centerX + width / 2, centerY - height - 2,
centerX + width / 2, centerY + height / 2,
centerX, centerY + height / 2);
characterCanvasContext.bezierCurveTo(
centerX - width / 2, centerY + height / 2,
centerX - width / 2, centerY - height / 2,
centerX, centerY - height / 2);
characterCanvasContext.fillStyle = "black";
characterCanvasContext.fill();
characterCanvasContext.closePath();
};
function updateBreathing() {
if (breathDirection === 1) {
breathAmount -= breathIncrease;
if (breathAmount < -breathMax) {
breathDirection = -1;
};
} else {
breathAmount += breathIncrease;
if (breathAmount > breathMax) {
breathDirection = 1;
};
};
};
function updateBlink() {
characterEyesOpenTime += characterBlinkUpdateTime;
if (characterEyesOpenTime >= characterTimeBetweenBlinks) {
blink();
};
};
function blink() {
characterCursiveEyeHeight -= 1;
if (characterCursiveEyeHeight <= 0) {
characterEyesOpenTime = 0;
characterCursiveEyeHeight = characterMaxEyesHeight;
} else {
setTimeout(blink, 10);
};
};
function jump() {
if (character.jumping === false) {
character.jumping = true;
setTimeout(land, 500);
};
};
function land() {
character.jumping = false;
}
The JSON file structure:
{
"filename": "legs",
"fileExtension": "png",
"changesIfJumping": true,
"customizeable": true
}
Use more modern way - fetch instead xhr
async function getData(characterPart) {
try {
let response = await fetch(`./character/${characterPart}/data.json`);
return response.json()
} catch (e) {
throw new Error(`Could not get file with the name "data.json" in the directory "${characterPart}".`);
}
}
// TEST
async function start() {
window.defaultData = await getData('xxx');
}
start();
So I am creating a piano through web audio and am having trouble implementing a volume control. Whenever a key is clicked, the volume control should dictate at what volume it is played through. I have used the code from html5rocks and modified it to my own uses. Basically instead of a VolumeSample array I have all of my soundclips loaded into a BUFFERS array. Whenever I try to manipulate the slider and change the gain of the clip, I get an 'cannot read property 'gain' of null. I am testing it through the debugger and everything runs fine up until the this.gainNode.gain.value = fraction * fraction; portion of my code. Just take a look at my code and hopefully you can see what I am missing. I'd like to call attention to the playSounds(buffer) method, which is where create and connect the gain node, and the method changeVolume at the bottom, which is where the actualy change in gain node happens:
var context;
var bufferLoader;
var BUFFERS = {};
var VolumeMain = {};
var LowPFilter = {FREQ_MUL: 7000,
QUAL_MUL: 30};
var BUFFERS_TO_LOAD = {
Down1: 'mp3/0C.mp3',
Down2: 'mp3/0CS.mp3',
Down3: 'mp3/0D.mp3',
Down4: 'mp3/0DS.mp3',
Down5: 'mp3/0E.mp3',
Down6: 'mp3/0F.mp3',
Down7: 'mp3/0FS.mp3',
Down8: 'mp3/0G.mp3',
Down9: 'mp3/0GS.mp3',
Down10: 'mp3/0A.mp3',
Down11: 'mp3/0AS.mp3',
Down12: 'mp3/0B.mp3',
Up13: 'mp3/1C.mp3',
Up14: 'mp3/1CS.mp3',
Up15: 'mp3/1D.mp3',
Up16: 'mp3/1DS.mp3',
Up17: 'mp3/1E.mp3',
Up18: 'mp3/1F.mp3',
Up19: 'mp3/1FS.mp3',
Up20: 'mp3/1G.mp3',
Up21: 'mp3/1GS.mp3',
Up22: 'mp3/1A.mp3',
Up23: 'mp3/1AS.mp3',
Up24: 'mp3/1B.mp3',
Beat1: 'mp3/beat1.mp3',
Beat2: 'mp3/beat2.mp3'
};
function loadBuffers() {
var names = [];
var paths = [];
for (var name in BUFFERS_TO_LOAD) {
var path = BUFFERS_TO_LOAD[name];
names.push(name);
paths.push(path);
}
bufferLoader = new BufferLoader(context, paths, function(bufferList) {
for (var i = 0; i < bufferList.length; i++) {
var buffer = bufferList[i];
var name = names[i];
BUFFERS[name] = buffer;
}
});
bufferLoader.load();
}
document.addEventListener('DOMContentLoaded', function() {
try {
// Fix up prefixing
window.AudioContext = window.AudioContext || window.webkitAudioContext;
context = new AudioContext();
}
catch(e) {
alert("Web Audio API is not supported in this browser");
}
loadBuffers();
});
function playSound(buffer) {
var source = context.createBufferSource();
source.buffer = buffer;
var filter1 = context.createBiquadFilter();
filter1.type = 0;
filter1.frequency.value = 5000;
var gainNode = context.createGain();
source.connect(gainNode);
source.connect(filter1);
gainNode.connect(context.destination);
filter1.connect(context.destination);
source.start(0);
}
//volume control
VolumeMain.gainNode = null;
VolumeMain.changeVolume = function(element) {
var volume = element.value;
var fraction = parseInt(element.value) / parseInt(element.max);
this.gainNode.gain.value = fraction * fraction; //error occurs here
};
// Start off by initializing a new context.
context = new (window.AudioContext || window.webkitAudioContext)();
if (!context.createGain)
context.createGain = context.createGainNode;
if (!context.createDelay)
context.createDelay = context.createDelayNode;
if (!context.createScriptProcessor)
context.createScriptProcessor = context.createJavaScriptNode;
// shim layer with setTimeout fallback
window.requestAnimFrame = (function(){
return window.requestAnimationFrame ||
window.webkitRequestAnimationFrame ||
window.mozRequestAnimationFrame ||
window.oRequestAnimationFrame ||
window.msRequestAnimationFrame ||
function( callback ){
window.setTimeout(callback, 1000 / 60);
};
})();
function BufferLoader(context, urlList, callback) {
this.context = context;
this.urlList = urlList;
this.onload = callback;
this.bufferList = new Array();
this.loadCount = 0;
}
BufferLoader.prototype.loadBuffer = function(url, index) {
// Load buffer asynchronously
var request = new XMLHttpRequest();
request.open("GET", url, true);
request.responseType = "arraybuffer";
var loader = this;
request.onload = function() {
// Asynchronously decode the audio file data in request.response
loader.context.decodeAudioData(
request.response,
function(buffer) {
if (!buffer) {
alert('error decoding file data: ' + url);
return;
}
loader.bufferList[index] = buffer;
if (++loader.loadCount == loader.urlList.length)
loader.onload(loader.bufferList);
},
function(error) {
console.error('decodeAudioData error', error);
}
);
}
request.onerror = function() {
alert('BufferLoader: XHR error');
}
request.send();
};
BufferLoader.prototype.load = function() {
for (var i = 0; i < this.urlList.length; ++i)
this.loadBuffer(this.urlList[i], i);
}
LowPFilter.changeFrequency = function(element) {
// Clamp the frequency between the minimum value (40 Hz) and half of the
// sampling rate.
var minValue = 40;
var maxValue = context.sampleRate / 2;
// Logarithm (base 2) to compute how many octaves fall in the range.
var numberOfOctaves = Math.log(maxValue / minValue) / Math.LN2;
// Compute a multiplier from 0 to 1 based on an exponential scale.
var multiplier = Math.pow(2, numberOfOctaves * (element.value - 1.0));
// Get back to the frequency value between min and max.
this.filter1.frequency.value = maxValue * multiplier;
};
LowPFilter.changeQuality = function(element) {
this.filter1.Q.value = element.value * this.QUAL_MUL;
};
LowPFilter.toggleFilter = function(element) {
this.source.disconnect(0);
this.filter1.disconnect(0);
// Check if we want to enable the filter.
if (element.checked) {
// Connect through the filter.
this.source.connect(this.filter1);
this.filter1.connect(context.destination);
} else {
// Otherwise, connect directly.
this.source.connect(context.destination);
}
};
function Beat1() {
this.isPlaying = false;
};
Beat1.prototype.play = function() {
this.gainNode = context.createGain();
this.source = context.createBufferSource();
this.source.buffer = BUFFERS.Beat1;
// Connect source to a gain node
this.source.connect(this.gainNode);
// Connect gain node to destination
this.gainNode.connect(context.destination);
// Start playback in a loop
this.source.loop = true;
this.source[this.source.start ? 'start' : 'noteOn'](0);
};
Beat1.prototype.changeVolume = function(element) {
var volume = element.value;
var fraction = parseInt(element.value) / parseInt(element.max);
// Let's use an x*x curve (x-squared) since simple linear (x) does not
// sound as good.
this.gainNode.gain.value = fraction * fraction;
};
Beat1.prototype.stop = function() {
this.source[this.source.stop ? 'stop' : 'noteOff'](0);
};
Beat1.prototype.toggle = function() {
this.isPlaying ? this.stop() : this.play();
this.isPlaying = !this.isPlaying;
};
function Beat2() {
this.isPlaying = false;
};
Beat2.prototype.play = function() {
this.gainNode = context.createGain();
this.source = context.createBufferSource();
this.source.buffer = BUFFERS.Beat2;
// Connect source to a gain node
this.source.connect(this.gainNode);
// Connect gain node to destination
this.gainNode.connect(context.destination);
// Start playback in a loop
this.source.loop = true;
this.source[this.source.start ? 'start' : 'noteOn'](0);
};
Beat2.prototype.changeVolume = function(element) {
var volume = element.value;
var fraction = parseInt(element.value) / parseInt(element.max);
// Let's use an x*x curve (x-squared) since simple linear (x) does not
// sound as good.
this.gainNode.gain.value = fraction * fraction;
};
Beat2.prototype.stop = function() {
this.source[this.source.stop ? 'stop' : 'noteOff'](0);
};
Beat2.prototype.toggle = function() {
this.isPlaying ? this.stop() : this.play();
this.isPlaying = !this.isPlaying;
};
This is where I create the piano and check which key was clicked and play the appropriate sound (seperate JS file):
// keyboard creation function
window.onload = function () {
// Keyboard Height
var keyboard_height = 120;
// Keyboard Width
var keyboard_width = 980;
// White Key Color
var white_color = 'white';
// Black Key Color
var black_color = 'black';
// Number of octaves
var octaves = 2;
// ID of containing Div
var div_id = 'keyboard';
//------------------------------------------------------------
var paper = Raphael(div_id, keyboard_width, keyboard_height);
// Define white key specs
var white_width = keyboard_width / 14;
// Define black key specs
var black_width = white_width/2;
var black_height = keyboard_height/1.6;
var repeat = 0;
var keyboard_keys = [];
//define white and black key names
var wkn = ['C', 'D', 'E', 'F', 'G', 'A', 'B'];
var bkn = ['Csharp', 'Dsharp', 'Fsharp', 'Gsharp', 'Asharp'];
//create octave groups
for (i=0;i<octaves;i++) {
//create white keys first
for (var w=0; w <= 6 ; w++) {
keyboard_keys[wkn[w]+i] = paper.rect(white_width*(repeat + w), 0, white_width, keyboard_height).attr("fill", white_color);
};
//set multiplier for black key placement
var bw_multiplier = 1.5;
//then black keys on top
for (var b=0; b <= 4 ; b++) {
keyboard_keys[bkn[b]+i] = paper.rect((white_width*repeat) + (black_width*bw_multiplier), 0, black_width, black_height).attr("fill", black_color);
bw_multiplier = (b == 1) ? bw_multiplier + 4 : bw_multiplier + 2;
};
repeat = repeat + 7;
}
for (var i in keyboard_keys) {
(function (st) {
st.node.onclick = function(event) {
var newColor = '#'+(0x1000000+(Math.random())*0xffffff).toString(16).substr(1,6);
st.animate({fill:newColor}, 100);
var testKey = st.paper.getElementByPoint(event.pageX, event.pageY);
var indexOfKey = testKey.id;
if (indexOfKey == 0)
{
playSound(BUFFERS.Down1);
}
else if (indexOfKey == 1)
{
playSound(BUFFERS.Down3);
}
else if (indexOfKey == 2)
{
playSound(BUFFERS.Down5);
}
else if (indexOfKey == 3)
{
playSound(BUFFERS.Down6);
}
else if (indexOfKey == 4)
{
playSound(BUFFERS.Down8);
}
else if (indexOfKey == 5)
{
playSound(BUFFERS.Down10);
}
else if (indexOfKey == 6)
{
playSound(BUFFERS.Down12);
}
else if (indexOfKey == 7)
{
playSound(BUFFERS.Down2);
}
else if (indexOfKey == 8)
{
playSound(BUFFERS.Down4);
}
else if (indexOfKey == 9)
{
playSound(BUFFERS.Down7);
}
else if (indexOfKey == 10)
{
playSound(BUFFERS.Down9);
}
else if (indexOfKey == 11)
{
playSound(BUFFERS.Down11);
}
else if (indexOfKey == 12)
{
playSound(BUFFERS.Up13);
}
else if (indexOfKey == 13)
{
playSound(BUFFERS.Up15);
}
else if (indexOfKey == 14)
{
playSound(BUFFERS.Up17);
}
else if (indexOfKey == 15)
{
playSound(BUFFERS.Up18);
}
else if (indexOfKey == 16)
{
playSound(BUFFERS.Up20);
}
else if (indexOfKey == 17)
{
playSound(BUFFERS.Up22);
}
else if (indexOfKey == 18)
{
playSound(BUFFERS.Up24);
}
else if (indexOfKey == 19)
{
playSound(BUFFERS.Up14);
}
else if (indexOfKey == 20)
{
playSound(BUFFERS.Up16)
}
else if (indexOfKey == 21)
{
playSound(BUFFERS.Up19);
}
else if (indexOfKey == 22)
{
playSound(BUFFERS.Up21);
}
else
{
playSound(BUFFERS.Up23);
}
};
})(keyboard_keys[i]);
}
};
Here's where I define the range slider for the volume control in my HTML (don't worry it is formatted correctly on in my code):
<div id="keyboard">
<script>
loadBuffers();
var beat1 = new Beat1();
var beat2 = new Beat2();
</script>
</div>
<div>Volume: <input type="range" min="0" max="100" value="100" oninput="VolumeMain.changeVolume(this);" /></div>
<div>Low Pass Filter on: <input type="checkbox" checked="false" oninput="LowPFilter.toggleFilter(this);" />
Frequency: <input type="range" min="0" max="1" step="0.01" value="1" oninput="LowPFilter.changeFrequency(this);" />
Quality: <input type="range" min="0" max="1" step="0.01" value="0" oninput="LowPFilter.changeQuality(this);" /></div>
<div>Beat 1: <input type="button" onclick="beat1.toggle();" value="Play/Pause"/>
Volume: <input type="range" min="0" max="100" value="100" onchange="beat1.changeVolume(this);"></div>
<div>Beat 2: <input type="button" onclick="beat2.toggle();" value="Play/Pause"/>
Volume: <input type="range" min="0" max="100" value="100" onchange="beat2.changeVolume(this);"></div>
</div>
This issue seems to be that the Volume control being used for the keyboard itself is somehow not being able to detect which sound buffer to use and modify. The code you supplied is good when you know exactly which source you are going to be adjusting the volume for, like in the case of my Beat1 and Beat 2 (those volume controls both work fine). I need the code to be able to modify the volume of any source in the buffer array. I'm using the Raphael package to create the keyboard, if that helps (it probably doesn't). I would call attention to the playSound(buffer) method and the VolumeMain.changeVolume functions. None of the LowPFilter methods work either but once we figure out how to adjust the volume for any given source that method's problem will also be fixed.
Edit (update). This removes the error and allows you to access the gainNode value
var gainNode = context.createGain();
function playSound(buffer) {
var source = context.createBufferSource();
source.buffer = buffer;
var filter1 = context.createBiquadFilter();
filter1.type = 0;
filter1.frequency.value = 5000;
source.connect(gainNode);
source.connect(filter1);
gainNode.connect(context.destination);
filter1.connect(context.destination);
source.start(audioContext.currentTime);
}
//volume control
VolumeMain.changeVolume = function(element) {
var volume = element.value;
var fraction = parseInt(element.value) / parseInt(element.max);
gainNode.gain.value = fraction * fraction;
console.log(gainNode.gain.value); // Console log of gain value when slider is moved
};
Previous reply
I don't really understand the problem but if you just want a piece of code as an example of setting up a gain node with an HTML range slider here's an example with an oscillator. You might want to do a little spike test and see if something like this works in your code with an oscillator and then try and apply it to your audio buffer code.
http://jsfiddle.net/vqb9dmrL/
<input id="gainSlider" type="range" min="0" max="1" step="0.05" value="0.5"/>
var audioContext = new webkitAudioContext();
var osc = audioContext.createOscillator();
osc.start(audioContext.cueentTime);
var gainChan1 = audioContext.createGain();
osc.connect(gainChan1);
gainChan1.connect(audioContext.destination);
var gainSlider = document.getElementById("gainSlider");
gainSlider.addEventListener('change', function() {
gainChan1.gain.value = this.value;
});