Reactjs recorder-js download - javascript

I am trying to make a React app for recording voice samples and save them as wav files, I used recorder-js to make the record but I am still struggling to make the saving part as wav file, and I got the exporWAV is not a function everytime.
If you've been there please help

Sample code is here
//create
var record_recorder;
record_recorder = new RecorderV2(input,{sampleRate:44100, numChannels:2});
//start
record_recorder.record();
//stop
record_recorder.stop();
//export
record_recorder.exportWAV(function(blob) {
var url = URL.createObjectURL(blob);
var li = document.createElement('div');
var au = document.createElement('audio');
var hf = document.createElement('a');
var br = document.createElement('br');
record_result_blob = blob;
au.controls = true;
au.src = url;
hf.href = url;
hf.download = 'record.wav';
hf.innerHTML = hf.download;
li.appendChild(au);
li.appendChild(br);
li.appendChild(hf);
if (result)
result(li);
});
See the example website:
https://voice-recorder-online.com
code:
https://voice-recorder-online.com/js/index_editor.js

Here's How u can make waves depending on your voice frequency and record and save files in wav format.
(async() => {
let leftchannel = [];
let rightchannel = [];
let recorder = null;
let recording = false;
let recordingLength = 0;
let volume = null;
let audioInput = null;
let sampleRate = null;
let AudioContext = window.AudioContext || window.webkitAudioContext;
let context = null;
let analyser = null;
let canvas = document.querySelector("canvas");
let canvasCtx = canvas.getContext("2d");
let visualSelect = document.querySelector("#visSelect");
let micSelect = document.querySelector("#micSelect");
let stream = null;
let tested = false;
try {
window.stream = stream = await getStream();
console.log("Got stream");
} catch (err) {
alert("Issue getting mic", err);
}
const deviceInfos = await navigator.mediaDevices.enumerateDevices();
var mics = [];
for (let i = 0; i !== deviceInfos.length; ++i) {
let deviceInfo = deviceInfos[i];
if (deviceInfo.kind === "audioinput") {
mics.push(deviceInfo);
let label = deviceInfo.label || "Microphone " + mics.length;
console.log("Mic ", label + " " + deviceInfo.deviceId);
const option = document.createElement("option");
option.value = deviceInfo.deviceId;
option.text = label;
micSelect.appendChild(option);
}
}
function getStream(constraints) {
if (!constraints) {
constraints = {
audio: true,
video: false
};
}
return navigator.mediaDevices.getUserMedia(constraints);
}
setUpRecording();
function setUpRecording() {
context = new AudioContext();
sampleRate = context.sampleRate;
// creates a gain node
volume = context.createGain();
// creates an audio node from teh microphone incoming stream
audioInput = context.createMediaStreamSource(stream);
// Create analyser
analyser = context.createAnalyser();
// connect audio input to the analyser
audioInput.connect(analyser);
// connect analyser to the volume control
// analyser.connect(volume);
let bufferSize = 2048;
let recorder = context.createScriptProcessor(bufferSize, 2, 2);
// we connect the volume control to the processor
// volume.connect(recorder);
analyser.connect(recorder);
// finally connect the processor to the output
recorder.connect(context.destination);
recorder.onaudioprocess = function(e) {
// Check
if (!recording) return;
// Do something with the data, i.e Convert this to WAV
console.log("recording");
let left = e.inputBuffer.getChannelData(0);
let right = e.inputBuffer.getChannelData(1);
if (!tested) {
tested = true;
// if this reduces to 0 we are not getting any sound
if (!left.reduce((a, b) => a + b)) {
alert("There seems to be an issue with your Mic");
// clean up;
stop();
stream.getTracks().forEach(function(track) {
track.stop();
});
context.close();
}
}
// we clone the samples
leftchannel.push(new Float32Array(left));
rightchannel.push(new Float32Array(right));
recordingLength += bufferSize;
};
visualize();
}
function mergeBuffers(channelBuffer, recordingLength) {
let result = new Float32Array(recordingLength);
let offset = 0;
let lng = channelBuffer.length;
for (let i = 0; i < lng; i++) {
let buffer = channelBuffer[i];
result.set(buffer, offset);
offset += buffer.length;
}
return result;
}
function interleave(leftChannel, rightChannel) {
let length = leftChannel.length + rightChannel.length;
let result = new Float32Array(length);
let inputIndex = 0;
for (let index = 0; index < length;) {
result[index++] = leftChannel[inputIndex];
result[index++] = rightChannel[inputIndex];
inputIndex++;
}
return result;
}
function writeUTFBytes(view, offset, string) {
let lng = string.length;
for (let i = 0; i < lng; i++) {
view.setUint8(offset + i, string.charCodeAt(i));
}
}
function start() {
recording = true;
document.querySelector("#msg").style.visibility = "visible";
// reset the buffers for the new recording
leftchannel.length = rightchannel.length = 0;
recordingLength = 0;
console.log("context: ", !!context);
if (!context) setUpRecording();
}
function stop() {
console.log("Stop");
recording = false;
document.querySelector("#msg").style.visibility = "hidden";
// we flat the left and right channels down
let leftBuffer = mergeBuffers(leftchannel, recordingLength);
let rightBuffer = mergeBuffers(rightchannel, recordingLength);
// we interleave both channels together
let interleaved = interleave(leftBuffer, rightBuffer);
///////////// WAV Encode /////////////////
// from http://typedarray.org/from-microphone-to-wav-with-getusermedia-and-web-audio/
//
// we create our wav file
let buffer = new ArrayBuffer(44 + interleaved.length * 2);
let view = new DataView(buffer);
// RIFF chunk descriptor
writeUTFBytes(view, 0, "RIFF");
view.setUint32(4, 44 + interleaved.length * 2, true);
writeUTFBytes(view, 8, "WAVE");
// FMT sub-chunk
writeUTFBytes(view, 12, "fmt ");
view.setUint32(16, 16, true);
view.setUint16(20, 1, true);
// stereo (2 channels)
view.setUint16(22, 2, true);
view.setUint32(24, sampleRate, true);
view.setUint32(28, sampleRate * 4, true);
view.setUint16(32, 4, true);
view.setUint16(34, 16, true);
// data sub-chunk
writeUTFBytes(view, 36, "data");
view.setUint32(40, interleaved.length * 2, true);
// write the PCM samples
let lng = interleaved.length;
let index = 44;
let volume = 1;
for (let i = 0; i < lng; i++) {
view.setInt16(index, interleaved[i] * (0x7fff * volume), true);
index += 2;
}
// our final binary blob
const blob = new Blob([view], {
type: "audio/wav"
});
const audioUrl = URL.createObjectURL(blob);
console.log("BLOB ", blob);
console.log("URL ", audioUrl);
document.querySelector("#audio").setAttribute("src", audioUrl);
const link = document.querySelector("#download");
link.setAttribute("href", audioUrl);
link.download = "output.wav";
}
// Visualizer function from
// https://webaudiodemos.appspot.com/AudioRecorder/index.html
//
function visualize() {
WIDTH = canvas.width;
HEIGHT = canvas.height;
CENTERX = canvas.width / 2;
CENTERY = canvas.height / 2;
let visualSetting = visualSelect.value;
console.log(visualSetting);
if (!analyser) return;
if (visualSetting === "sinewave") {
analyser.fftSize = 2048;
var bufferLength = analyser.fftSize;
console.log(bufferLength);
var dataArray = new Uint8Array(bufferLength);
canvasCtx.clearRect(0, 0, WIDTH, HEIGHT);
var draw = function() {
drawVisual = requestAnimationFrame(draw);
analyser.getByteTimeDomainData(dataArray);
canvasCtx.fillStyle = "rgb(200, 200, 200)";
canvasCtx.fillRect(0, 0, WIDTH, HEIGHT);
canvasCtx.lineWidth = 2;
canvasCtx.strokeStyle = "rgb(0, 0, 0)";
canvasCtx.beginPath();
var sliceWidth = (WIDTH * 1.0) / bufferLength;
var x = 0;
for (var i = 0; i < bufferLength; i++) {
var v = dataArray[i] / 128.0;
var y = (v * HEIGHT) / 2;
if (i === 0) {
canvasCtx.moveTo(x, y);
} else {
canvasCtx.lineTo(x, y);
}
x += sliceWidth;
}
canvasCtx.lineTo(canvas.width, canvas.height / 2);
canvasCtx.stroke();
};
draw();
} else if (visualSetting == "frequencybars") {
analyser.fftSize = 64;
var bufferLengthAlt = analyser.frequencyBinCount;
console.log(bufferLengthAlt);
var dataArrayAlt = new Uint8Array(bufferLengthAlt);
canvasCtx.clearRect(0, 0, WIDTH, HEIGHT);
var drawAlt = function() {
drawVisual = requestAnimationFrame(drawAlt);
analyser.getByteFrequencyData(dataArrayAlt);
canvasCtx.fillStyle = "rgb(0, 0, 0)";
canvasCtx.fillRect(0, 0, WIDTH, HEIGHT);
var barWidth = WIDTH / bufferLengthAlt;
var barHeight;
var x = 0;
for (var i = 0; i < bufferLengthAlt; i++) {
barHeight = dataArrayAlt[i];
canvasCtx.fillStyle = "rgb(" + (barHeight + 100) + ",50,50)";
canvasCtx.fillRect(
x,
HEIGHT - barHeight / 2,
barWidth,
barHeight / 2
);
x += barWidth + 1;
}
};
drawAlt();
} else if (visualSetting == "circle") {
analyser.fftSize = 32;
let bufferLength = analyser.frequencyBinCount;
console.log(bufferLength);
let dataArray = new Uint8Array(bufferLength);
canvasCtx.clearRect(0, 0, WIDTH, HEIGHT);
let draw = () => {
drawVisual = requestAnimationFrame(draw);
analyser.getByteFrequencyData(dataArray);
canvasCtx.fillStyle = "rgb(0, 0, 0)";
canvasCtx.fillRect(0, 0, WIDTH, HEIGHT);
// let radius = dataArray.reduce((a,b) => a + b) / bufferLength;
let radius = dataArray[2] / 2;
if (radius < 20) radius = 20;
if (radius > 100) radius = 100;
// console.log('Radius ', radius)
canvasCtx.beginPath();
canvasCtx.arc(CENTERX, CENTERY, radius, 0, 2 * Math.PI, false);
// canvasCtx.fillStyle = 'rgb(50,50,' + (radius+100) +')';
// canvasCtx.fill();
canvasCtx.lineWidth = 6;
canvasCtx.strokeStyle = "rgb(50,50," + (radius + 100) + ")";
canvasCtx.stroke();
};
draw();
}
}
#msg {
visibility: hidden;
color: red;
font-weight: bold;
font-size: 22px;
font-family: Verdana;
}
button {
padding: 5px 10px;
border: 1px solid grey;
font-size: 18px;
background: white;
}
.audio-controls {
display: flex;
align-items: center;
padding-top: 20px;
justify-content: center;
}
.audio-controls button {
margin: 0px 5px;
}
canvas {
margin-top: 10px;
background-color: black;
}
select {
height: 25px;
margin: 0px 5px;
}
a {
margin-left: 20px;
}
.app {
text-align: center;
padding-top: 20px;
}
<div class="app">
<select name="" id="micSelect"></select>
<select id="visSelect">
<option value="frequencybars">Bar</option>
<option value="sinewave">Wave</option>
<option value="circle">Circle</option>
</select>
<a id="download">Download</a>
<div class="audio-controls">
<button id="record">Record</button>
<button id="stop">Stop</button>
<audio id="audio" controls></audio>
</div>
<div id="msg">Recording...</div>
<canvas width="500" height="300"></canvas>
<div>
Live Demo: https://codepen.io/furki911/pen/jOYpvMx

Related

Audio API center visualisation on frequency

I am working on an audio visualizer for the web that also lets the user "tune" the raw audio signal visualizer to a frequency. This is a feature of many hardware oscilloscopes. Basically, when a user centers on 440Hz and I have a 440Hz sine wave, the wave should stay still on the canvas and not move left or right. My plan was to move the graph to the left according to the frequency (440Hz = 1/440s to the left per second because the wave should repeat every 1/440s), but this does not work as it seems.
I could not find the units used by the Audio Analyzer Node's time domain data. I guess that it's in milliseconds, but I am not certain.
"use strict";
// Oscillator instead of mic for debugging
const USE_OSCILLATOR = true;
// Compatibility
if (!window.AudioContext)
window.AudioContext = window.webkitAudioContext;
if (!navigator.getUserMedia)
navigator.getUserMedia =
navigator.mozGetUserMedia ||
navigator.webkitGetUserMedia ||
navigator.msGetUserMedia;
// Main
class App {
constructor(visualizerElement, optionsElement) {
this.visualizerElement = visualizerElement;
this.optionsElement = optionsElement;
// HTML elements
this.canvas = document.createElement("canvas");
// Context
this.context = new AudioContext({
// Low latency
latencyHint: "interactive",
});
this.canvasCtx = this.canvas.getContext("2d", {
// Low latency
desynchronized: true,
alpha: false,
});
// Audio nodes
this.audioAnalyser = this.context.createAnalyser();
this.audioBuffer = new Uint8Array(this.audioAnalyser.frequencyBinCount);
this.audioInputStream = null;
this.audioInputNode = null;
if (this.canvasCtx === null)
throw new Error("2D rendering Context not supported by browser.");
this.updateCanvasSize();
window.addEventListener("resize", () => this.updateCanvasSize());
this.drawVisualizer();
this.visualizerElement.appendChild(this.canvas);
if (USE_OSCILLATOR) {
let oscillator = this.context.createOscillator();
oscillator.type = "sine";
oscillator.frequency.setValueAtTime(440, this.context.currentTime);
oscillator.connect(this.audioAnalyser);
oscillator.start();
}
else {
navigator.getUserMedia({ audio: true }, (stream) => {
this.audioInputStream = stream;
this.audioInputNode = this.context.createMediaStreamSource(stream);
this.audioInputNode.channelCountMode = "explicit";
this.audioInputNode.channelCount = 1;
this.audioBuffer = new Uint8Array(this.audioAnalyser.frequencyBinCount);
this.audioInputNode.connect(this.audioAnalyser);
}, (err) => console.error(err));
}
}
updateCanvasSize() {
var _a;
this.canvas.width = window.innerWidth;
this.canvas.height = window.innerHeight;
(_a = this.canvasCtx) === null || _a === void 0 ? void 0 : _a.setTransform(1, 0, 0, -1, 0, this.canvas.height * 0.5);
}
drawVisualizer() {
if (this.canvasCtx === null)
return;
const ctx = this.canvasCtx;
ctx.fillStyle = "black";
ctx.fillRect(0, -0.5 * this.canvas.height, this.canvas.width, this.canvas.height);
// Draw FFT
this.audioAnalyser.getByteFrequencyData(this.audioBuffer);
const step = this.canvas.width / this.audioBuffer.length;
const scale = this.canvas.height / (2 * 255);
ctx.beginPath();
ctx.moveTo(-step, this.audioBuffer[0] * scale);
this.audioBuffer.forEach((sample, index) => {
ctx.lineTo(index * step, scale * sample);
});
ctx.strokeStyle = "white";
ctx.stroke();
// Get the highest dominant frequency
let highestFreqHalfHz = 0;
{
/**
* Highest frequency in 0.5Hz
*/
let highestFreq = NaN;
let highestFreqAmp = NaN;
let remSteps = NaN;
for (let i = this.audioBuffer.length - 1; i >= 0; i--) {
const sample = this.audioBuffer[i];
if (sample > 20 && (isNaN(highestFreqAmp) || sample > highestFreqAmp)) {
highestFreq = i;
highestFreqAmp = sample;
if (isNaN(remSteps))
remSteps = 500;
}
if (!isNaN(remSteps)) {
if (remSteps-- < 0)
break;
}
}
if (!isNaN(highestFreq)) {
ctx.beginPath();
ctx.moveTo(highestFreq * step, 0);
ctx.lineTo(highestFreq * step, scale * 255);
ctx.strokeStyle = "green";
ctx.stroke();
highestFreqHalfHz = highestFreq;
}
}
// Draw Audio
this.audioAnalyser.getByteTimeDomainData(this.audioBuffer);
{
const bufferSize = this.audioBuffer.length;
const offsetY = -this.canvas.height * 0.5;
// I don't know what I am doing here:
const offsetX = highestFreqHalfHz == 0
? 0
: bufferSize -
Math.round(((this.context.currentTime * 1000) % (1 / 440)) % bufferSize);
// Draw the audio graph with the given offset
ctx.beginPath();
ctx.moveTo(-step, this.audioBuffer[0] * scale + offsetY);
for (let i = 0; i < bufferSize; i++) {
const index = (offsetX + i) % bufferSize;
const sample = this.audioBuffer[index];
ctx.lineTo(i * step, scale * sample + offsetY);
}
ctx.strokeStyle = "white";
ctx.stroke();
}
}
}
window.addEventListener("load", () => {
const app = new App(document.getElementById("visualizer"), document.getElementById("options"));
requestAnimationFrame(draw);
function draw() {
requestAnimationFrame(draw);
app.drawVisualizer();
}
});
html {
background: black;
}
body {
width: 100vw;
height: 100vh;
margin: 0;
overflow: hidden;
}
#visualizer {
position: fixed;
inset: 0;
}
<!DOCTYPE html>
<html lang="en">
<head>
<meta charset="UTF-8">
<meta name="viewport" content="width=device-width, initial-scale=1.0">
<title>Equalizer</title>
</head>
<body>
<div id="visualizer"></div>
<div id="options"></div>
</body>
</html>
The above snippet was generated from TypeScript. You can find the source here. If it worked as intended, the oscillating graph (bottom) would not be moving.
I was able to solve this problem thanks to Raymond Toy's comment and my maths teacher (thank you Mr. Klein). The solution was Math.round((this.context.currentTime % iv) * sampleRate) where iv is the interval of the frequency (1/Hz). The wave is not perfectly centered. The FFT approximation is not very accurate though. In the following example I forced the detected frequency to be the specified one.
"use strict";
// Oscillator instead of mic for debugging
const USE_OSCILLATOR = true;
const OSCILLATOR_HZ = 1000;
// Compatibility
if (!window.AudioContext)
window.AudioContext = window.webkitAudioContext;
if (!navigator.getUserMedia)
navigator.getUserMedia =
navigator.mozGetUserMedia ||
navigator.webkitGetUserMedia ||
navigator.msGetUserMedia;
// Main
class App {
constructor(visualizerElement, optionsElement) {
this.visualizerElement = visualizerElement;
this.optionsElement = optionsElement;
// HTML elements
this.canvas = document.createElement("canvas");
// Context
this.context = new AudioContext({
// Low latency
latencyHint: "interactive",
});
this.canvasCtx = this.canvas.getContext("2d", {
// Low latency
desynchronized: true,
alpha: false,
});
// Audio nodes
this.audioAnalyser = this.context.createAnalyser();
this.audioBuffer = new Uint8Array(0);
this.audioInputStream = null;
this.audioInputNode = null;
if (this.canvasCtx === null)
throw new Error("2D rendering Context not supported by browser.");
this.updateCanvasSize();
window.addEventListener("resize", () => this.updateCanvasSize());
this.drawVisualizer();
this.visualizerElement.appendChild(this.canvas);
this.audioAnalyser.fftSize = 2048;
this.audioAnalyser.maxDecibels = -10;
this.audioBuffer = new Uint8Array(this.audioAnalyser.frequencyBinCount * 2);
this.audioFilter = this.context.createBiquadFilter();
this.audioFilter.type = "bandpass";
this.audioFilter.frequency.value = 900;
this.audioFilter.Q.value = 20;
this.audioAmplifier = this.context.createGain();
this.audioAmplifier.gain.value = 5;
this.audioFilter.connect(this.audioAmplifier);
this.audioAmplifier.connect(this.audioAnalyser);
if (USE_OSCILLATOR) {
let oscillator = this.context.createOscillator();
oscillator.type = "sine";
oscillator.frequency.setValueAtTime(OSCILLATOR_HZ, this.context.currentTime);
oscillator.connect(this.audioFilter);
oscillator.start();
}
else {
navigator.getUserMedia({ audio: true }, (stream) => {
this.audioInputStream = stream;
this.audioInputNode = this.context.createMediaStreamSource(stream);
this.audioInputNode.channelCountMode = "explicit";
this.audioInputNode.channelCount = 1;
this.audioBuffer = new Uint8Array(this.audioAnalyser.frequencyBinCount);
this.audioInputNode.connect(this.audioFilter);
}, (err) => console.error(err));
}
}
updateCanvasSize() {
var _a;
this.canvas.width = window.innerWidth;
this.canvas.height = window.innerHeight;
(_a = this.canvasCtx) === null || _a === void 0 ? void 0 : _a.setTransform(1, 0, 0, -1, 0, this.canvas.height * 0.5);
}
drawVisualizer() {
if (this.canvasCtx === null)
return;
const ctx = this.canvasCtx;
ctx.globalAlpha = 0.5;
ctx.fillStyle = "black";
ctx.fillRect(0, -0.5 * this.canvas.height, this.canvas.width, this.canvas.height);
ctx.globalAlpha = 1;
// Draw FFT
this.audioAnalyser.getByteFrequencyData(this.audioBuffer);
const scale = this.canvas.height / (2 * 255);
const { frequencyBinCount } = this.audioAnalyser;
const { sampleRate } = this.context;
{
const step = this.canvas.width / frequencyBinCount;
ctx.beginPath();
ctx.moveTo(-step, this.audioBuffer[0] * scale);
for (let index = 0; index < frequencyBinCount; index++) {
ctx.lineTo(index * step, scale * this.audioBuffer[index]);
}
ctx.strokeStyle = "white";
ctx.stroke();
}
// Get the highest dominant frequency
const step = this.canvas.width / frequencyBinCount;
let highestFreqHz = 0;
{
/**
* Highest frequency index in the buffer
*/
let highestFreqIndex = NaN;
let highestFreqAmp = NaN;
let remSteps = NaN;
for (let i = frequencyBinCount - 1; i >= 0; i--) {
const sample = this.audioBuffer[i];
if (sample > 30) {
if (isNaN(highestFreqAmp)) {
highestFreqIndex = i;
highestFreqAmp = sample;
}
else {
if (sample > highestFreqAmp) {
highestFreqIndex = i;
highestFreqAmp = sample;
}
}
//if (isNaN(remSteps)) remSteps = 100;
}
if (!isNaN(remSteps)) {
if (remSteps-- < 0)
break;
}
}
if (!isNaN(highestFreqIndex)) {
// Force exact value: (not necessary)
highestFreqIndex =
(OSCILLATOR_HZ * (2 * frequencyBinCount)) / sampleRate;
ctx.beginPath();
ctx.moveTo(highestFreqIndex * step, 0);
ctx.lineTo(highestFreqIndex * step, scale * 255);
ctx.strokeStyle = "green";
ctx.stroke();
highestFreqHz =
(highestFreqIndex * sampleRate) / (2 * frequencyBinCount);
window.HZ = highestFreqHz;
}
}
// Draw Audio
this.audioAnalyser.getByteTimeDomainData(this.audioBuffer);
{
const iv = highestFreqHz == 0 ? 0 : 1 / highestFreqHz;
const bufferSize = this.audioBuffer.length;
const offsetY = -this.canvas.height / 2.4;
const startIndex = Math.round(iv * sampleRate);
const step = this.canvas.width / (this.audioBuffer.length - startIndex);
const scale = this.canvas.height / (3 * 255);
const offsetX = highestFreqHz == 0
? 0
: Math.round((this.context.currentTime % iv) * sampleRate) %
bufferSize;
// Draw the audio graph with the given offset
ctx.beginPath();
ctx.moveTo(-step, this.audioBuffer[startIndex - offsetX] * scale + offsetY);
for (let i = startIndex; i < bufferSize; i += 4) {
const index = (i - offsetX) % bufferSize;
const sample = this.audioBuffer[index];
ctx.lineTo((i - startIndex) * step, scale * sample + offsetY);
}
ctx.strokeStyle = "white";
ctx.stroke();
}
}
}
window.addEventListener("load", () => {
const app = new App(document.getElementById("visualizer"), document.getElementById("options"));
requestAnimationFrame(draw);
function draw() {
requestAnimationFrame(draw);
app.drawVisualizer();
}
});
html {
background: black;
}
body {
width: 100vw;
height: 100vh;
margin: 0;
overflow: hidden;
}
#visualizer {
position: fixed;
inset: 0;
}
<!DOCTYPE html>
<html lang="en">
<head>
<meta charset="UTF-8">
<meta name="viewport" content="width=device-width, initial-scale=1.0">
<title>Equalizer</title>
</head>
<body>
<div id="visualizer"></div>
<div id="options"></div>
</body>
</html>

Signature pad imprint text size change within canvas ratio

I have implemented Signature pad canvas for my website. I want to make an imprint when saving signature after user signed. The problem I'm facing is to make that imprint size fit into canvas width. Canvas size is depends on users signature scale.
This is how it saved
If you could see I have trimmed out after signature drawn on the canvas space. And I'm using another function to print value on the top of the signature canvas after download it as an image. I want to adjust that text width as per canvas width to fit into the whole text value inside the canvas area.
var signaturePad = new SignaturePad(document.getElementById('signature-pad'), {
backgroundColor: 'rgba(255, 255, 255)',
penColor: 'rgb(0, 0, 0)'
});
var saveButton = document.getElementById('save');
var cancelButton = document.getElementById('clear');
function dataURLToBlob(dataURL) {
// Code taken from https://github.com/ebidel/filer.js
var parts = dataURL.split(';base64,');
var contentType = parts[0].split(":")[1];
var raw = window.atob(parts[1]);
var rawLength = raw.length;
var uInt8Array = new Uint8Array(rawLength);
for (var i = 0; i < rawLength; ++i) {
uInt8Array[i] = raw.charCodeAt(i);
}
return new Blob([uInt8Array], {
type: contentType
});
}
SignaturePad.prototype.removeBlanks = function() {
var cif = "0000885458";
var shortName = "/MR Jhon Doe";
var ref = "0854220190000001585/";
var imprintValue1 = "";
var imprintValue = "";
var imgWidth = this._ctx.canvas.width;
var imgHeight = this._ctx.canvas.height;
var imageData = this._ctx.getImageData(0, 0, imgWidth, imgHeight),
data = imageData.data,
getAlpha = function(x, y) {
return {
red: data[(imgWidth * y + x) * 4],
green: data[(imgWidth * y + x) * 4 + 1],
blue: data[(imgWidth * y + x) * 4 + 2]
};
},
isWhite = function(rgb) {
return rgb.red == 255 && rgb.green == 255 && rgb.blue == 255;
},
scanY = function(fromTop) {
var offset = fromTop ? 1 : -1;
// loop through each row
for (var y = fromTop ? 0 : imgHeight - 1; fromTop ? (y < imgHeight) : (y > -1); y += offset) {
// loop through each column
for (var x = 0; x < imgWidth; x++) {
if (!isWhite(getAlpha(x, y))) {
return y;
}
}
}
return null; // all image is white
},
scanX = function(fromLeft) {
var offset = fromLeft ? 1 : -1;
// loop through each column
for (var x = fromLeft ? 0 : imgWidth - 1; fromLeft ? (x < imgWidth) : (x > -1); x += offset) {
// loop through each row
for (var y = 0; y < imgHeight; y++) {
if (!isWhite(getAlpha(x, y))) {
return x;
}
}
}
return null; // all image is white
};
var cropTop = scanY(false),
cropBottom = scanY(true),
cropLeft = scanX(true),
cropRight = scanX(false);
cropTop += 30;
cropRight += 20;
var relevantData = this._ctx.getImageData(cropLeft - 10, cropTop - 20, cropRight - cropLeft, cropBottom - cropTop);
this._ctx.canvas.width = cropRight - cropLeft;
this._ctx.canvas.height = cropBottom - cropTop;
if (cif && shortName && ref) {
imprintValue1 = cif.concat("" + shortName);
imprintValue = ref.concat(imprintValue1);
}
this._ctx.clearRect(0, 0, cropRight - cropLeft, cropBottom - cropTop);
this._ctx.putImageData(relevantData, 0, 0);
var canvas_width = this._ctx.canvas.width;
var fontBase = canvas_width, // selected default width for canvas
fontSize = 70;
var ratio = fontSize / fontBase; // calc ratio
var size = canvas_width * ratio; // get font size based on current width
this._ctx.font = size;
// draw the imprintValue
this._ctx.fillStyle = "#e42f2f";
this._ctx.fillText(imprintValue, 0, 15);
};
saveButton.addEventListener('click', function(event) {
signaturePad.removeBlanks();
var dataURL = signaturePad.toDataURL('image/png');
download(dataURL, "signature.png");
});
function download(dataURL, filename) {
if (navigator.userAgent.indexOf("Safari") > -1 && navigator.userAgent.indexOf("Chrome") === -1) {
window.open(dataURL);
} else {
var blob = dataURLToBlob(dataURL);
var url = window.URL.createObjectURL(blob);
var a = document.createElement("a");
a.style = "display: none";
a.href = url;
a.download = filename;
document.body.appendChild(a);
a.click();
window.URL.revokeObjectURL(url);
}
}
cancelButton.addEventListener('click', function(event) {
signaturePad.clear();
});
.wrapper {
position: relative;
width: 400px;
height: 200px;
-moz-user-select: none;
-webkit-user-select: none;
-ms-user-select: none;
user-select: none;
}
img {
position: absolute;
left: 0;
top: 0;
}
.signature-pad {
position: absolute;
left: 0;
top: 0;
width: 400px;
height: 200px;
}
<script src="https://cdn.jsdelivr.net/npm/signature_pad#3.0.0-beta.3/dist/signature_pad.umd.min.js"></script>
<h1>
Put Signature
</h1>
<div class="wrapper">
<canvas id="signature-pad" class="signature-pad" width=400 height=200></canvas>
</div>
<div>
<button id="save">Save</button>
<button id="clear">Clear</button>
</div>

Detect the pitch of a live audio input in the browser

How can you detect the pitch of a live audio input in the browser?
The below code will get you 1,024 frequency values. However I don't know how to go from this to actual pitches (e.g. A#).
const audioContext = new window.AudioContext();
const analyser = audioContext.createAnalyser();
navigator.getUserMedia(
{ audio: true },
stream => {
audioContext.createMediaStreamSource(stream).connect(analyser);
const dataArray = new Uint8Array(analyser.frequencyBinCount);
analyser.getByteTimeDomainData(dataArray);
// Log the contents of the analyzer ever 500ms.
setInterval(() => {
console.log(dataArray.length);
}, 500);
},
err => console.log(err)
);
What you are currently accessing is the Time Domain Data, and can not be used to retrieve a note (which seems to be what you want).
What you'd want is the Frequency Domain, using AnalyserNode.get[XXX]FrequencyData, from which you could get which frequencies are louder or more quiet.
However, since most sound is made of harmonies, you can't retrieve what note was played from a microphone, add to this that we only have access to limited resolution, and not only will you be unable to retrieve a note from a microphone, but not even from a virtual oscillator either.
Below example has been made from this Q/A and examples from MDN;
const canvasCtx = canvas.getContext('2d');
const WIDTH = canvas.width = 500;
const HEIGHT = canvas.height = 150;
const audioCtx = new (window.AudioContext || window.webkitAudioContext);
const analyser = audioCtx.createAnalyser();
const nyquist = audioCtx.sampleRate / 2;
// highest precision
analyser.fftSize = 32768;
const bufferLength = analyser.frequencyBinCount;
const dataArray = new Uint8Array(bufferLength);
const osc = audioCtx.createOscillator();
osc.frequency.value = 400;
osc.connect(analyser);
osc.connect(audioCtx.destination);
range.oninput = e => {
osc.frequency.value = range.value;
};
if(!audioCtx.state || audioCtx.state === 'running') {
begin();
}
else {
log.textContent = 'click anywhere to begin';
onclick = e => {
onclick = null;
begin()
}
}
function begin() {
osc.start(0);
draw();
}
function draw() {
requestAnimationFrame(draw);
// get the Frequency Domain
analyser.getByteFrequencyData(dataArray);
canvasCtx.fillStyle = 'rgb(0, 0, 0)';
canvasCtx.fillRect(0, 0, WIDTH, HEIGHT);
const barWidth = (WIDTH / bufferLength) * 2.5;
let max_val = -Infinity;
let max_index = -1;
let x = 0;
for(let i = 0; i < bufferLength; i++) {
let barHeight = dataArray[i];
if(barHeight > max_val) {
max_val = barHeight;
max_index = i;
}
canvasCtx.fillStyle = 'rgb(' + (barHeight+100) + ',50,50)';
canvasCtx.fillRect(x,HEIGHT-barHeight/2,barWidth,barHeight/2);
x += barWidth;
}
log.textContent = `loudest freq: ${max_index * (nyquist / bufferLength)}
real value: ${range.value}`;
}
#log{display: inline-block; margin:0 12px}
#canvas{display: block;}
<input id="range" type="range" min="0" max="1000" value="400"><pre id="log"></pre>
<canvas id="canvas"></canvas>

(Three.js) Change color on particles forming image

I'm currently working on a particle system where the particles move around and create pictures with some seconds in between (almost like a slide). I know where to put the color code to change the color on the particles forming the pictures bur for some reason it doesn't work. I therefore suspect that the problem is somewhere else in the script but the question is where...
You can see the code below (tried to make a codepen but it didn't work):
var dispersed = false;
var firstDone = false;
var secondDone = false;
var thirdDone = false;
var fRgba = []; // first img rgba data
var sRgba = []; // second img rgba data
var tRgba = []; // third img rgba data
var WIDTH = window.innerWidth,
HEIGHT = window.innerHeight;
var VIEW_ANGLE = 45,
ASPECT = WIDTH / HEIGHT,
NEAR = 0.01,
FAR = 10000;
var $container = $("#container");
var renderer = new THREE.WebGLRenderer();
var camera = new THREE.PerspectiveCamera(
VIEW_ANGLE,
ASPECT,
NEAR,
FAR);
var scene = new THREE.Scene();
scene.add(camera);
camera.position.z = 900;
renderer.setSize(WIDTH, HEIGHT);
$container.append(renderer.domElement);
var particleCount = 5200,
particles = new THREE.Geometry();
var pMaterial = new THREE.PointsMaterial({
size: 6,
map: createCircleTexture('#CACACA', 256),
transparent: true,
depthWrite: false
});
function createCircleTexture(color, size) {
var matCanvas = document.createElement('canvas');
matCanvas.width = matCanvas.height = size;
var matContext = matCanvas.getContext('2d');
var texture = new THREE.Texture(matCanvas);
var center = size / 2;
matContext.beginPath();
matContext.arc(center, center, size/2, 0, 2 * Math.PI, false);
matContext.closePath();
matContext.fillStyle = color;
matContext.fill();
texture.needsUpdate = true;
return texture;
}
for (var i = 0; i < particleCount; i++) {
var x = Math.random() * 1600 - 800;
var y = getRandomInt(600, 1500)
var z = Math.random() * 30 - 15;
var particle = new THREE.Vector3(x, y, z);
particle.updated = 0;
particles.vertices.push(particle);
};
var particleSystem = new THREE.Points(particles, pMaterial);
particleSystem.sortParticles = true;
scene.add(particleSystem);
function drawImage(imageObj, array) {
var canvas = $("#canvas")[0];
var context = canvas.getContext("2d");
var imageX = 0;
var imageY = 0;
var imageWidth = imageObj.width;
var imageHeight = imageObj.height;
context.drawImage(imageObj, imageX, imageY);
var imageData = context.getImageData(imageX, imageY, imageWidth,
imageHeight);
var data = imageData.data;
for(var y = 0; y < imageHeight; y+= 4) {
for(var x = 0; x < imageWidth; x+= 4) {
var red = data[((imageWidth * y) + x) * 4];
var green = data[((imageWidth * y) + x) * 4 + 1];
var blue = data[((imageWidth * y) + x) * 4 + 2];
var alpha = data[((imageWidth * y) + x) * 4 + 3];
if (red < 100) {
var pX = (x % 500) - 249;
var pY = 249 - y;
array.push([pX, pY, red, green, blue, alpha]);
}
}
}
};
var addDestination = function(particle, x, y, z) {
var dest = new THREE.Vector3(x, y, z);
particle.destination = dest;
};
var addVelocity = function(particle) {
var xDiff = (particle.destination.x - particle.x) / 180;
var yDiff = (particle.destination.y - particle.y) / 180;
var zDiff = (particle.destination.z - particle.z) / 180;
var vel = new THREE.Vector3(xDiff, yDiff, zDiff);
particle.velocity = vel;
};
var move = function(particle) {
particle.x += particle.velocity.x;
particle.y += particle.velocity.y;
particle.z += particle.velocity.z;
particle.updated += 1;
};
var slowDown = function(particle) {
particle.velocity.x -= (particle.velocity.x / 300)
particle.velocity.y -= (particle.velocity.y / 300)
particle.velocity.z -= (particle.velocity.z / 160)
};
var resetProperties = function() {
var pCount = particleCount;
while (pCount--) {
var particle = particles.vertices[pCount];
particle.destination = null
particle.updated = 0;
};
};
function getRandomInt(min, max) {
return Math.floor(Math.random() * (max - min)) + min;
};
var distributedZ = function(level) {
var z;
if (level === 1) {
z = getRandomInt(50, 100);
} else if (level === 2) {
z = getRandomInt(350, 400);
} else {
z = getRandomInt(650, 700);
}
return z;
};
function shuffle(array) {
var currentIndex = array.length, temporaryValue, randomIndex;
while (0 !== currentIndex) {
randomIndex = Math.floor(Math.random() * currentIndex);
currentIndex -= 1;
temporaryValue = array[currentIndex];
array[currentIndex] = array[randomIndex];
array[randomIndex] = temporaryValue;
}
return array;
};
var disperse = function() {
pCount = particleCount;
for (var i = 0; i < pCount; i++) {
var particle = particles.vertices[i];
if (typeof(particle.destination) === "undefined") {
var nums = [-1, 1];
var x = particle.x + nums[Math.round(Math.random())];
var y = particle.y - 1000;
var z = Math.random() * 30 - 15;
addDestination(particle, x, y, z);
particle.velocity = new THREE.Vector3(x - particle.x, -3, z -
particle.z);
}
if (particle.updated <= 300) {
move(particle);
} else {
particles.vertices = shuffle(particles.vertices);
resetProperties();
dispersed = true;
return;
}
}
}
var morphImageParticles = function(imageParticles, rgba) {
for (var i = 0; i < imageParticles.length; i++) {
var particle = imageParticles[i]
if (particle.destination === null) {
var pixelData = rgba[i];
var x = pixelData[0];
var y = pixelData[1];
var z = Math.random() * 15 - 7;
addDestination(particle, x, y, z);
addVelocity(particle);
}
if (particle.updated <= 180) {
move(particle);
}
}
};
var morphOuterParticles = function(outerParticles, ord) {
for (var i = 0; i < outerParticles.length; i++) {
var nums = [-1, 1];
var particle = outerParticles[i];
if (particle.destination === null) {
var x = Math.random() * 1000 - 500;
var y = Math.random() * 1000 - 500;
var z;
if (i <= Math.round(outerParticles.length * 0.6)) {
z = distributedZ(1)
} else if (i > Math.round(outerParticles.length * 0.6) && i <
Math.round(outerParticles.length * 0.9)) {
z = distributedZ(2)
} else {
z = distributedZ(3);
}
addDestination(particle, x, y, z);
addVelocity(particle);
}
if (particle.updated <= 600) {
move(particle);
slowDown(particle);
} else {
particles.vertices = shuffle(particles.vertices);
resetProperties();
if (ord === 1) {
firstDone = true;
} else if (ord === 2) {
secondDone = true;
} else {
thirdDone = true;
}
return;
}
}
};
var makeImg = function(rgba, ord) {
var pCount = particleCount;
var imagePs = particles.vertices.slice(0, rgba.length);
var outerPs = particles.vertices.slice(rgba.length, pCount);
morphImageParticles(imagePs, rgba);
morphOuterParticles(outerPs, ord);
};
var update = function() {
if (thirdDone) {
} else if (secondDone) {
makeImg(tRgba, 3);
} else if (firstDone) {
makeImg(sRgba, 2);
} else if (dispersed) {
makeImg(fRgba, 1);
} else {
disperse();
}
particleSystem.geometry.verticesNeedUpdate = true;
renderer.render(scene, camera);
requestAnimationFrame(update);
TWEEN.update();
};
var rotXScale = d3.scale.linear().domain([0, window.innerHeight]).range([15,
-15]);
var rotYScale = d3.scale.linear().domain([0, window.innerWidth]).range([25,
-25]);
d3.select("body").on("mousemove", function() {
var scaledX = rotXScale(d3.mouse(this)[1]) * Math.PI / 180;
var scaledY = rotYScale(d3.mouse(this)[0]) * Math.PI / 180;
var tween = new TWEEN.Tween(particleSystem.rotation).to({ x: scaledX, y:
scaledY, z: 0 });
tween.easing( TWEEN.Easing.Quartic.Out);
tween.start();
transparency: true
});
var img1 = new Image();
var img2 = new Image();
var img3 = new Image();
img1.onload = function() {
drawImage(this, fRgba);
img2.onload = function() {
drawImage(this, sRgba);
img3.onload = function() {
drawImage(this, tRgba);
}
img3.src = "images/p1.png";
}
img2.src = "images/p2.png";
update();
}
img1.src = "images/p3.png";
update();
I thought I only need to add the code below, for example ['0xffffff'], that's how it should work at least but it didn't. Therefore I guess the problem is somewhere else in the script.
var fRgba = []; // first img rgba data
var sRgba = []; // second img rgba data
var tRgba = []; // third img rgba data

Slowing movement of image on canvas

The Problem
I am creating a game that involves dodging projectiles. The player is in control of an image of a ship and I dont want the ship to move exactly together as this looks very unrealistic.
The Question
Is there a way to control how fast the image moves, how can i slow the movemnt of the image down?
The Code
var game = create_game();
game.init();
//music
var snd = new Audio("Menu.mp3");
snd.loop = true;
snd.play();
document.getElementById('mute').addEventListener('click', function (evt) {
if ( snd.muted ) {
snd.muted = false
evt.target.innerHTML = 'mute'
}
else {
snd.muted = true
evt.target.innerHTML = 'unmute'
}
})
function create_game() {
debugger;
var level = 1;
var projectiles_per_level = 1;
var min_speed_per_level = 1;
var max_speed_per_level = 2;
var last_projectile_time = 0;
var next_projectile_time = 0;
var width = 600;
var height = 500;
var delay = 1000;
var item_width = 30;
var item_height = 30;
var total_projectiles = 0;
var projectile_img = new Image();
var projectile_w = 30;
var projectile_h = 30;
var player_img = new Image();
var c, ctx;
var projectiles = [];
var player = {
x: 200,
y: 400,
score: 0
};
function init() {
projectile_img.src = "projectile.png";
player_img.src = "player.png";
level = 1;
total_projectiles = 0;
projectiles = [];
c = document.getElementById("c");
ctx = c.getContext("2d");
ctx.fillStyle = "#ff6600";
ctx.fillRect(0, 0, 500, 600);
c.addEventListener("mousemove", function (e) {
//moving over the canvas.
var bounding_box = c.getBoundingClientRect();
player.x = (e.clientX - bounding_box.left) * (c.width / bounding_box.width) - player_img.width / 2;
}, false);
setupProjectiles();
requestAnimationFrame(tick);
}
function setupProjectiles() {
var max_projectiles = level * projectiles_per_level;
while (projectiles.length < max_projectiles) {
initProjectile(projectiles.length);
}
}
function initProjectile(index) {
var max_speed = max_speed_per_level * level;
var min_speed = min_speed_per_level * level;
projectiles[index] = {
x: Math.round(Math.random() * (width - 2 * projectile_w)) + projectile_w,
y: -projectile_h,
v: Math.round(Math.random() * (max_speed - min_speed)) + min_speed,
delay: Date.now() + Math.random() * delay
}
total_projectiles++;
}
function collision(projectile) {
if (projectile.y + projectile_img.height < player.y + 74) {
return false;
}
if (projectile.y > player.y + 74) {
return false;
}
if (projectile.x + projectile_img.width < player.x + 177) {
return false;
}
if (projectile.x > player.x + 177) {
return false;
}
return true;
}
function maybeIncreaseDifficulty() {
level = Math.max(1, Math.ceil(player.score / 10));
setupProjectiles();
}
function tick() {
var i;
var projectile;
var dateNow = Date.now();
c.width = c.width;
for (i = 0; i < projectiles.length; i++) {
projectile = projectiles[i];
if (dateNow > projectile.delay) {
projectile.y += projectile.v;
if (collision(projectile)) {
initProjectile(i);
player.score++;
} else if (projectile.y > height) {
initProjectile(i);
} else {
ctx.drawImage(projectile_img, projectile.x, projectile.y);
}
}
}
ctx.font = "bold 24px sans-serif";
ctx.fillStyle = "#ff6600";
ctx.fillText(player.score, c.width - 50, 50);
ctx.fillText("Level: " + level, 20, 50);
ctx.drawImage(player_img, player.x, player.y);
maybeIncreaseDifficulty();
requestAnimationFrame(tick);
}
return {
init: init
};
}
https://jsfiddle.net/a6nmy804/4/ (Broken)
Throttle the player's movement using a "timeout" countdown
Create a global var playerFreezeCountdown=0.
In mousemove change player.x only if playerFreezeCountdown<=0.
If playerFreezeCountdown>0 you don't change player.x.
If playerFreezeCountdown<=0 you both change player.x and also set playerFreezeCountdown to a desired "tick timeout" value: playerFreezeCountdown=5. This timeout will cause prevent the player from moving their ship until 5 ticks have passed.
In tick, always decrement playerFreezeCountdown--. This will indirectly allow a change to player.x after when playerFreezeCountdown is decremented to zero or below zero.

Categories

Resources