I've been searching around looking for some solutions but haven't really found much. I'd like something really simple along the lines of the image below. Has anyone ever used one in a project? Any advice or any API's I could use? Thanks.
Here is base:
You need a canvas
You need canvas context
You need audio context
var canvas = document.createElement("canvas");
canvas.width = 500;
canvas.height = 180;
var ctx = canvas.getContext("2d");
ctx.fillStyle = "black";
ctx.strokeStyle = "white";
ctx.lineCap = "round";
var auctx;
window.onload = () => {
document.body.appendChild(canvas);
auctx = new(window.AudioContext || window.webkitAudioContext)();
startAudio();
}
var buffer, src, analyser, buffLen;
var barWidth, dataArray;
function startAudio() {
var url = "https://cf-media.sndcdn.com/cTGZiRbnSouE.128.mp3?Policy=eyJTdGF0ZW1lbnQiOlt7IlJlc291cmNlIjoiKjovL2NmLW1lZGlhLnNuZGNkbi5jb20vY1RHWmlSYm5Tb3VFLjEyOC5tcDMiLCJDb25kaXRpb24iOnsiRGF0ZUxlc3NUaGFuIjp7IkFXUzpFcG9jaFRpbWUiOjE1MTk5NTQ5NjB9fX1dfQ__&Signature=JmNkAHzih0~f3lQVwvPXFeTIUVuMXbwlbqizsXbCtc6lFIxjRlqa3wUGp5-xAkt7AUlhiYxu~Wscc6MfQTTc527DHJURMpdqvdXv61ll-WJqoV1V-tpWSa~qR-NEAWGCGBvrge0BkRRAsOHFljeLNCvO3DjzH7lSTPMlV-MtbFV2k-PiY0vrY1LuicAOcfEtXYTiMBkg-rhzkeHFcNHYt2Nb2hmIvmWFI1cFG74FBIXTnVPAg2Yo0r-LeiirWvSgewkIu~zPzaVYjnPaN1y-ZGnPBFiBSC1mpVhtB5wkhTXF5LFthkGUHnUK2ybESr-1uOH9GLye-7dxdIXx~A1LDA__&Key-Pair-Id=APKAJAGZ7VMH2PFPW6UQ"; // nice url
var request = new XMLHttpRequest();
request.open('GET', url, true);
request.responseType = 'arraybuffer';
request.onload = function() {
auctx.decodeAudioData(request.response, function(buffer) {
buffer = buffer;
src = auctx.createBufferSource();
src.buffer = buffer;
src.loop = false;
src.connect(auctx.destination);
src.start(0);
analyser = auctx.createAnalyser();
src.connect(analyser);
analyser.connect(auctx.destination);
analyser.fftSize = 256;
buffLen = analyser.frequencyBinCount;
dataArray = new Uint8Array(buffLen);
barWidth = (500 - 2 * buffLen - 4) / buffLen * 2.5;
ctx.lineWidth = barWidth;
draw();
});
}
request.send();
}
function draw() {
ctx.fillRect(0, 0, 500, 180);
analyser.getByteFrequencyData(dataArray);
for (var i = 0; i < buffLen; i++) {
ctx.beginPath();
ctx.moveTo(4 + 2 * i * barWidth + barWidth / 2, 178 - barWidth / 2);
ctx.lineTo(4 + 2 * i * barWidth + barWidth / 2, 178 - dataArray[i] * 0.65 - barWidth / 2);
ctx.stroke();
}
requestAnimationFrame(draw);
}
canvas {
background: black;
}
This code should work. You can add some images and tweak settings.
I think that this is maybe what you’re looking for. Sorry I’m a bit late.
// AUDIO CONTEXT
window.AudioContext = (window.AudioContext ||
window.webkitAudioContext ||
window.mozAudioContext ||
window.oAudioContext ||
window.msAudioContext);
if (!AudioContext) alert('This site cannot be run in your Browser. Try a recent Chrome or Firefox. ');
var audioContext = new AudioContext();
var currentBuffer = null;
// CANVAS
var canvasWidth = window.innerWidth, canvasHeight = 120 ;
var newCanvas = createCanvas (canvasWidth, canvasHeight);
var context = null;
window.onload = appendCanvas;
function appendCanvas() { document.body.appendChild(newCanvas);
context = newCanvas.getContext('2d'); }
// the function that loads the sound file
//NOTE this program for some reason won’t load sound files from like a weebly website so you’ll have to add the files to your github or whatever and use that raw audio file
function loadMusic(url) {
var req = new XMLHttpRequest();
req.open( "GET", url, true );
req.responseType = "arraybuffer";
req.onreadystatechange = function (e) {
if (req.readyState == 4) {
if(req.status == 200)
audioContext.decodeAudioData(req.response,
function(buffer) {
currentBuffer = buffer;
displayBuffer(buffer);
}, onDecodeError);
else
alert('error during the load.Wrong url or cross origin issue');
}
} ;
req.send();
}
function onDecodeError() { alert('error while decoding your file.'); }
// MUSIC DISPLAY
function displayBuffer(buff /* is an AudioBuffer */) {
var drawLines = 500;
var leftChannel = buff.getChannelData(0); // Float32Array describing left channel
var lineOpacity = canvasWidth / leftChannel.length ;
context.save();
context.fillStyle = '#080808' ;
context.fillRect(0,0,canvasWidth,canvasHeight );
context.strokeStyle = '#46a0ba';
context.globalCompositeOperation = 'lighter';
context.translate(0,canvasHeight / 2);
//context.globalAlpha = 0.6 ; // lineOpacity ;
context.lineWidth=1;
var totallength = leftChannel.length;
var eachBlock = Math.floor(totallength / drawLines);
var lineGap = (canvasWidth/drawLines);
context.beginPath();
for(var i=0;i<=drawLines;i++){
var audioBuffKey = Math.floor(eachBlock * i);
var x = i*lineGap;
var y = leftChannel[audioBuffKey] * canvasHeight / 2;
context.moveTo( x, y );
context.lineTo( x, (y*-1) );
}
context.stroke();
context.restore();
}
// Creates the Canvas
function createCanvas ( w, h ) {
var newCanvas = document.createElement('canvas');
newCanvas.width = w; newCanvas.height = h;
return newCanvas;
};
// The program runs the url you put into the line below
loadMusic('||YOUR LINK||');
Happy Coding!!
If you by chance have a better solution on this, may you send me the code because I’m also having a bit of trouble with this. I’m trying to create one like soundcloud without using external libraries.
EDIT 1
So i thought that it would be nice if i give an example of what it would look like in action so, here —>
// AUDIO CONTEXT
window.AudioContext = (window.AudioContext ||
window.webkitAudioContext ||
window.mozAudioContext ||
window.oAudioContext ||
window.msAudioContext);
if (!AudioContext) alert('This site cannot be run in your Browser. Try a recent Chrome or Firefox. ');
var audioContext = new AudioContext();
var currentBuffer = null;
// CANVAS
var canvasWidth = window.innerWidth, canvasHeight = 120 ;
var newCanvas = createCanvas (canvasWidth, canvasHeight);
var context = null;
window.onload = appendCanvas;
function appendCanvas() { document.body.appendChild(newCanvas);
context = newCanvas.getContext('2d'); }
// the function that loads the sound file
//NOTE this program for some reason won’t load sound files from like a weebly website so you’ll have to add the files to your github or whatever and use that raw audio file
function loadMusic(url) {
var req = new XMLHttpRequest();
req.open( "GET", url, true );
req.responseType = "arraybuffer";
req.onreadystatechange = function (e) {
if (req.readyState == 4) {
if(req.status == 200)
audioContext.decodeAudioData(req.response,
function(buffer) {
currentBuffer = buffer;
displayBuffer(buffer);
}, onDecodeError);
else
alert('error during the load.Wrong url or cross origin issue');
}
} ;
req.send();
}
function onDecodeError() { alert('error while decoding your file.'); }
// MUSIC DISPLAY
function displayBuffer(buff /* is an AudioBuffer */) {
var drawLines = 500;
var leftChannel = buff.getChannelData(0); // Float32Array describing left channel
var lineOpacity = canvasWidth / leftChannel.length ;
context.save();
context.fillStyle = '#080808' ;
context.fillRect(0,0,canvasWidth,canvasHeight );
context.strokeStyle = '#46a0ba';
context.globalCompositeOperation = 'lighter';
context.translate(0,canvasHeight / 2);
//context.globalAlpha = 0.6 ; // lineOpacity ;
context.lineWidth=1;
var totallength = leftChannel.length;
var eachBlock = Math.floor(totallength / drawLines);
var lineGap = (canvasWidth/drawLines);
context.beginPath();
for(var i=0;i<=drawLines;i++){
var audioBuffKey = Math.floor(eachBlock * i);
var x = i*lineGap;
var y = leftChannel[audioBuffKey] * canvasHeight / 2;
context.moveTo( x, y );
context.lineTo( x, (y*-1) );
}
context.stroke();
context.restore();
}
// Creates the Canvas
function createCanvas ( w, h ) {
var newCanvas = document.createElement('canvas');
newCanvas.width = w; newCanvas.height = h;
return newCanvas;
};
// The program runs the url you put into the line below
loadMusic('https://raw.githubusercontent.com/lightning417techa/Music/master/images/lil%20dicky%20-%20freaky%20friday%20(lyrics)%20ft.%20chris%20brown.mp3');
Note: that’s the kind of file that this thing needs, it’s annoying at times.
Related
I wrote the following to draw a sound wave from AudioBuffer but what I get is a canvas with a straight horizontal line:
const { audioContext, analyser } = this.getAudioContext();
const source = audioContext.createBufferSource();
source.buffer = audioBuffer;
source.connect(audioContext.destination);
const canvas = document.getElementById('canvas');
const canvasCtx = canvas.getContext("2d");
let sinewaveDataArray = new Uint8Array(analyser.fftSize);
const drawSinewave = function() {
analyser.getByteTimeDomainData(sinewaveDataArray);
requestAnimationFrame(drawSinewave);
canvasCtx.fillStyle = 'white';
canvasCtx.fillRect(0, 0, canvas.width, canvas.height);
canvasCtx.lineWidth = 2;
canvasCtx.strokeStyle = "black";
canvasCtx.beginPath();
const sliceWidth = canvas.width * 1.0 / analyser.fftSize;
let x = 0;
for(let i = 0; i < analyser.fftSize; i++) {
const v = sinewaveDataArray[i] / 128.0; // byte / 2 || 256 / 2
const y = v * canvas.height / 2;
if(i === 0) {
canvasCtx.moveTo(x, y);
} else {
canvasCtx.lineTo(x, y);
}
x += sliceWidth;
}
canvasCtx.lineTo(canvas.width, canvas.height / 2);
canvasCtx.stroke();
};
source.start();
drawSinewave();
getAudioContext = () => {
AudioContext = window.AudioContext || window.webkitAudioContext;
const audioContext = new AudioContext();
const analyser = audioContext.createAnalyser();
return { audioContext, analyser };
};
The end result I am looking for is something like the first image clip here: https://meyda.js.org/
Any idea what I am missing?
I think two little changes are necessary to make it work.
source.start() needs to be executed in response to a user event. A simple button with a click handler can be used to achieve that. This is necessary to deal with the autoplay policy which is present in some browsers.
aReferenceToAButton.addEventListener('click', async () => {
await audioContext.resume();
source.start();
});
Last but not least you need to pipe the signal through the AnalyserNode.
source
.connect(analyser)
.connect(audioContext.destination);
I hope this helps.
I'm continuing with my canvas hobby a-a-and hit another problem while trying to offload most of the things with web workers. This is the current design:
[creating and starting the core web worker core.js from index.html]
var canvas = document.getElementById('testcanvas');
canvas.imageSmoothingEnabled = false;
const offscreenCanvas = canvas.transferControlToOffscreen();
const core = new Worker('core.js');
//transfer offscreenCanvas just ONCE or will get a 'An OffscreenCanvas could not be cloned because it was detached' error
core.postMessage({canvas: offscreenCanvas, msg: 'start'}, [offscreenCanvas]);
[in the core.js I'm creating two web workers]
'use strict'; //meh
const render = new Worker('render.js');
const mainloop = new Worker('mainloop.js');
onmessage = function(ev) {
if (ev.data.msg === 'start') {
mainloop.postMessage({msg: 'start'});
}
if (ev.data.canvas) {
render.postMessage({canvas: ev.data.canvas}, [ev.data.canvas]);
}
if (ev.data.posX && ev.data.posY) {
render.postMessage({posX: ev.data.posX, posY: ev.data.posY});
}
}
each web worker sends the results to the core worker
[mainloop.js]
'use strict';
var canvas;
var ctx;
var speed = 100;
var currentTime = 0; var timeDiff = 0; var lastTime = 0;
var timeProduct = 0; var dTime = 0; var timeScale = 1; var timeStep = 0.01;
var posX = 10; var posY = 10;
function main() {
currentTime = performance.now();
timeDiff = (Math.abs(currentTime - lastTime) * 0.001);
dTime += Math.min(timeDiff, 1);
timeProduct = timeStep * timeScale;
while (dTime > timeProduct) {
postMessage({posX: posX, posY: posY});
dTime -= timeProduct;
}
lastTime = currentTime;
posX += speed * timeDiff;
posY += speed * timeDiff;
if (posX > 500) posX = 10;
if (posY > 500) posY = 10;
requestAnimationFrame(main);
//setTimeout(main, 0);
}
onmessage = function(ev) {
if(ev.data.msg === 'start') {
main();
}
}
[render.js]
'use strict';
var canvas;
var ctx;
function draw(posX, posY) {
//clear
ctx.setTransform(1,0,0,1,0,0);
ctx.clearRect(0, 0, canvas.width, canvas.height);
ctx.fillStyle = "#000000";
ctx.fillRect(0, 0, canvas.width, canvas.height);
//draw
ctx.beginPath();
ctx.moveTo(posX, posY);
ctx.ellipse(posX,
posY,
5,
5,
0.7854,
0,
2 * Math.PI,
false);
ctx.strokeStyle = "white";
ctx.stroke();
}
onmessage = function(ev) {
if(ev.data) {
if (!canvas) canvas = ev.data.canvas;
if (!ctx) ctx = canvas.getContext('2d');
draw(ev.data.posX, ev.data.posY);
}
}
The problem is that when requestAnimationFrame(main) is called in the mainloop.js I'm receiving 'requestAnimationFrame not supported in this Worker' though offscreenCanvas has been transfered between core and render workers. When changed with setTimeout(main, 0) error disappears but nothing is drawn on the canvas ...
I am Trying to Build a simple web page to display a feed from my cameras that pulls a still image from the camera via the cameras api and then regrabs the image with the API (so i can configure the frame rate of the cameras to cut down on mobile data)
I have managed to build a simple website with just one of the displays, but i want to be able to display all 8 of my cameras, IP addresses 192.168.0.157 - 165
My current code is
<html>
<head>
<script type="text/JavaScript">
var refreshInterval = 1000;
var url1 = "http://192.168.0.157/api/still?passwd=pass&"
var drawDate = true;
var img1;
function init() {
var canvas = document.getElementById("canvas");
var context = canvas.getContext("2d");
img = new Image();
img.onload = function() {
canvas.setAttribute("width", img.width)
canvas.setAttribute("height", img.height)
context.drawImage(this, 0, 0);
if(drawDate) {
var now = new Date();
var text = now.toLocaleDateString() + " " + now.toLocaleTimeString();
var maxWidth = 100;
var x = img.width-10-maxWidth;
var y = img.height-10;
context.strokeStyle = 'black';
context.lineWidth = 2;
context.strokeText(text, x, y, maxWidth);
context.fillStyle = 'white';
context.fillText(text, x, y, maxWidth);
}
};
refresh();
}
function refresh()
{
img.src = img.src = url1 + "t=" + new Date().getTime();
setTimeout("refresh()",refreshInterval);
}
</script>
<title>Test4</title>
</head>
<body onload="JavaScript:init();">
<canvas id="canvas"/>
</body>
</html>
Thanks in advance
I'm thinking make an array for every camera IP, and do all the API stuff for each of those.
var ip = [
"192.168.0.157",
"192.168.0.158",
"192.168.0.159",
"192.168.0.160",
"192.168.0.161",
"192.168.0.162",
"192.168.0.163",
"192.168.0.164",
"192.168.0.165"
];
var img = [];
function init() {
var canvas = document.getElementById("canvas");
var context = canvas.getContext("2d");
for (var i = 0; i < ip.length; i++) {
img[i] = new Image();
img[i].onload = (function() {
canvas.setAttribute("width", img[i].width);
canvas.setAttribute("height", img[i].height);
context.drawImage(this, 0, 0);
if (drawDate) {
var now = new Date();
var text = now.toLocaleDateString() + " " + now.toLocaleTimeString();
var maxWidth = 100;
var x = img[i].width - 10 - maxWidth;
var y = img[i].height - 10;
context.strokeStyle = 'black';
context.lineWidth = 2;
context.strokeText(text, x, y, maxWidth);
context.fillStyle = 'white';
context.fillText(text, x, y, maxWidth);
}
})();
}
refresh();
};
function refresh() {
for (var i = 0; i < img.length; i++) {
img[i].src = "http://" + ip[i] + "/api/still?passwd=pass&t=" + new Date().getTime();
}
setTimeout("refresh()",refreshInterval);
}
Is there way to get the video from webcamera (getUserMedia), pass it to canvas, make some effects with this video and get back in video format (mp4 for example)?
I'm looking for a way to be done on client side only. And also it should be a real time process.
Yes you can, in modern browsers, but you'll loose every audio stream (Actually you can save it too), and I think that the mp4 requirement is not here yet and you should not wait for it (damn royalties) only webm or ogv export is currently available.
You can use a MediaRecorder and the canvas.captureStream() method.
var mediaRecorder = new MediaRecorder(canvas.captureStream(30));
Here is an example code (which will only work on latests FF and chrome !).
var startRecording = function(){
// make something happen on the canvas
var stopCanvas = initCanvasDrawing();
// really, that's it..
var recorder = new MediaRecorder(canvas.captureStream(30))
var chunks = [];
recorder.ondataavailable = function(e){
if(e.data.size){
chunks.push(e.data)
}
};
recorder.onstop = function(){
var blob = new Blob(chunks);
var url = URL.createObjectURL(blob);
unrelatedDOMStuff(url);
stopCanvas();
};
recorder.start();
setTimeout(function(){recorder.stop()}, 5000);
}
// the rest of the code is just for the demo
var unrelatedDOMStuff = function(url){
rec.style.display = canvas.style.display = 'none';
var vid = document.createElement('video');
vid.src = url
vid.controls = true;
document.body.appendChild(vid);
vid.play();
var a = document.createElement('a');
a.innerHTML = 'you can also get it here';
a.download = 'awesomeCanvasVid.webm';
a.href = url;
document.body.appendChild(a);
}
var initCanvasDrawing = function() {
var ctx = canvas.getContext('2d');
var objects = [];
ctx.fillStyle = 'ivory';
// taken from https://stackoverflow.com/a/23486828/3702797
for (var i = 0; i < 100; i++) {
objects.push({
angle: Math.random() * 360,
x: 100 + (Math.random() * canvas.width / 2),
y: 100 + (Math.random() * canvas.height / 2),
radius: 10 + (Math.random() * 40),
speed: 1 + Math.random() * 20
});
}
var stop = false;
var draw = function() {
ctx.fillRect(0, 0, canvas.width, canvas.height);
for (var n = 0; n < 100; n++) {
var entity = objects[n],
velY = Math.cos(entity.angle * Math.PI / 180) * entity.speed,
velX = Math.sin(entity.angle * Math.PI / 180) * entity.speed;
entity.x += velX;
entity.y -= velY;
ctx.drawImage(img, entity.x, entity.y, entity.radius, entity.radius);
entity.angle++;
}
if (!stop) {
requestAnimationFrame(draw);
}
}
var img = new Image();
img.onload = draw;
img.crossOrigin = 'anonymous';
img.src = "https://dl.dropboxusercontent.com/s/4e90e48s5vtmfbd/aaa.png";
return function() {
stop = true;
};
}
startRecording();
#rec{ width:2em; height:2em; border-radius:50%; background-color:red; position: absolute; top: 0; left: 0;}
a{display: block;}
video{border:1px solid green;}
<canvas id="canvas" width="500" height="250"></canvas>
<div id="rec"></div>
Note that since webm video format doesn't support transparency, every transparent pixels will be set to opaque black.
I am working on script that converts audio into a waveform using canvas and then uploads it to server as png. I got the javascript to create the waveform and it looks great, I am just trying to now figure out how to take that canvas amd upload it to server as png using toDataUrl(); and Ajax. For some reason it is not uploading to server.
Javascript:
<script>
// AUDIO CONTEXT
window.AudioContext = (window.AudioContext ||
window.webkitAudioContext ||
window.mozAudioContext ||
window.oAudioContext ||
window.msAudioContext);
if (!AudioContext) alert('This site cannot be run in your Browser. Try a recent Chrome or Firefox. ');
var audioContext = new AudioContext();
var currentBuffer = null;
// CANVAS
var canvasWidth = canvasWidth = 500, canvasHeight = 50 ;
var newCanvas = createCanvas (canvasWidth, canvasHeight);
var context = null;
window.onload = appendCanvas;
function appendCanvas() { document.body.appendChild(newCanvas);
context = newCanvas.getContext('2d'); }
// MUSIC LOADER + DECODE
function loadMusic(url) {
var req = new XMLHttpRequest();
req.open( "GET", url, true );
req.responseType = "arraybuffer";
req.onreadystatechange = function (e) {
if (req.readyState == 4) {
if(req.status == 200)
audioContext.decodeAudioData(req.response,
function(buffer) {
currentBuffer = buffer;
displayBuffer(buffer);
}, onDecodeError);
else
alert('error during the load.Wrong url or cross origin issue');
}
} ;
req.send();
}
function onDecodeError() { alert('error while decoding your file.'); }
// MUSIC DISPLAY
function displayBuffer(buff /* is an AudioBuffer */) {
var drawLines = 10000;
var leftChannel = buff.getChannelData(0); // Float32Array describing left channel
var lineOpacity = canvasWidth / leftChannel.length ;
context.save();
context.fillStyle = '#000' ;
context.fillRect(0,0,canvasWidth,canvasHeight );
context.strokeStyle = 'lightgreen';
context.globalCompositeOperation = 'lighter';
context.translate(0,canvasHeight / 2);
context.globalAlpha = 0.1 ;
context.lineWidth=1;
var totallength = leftChannel.length;
var eachBlock = Math.floor(totallength / drawLines);
var lineGap = (canvasWidth/drawLines);
context.beginPath();
for(var i=0;i<=drawLines;i++){
var audioBuffKey = Math.floor(eachBlock * i);
var x = i*lineGap;
var y = leftChannel[audioBuffKey] * canvasHeight / 2;
context.moveTo( x, y );
context.lineTo( x, (y*-1) );
}
context.stroke();
context.restore();
}
function createCanvas ( w, h ) {
var newCanvas = document.createElement('canvas');
newCanvas.width = w; newCanvas.height = h;
};
loadMusic('wavformtest.mp3');
function uploadtoserver(myCanvas) {
var photo = myCanvas.toDataURL('image/jpeg');
$.ajax({
method: 'POST',
url: 'photo_upload.php',
data: {
photo: photo
}
});
}
</script>
photo_upload.php
<?php
$data = $_POST['photo'];
list($type, $data) = explode(';', $data);
list(, $data) = explode(',', $data);
$data = base64_decode($data);
mkdir($_SERVER['DOCUMENT_ROOT'] . "/images/song");
file_put_contents($_SERVER['DOCUMENT_ROOT'] . "/images/song/".time().'.png', $data);
die;
?>