I have the web-dictaphone working: https://github.com/mdn/web-dictaphone/
My goal is to have it work just like it does by default, but I want to add a save button that will save the recorded file onto the wordpress server.
Anyone have any suggestions on how to do this? Here is the code that executes after recording,
I imagine I need to do something with the audioURL and use something like file_put_contents() in php. I'm not sure if I need to convert to base64 or anything? Any help is appreciated, thanks.
mediaRecorder.onstop = function(e) {
console.log("data available after MediaRecorder.stop() called.");
const clipName = prompt('Enter a name for your sound clip?','My unnamed clip');
const clipContainer = document.createElement('article');
const clipLabel = document.createElement('p');
const audio = document.createElement('audio');
const deleteButton = document.createElement('button');
clipContainer.classList.add('clip');
audio.setAttribute('controls', '');
deleteButton.textContent = 'Delete';
deleteButton.className = 'delete';
if(clipName === null) {
clipLabel.textContent = 'My unnamed clip';
} else {
clipLabel.textContent = clipName;
}
clipContainer.appendChild(audio);
clipContainer.appendChild(clipLabel);
clipContainer.appendChild(deleteButton);
soundClips.appendChild(clipContainer);
audio.controls = true;
const blob = new Blob(chunks, { 'type' : 'audio/ogg; codecs=opus' });
chunks = [];
const audioURL = window.URL.createObjectURL(blob);
audio.src = audioURL;
console.log("recorder stopped");
deleteButton.onclick = function(e) {
let evtTgt = e.target;
evtTgt.parentNode.parentNode.removeChild(evtTgt.parentNode);
}
clipLabel.onclick = function() {
const existingName = clipLabel.textContent;
const newClipName = prompt('Enter a new name for your sound clip?');
if(newClipName === null) {
clipLabel.textContent = existingName;
} else {
clipLabel.textContent = newClipName;
}
}
}
It is not needed to convert to base64, just append blob to an instance of the FormData class and send.
var formData = new FormData();
formData.append('attachment', blob);
var request = new XMLHttpRequest();
request.open('POST', 'URL', true); // edit URL
request.onload = function() { console.log("Status: %s", request.responseText) };
request.send(formData);
On the backend side, receiving can be archived with $_FILES and move_uploaded_file() like a normal upload procedure.
Related
I am very new to javaScript, I know some basics but have not yet completely understood the complete logics behind it (so far I have only worked with Python and a little bit of VBA)
For uni I have to build a browser interface to record audio and transfer it to a server where a Speech to text application runs. I found some opensource code here (https://github.com/mdn/dom-examples/blob/main/media/web-dictaphone/scripts/app.js) which I wanted to use, but is missing the websocket part. Now I don't know, where exactly to insert that. So far I have this:
code of the Webdictaphone:
// set up basic variables for app
const record = document.querySelector('.record');
const stop = document.querySelector('.stop');
const soundClips = document.querySelector('.sound-clips');
const canvas = document.querySelector('.visualizer');
const mainSection = document.querySelector('.main-controls');
// disable stop button while not recording
stop.disabled = true;
// visualiser setup - create web audio api context and canvas
let audioCtx;
const canvasCtx = canvas.getContext("2d");
//main block for doing the audio recording
if (navigator.mediaDevices.getUserMedia) {
console.log('getUserMedia supported.');
const constraints = { audio: true };
let chunks = [];
let onSuccess = function(stream) {
const mediaRecorder = new MediaRecorder(stream);
visualize(stream);
record.onclick = function() {
mediaRecorder.start();
console.log(mediaRecorder.state);
console.log("recorder started");
record.style.background = "red";
stop.disabled = false;
record.disabled = true;
}
stop.onclick = function() {
mediaRecorder.stop();
console.log(mediaRecorder.state);
console.log("recorder stopped");
record.style.background = "";
record.style.color = "";
// mediaRecorder.requestData();
stop.disabled = true;
record.disabled = false;
}
mediaRecorder.onstop = function(e) {
console.log("data available after MediaRecorder.stop() called.");
const clipName = prompt('Enter a name for your sound clip?','My unnamed clip');
const clipContainer = document.createElement('article');
const clipLabel = document.createElement('p');
const audio = document.createElement('audio');
const deleteButton = document.createElement('button');
clipContainer.classList.add('clip');
audio.setAttribute('controls', '');
deleteButton.textContent = 'Delete';
deleteButton.className = 'delete';
if(clipName === null) {
clipLabel.textContent = 'My unnamed clip';
} else {
clipLabel.textContent = clipName;
}
clipContainer.appendChild(audio);
clipContainer.appendChild(clipLabel);
clipContainer.appendChild(deleteButton);
soundClips.appendChild(clipContainer);
audio.controls = true;
const blob = new Blob(chunks, { 'type' : 'audio/ogg; codecs=opus' });
chunks = [];
const audioURL = window.URL.createObjectURL(blob);
audio.src = audioURL;
console.log("recorder stopped");
deleteButton.onclick = function(e) {
e.target.closest(".clip").remove();
}
clipLabel.onclick = function() {
const existingName = clipLabel.textContent;
const newClipName = prompt('Enter a new name for your sound clip?');
if(newClipName === null) {
clipLabel.textContent = existingName;
} else {
clipLabel.textContent = newClipName;
}
}
}
mediaRecorder.ondataavailable = function(e) {
chunks.push(e.data);
}
}
let onError = function(err) {
console.log('The following error occured: ' + err);
}
navigator.mediaDevices.getUserMedia(constraints).then(onSuccess, onError);
} else {
console.log('getUserMedia not supported on your browser!');
}
websocket part (client side):
window.addEventListener("DOMContentLoaded", () => {
// Open the WebSocket connection and register event handlers.
console.log('DOMContentLoaded done');
const ws = new WebSocket("ws://localhost:8001/"); // temp moved to mediarecorder.onstop
dataToBeSent = function (data) {
ws.send(data);
};
console.log('ws is defined');
})
Right now I just stacked both of the parts on top of each other, but this doesn't work, since, as I found out, you only can define and use variables (such as ws) within a block. This leads to an error that says that ws i not defined when I call the sending function within the if-statement.
I already tried to look for tutorials for hours but none that I found included this topic. I also tried moving the web socket part into the if statement, but that also did - unsurprisingly work, at least not in the way that I tried.
I feel like my problem lays in understanding how to define the websocket so I can call it within the if statement, or figure out a way to somehow get the audio somewhere where ws is considered to be defined. Unfortunately I just don't get behind it and already invested days which has become really frustrating.
I appreciate any help. If you have any ideas what I could change or move in the code or maybe just know any tutorial that could help, I'd be really grateful.
Thanks in advance!
You don't need that window.addEventListener("DOMContentLoaded", () => { part
const ws = new WebSocket("ws://localhost:8001/"); // temp moved to mediarecorder.onstop
dataToBeSent = function (data) {
ws.send(data);
};
const record = document.querySelector(".record");
const stop = document.querySelector(".stop");
const soundClips = document.querySelector(".sound-clips");
const canvas = document.querySelector(".visualizer");
const mainSection = document.querySelector(".main-controls");
// disable stop button while not recording
stop.disabled = true;
// visualiser setup - create web audio api context and canvas
let audioCtx;
const canvasCtx = canvas.getContext("2d");
//main block for doing the audio recording
if (navigator.mediaDevices.getUserMedia) {
console.log("getUserMedia supported.");
const constraints = { audio: true };
let chunks = [];
let onSuccess = function (stream) {
const mediaRecorder = new MediaRecorder(stream);
visualize(stream);
record.onclick = function () {
mediaRecorder.start();
console.log(mediaRecorder.state);
console.log("recorder started");
record.style.background = "red";
stop.disabled = false;
record.disabled = true;
};
stop.onclick = function () {
mediaRecorder.stop();
console.log(mediaRecorder.state);
console.log("recorder stopped");
record.style.background = "";
record.style.color = "";
// mediaRecorder.requestData();
stop.disabled = true;
record.disabled = false;
};
mediaRecorder.onstop = function (e) {
console.log("data available after MediaRecorder.stop() called.");
const clipName = prompt(
"Enter a name for your sound clip?",
"My unnamed clip"
);
const clipContainer = document.createElement("article");
const clipLabel = document.createElement("p");
const audio = document.createElement("audio");
const deleteButton = document.createElement("button");
clipContainer.classList.add("clip");
audio.setAttribute("controls", "");
deleteButton.textContent = "Delete";
deleteButton.className = "delete";
if (clipName === null) {
clipLabel.textContent = "My unnamed clip";
} else {
clipLabel.textContent = clipName;
}
clipContainer.appendChild(audio);
clipContainer.appendChild(clipLabel);
clipContainer.appendChild(deleteButton);
soundClips.appendChild(clipContainer);
audio.controls = true;
const blob = new Blob(chunks, { type: "audio/ogg; codecs=opus" });
chunks = [];
const audioURL = window.URL.createObjectURL(blob);
audio.src = audioURL;
console.log("recorder stopped");
deleteButton.onclick = function (e) {
e.target.closest(".clip").remove();
};
clipLabel.onclick = function () {
const existingName = clipLabel.textContent;
const newClipName = prompt("Enter a new name for your sound clip?");
if (newClipName === null) {
clipLabel.textContent = existingName;
} else {
clipLabel.textContent = newClipName;
}
};
};
mediaRecorder.ondataavailable = function (e) {
chunks.push(e.data);
};
};
let onError = function (err) {
console.log("The following error occured: " + err);
};
navigator.mediaDevices.getUserMedia(constraints).then(onSuccess, onError);
} else {
console.log("getUserMedia not supported on your browser!");
}
I can record audio to ogg file in Electron and Chrome by creating blob this way
const blob = new Blob(chunks, { 'type' : 'audio/ogg; codecs=opus' });
Full example code is this
if (navigator.mediaDevices) {
console.log('getUserMedia supported.');
const constraints = { audio: true };
let chunks = [];
navigator.mediaDevices.getUserMedia(constraints)
.then(function(stream) {
const mediaRecorder = new MediaRecorder(stream);
visualize(stream);
record.onclick = function() {
mediaRecorder.start();
console.log(mediaRecorder.state);
console.log("recorder started");
record.style.background = "red";
record.style.color = "black";
}
stop.onclick = function() {
mediaRecorder.stop();
console.log(mediaRecorder.state);
console.log("recorder stopped");
record.style.background = "";
record.style.color = "";
}
mediaRecorder.onstop = function(e) {
console.log("data available after MediaRecorder.stop() called.");
const clipName = prompt('Enter a name for your sound clip');
const clipContainer = document.createElement('article');
const clipLabel = document.createElement('p');
const audio = document.createElement('audio');
const deleteButton = document.createElement('button');
clipContainer.classList.add('clip');
audio.setAttribute('controls', '');
deleteButton.innerHTML = "Delete";
clipLabel.innerHTML = clipName;
clipContainer.appendChild(audio);
clipContainer.appendChild(clipLabel);
clipContainer.appendChild(deleteButton);
soundClips.appendChild(clipContainer);
audio.controls = true;
const blob = new Blob(chunks, { 'type' : 'audio/ogg; codecs=opus' });
chunks = [];
const audioURL = URL.createObjectURL(blob);
audio.src = audioURL;
console.log("recorder stopped");
deleteButton.onclick = function(e) {
const evtTgt = e.target;
evtTgt.parentNode.parentNode.removeChild(evtTgt.parentNode);
}
}
mediaRecorder.ondataavailable = function(e) {
chunks.push(e.data);
}
})
.catch(function(err) {
console.log('The following error occurred: ' + err);
})
}
I want to create flac file so I tried
const blob = new Blob(chunks, { 'type': 'audio/flac; codecs=flac' });
When I check output file type using Linux file command I get webM in both cases.
How can I get output file in flac format?
must be a simple question but I've been struggling for a week with it. I have a super simple jquery based audio capture - what I just want is to save it as a file based on a controller action. The problem is that I can't figure out how to pass blob file to the controller. This is the code I have to capture audio (see below). With image I can just use
document.getElementById("canvas").toDataURL("image/png");
then pass it to controller and save it as image, something like this:
using (FileStream fs = new FileStream(fileNameWitPath, FileMode.Create))
{
using (BinaryWriter bw = new BinaryWriter(fs))
{
byte[] data = Convert.FromBase64String(imageData);
bw.Write(data);
bw.Close();
}
fs.Close();
}
so ideally I would want something akin to how I save images.
$(function () {
$('body').append(
$('<button/>')
.attr("id", "start")
.html("start")
).append(
$('<button/>')
.attr("id", "stop")
.html("stop")
).append(
$('<div/>').
attr("id", "ul")
)
let log = console.log.bind(console),
ul = $('#ul')[0],
start = $('#start')[0],
stop = $('#stop')[0],
stream,
recorder,
counter = 1,
chunks,
media;
media = {
tag: 'audio',
type: 'audio/ogg',
ext: '.ogg',
gUM: { audio: true }
}
navigator.mediaDevices.getUserMedia(media.gUM).then(_stream => {
stream = _stream;
recorder = new MediaRecorder(stream);
recorder.ondataavailable = e => {
chunks.push(e.data);
if (recorder.state == 'inactive') makeLink();
};
log('got media successfully');
}).catch(log);
start.onclick = e => {
start.disabled = true;
stop.removeAttribute('disabled');
chunks = [];
recorder.start();
}
stop.onclick = e => {
stop.disabled = true;
recorder.stop();
start.removeAttribute('disabled');
}
function makeLink() {
let blob = new Blob(chunks, { type: media.type })
, url = URL.createObjectURL(blob)
, div = document.createElement('div')
, mt = document.createElement(media.tag)
, hf = document.createElement('a')
;
mt.controls = true;
mt.src = url;
hf.href = url;
hf.download = `${counter++}${media.ext}`;
hf.innerHTML = `donwload ${hf.download}`;
div.appendChild(mt);
ul.appendChild(div);
}
});
<script src="https://cdnjs.cloudflare.com/ajax/libs/jquery/3.3.1/jquery.min.js"></script>
Much appreciated
So just in case anyone else stumples on it, as expected it was quite simple (not the cleanest code but here you go):
create a new Blob value:
recorder.ondataavailable = e => {
chunks.push(e.data);
superBuffer = new Blob(chunks, { type: 'audio/ogg' });
if (recorder.state == 'inactive') makeLink(); //console.log(e.data)
Then use Ajax to send this to server:
var reader = new window.FileReader();
reader.readAsDataURL(superBuffer);
reader.onloadend = function () {
base64 = reader.result;
base64 = base64.split(',')[1];
$.ajax({
url: 'MyController/Action1',
type: 'POST',
data: {
audioname: "hello",//obviously change to something dynamic
chunks: base64
},
success: function (response) { console.log(response); },
error: function (response) { console.log(response); }
});
Then in the code-behind:
[HttpPost]
public IActionResult Action1(string audioname, string chunks)
{
string fileNameWitPath = Path.Combine(_hostingEnvironment.WebRootPath, "audio", "test.ogg");
using (FileStream fs = new FileStream(fileNameWitPath, FileMode.Create))
{
using (BinaryWriter bw = new BinaryWriter(fs))
{
byte[] data = Convert.FromBase64String(chunks);
bw.Write(data);
bw.Close();
}
fs.Close();
}
return Content(chunks) ;//this is for testing - sends back full chunk on success, would probably just want some confirm all is good message
}
Note this is work in progress obviously with things to fill, but in general works
I have an audio using Web Audio API and want to record it at half the speed and later speed it up to normal at back-end. The output I get is pitched down.
var mr = null;
var source = null;
var stop = function(){
mr.stop();
source.stop();
}
var save = function(blob, filename){
var link = document.createElement('a');
link.href = URL.createObjectURL(blob);
link.download = filename || 'data.json';
link.click();
}
var context = new ( window.AudioContext || window.webkitAudioContext )();
var dest = context.createMediaStreamDestination();
function loadSound(url) {
var request = new XMLHttpRequest();
request.open('GET', url, true);
request.responseType = 'arraybuffer';
// Decode asynchronously
request.onload = function() {
context.decodeAudioData(request.response, function(buffer) {
source = context.createBufferSource();
source.buffer = buffer;
source.playbackRate.value = 0.5;
source.connect(dest);
mr = new MediaRecorder(dest.stream);
mr.start();
source.start(0);
var chunks = [];
mr.ondataavailable = function(e){
chunks.push(e.data);
}
mr.onstop = function(e) {
var blob = new Blob(chunks, { 'type' : 'video/webm' });
save(blob,"a.webm");
}
setTimeout(stop, 10000);
}, null);
}
request.send();
}
loadSound("https://storage.googleapis.com/frulix-dev.appspot.com/words");
I have created a fiddle for the case https://jsfiddle.net/thenectorgod/v3ogtwvp/4/.
It does not happen for normal recording containing audio and that audio comes clear after speeding to 2.
Can anyone suggest how to get proper audio in this case?
I'm wondering how to upload file onto Firebase's storage via URL instead of input (for example). I'm scrapping images from a website and retrieving their URLS. I want to pass those URLS through a foreach statement and upload them to Firebase's storage. Right now, I have the firebase upload-via-input working with this code:
var auth = firebase.auth();
var storageRef = firebase.storage().ref();
function handleFileSelect(evt) {
evt.stopPropagation();
evt.preventDefault();
var file = evt.target.files[0];
var metadata = {
'contentType': file.type
};
// Push to child path.
var uploadTask = storageRef.child('images/' + file.name).put(file, metadata);
// Listen for errors and completion of the upload.
// [START oncomplete]
uploadTask.on('state_changed', null, function(error) {
// [START onfailure]
console.error('Upload failed:', error);
// [END onfailure]
}, function() {
console.log('Uploaded',uploadTask.snapshot.totalBytes,'bytes.');
console.log(uploadTask.snapshot.metadata);
var url = uploadTask.snapshot.metadata.downloadURLs[0];
console.log('File available at', url);
// [START_EXCLUDE]
document.getElementById('linkbox').innerHTML = 'Click For File';}
Question what do I replace
var file = evt.target.files[0];
with to make it work with external URL instead of a manual upload process?
var file = "http://i.imgur.com/eECefMJ.jpg"; doesn't work!
There's no need to use Firebase Storage if all you're doing is saving a url path. Firebase Storage is for physical files, while the Firebase Realtime Database could be used for structured data.
Example . once you get the image url from the external site this is all you will need :
var externalImageUrl = 'https://foo.com/images/image.png';
then you would store this in your json structured database:
databaseReference.child('whatever').set(externalImageUrl);
OR
If you want to actually download the physical images straight from external site to storage then this will require making an http request and receiving a blob response or probably may require a server side language ..
Javascript Solution : How to save a file from a url with javascript
PHP Solution : Saving image from PHP URL
This answer is similar to #HalesEnchanted's answer but with less code. In this case it's done with a Cloud Function but I assume the same can be done from the front end. Notice too how createWriteStream() has an options parameter similar to bucket.upload().
const fetch = require("node-fetch");
const bucket = admin.storage().bucket('my-bucket');
const file = bucket.file('path/to/image.jpg');
fetch('https://example.com/image.jpg').then((res: any) => {
const contentType = res.headers.get('content-type');
const writeStream = file.createWriteStream({
metadata: {
contentType,
metadata: {
myValue: 123
}
}
});
res.body.pipe(writeStream);
});
Javascript solution to this using fetch command.
var remoteimageurl = "https://example.com/images/photo.jpg"
var filename = "images/photo.jpg"
fetch(remoteimageurl).then(res => {
return res.blob();
}).then(blob => {
//uploading blob to firebase storage
firebase.storage().ref().child(filename).put(blob).then(function(snapshot) {
return snapshot.ref.getDownloadURL()
}).then(url => {
console.log("Firebase storage image uploaded : ", url);
})
}).catch(error => {
console.error(error);
});
Hopefully this helps somebody else :)
// Download a file form a url.
function saveFile(url) {
// Get file name from url.
var filename = url.substring(url.lastIndexOf("/") + 1).split("?")[0];
var xhr = new XMLHttpRequest();
xhr.addEventListener("load", transferComplete);
xhr.addEventListener("error", transferFailed);
xhr.addEventListener("abort", transferCanceled);
xhr.responseType = 'blob';
xhr.onload = function() {
var a = document.createElement('a');
a.href = window.URL.createObjectURL(xhr.response); // xhr.response is a blob
a.download = filename; // Set the file name.
a.style.display = 'none';
document.body.appendChild(a);
a.click();
delete a;
if (this.status === 200) {
// `blob` response
console.log(this.response);
var reader = new FileReader();
reader.onload = function(e) {
var auth = firebase.auth();
var storageRef = firebase.storage().ref();
var metadata = {
'contentType': 'image/jpeg'
};
var file = e.target.result;
var base64result = reader.result.split(',')[1];
var blob = b64toBlob(base64result);
console.log(blob);
var uploadTask = storageRef.child('images/' + filename).put(blob, metadata);
uploadTask.on('state_changed', null, function(error) {
// [START onfailure]
console.error('Upload failed:', error);
// [END onfailure]
}, function() {
console.log('Uploaded',uploadTask.snapshot.totalBytes,'bytes.');
console.log(uploadTask.snapshot.metadata);
var download = uploadTask.snapshot.metadata.downloadURLs[0];
console.log('File available at', download);
// [START_EXCLUDE]
document.getElementById('linkbox').innerHTML = 'Click For File';
// [END_EXCLUDE]
});
// `data-uri`
};
reader.readAsDataURL(this.response);
};
};
xhr.open('GET', url);
xhr.send();
}
function b64toBlob(b64Data, contentType, sliceSize) {
contentType = contentType || '';
sliceSize = sliceSize || 512;
var byteCharacters = atob(b64Data);
var byteArrays = [];
for (var offset = 0; offset < byteCharacters.length; offset += sliceSize) {
var slice = byteCharacters.slice(offset, offset + sliceSize);
var byteNumbers = new Array(slice.length);
for (var i = 0; i < slice.length; i++) {
byteNumbers[i] = slice.charCodeAt(i);
}
var byteArray = new Uint8Array(byteNumbers);
byteArrays.push(byteArray);
}
var blob = new Blob(byteArrays, {type: contentType});
return blob;
}
function transferComplete(evt) {
window.onload = function() {
// Sign the user in anonymously since accessing Storage requires the user to be authorized.
auth.signInAnonymously().then(function(user) {
console.log('Anonymous Sign In Success', user);
document.getElementById('file').disabled = false;
}).catch(function(error) {
console.error('Anonymous Sign In Error', error);
});
}
}
function transferFailed(evt) {
console.log("An error occurred while transferring the file.");
}
function transferCanceled(evt) {
console.log("The transfer has been canceled by the user.");
}