AudioWorklet - Set output to Float32Array to stream live audio? - javascript

I have audio data streaming from the server to the client. It starts as a Node.js buffer (which is a Uint8Array) and is then sent to the AudiWorkletProcessor via port.postMessage(), where it is converted into a Float32Array and stored in this.data. I have spent hours trying to set the output to the audio data contained in the Float32Array. Logging the Float32Array pre-processing shows accurate data, but logging it during processing shows that it is not changing when the new message is posted. This is probably a gap in my low-level audio-programming knowledge.
When data arrives in the client, the following function is called:
process = (data) => {
this.node.port.postMessage(data)
}
As an aside, (and you can let me know) maybe I should be using parameter descriptors instead of postMessage? Anyways, here's my AudioWorkletProcessor:
class BypassProcessor extends AudioWorkletProcessor {
constructor() {
super();
this.isPlaying = true;
this.port.onmessage = this.onmessage.bind(this)
}
static get parameterDescriptors() {
return [{ // Maybe we should use parameters. This is not utilized at present.
name: 'stream',
defaultValue: 0.707
}];
}
convertBlock = (incomingData) => { // incoming data is a UInt8Array
let i, l = incomingData.length;
let outputData = new Float32Array(incomingData.length);
for (i = 0; i < l; i++) {
outputData[i] = (incomingData[i] - 128) / 128.0;
}
return outputData;
}
onmessage(event) {
const { data } = event;
let ui8 = new Uint8Array(data);
this.data = this.convertBlock(ui8)
}
process(inputs, outputs) {
const input = inputs[0];
const output = outputs[0];
if (this.data) {
for (let channel = 0; channel < output.length; ++channel) {
const inputChannel = input[channel]
const outputChannel = output[channel]
for (let i = 0; i < inputChannel.length; ++i) {
outputChannel[i] = this.data[i]
}
}
}
return true;
}
}
registerProcessor('bypass-processor', BypassProcessor);
How can I simply set the output of the AudioWorkletProcessor to the data coming through?

The AudioWorkletProcessor process only each 128 bytes, so you need to manage your own buffers to make sure that is the case for an AudioWorklet, probably by adding a FIFO.
I resolved something like this using a RingBuffer(FIFO) implemented in WebAssembly, in my case I was receiving a buffer with 160 bytes.
Look my AudioWorkletProcessor implementation
import Module from './buffer-kernel.wasmodule.js';
import { HeapAudioBuffer, RingBuffer, ALAW_TO_LINEAR } from './audio-helper.js';
class SpeakerWorkletProcessor extends AudioWorkletProcessor {
constructor(options) {
super();
this.payload = null;
this.bufferSize = options.processorOptions.bufferSize; // Getting buffer size from options
this.channelCount = options.processorOptions.channelCount;
this.inputRingBuffer = new RingBuffer(this.bufferSize, this.channelCount);
this.outputRingBuffer = new RingBuffer(this.bufferSize, this.channelCount);
this.heapInputBuffer = new HeapAudioBuffer(Module, this.bufferSize, this.channelCount);
this.heapOutputBuffer = new HeapAudioBuffer(Module, this.bufferSize, this.channelCount);
this.kernel = new Module.VariableBufferKernel(this.bufferSize);
this.port.onmessage = this.onmessage.bind(this);
}
alawToLinear(incomingData) {
const outputData = new Float32Array(incomingData.length);
for (let i = 0; i < incomingData.length; i++) {
outputData[i] = (ALAW_TO_LINEAR[incomingData[i]] * 1.0) / 32768;
}
return outputData;
}
onmessage(event) {
const { data } = event;
if (data) {
this.payload = this.alawToLinear(new Uint8Array(data)); //Receiving data from my Socket listener and in my case converting PCM alaw to linear
} else {
this.payload = null;
}
}
process(inputs, outputs) {
const output = outputs[0];
if (this.payload) {
this.inputRingBuffer.push([this.payload]); // Pushing data from my Socket
if (this.inputRingBuffer.framesAvailable >= this.bufferSize) { // if the input data size hits the buffer size, so I can "outputted"
this.inputRingBuffer.pull(this.heapInputBuffer.getChannelData());
this.kernel.process(
this.heapInputBuffer.getHeapAddress(),
this.heapOutputBuffer.getHeapAddress(),
this.channelCount,
);
this.outputRingBuffer.push(this.heapOutputBuffer.getChannelData());
}
this.outputRingBuffer.pull(output); // Retriving data from FIFO and putting our output
}
return true;
}
}
registerProcessor(`speaker-worklet-processor`, SpeakerWorkletProcessor);
Look the AudioContext and AudioWorklet instances
this.audioContext = new AudioContext({
latencyHint: 'interactive',
sampleRate: this.sampleRate,
sinkId: audioinput || "default"
});
this.audioBuffer = this.audioContext.createBuffer(1, this.audioSize, this.sampleRate);
this.audioSource = this.audioContext.createBufferSource();
this.audioSource.buffer = this.audioBuffer;
this.audioSource.loop = true;
this.audioContext.audioWorklet
.addModule('workers/speaker-worklet-processor.js')
.then(() => {
this.speakerWorklet = new AudioWorkletNode(
this.audioContext,
'speaker-worklet-processor',
{
channelCount: 1,
processorOptions: {
bufferSize: 160, //Here I'm passing the size of my output, I'm just saying to RingBuffer what size I need
channelCount: 1,
},
},
);
this.audioSource.connect(this.speakerWorklet).connect(this.audioContext.destination);
}).catch((err)=>{
console.log("Receiver ", err);
})
Look how I'm receiving and sending the data from Socket to audioWorklet
protected onMessage(e: any): void { //My Socket message listener
const { data:serverData } = e;
const socketId = e.socketId;
if (this.audioWalking && this.ws && !this.ws.isPaused() && this.ws.info.socketId === socketId) {
const buffer = arrayBufferToBuffer(serverData);
const rtp = RTPParser.parseRtpPacket(buffer);
const sharedPayload = new Uint8Array(new SharedArrayBuffer(rtp.payload.length)); //sharing javascript buffer memory between main thread and worklet thread
sharedPayload.set(rtp.payload, 0);
this.speakerWorklet.port.postMessage(sharedPayload); //Sending data to worklet
}
}
For help people I putted on Github the piece important of this solution
audio-worklet-processor-wasm-example
I followed this example, it have the all explanation how the RingBuffer works
wasm-ring-buffer

Related

How to append Blob Data of Video coming in chunks?

`I have an API which send video data in chunks for large files. How can i append those data in a single blob so that i can create a final URL?
I have tried pushing data to blob parts and it is also get appended with correct data size in final blob. But video only play for 7 sec out of 31sec. I think only first chunk is getting appended only.
var MyBlobBuilder = function (this: any) {
this.parts = []
}
MyBlobBuilder.prototype.append = function (part) {
this.parts.push(part)
this.blob = undefined
}
MyBlobBuilder.prototype.getBlob = function () {
if (!this.blob) {
this.blob = new Blob(this.parts, { type: 'video/mp4' })
}
return this.blob
}
let myBlobBuilder = new MyBlobBuilder()
function handleResourceClick(resource: string) {
dispatch(fetchFileType(resource)).then((x: any) => {
let group = x.payload.data.sizeInKB / 5
let start = 0
for (let i = 0; i < 5 && start <= x.payload.data.sizeInKB; i++) {
dispatch(
getVideoUrl({ pdfUrl: resource, offset: start, length: group })
).then((res: any) => {
if (isApiSuccess(res)) {
myBlobBuilder.append(res.payload?.data)
}
})
start = start + group
}
})
}]

Javascript audio stream over Websocket to C# server

I want to stream audio form web client to c# server over Websocket but i received the same bytes every time .
client javascript code
function audioplayer(){
var stream;
var bufferSize = 1024 * 16;
var audioContext = new AudioContext();
var processor = audioContext.createScriptProcessor(bufferSize, 1, 1);
processor.connect(audioContext.destination);
navigator.mediaDevices.getUserMedia({
video: false,
audio: true
}).then( streamObj => {
stream = streamObj;
input = audioContext.createMediaStreamSource(stream);
input.connect(processor);
processor.onaudioprocess = e => {
microphoneProcess(e);
};
});
//var obj = { command:"mic" , audio : btoa(new Blob(recording)) };
//ws.send(JSON.stringify(obj));
}
function microphoneProcess(e) {
const left = e.inputBuffer.getChannelData(0); // get only one audio channel
const left16 = convertFloat32ToInt16(left); // skip if you don't need this
var obj = { command:"mic" , audio : btoa(left16) }
ws.send(JSON.stringify(obj)) // send to server via web socket
}
function convertFloat32ToInt16(buffer) {
let l = buffer.length;
const buf = new Int16Array(l / 3);
while (l--) {
if (l % 3 === 0) {
buf[l / 3] = buffer[l] * 0xFFFF;
}
}
return buf.buffer;
}
c# server
if (data == null) return;
using(MemoryStream ms = new MemoryStream(data)) {
try {
SoundPlayer player = new SoundPlayer(ms);
player.Stream = ms;
player.Play();
} catch {
}
}
DataRecv.Text = data.Length.ToString();
server give me error
System.InvalidOperationException: 'The wave header is corrupt.'
try to reset the position then play it,
if (data == null) return;
using(MemoryStream ms = new MemoryStream(data)) {
try {
ms.Position = 0;
SoundPlayer player = new SoundPlayer(ms);
player.Stream = ms;
player.Play();
} catch {}
}
DataRecv.Text = data.Length.ToString();

can i import a library from node modules in web worker?

i am working on a small app in which I am passing some data from main class to the web worker to train model using RandomForestClassifer method from 'ml-random-forest' module.
I tried to import the above method into the web worker but i get the following error: Uncaught ReferenceError: ml_random_forest__WEBPACK_IMPORTED_MODULE_0__ is not defined at onmessage
worker.js
import { RandomForestClassifier as RFClassifier } from 'ml-random-forest';
export default ()=>{
onmessage=(e)=>{
console.log('in worker')
const classifier = new RFClassifier(e.data[0]);
console.log('classifer'+ classifier)
classifier.train(e.data[1],e.data[2])
var result = classifier.predict(e.data[3]);
var accuracy = calculateAccuracy(result, e.data[4]);
console.log(accuracy)
postMessage(accuracy)
}
const calculateAccuracy = (result, test_labels) => {
let hits = 0;
for(let i = 0; i<result.length; i++){
if(result[i] == test_labels[i]){
hits++;
}
}
let rcl = hits / result.length;
return rcl;
}
}
main.js
const myworker = new webWorker(worker);
//some extra code inbetween
setTimeout(async () => {
// training_labels = transformLabelsIntoEnums(training_labels);
// test_labels = transformLabelsIntoEnums(test_labels);
const options = {
seed: 132456,
maxFeatures: props.state.rf_config.rf_maxFeatures,
replacement: false,
nEstimators: props.state.rf_config.rf_nEstimators
};
const classifier = new RFClassifier(options);
// await sleep(1000000);
console.log('classifier '+classifier)
myworker.onmessage = function (event){
accuracy=event.data;
console.log(event.data);
}
console.log('calling worker')
myworker.postMessage([options,training_dataset,training_labels,test_dataset,test_labels]);
}, 100);

Vue js send data by chunks

I would like to send the data by chunks
now what i'm sending to the server look like this
for loop - 1, 2, 3
what the server receives: 3,1,2 -> asynchronous.
and i need to send it synchronic so the server will receive as my for loop order: 1, 2, 3
How can i do it ?
//52428800
const chunkSize = 1377628
let beginUpload = data;
let component = this;
let start = 0;
let startCount = 0;
let callStoreCouunt = 0;
for (start; start < zipedFile.length; start += chunkSize) {
const chunk = zipedFile.slice(start, start + chunkSize + 1)
startCount +=1;
// debugger
// var base64Image = new Buffer( zipedFile ).toString('base64');
var base64Image = new Buffer( chunk ).toString('base64');
console.log(chunk, startCount);
let uploadPackage: documentInterfaces.UploadPackage = {
transaction: {
documentId: {value: data.documentId.value},
transactionId: data.transactionId,
fileGuid: data.fileGuid
},
packageBuffer: base64Image
};
// debugger
component.$store.dispatch('documents/uploadPackage', uploadPackage)
.then(({ data, status }: { data: documentInterfaces.ReciveAttachScene , status: number }) => {
// debugger
if(status !== 200){
component.$message({
message: data,
type: "error"
});
component.rejectUpload(beginUpload);
}
else{
callStoreCouunt+=1;
console.log(chunk, "res" + callStoreCouunt)
debugger
if(callStoreCouunt === startCount){
let commitPackage = {
transaction: {
documentId: {value: uploadPackage.transaction.documentId.value},
transactionId: uploadPackage.transaction.transactionId,
fileGuid: uploadPackage.transaction.fileGuid
}
};
debugger
component.commitUpload(commitPackage);
}
}
});
}
You cannot control which chunk of data reaches the server first. If there's a network problem somewhere on its way, it might go around the planet multiple times before it reaches the server.
Even if the 1st chunk was sent 5 ms earlier than the 2nd one, the 2nd chunk might reach the server earlier.
But there's a few ways you can solve this:
Method 1:
Wait for the server response before sending the next chunk:
let state = {
isPaused: false
}
let sentChunks = 0
let totalChunks = getTotalChunksAmount()
let chunkToSend = ...
setInterval(() => {
if (!isPaused && sentChunks < totalChunks) {
state.isPaused = true
send(chunkToSend)
sentChunks += 1
}
}, 100)
onServerReachListener(response => {
if (response === ...) {
state.isPaused = false
}
})
Method 2:
If you don't need to process chunks sequentially in real time, you can just wait for all of them to arrive on the server, then sort them before processing:
let chunks = []
onChunkReceived (chunk) {
if (chunk.isLast) {
chunks.push(chunk)
chunks.sort()
processChunks()
}
else {
chunks.push(chunk)
}
}
Method 3:
If you do need to process chunks sequentially in real time, give all the chunks an id property and processing them sequentially, while storing the other ones for later:
let chunksToProcess = []
let lastProcessedChunkId = -1
onChunkReceived (chunk) {
if (chunk.id === lastProcessedChunkId) {
processChunk()
lastProcessedChunkId += 1
processStoredChunks()
}
else {
chunksToProcess.push(chunk)
}
}

JSON array to nodejs Readable object stream

I have an event that fires every time a packet is received from a bluetooth device. The packet comes in as a JSON array and always contains 10 objects similar to the ones below.
var s1_data = [{"record":0,"sensor":1,"timestamp":26566,"date":{"day":7,"hour":10,"minute":45,"month":5,"second":38,"year":18}},
{"record":1,"sensor":1,"timestamp":26567,"date":{"day":7,"hour":10,"minute":45,"month":5,"second":38,"year":18}},
{"record":2,"sensor":1,"timestamp":26568,"date":{"day":7,"hour":10,"minute":45,"month":5,"second":38,"year":18}},
{"record":3,"sensor":1,"timestamp":26569,"date":{"day":7,"hour":10,"minute":45,"month":5,"second":38,"year":18}},
{"record":4,"sensor":1,"timestamp":26570,"date":{"day":7,"hour":10,"minute":45,"month":5,"second":38,"year":18}},
{"record":5,"sensor":1,"timestamp":26571,"date":{"day":7,"hour":10,"minute":45,"month":5,"second":38,"year":18}},
{"record":6,"sensor":1,"timestamp":26572,"date":{"day":7,"hour":10,"minute":45,"month":5,"second":38,"year":18}},
{"record":7,"sensor":1,"timestamp":26573,"date":{"day":7,"hour":10,"minute":45,"month":5,"second":38,"year":18}},
{"record":8,"sensor":1,"timestamp":26574,"date":{"day":7,"hour":10,"minute":45,"month":5,"second":38,"year":18}},
{"record":9,"sensor":1,"timestamp":26575,"date":{"day":7,"hour":10,"minute":45,"month":5,"second":38,"year":18}}]
I need to push each individual object into a node.js stream that is being consumed by another stream.
Can anyone give an example on how I can push these to a Readable object stream on an ongoing basis (or do I need to use a PassThrough stream)? I can’t seem to get my head around how to do this!
Edited to add sample code below. chunk.toString() fails as chunk is undefined
var Readable = require('stream').Readable;
var util = require('util');
var through2 = require('through2');
var s1 = require('./sensor1.js');
var s2 = require('./sensor2.js');
var minLength = s1.length;
//console.log(s1);
var s1stream = new Readable({ objectMode: true });
var s2stream = new Readable({ objectMode: true });
s1stream._read = function () {
this.push();
};
s2stream._read = function () {
this.push();
};
if (s2.length < minLength){
minLength = s2.length;
}
var n1 = 0;
setInterval(function() {
if (n1++ < minLength) {
console.log(s1[n1].record);
s1stream.push(s1[n1]);
} else if (n1++ === minLength) {
s1stream.push(null);
}
}, 1000);
var n2 = 0;
setInterval(function() {
if (n2++ < minLength) {
s2stream.push(s2[n2]);
} else if (n2++ === minLength) {
s2stream.push(null);
}
}, 1000);
s1stream.pipe(through2.obj(function (chunk, enc, callback) {
this.push(chunk.toString())
callback()
})).pipe(process.stdout);
Edit below shows working code. Looks like it was to do with the way I was creating the read functions
var Readable = require('stream').Readable;
var util = require('util');
var through2 = require('through2');
var Transform = require('stream').Transform;
var s1 = require('./sensor1.js');
var s2 = require('./sensor2.js');
var minLength = s1.length;
//console.log(s1);
var s1stream = new Readable({
objectMode: true,
read() {}
})
const s2stream = new Readable({
objectMode: true,
read() {}
})
if (s2.length < minLength){
minLength = s2.length;
}
var n1 = 0;
setInterval(function() {
if (n1 < minLength) {
s1stream.push(s1[n1]);
n1++;
} else if (n1 == minLength) {
s1stream.push(null);
}
}, 1000);
var n2 = 0;
setInterval(function() {
if (n2++ < minLength) {
s2stream.push(s2[n2]);
} else if (n2++ === minLength) {
s2stream.push(null);
}
}, 1000);
var jsonStream = through2.obj(function(file, encoding, cb) {
this.push(file.record.toString())
cb()
})
s1stream.pipe(jsonStream).pipe(process.stdout);
You will need to implement a Readable stream that "wraps" your bluetooth source. When the packet received event is fired you'll loop through your array of objects and call your Readable stream's push method, passing in each object. A somewhat similar example of this can be seen here
Note that since you'll be sending actual JSON objects you'll want to enable the objectMode option when you write your implementation as described here

Categories

Resources