ArrayBuffer and Float64Array in Swift - javascript

I'm trying to convert one of the javascript functions into Swift 5 function. But even after so much of extensive search I couldn't find anything on it.
function toArrayBuffer(type, ts, x, y, z, sc) {
let userId = userID.getUserId();
//console.log("Found GPS UserID: " + userId);
if (userId == "Unauthor") {
//console.log("GPS did not find the userID")
return null;
}
let buffer = new ArrayBuffer(8 * 4);
// Offset: 0
let float64Bytes = new Float64Array(buffer);
float64Bytes[0] = ts;
// Offset: 8
let bytes = new Float32Array(buffer);
bytes[2] = x;
bytes[3] = y;
bytes[4] = z;
bytes[5] = sc;
// Offset: 24
let uint8Bytes = new Uint8Array(buffer);
uint8Bytes[24] = type;
for (let i = 0; i < 8; i++) {
if (userId.charCodeAt(i)) {
uint8Bytes[i + 25] = userId.charCodeAt(i);
}
}
return buffer;
}
Basically I tried to build the same function with var byteArray = [UInt8](stringVal.utf8) but UInt8 can store only upto 256, but I had to store epoch time stamp. So, it doesn't work too. Any help would be appreciated.

Related

Smoothing out voice packets using Web API

Recently I've been working on a voice platform allowing for users to talk to each other under certain conditions. However, it only seems to be smooth when your ping is really low.
Here's the structure currently:
Raw PCM Audio From User -> Web Socket -> Sent to all clients that meet a certain condition
There's nothing particularly special about my WebSocket. It's made in Java and just sends the data received directly from clients to other clients (just temporary).
Issue:
For users that have a decent amount of ping (>100ms) their audio cuts out (choppy) at certain parts and doesn't seem to load fast enough even with the code I have in place. If a video of this will help, let me know!
This is the code I have for recording and playback currently (using AudioWorkletProcessors)
Recording:
buffers = [];
buffersLength = 0;
process(inputs, _, parameters) {
const [input] = inputs;
if (parameters.muted[0] == 1) {
return true;
}
// Create one buffer from this input
const dataLen = input[0].byteLength;
const channels = input.length;
const bufferForSocket = new Uint8Array(dataLen * channels + 9);
bufferForSocket.set(new Uint8Array([channels]), 0);
bufferForSocket.set(numberToArrayBuffer(sampleRate), 1);
bufferForSocket.set(numberToArrayBuffer(input[0].byteLength), 5);
for (let i = 0; i < channels; i++) {
bufferForSocket.set(new Uint8Array(input[i].buffer), 9 + dataLen * i);
}
// Add buffers to a list
this.buffers.push(bufferForSocket);
this.buffersLength += bufferForSocket.byteLength;
// If we have 25 buffers, send them off to websocket
if (this.buffers.length >= 25) {
const combinedBuffer = new Uint8Array(
this.buffersLength + 4 + 4 * this.buffers.length
);
combinedBuffer.set(numberToArrayBuffer(this.buffers.length), 0);
let offset = 4;
for (let i = 0; i < this.buffers.length; i++) {
const buffer = this.buffers[i];
combinedBuffer.set(numberToArrayBuffer(buffer.byteLength), offset);
combinedBuffer.set(buffer, offset + 4);
offset += buffer.byteLength + 4;
}
this.buffers.length = 0;
this.buffersLength = 0;
// This is what sends to WebSocket
this.port.postMessage(combinedBuffer.buffer);
}
return true;
}
Playback:
class extends AudioWorkletProcessor {
buffers = new LinkedList();
timeTillNextBuffer = 0;
process(_, outputs, __) {
const [output] = outputs;
const linkedBuffers = this.buffers.last();
// If we aren't currently playing a buffer and we cannot play a buffer right now, return
if (
linkedBuffers == null ||
(linkedBuffers.buffers.length === 0 && this.timeTillNextBuffer > Date.now())
)
return true;
const buffer = linkedBuffers.buffers.removeLast();
// Current buffer is finished, remove it
if (linkedBuffers.index === linkedBuffers.buffers.length) {
this.buffers.removeLast();
}
if (buffer === null) {
return true;
}
const inputData = buffer.channels;
// put audio to output
for (let channel = 0; channel < outputs.length; channel++) {
const channelData = inputData[channel];
const outputData = output[channel];
for (let i = 0; i < channelData.length; i++) {
outputData[i] = channelData[i];
}
}
return true;
}
static get parameterDescriptors() {
return [];
}
onBuffer(buffer) {
// buffer is ArrayBuffer
const buffersGiven = new DataView(buffer, 0, 4).getUint32(0, true);
let offset = 4;
const buffers = new LinkedList();
const linkedBuffers = { index: 0, buffers };
// Read buffers from WebSocket (created in the snippet above)
for (let i = 0; i < buffersGiven; i++) {
const bufferLength = new DataView(buffer, offset, 4).getUint32(0, true);
const numberOfChannels = new DataView(buffer, offset + 4, 1).getUint8(0, true);
const sampleRate = new DataView(buffer, offset + 5, 4).getUint32(0, true);
const channelLength = new DataView(buffer, offset + 9, 4).getUint32(0, true);
const channels = [];
for (let i = 0; i < numberOfChannels; i++) {
const start = offset + 13 + i * channelLength;
channels[i] = new Float32Array(buffer.slice(start), 0, channelLength / 4);
}
buffers.push({ channelLength, numberOfChannels, sampleRate, channels });
offset += bufferLength + 4;
}
this.buffers.push(linkedBuffers);
// Jitter buffer
this.timeTillNextBuffer = 50 - this.buffers.length * 25 + Date.now();
}
constructor() {
super();
this.port.onmessage = (e) => this.onBuffer(e.data); // Data directly from WebSocket
}
}
I heard the use of Atomics can help because of how the AudioContext plays blank audio when returning. Any tips would be greatly appreciated! Also, if anything is unclear, please let me know!
My code has somewhat of a Jitter buffer system and it doesn't seem to work at all. The audio that a user receives from me (low ping) is clear. However, they (high ping) send me choppy audio. Furthermore, this choppy audio seems to build up and it gets more delayed the more packets I receive.

download result mp3 file after processing in wavesurfer.js and Web Audio API

I made extensive 2 days research on the topic, but there is no really well explained piece that would work.
So the flow is following:
load mp3 (store bought) cbr 320 into wavesurfer
apply all the changes you need
download processed result back to mp3 file (without usage of server)
Ive seen online apps that can do that, nothing is transmitted to server, all happens in the browser.
when we inspect wavesurfer, we have access to these:
The goal would be to use already available references from wavesurfer to produce the download mp3.
from my understanding this can be done with MediaRecorder, WebCodecs API or some libraries like lamejs.
Ive tried to find working example of how to do it with two first methods but without luck. I also tried to do it with lamejs using their example provided on the git but i am getting errors from the lib that are hard to debug, most likely related to providing wrong input.
So far i only managed to download wav file using following script:
handleCopyRegion = (region, instance) => {
var segmentDuration = region.end - region.start;
var originalBuffer = instance.backend.buffer;
var emptySegment = instance.backend.ac.createBuffer(
originalBuffer.numberOfChannels,
Math.ceil(segmentDuration * originalBuffer.sampleRate),
originalBuffer.sampleRate
);
for (var i = 0; i < originalBuffer.numberOfChannels; i++) {
var chanData = originalBuffer.getChannelData(i);
var emptySegmentData = emptySegment.getChannelData(i);
var mid_data = chanData.subarray(
Math.ceil(region.start * originalBuffer.sampleRate),
Math.ceil(region.end * originalBuffer.sampleRate)
);
emptySegmentData.set(mid_data);
}
return emptySegment;
};
bufferToWave = (abuffer, offset, len) => {
var numOfChan = abuffer.numberOfChannels,
length = len * numOfChan * 2 + 44,
buffer = new ArrayBuffer(length),
view = new DataView(buffer),
channels = [],
i,
sample,
pos = 0;
// write WAVE header
setUint32(0x46464952); // "RIFF"
setUint32(length - 8); // file length - 8
setUint32(0x45564157); // "WAVE"
setUint32(0x20746d66); // "fmt " chunk
setUint32(16); // length = 16
setUint16(1); // PCM (uncompressed)
setUint16(numOfChan);
setUint32(abuffer.sampleRate);
setUint32(abuffer.sampleRate * 2 * numOfChan); // avg. bytes/sec
setUint16(numOfChan * 2); // block-align
setUint16(16); // 16-bit (hardcoded in this demo)
setUint32(0x61746164); // "data" - chunk
setUint32(length - pos - 4); // chunk length
// write interleaved data
for (i = 0; i < abuffer.numberOfChannels; i++)
channels.push(abuffer.getChannelData(i));
while (pos < length) {
for (i = 0; i < numOfChan; i++) {
// interleave channels
sample = Math.max(-1, Math.min(1, channels[i][offset])); // clamp
sample = (0.5 + sample < 0 ? sample * 32768 : sample * 32767) | 0; // scale to 16-bit signed int
view.setInt16(pos, sample, true); // update data chunk
pos += 2;
}
offset++; // next source sample
}
// create Blob
return new Blob([buffer], { type: "audio/wav" });
function setUint16(data) {
view.setUint16(pos, data, true);
pos += 2;
}
function setUint32(data) {
view.setUint32(pos, data, true);
pos += 4;
}
};
const cutSelection = this.handleCopyRegion(
this.wavesurfer.regions.list.cut,
this.wavesurfer
);
const blob = this.bufferToWave(cutSelection, 0, cutSelection.length);
// you can now download wav from the blob
Is there a way to avoid making wav and right away make mp3 and download it, or if not make mp3 from that wav, if so how it can be done?
I mainly tried to use wavesurfer.backend.buffer as input, because this reference is AudioBuffer and accessing .getChannelData(0|1) gives you left and right channels. But didnt accomplish anything, maybe i am thinking wrong.
Alright, here is the steps we need to do:
Get buffer data from the wavesurfer player
Analyze the buffer to get the number of Channels(STEREO or MONO), channels data and Sample rate.
Use lamejs library to convert buffer to the MP3 blob file
Then we can get that download link from blob
Here is a quick DEMO
and also the JS code:
function downloadMp3() {
var MP3Blob = analyzeAudioBuffer(wavesurfer.backend.buffer);
console.log('here is your mp3 url:');
console.log(URL.createObjectURL(MP3Blob));
}
function analyzeAudioBuffer(aBuffer) {
let numOfChan = aBuffer.numberOfChannels,
btwLength = aBuffer.length * numOfChan * 2 + 44,
btwArrBuff = new ArrayBuffer(btwLength),
btwView = new DataView(btwArrBuff),
btwChnls = [],
btwIndex,
btwSample,
btwOffset = 0,
btwPos = 0;
setUint32(0x46464952); // "RIFF"
setUint32(btwLength - 8); // file length - 8
setUint32(0x45564157); // "WAVE"
setUint32(0x20746d66); // "fmt " chunk
setUint32(16); // length = 16
setUint16(1); // PCM (uncompressed)
setUint16(numOfChan);
setUint32(aBuffer.sampleRate);
setUint32(aBuffer.sampleRate * 2 * numOfChan); // avg. bytes/sec
setUint16(numOfChan * 2); // block-align
setUint16(16); // 16-bit
setUint32(0x61746164); // "data" - chunk
setUint32(btwLength - btwPos - 4); // chunk length
for (btwIndex = 0; btwIndex < aBuffer.numberOfChannels; btwIndex++)
btwChnls.push(aBuffer.getChannelData(btwIndex));
while (btwPos < btwLength) {
for (btwIndex = 0; btwIndex < numOfChan; btwIndex++) {
// interleave btwChnls
btwSample = Math.max(-1, Math.min(1, btwChnls[btwIndex][btwOffset])); // clamp
btwSample = (0.5 + btwSample < 0 ? btwSample * 32768 : btwSample * 32767) | 0; // scale to 16-bit signed int
btwView.setInt16(btwPos, btwSample, true); // write 16-bit sample
btwPos += 2;
}
btwOffset++; // next source sample
}
let wavHdr = lamejs.WavHeader.readHeader(new DataView(btwArrBuff));
//Stereo
let data = new Int16Array(btwArrBuff, wavHdr.dataOffset, wavHdr.dataLen / 2);
let leftData = [];
let rightData = [];
for (let i = 0; i < data.length; i += 2) {
leftData.push(data[i]);
rightData.push(data[i + 1]);
}
var left = new Int16Array(leftData);
var right = new Int16Array(rightData);
//STEREO
if (wavHdr.channels===2)
return bufferToMp3(wavHdr.channels, wavHdr.sampleRate, left,right);
//MONO
else if (wavHdr.channels===1)
return bufferToMp3(wavHdr.channels, wavHdr.sampleRate, data);
function setUint16(data) {
btwView.setUint16(btwPos, data, true);
btwPos += 2;
}
function setUint32(data) {
btwView.setUint32(btwPos, data, true);
btwPos += 4;
}
}
function bufferToMp3(channels, sampleRate, left, right = null) {
var buffer = [];
var mp3enc = new lamejs.Mp3Encoder(channels, sampleRate, 128);
var remaining = left.length;
var samplesPerFrame = 1152;
for (var i = 0; remaining >= samplesPerFrame; i += samplesPerFrame) {
if (!right)
{
var mono = left.subarray(i, i + samplesPerFrame);
var mp3buf = mp3enc.encodeBuffer(mono);
}
else {
var leftChunk = left.subarray(i, i + samplesPerFrame);
var rightChunk = right.subarray(i, i + samplesPerFrame);
var mp3buf = mp3enc.encodeBuffer(leftChunk,rightChunk);
}
if (mp3buf.length > 0) {
buffer.push(mp3buf);//new Int8Array(mp3buf));
}
remaining -= samplesPerFrame;
}
var d = mp3enc.flush();
if(d.length > 0){
buffer.push(new Int8Array(d));
}
var mp3Blob = new Blob(buffer, {type: 'audio/mpeg'});
//var bUrl = window.URL.createObjectURL(mp3Blob);
// send the download link to the console
//console.log('mp3 download:', bUrl);
return mp3Blob;
}
Let me know if you have any question about the code

How can I align images using opencv.js

I am following this example from Satya Mallick
I hosted a test here https://icollect.money/opencv_align#
Problem: the findHomography() succeeds but the warpPerspective() fails with an 'unhandled exception'
I suspect that the homography is wrong as it looks like its an empty array:
h: Mat {$$: {…}}
cols: 0
data: Uint8Array(0)
data8S: Int8Array(0)
data16S: Int16Array(0)
data16U: Uint16Array(0)
data32F: Float32Array(0)
data64F: Float64Array(0)
matSize: Array(0)
rows: 0
I included the cpp code from the referenced article (above) inline with the javascript code:
function Align_img() {
//im2 is the original reference image we are trying to align to
let im2 = cv.imread(image_A_element);
//im1 is the image we are trying to line up correctly
let im1 = cv.imread(image_B_element);
//17 Convert images to grayscale
//18 Mat im1Gray, im2Gray;
//19 cvtColor(im1, im1Gray, CV_BGR2GRAY);
//20 cvtColor(im2, im2Gray, CV_BGR2GRAY);
let im1Gray = new cv.Mat();
let im2Gray = new cv.Mat();
cv.cvtColor(im1, im1Gray, cv.COLOR_BGRA2GRAY);
cv.cvtColor(im2, im2Gray, cv.COLOR_BGRA2GRAY);
//22 Variables to store keypoints and descriptors
//23 std::vector<KeyPoint> keypoints1, keypoints2;
//24 Mat descriptors1, descriptors2;
let keypoints1 = new cv.KeyPointVector();
let keypoints2 = new cv.KeyPointVector();
let descriptors1 = new cv.Mat();
let descriptors2 = new cv.Mat();
//26 Detect ORB features and compute descriptors.
//27 Ptr<Feature2D> orb = ORB::create(MAX_FEATURES);
//28 orb->detectAndCompute(im1Gray, Mat(), keypoints1, descriptors1);
//29 orb->detectAndCompute(im2Gray, Mat(), keypoints2, descriptors2);
var orb = new cv.ORB(5000);
orb.detectAndCompute(im1Gray, new cv.Mat(), keypoints1, descriptors1);
orb.detectAndCompute(im2Gray, new cv.Mat(), keypoints2, descriptors2);
//31 Match features.
//32 std::vector<DMatch> matches;
//33 Ptr<DescriptorMatcher> matcher = DescriptorMatcher::create("BruteForce-Hamming");
//34 matcher->match(descriptors1, descriptors2, matches, Mat());
let bf = new cv.BFMatcher(cv.NORM_HAMMING, true);
let matches = new cv.DMatchVector();
bf.match(descriptors1, descriptors2, matches);
//36 Sort matches by score
//37 std::sort(matches.begin(), matches.end());
//39 Remove not so good matches
//40 const int numGoodMatches = matches.size() * GOOD_MATCH_PERCENT;
//41 matches.erase(matches.begin()+numGoodMatches, matches.end());
let good_matches = new cv.DMatchVector();
for (let i = 0; i < matches.size(); i++) {
if (matches.get(i).distance < 30) {
good_matches.push_back(matches.get(i));
}
}
//44 Draw top matches
//45 Mat imMatches;
//46 drawMatches(im1, keypoints1, im2, keypoints2, matches, imMatches);
//47 imwrite("matches.jpg", imMatches);
let imMatches = new cv.Mat();
let color = new cv.Scalar(0,255,0, 255);
cv.drawMatches(im1, keypoints1, im2, keypoints2, good_matches, imMatches, color);
cv.imshow('imageCompareMatches', imMatches);
//50 Extract location of good matches
//51 std::vector<Point2f> points1, points2;
//53 for( size_t i = 0; i < matches.size(); i++ )
//54 {
//55 points1.push_back( keypoints1[ matches[i].queryIdx ].pt );
//56 points2.push_back( keypoints2[ matches[i].trainIdx ].pt );
//57 }
let points1 = [];
let points2 = [];
for (let i = 0; i < good_matches.size(); i++) {
points1.push(keypoints1.get(good_matches.get(i).queryIdx ).pt );
points2.push(keypoints2.get(good_matches.get(i).trainIdx ).pt );
}
//59 Find homography
//60 h = findHomography( points1, points2, RANSAC );
//The first 2 arguments to findHomography need to be matArray so you must convert your point1 and point2 to matArray
//reference: https://docs.opencv.org/3.4/d9/d0c/group__calib3d.html#ga4abc2ece9fab9398f2e560d53c8c9780
//*********** the issue seems to be here in how mat1 and mat2 are created *****
let mat1 = cv.matFromArray(points1.length, 2, cv.CV_32F, points1);
let mat2 = cv.matFromArray(points2.length, 2, cv.CV_32F, points2);
let h = cv.findHomography(mat1, mat2, cv.RANSAC);
//62 Use homography to warp image
//63 warpPerspective(im1, im1Reg, h, im2.size());
let image_B_final_result = new cv.Mat();
cv.warpPerspective(im1, image_B_final_result, h, im2.size());
cv.imshow('imageAligned', image_B_final_result);
matches.delete();
bf.delete();
orb.delete();
descriptors1.delete();
descriptors2.delete();
keypoints1.delete();
keypoints2.delete();
im1Gray.delete();
im2Gray.delete();
h.delete();
image_B_final_result.delete();
mat1.delete();
mat2.delete();
}
for (let i = 0; i < good_matches.size(); i++) {
points1.push(keypoints1.get(good_matches.get(i).queryIdx).pt);
points2.push(keypoints2.get(good_matches.get(i).trainIdx).pt);
}
shoud be below code
for (let i = 0; i < good_matches.size(); i++) {
points1.push(keypoints1.get(good_matches.get(i).queryIdx).pt.x);
points1.push(keypoints1.get(good_matches.get(i).queryIdx).pt.y);
points2.push(keypoints2.get(good_matches.get(i).trainIdx).pt.x);
points2.push(keypoints2.get(good_matches.get(i).trainIdx).pt.y);
}
I push staggered data into points[], then it's work!

Convert AudioBuffer to ArrayBuffer / Blob for WAV Download

I'd like to convert an AudioBuffer to a Blob so that I can create an ObjectURL from it and then download the audio file.
let rec = new Recorder(async(chunks) => {
var blob = new Blob(chunks, {
type: 'audio/mp3'
});
var arrayBuffer = await blob.arrayBuffer();
const audioContext = new AudioContext()
await audioContext.decodeAudioData(arrayBuffer, (audioBuffer) => {
// How to I now convert the AudioBuffer into an ArrayBuffer => Blob ?
}
An AudioBuffer contains non-interleaved Float32Array PCM samples for each decoded audio channel. For a stereo AudioBuffer, it will contain 2 channels. Those channels need to be interleaved first, and then the interleaved PCM must have a WAV header appended to it so you can download and play as a WAV.
// Float32Array samples
const [left, right] = [audioBuffer.getChannelData(0), audioBuffer.getChannelData(1)]
// interleaved
const interleaved = new Float32Array(left.length + right.length)
for (let src=0, dst=0; src < left.length; src++, dst+=2) {
interleaved[dst] = left[src]
interleaved[dst+1] = right[src]
}
// get WAV file bytes and audio params of your audio source
const wavBytes = getWavBytes(interleaved.buffer, {
isFloat: true, // floating point or 16-bit integer
numChannels: 2,
sampleRate: 48000,
})
const wav = new Blob([wavBytes], { type: 'audio/wav' })
// create download link and append to Dom
const downloadLink = document.createElement('a')
downloadLink.href = URL.createObjectURL(wav)
downloadLink.setAttribute('download', 'my-audio.wav') // name file
supporting functions below:
// Returns Uint8Array of WAV bytes
function getWavBytes(buffer, options) {
const type = options.isFloat ? Float32Array : Uint16Array
const numFrames = buffer.byteLength / type.BYTES_PER_ELEMENT
const headerBytes = getWavHeader(Object.assign({}, options, { numFrames }))
const wavBytes = new Uint8Array(headerBytes.length + buffer.byteLength);
// prepend header, then add pcmBytes
wavBytes.set(headerBytes, 0)
wavBytes.set(new Uint8Array(buffer), headerBytes.length)
return wavBytes
}
// adapted from https://gist.github.com/also/900023
// returns Uint8Array of WAV header bytes
function getWavHeader(options) {
const numFrames = options.numFrames
const numChannels = options.numChannels || 2
const sampleRate = options.sampleRate || 44100
const bytesPerSample = options.isFloat? 4 : 2
const format = options.isFloat? 3 : 1
const blockAlign = numChannels * bytesPerSample
const byteRate = sampleRate * blockAlign
const dataSize = numFrames * blockAlign
const buffer = new ArrayBuffer(44)
const dv = new DataView(buffer)
let p = 0
function writeString(s) {
for (let i = 0; i < s.length; i++) {
dv.setUint8(p + i, s.charCodeAt(i))
}
p += s.length
}
function writeUint32(d) {
dv.setUint32(p, d, true)
p += 4
}
function writeUint16(d) {
dv.setUint16(p, d, true)
p += 2
}
writeString('RIFF') // ChunkID
writeUint32(dataSize + 36) // ChunkSize
writeString('WAVE') // Format
writeString('fmt ') // Subchunk1ID
writeUint32(16) // Subchunk1Size
writeUint16(format) // AudioFormat https://i.stack.imgur.com/BuSmb.png
writeUint16(numChannels) // NumChannels
writeUint32(sampleRate) // SampleRate
writeUint32(byteRate) // ByteRate
writeUint16(blockAlign) // BlockAlign
writeUint16(bytesPerSample * 8) // BitsPerSample
writeString('data') // Subchunk2ID
writeUint32(dataSize) // Subchunk2Size
return new Uint8Array(buffer)
}
a minor edit to AnthumChri's reply above. The following function adds a check for stereo input.
convertAudioBufferToBlob(audioBuffer) {
var channelData = [],
totalLength = 0,
channelLength = 0;
for (var i = 0; i < audioBuffer.numberOfChannels; i++) {
channelData.push(audioBuffer.getChannelData(i));
totalLength += channelData[i].length;
if (i == 0) channelLength = channelData[i].length;
}
// interleaved
const interleaved = new Float32Array(totalLength);
for (
let src = 0, dst = 0;
src < channelLength;
src++, dst += audioBuffer.numberOfChannels
) {
for (var j = 0; j < audioBuffer.numberOfChannels; j++) {
interleaved[dst + j] = channelData[j][src];
}
//interleaved[dst] = left[src];
//interleaved[dst + 1] = right[src];
}
// get WAV file bytes and audio params of your audio source
const wavBytes = this.getWavBytes(interleaved.buffer, {
isFloat: true, // floating point or 16-bit integer
numChannels: audioBuffer.numberOfChannels,
sampleRate: 48000,
});
const wav = new Blob([wavBytes], { type: "audio/wav" });
return wav;
},

Randomly grab three locations by slicing an array based off of comparison of location

I have an array and I am grabbing 3 random locations out of it. The third location cannot be a restaurant. This will make sense when you see the code.
What I would like to do is make sure each location is at least 1 mile away from the previous location.
CODE:
Here is my existing code. Currently it grabs three locations but they could be really close to each other.
for (let i = a.length - 1; i > 0; i--) {
const j = Math.floor(Math.random() * (i + 1));
[a[i], a[j]] = [a[j], a[i]];
}
let third = a[0];
let [first, second] = a.filter(l => !l.restaurant).slice(1);
let selectedLocations = [first, second, third];
function calculateDistance(lat1, lon1, lat2, lon2) {
let Rm = 3961;
let Rk = 6373
lat1 = deg2rad(lat1);
lon1 = deg2rad(lon1);
lat2 = deg2rad(lat2);
lon2 = deg2rad(lon2);
let dlat = lat2 - lat1;
let dlon = lon2 - lon1;
let a = Math.pow(Math.sin(dlat/2),2) + Math.cos(lat1) * Math.cos(lat2) * Math.pow(Math.sin(dlon/2),2);
let c = 2 * Math.atan2(Math.sqrt(a),Math.sqrt(1-a)); // great circle distance in radians
let dm = c * Rm; // great circle distance in miles
let dk = c * Rk; // great circle distance in km
let mi = round(dm);
let km = round(dk);
let ft = Math.round(mi * 5280.0);
return mi
}
function deg2rad(deg) {
let rad = deg * Math.PI/180; // radians = degrees * pi/180
return rad;
}
function round(x) {
return Math.round( x * 100) / 100;
}
Remember the (third) location is grabbed first. Once I have the third location I would like to go through the array until I find another location at least 1 mile away from the third location.
This new location will become the (second) location and then once I have that location (second) I need to go through the array again and find one at least 1 mile away from it. This will be the final (first) location.
UPDATE:
Here is something along the lines of what I am talking about but I know there has to be a cleaner way of writing it
for (let i = a.length - 1; i > 0; i--) {
const j = Math.floor(Math.random() * (i + 1));
[a[i], a[j]] = [a[j], a[i]];
}
// We get our third location.
let third = a[0];
console.log(third)
// Build a new array removing the third location and
// any other locations that is a restaurant
let newarray = a.filter(l => !l.restaurant).slice(1);
// filter our new array based on the location being at
// least 1 mile away from the third location
let second = newarray.filter(function (e) {
return calculateDistance(
third.geolocation.lat,
third.geolocation.lng,
e.geolocation.lat,
e.geolocation.lng
) >= 1
})
// We now have our second location
console.log(second[0]);
// build a new array removing the second location
let thirdarray = second.slice(1);
// filter our new array based on the location being at
// least 1 mile away from the second location
let first = thirdarray.filter(function (e) {
return calculateDistance(
second[0].geolocation.lat,
second[0].geolocation.lng,
e.geolocation.lat,
e.geolocation.lng
) >= 1
})
// we now have our first location
console.log(first[0]);
Based on the link I posted in comments, I've included a tiny library to accommodate your needs:
//<![CDATA[
let get, post, doc, htm, bod, nav, mobile, M, I, S, Q, rad, distance, withinOneMile, sortCityDists; // for use on other loads
addEventListener('load', function(){
get = (url, success, context)=>{
const x = new XMLHttpRequest;
const c = context || this;
x.open('GET', url);
x.onload = ()=>{
if(success)success.call(c, JSON.parse(x.responseText));
}
x.send();
}
post = (url, send, success, context)=>{
const x = new XMLHttpRequest;
const c = context || this;
x.open('POST', url);
x.onload = ()=>{
if(success)success.call(c, JSON.parse(x.responseText));
}
if(typeof send === 'object' && send && !(send instanceof Array)){
if(typeof FormData !== 'undefined' && send instanceof FormData){
x.send(send);
}
else{
let s, r = [];
for(let p in send){
s = send[p];
if(typeof s === 'object')s = JSON.stringify(s);
r.push(encodeURIComponent(p)+'='+encodeURIComponent(s));
}
x.setRequestHeader('Content-type', 'application/x-www-form-urlencoded'); x.send(r.join('&'));
}
}
else{
throw new Error('send argument must be an Object');
}
return x;
}
doc = document; htm = doc.documentElement; bod = doc.body; nav = navigator; M = tag=>doc.createElement(tag); I = id=>doc.getElementById(id);
mobile = nav.userAgent.match(/Mobi/i) ? true : false;
S = (selector, within)=>{
const w = within || doc;
return w.querySelector(selector);
}
Q = (selector, within)=>{
const w = within || doc;
return w.querySelectorAll(selector);
}
rad = n=>n*Math.PI/180;
distance = (latLng, toLatLng)=>{
let ll = latLng, lL = toLatLng;
if(ll instanceof Array){
ll = {lat:ll[0], lng:ll[1]};
}
if(lL instanceof Array){
lL = {lat:lL[0], lng:lL[1]};
}
const lng1 = ll.lng === undefined ? +ll.lon : +ll.lng;
const lng2 = lL.lng === undefined ? +lL.lon : +lL.lng;
const m = 6371e3, lat1 = +ll.lat, lat2 = +lL.lat, lat1R = rad(lat1);
const lat2R = rad(lat2), latD = rad(lat2-lat1), lngD = rad(lng2-lng1);
const n = Math.pow(Math.sin(latD/2), 2)+Math.cos(lat1R)*Math.cos(lat2R)*Math.pow(Math.sin(lngD/2), 2);
return m*2*Math.atan2(Math.sqrt(n), Math.sqrt(1-n));
}
withinOneMile = (latLng, toLatLng)=>{
if(distance(latLng, toLatLng)*3.2808/5280 <= 1){
return true;
}
else{
return false;
}
}
sortCityDists = (citiesArrayOfObjs, originIndex = 0)=>{
const cities = citiesArrayOfObjs.slice(), baseLatLng = cities.splice(originIndex, 1)[0].latLng, dists = [];
cities.forEach(o=>{
dists.push({city:o.city, meters:distance(baseLatLng, o.latLng)});
});
dists.sort((a, b)=>a.meters-b.meters);
return dists;
}
// you can put the below on another page - besides the end load and beyond
const cities = [{city:'Seattle', latLng:[47.6038321, -122.3300624]}, {city:'New York', latLng:[40.7127281, -74.0060152]}, {city:'Chicago', latLng:[41.8755616, -87.6244212]}, {city:'San Francisco', latLng:[37.7790262, -122.4199061]}];
const meterDists = sortCityDists(cities).reverse(), mileDists = [];
console.log(meterDists);
meterDists.forEach(o=>{
mileDists.push({city:o.city, miles:o.meters*3.2808/5280});
});
console.log(mileDists);
const locs = [[47.6038321, -122.3300624], [47.6038321, -122.3500624], [47.6038321, -122.3800624]], res = [];
for(let i=0,n=1,loc1,loc2,l=locs.length; n<l; i++,n++){
loc1 = locs[i]; loc2 = locs[n];
if(withinOneMile(loc1, loc2)){
res.push(loc1, loc2);
}
else{
break;
}
}
console.log(res);
}); // end load
//]]>
As you can see, you pass an Array of Objects to sortCityDists and the result is an Array of Objects in order by distance from least to greatest, based on the first Element of the passed Array. Format can be seen by looking at the cities Array of Objects. Just .reverse() to flip the Array around.

Categories

Resources