http video streaming doesn't work with apple devices - javascript

This is my code below that takes get request from client and sends video
const {name} = req.params;
// console.log(req.headers)
let range = req.headers.range
console.log(req.headers);
// if(!range) range = 'bytes=0-'
if(req.headers.referer !== "https://course-client-nine.vercel.app/"){
return res.json({message: "No acces from another domain"})
}
const myBucket = storage.bucket('coursebuckets');
const file = myBucket.file(name);
if(!file){
return res.json({
message:"File not found",
code:401
})
}
const [metadata] = await file.getMetadata();
const videoSize = metadata.size;
// const videoSize = fs.statSync('123.mp4').size
const chunkSize = 1 * 1e+6;
const start = Number(range.replace(/\D/g, ''));
let end = Math.min(start+ chunkSize, videoSize - 1);
let contentLength = end - start + 1;
const headers = {
"Content-Range": `bytes ${start}-${end}/${videoSize}`,
"Accept-Ranges": 'bytes',
"Content-Length" : contentLength,
"Content-Type" : 'video/mp4'
}
res.writeHead(206,headers)
const readStream = file.createReadStream({start, end});
readStream.pipe(res);
readStream.on('error', (error) => {
console.log(error);
})
In chrome it works well but in ios devices it doesn't, no idea and no error, what should i do?

It doesn't look like you're properly parsing and using the requested byte range. So, you respond with a range that is not what Safari asked for. You appear to be crudely whacking off all non-numeric characters and assuming what's left is the start value and then making your own gigantic chunk size, but that's not the way you're supposed to be parsing the range. You are not honoring what the client asked for and Safari is pickier than Chrome in that regard.
If you log the request range from the client and then log what you're sending, you will see that they do not match.
From this article which is about making streaming work with Safari, here's some range parsing code that gives you the actual requested start and end:
if (range) {
const bytesPrefix = "bytes=";
if (range.startsWith(bytesPrefix)) {
const bytesRange = range.substring(bytesPrefix.length);
const parts = bytesRange.split("-");
if (parts.length === 2) {
const rangeStart = parts[0] && parts[0].trim();
if (rangeStart && rangeStart.length > 0) {
options.start = start = parseInt(rangeStart);
}
const rangeEnd = parts[1] && parts[1].trim();
if (rangeEnd && rangeEnd.length > 0) {
options.end = end = parseInt(rangeEnd);
}
}
}
}

Related

"invalid authentication credentials" when using SetInterval with google API

I have a discord bot taking every message on a specific channel (you can type only once on it).
For each message, I put the message on a spreadsheet.
To minimize google API calls on my spreadsheet, I've implemented this :
I'm checking how many users wrote per minute.
If 10 or less : I'm calling my updatesheet function = working perfectly.
If 11 or more : I'm delaying the call with a setInterval (I took care to delay not more than 10per minute in the future aswell) = not working
I get this error message :
message: 'Request had invalid authentication credentials. Expected OAuth 2 access token, login cookie or other valid authentication credential.
which I do not understand since I'm using the exact same updatesheet function :
async function update(placeId,authorId,adressId,pointsNumber,tokensWon) {
await doc.useServiceAccountAuth({
client_email: creds.client_email,
private_key: creds.private_key,
});
await doc.loadInfo(); // loads document properties and worksheets
let sheetdailyPoints = doc.sheetsByIndex[1];
var date = new Date();
const year = date.getFullYear()
const month = `${date.getMonth() + 1}`.padStart(2, "0")
const day = `${date.getDate()}`.padStart(2, "0")
const stringDate = [day, month, year].join("/")
const rowPointsAdded = await sheetdailyPoints.addRow({
Date: stringDate,
AuthorId: authorId,
Place: placeId,
Adress: adressId,
Points: pointsNumber
});
let sheetTokensXP = doc.sheetsByIndex[3];
const rows2 = await sheetTokensXP.getRows();
var addedTokensXP = false;
let = length = rows2.length;
for(let i = 0; i < length; i++){
if(rows2[i].Adress == adressId) {
if(rows2[i].Points!='')
rows2[i].Points = parseInt(rows2[i].Points)+parseInt(pointsNumber);
else
rows2[i].Points = parseInt(pointsNumber);
rows2[i].XP = parseInt(rows2[i].XP)+200;
rows2[i].Tokens = parseInt(rows2[i].Tokens)+parseInt(tokensWon);
await rows2[i].save();
addedTokensXP=true;
return rows2[i].Tokens;
}
}
if (addedTokensXP == false) {
const rowAdded3 = await sheetTokensXP.addRow({
Adress: adressId,
Points: pointsNumber,
Tokens: tokensWon,
XP: 200
});
await rowAdded3.save();
return tokensWon;
}
}
The main function:
const prevMap = new Map();
for (i=0; i<61; i++) {
prevMap.set(i,0);
}
async function updateSheets(message,currentMin,ladderNumber,placeId,authorId,adressId,pointsNumber,tokensWon) {
if(currentMin === prevMin) {
nbUpdateAtMin = prevMap.get(currentMin);
nbUpdateAtMin++;
if(nbUpdateAtMin>10) {
let nbMinAdded = 0;
let foundPlaceAtMin;
let foundPlace = false;
for (i=currentMin+1; foundPlace; i++) {
if (prevMap.get(i) < 11 ) {
foundPlaceAtMin = i;
foundPlace = true;
}
}
nbMinAdded = foundPlaceAtMin-currentMin;
setInterval(function() {riddle.update(placeId,authorId,adressId,pointsNumber,tokensWon)},nbMinAdded*60000);
let value = prevMap.get(currentMin+nbMinAdded);
prevMap.set(currentMin+nbMinAdded,value+1);
}
else {
let value = prevMap.get(currentMin);
prevMap.set(currentMin,value+1);
riddle.update(placeId,authorId,adressId,pointsNumber,tokensWon);
}
}
else {
prevMap.set(prevMin,0);
let value = prevMap.get(currentMin);
prevMap.set(currentMin,value+1);
prevMin = currentMin;
riddle.update(placeId,authorId,adressId,pointsNumber,tokensWon)
}
}

Arduino webserial data filtering

I have an Arduino kit and a webserial API setup to receive the data into a html div with the id of target.
At the moment it is logging all the data into one stream (there are a few dials and switches on the Arduino).
The data looks like this...
RADI 61 RADI 62 RADI 63 RADI 64 WIND 1 WIND 0 WIND 1
...RADI being a dial value and WIND being an on / off switch.
Is there a way to separate this information into RADI and WIND chunks...ideally into separate HTML text boxes so I can manipulate that data?
Any help would be appreciated
Here is my current javascript code...
document.getElementById('connectButton').addEventListener('click', () => {
if (navigator.serial) {
connectSerial();
} else {
alert('Web Serial API not supported.');
}
});
async function connectSerial() {
const log = document.getElementById('target');
try {
const port = await navigator.serial.requestPort();
await port.open({ baudRate: 9600 });
const decoder = new TextDecoderStream();
port.readable.pipeTo(decoder.writable);
const inputStream = decoder.readable;
const reader = inputStream.getReader();
while (true) {
const { value, done } = await reader.read();
if (value) {
log.textContent += value + '\n';
}
if (done) {
console.log('[readLoop] DONE', done);
reader.releaseLock();
break;
}
}
} catch (error) {
log.innerHTML = error;
}
}
Assuming you have a couple of textboxes:
<input id='textbox1'></input>
<input id='textbox2'></input>
You can update your log references to the following:
const log1 = document.getElementById('#textbox1');
const log2 = document.getElementById('#textbox2');
Then in your loop:
if (value) {
let parse = String(value).split(' ');
if(parse[0] ?? '' == 'WIND') log1.value = parse[1];
if(parse[0] ?? '' == 'RADI') log2.value = parse[1];
}

Video stream on Node server not working properly on Firefox only?

My goal is to stream a video file into chunks rather than loading and playing the entire file at once. It works great on Google Chrome and I am able to see the chunk sizes as they are logged to the console. Now when I try this on firefox but I'm getting nothing logged to the console. The video will play, but I am unable to fast forward past the 10 minute or so mark. Just curious why it works flawlessly in Chrome and not firefox?
router.get("/video", ensureAuthenticated, function(req, res) {
var id = req.query.id;
let path = "";
for (let k in movieLinks) {
if (k === id) {
path = movieLinks[k]
}
}
// const path = "public/movies/noes2.mp4";
const stat = fs.statSync(path);
const fileSize = stat.size;
const range = req.headers.range;
if (range) {
const parts = range.replace(/bytes=/, "").split("-");
const start = parseInt(parts[0],10);
const end = parts[1] ? parseInt(parts[1], 10) : fileSize-1;
const chunkSize = (end - start) + 1;
console.log('RANGE: ' + start + ' - ' + end + ' = ' + chunkSize);
const file = fs.createReadStream(path, {start,end} );
const head = {
'Content-Range' : `bytes ${start}-${end}/${fileSize}`,
'Content-Ranges' : 'bytes',
'Content-Length' : chunkSize,
'Content-Type' : 'video/mp4'
}
res.writeHead(206, head);
file.pipe(res);
} else {
const head = {
'Content-Length' : fileSize,
'Content-Type' : 'video/mp4'
}
res.writeHead(200,head);
fs.createReadStream(path).pipe(res);
}
// res.render('video', {layout: 'videoLayout'});
})
I done this before splitting video files with tool call ffmpeg.
You can use the script below to split your video in 30 second chunks read them from a s3 bucket or wherever they may live. This will make browser download easier.
ffmpeg script example:
ffmpeg -i input -c copy -segment_time 30 -f segment input.mov
I hope this helps

How to fix 'Failed to load resource: The operation couldn't be completed. Protocol Error" in iOS 12 Video?

I'm working on a video streaming component and videos seem to be working normally on every platform except for iOS 12. The video works for previous iOS versions (10.3 and 11.0). When I check the network requests, I get the following error. I've tried looking through changes made in iOS 12 related to video streaming but I couldn't find anything specific. Any help would be appreciated! I've also attached the code for the stream endpoint below. Thanks!
exports.DECRYPT = async function(req, res, {
content_id
}) {
let result = await FileFactory.Decrypt({
content_id
});
if (result.content_type.includes('video')) {
let fileSize = result.original_size;
const range = req.headers.range;
const parts = range ? range.replace(/bytes=/, "").split("-") : undefined;
const start = parts ? parseInt(parts[0], 10) : undefined;
const end = parts && parts[1] ? parseInt(parts[1], 10) : fileSize - 1;
res.setHeader('Accept-Ranges', 'bytes');
res.setHeader('Cache-Control', 'no-cache');
res.setHeader('Content-Type', result.content_type);
res.setHeader('Content-Length', end - start + 1);
res.setHeader('Content-Range', `bytes ${start}-${end}/${fileSize}`);
res.setHeader('Connection', 'Keep-Alive');
res.setHeader('Content-Encoding', 'identity');
if (start === 0 && !(parts[1])) {
res.statusCode = 200;
} else {
res.statusCode = 206;
}
let stream = request({
url: result.url
}).pipe(result.decipher);
let pointer = 0;
stream.on('data', (chunk) => {
pointer += chunk.length;
if (pointer > start) {
res.write(chunk.slice(start - pointer, end + 1));
}
if (pointer > end) {
stream.destroy("Chunk loaded");
res.end();
}
});
stream.on('error', function(e) {
});
stream.on('end', () => {
res.end();
});
} else {
res.setHeader('Content-Type', result.content_type);
res.setHeader('Accept-Ranges', 'bytes');
res.statusCode = 200;
request(result.url).pipe(result.decipher).pipe(res);
}
};
It appears that safari has some issue with HTTP/2. After some research, I ended up removing HTTP/2 configuration from nginx.

How convert a audio file into byte array with javascript

I'm trying to convert a .wav file to a byte array string. I need to do this on the back-end targeting the file is becoming an issue.
files.forEach(file => {
let index = files.indexOf(file)
let reader = new FileReader();
reader.readAsArrayBuffer(file);
console.log(reader.result);
reader.onload = function (event) {
let byteArray = new Uint8Array(reader.result);
let FileName = file.name;
let dataAsByteArrayString = byteArray.toString();
var listHtml = $list.html();
The above code uses npm's filereader which says to target a file. I'm having difficulty doing this since this is not a front end drag and drop of the file.
my file that is generated is called "response.wav" how would I convert this file using JavaScript and node extensions? Thanks!
I dont know if this will help but the last project I worked on we parsed a .wav file using the Node buffer API and wrote it using the Node file API.
If you have more questions about the code I can direct you to the person that worked on this file the most. I hope it helps somewhat
https://github.com/IntelliSound/intelliSound-Server/blob/development/lib/sound-data-parser.js
'use strict';
function ParsedWave(buffer) {
const RIFF_HEADER_OFFSET = 0;
const FILE_SIZE_OFFSET = 4;
const RIFF_FORMAT_OFFSET = 8;
const SUBCHUNK1_ID_OFFSET = 12;
const AUDIO_FORMAT_OFFSET = 20;
const NUMBER_OF_CHANNELS_OFFSET = 22;
const SAMPLE_RATE_OFFSET = 24;
const BITS_PER_SAMPLE_OFFSET = 34;
const SUBCHUNK2_ID_OFFSET = 36;
const SUBCHUNK2_SIZE_OFFSET = 40;
const DATA_OFFSET = 44;
this.buffer = buffer;
this.riff = buffer.slice(RIFF_HEADER_OFFSET, RIFF_HEADER_OFFSET + 4).toString('utf8');
this.fileSize = buffer.readUInt32LE(FILE_SIZE_OFFSET);
this.riffType = buffer.slice(RIFF_FORMAT_OFFSET, RIFF_FORMAT_OFFSET + 4).toString('utf8');
this.subChunk1Id = buffer.slice(SUBCHUNK1_ID_OFFSET, SUBCHUNK1_ID_OFFSET + 4).toString('utf8');
this.audioFormat = buffer.readUInt16LE(AUDIO_FORMAT_OFFSET);
this.numberOfChannels = buffer.readUInt16LE(NUMBER_OF_CHANNELS_OFFSET);
this.sampleRate = buffer.readUInt32LE(SAMPLE_RATE_OFFSET);
this.bitsPerSample = buffer.readUInt16LE(BITS_PER_SAMPLE_OFFSET);
this.subChunk2Id = buffer.slice(SUBCHUNK2_ID_OFFSET, SUBCHUNK2_ID_OFFSET + 4).toString('utf8');
this.subChunk2Size = buffer.readUInt32LE(SUBCHUNK2_SIZE_OFFSET);
this.data = buffer.slice(DATA_OFFSET, this.subChunk2Size + DATA_OFFSET);
}
// Andrew - The bufferMapper function is going to accept a parsed wave-file and output
// an array of values corresponding to the data subchunk in a format which can
// be accepted as input to the neural network.
const bufferMapper = parsedWave => {
const SIXTEEN_BIT_ZERO = 32768;
const SIXTEEN_BIT_MAX = 65535;
parsedWave.neuralArray = [];
for (let i = 0; i < parsedWave.data.length; i += 2) {
const sample = parsedWave.data.readInt16LE(i);
const unsignedSample = sample + SIXTEEN_BIT_ZERO;
const sigmoidSample = unsignedSample / SIXTEEN_BIT_MAX;
parsedWave.neuralArray.push(sigmoidSample);
}
return parsedWave;
};
module.exports = data => {
const parsedWaveFile = new ParsedWave(data);
if (parsedWaveFile.riff !== 'RIFF') {
throw new TypeError('incorrect file type, must be RIFF format');
}
if (parsedWaveFile.fileSize > 10000000) {
throw new TypeError('file too large, please limit file size to less than 10MB');
}
if (parsedWaveFile.riffType !== 'WAVE') {
throw new TypeError('file must be a WAVE');
}
if (parsedWaveFile.subChunk1Id !== 'fmt ') {
throw new TypeError('the first subchunk must be fmt');
}
if (parsedWaveFile.audioFormat !== 1) {
throw new TypeError('wave file must be uncompressed linear PCM');
}
if (parsedWaveFile.numberOfChannels > 2) {
throw new TypeError('wave file must have 2 or less channels');
}
if (parsedWaveFile.sampleRate > 48000) {
throw new TypeError('wave file must have sample rate of less than 48k');
}
if (parsedWaveFile.bitsPerSample !== 16) {
throw new TypeError(`file's bit depth must be 16`);
}
if (parsedWaveFile.subChunk2Id !== 'data') {
throw new TypeError('subchunk 2 must be data');
}
const neuralMappedWaveFile = bufferMapper(parsedWaveFile);
return neuralMappedWaveFile;
};
Using the included fs module, you can read in your wav file like so:
const fs = require('fs');
const path = './path/to/my.wav';
fs.readFile(path, (err, data) => {
// Data is a Buffer object
});
For documentation on working with a Node.JS Buffer see here. Now, if you were more interested in the file conversion portion, there are a couple of libraries out there. If you just need the conversion functionality, and not implementing yourself, node-fluent-ffmpeg may work for you. If you want to implement it yourself, this node-wav file may be a good reference (too much to paste here).
If you need to go from Buffer to ArrayBuffer, this SO shows some options.

Categories

Resources