I am a student studying programming.
I am not good at English, so I wrote it using a translator.
I'm studying the mediapipe.
https://google.github.io/mediapipe/solutions/face_mesh
Do you know how to use local video instead of webcam?
let videoElement = document.querySelector(".input_video")
//#mediapipe/camera_utils/camera_utils.js"
const camera = new Camera(videoElement, {
onFrame: async () => {
await holistic.send({ image: videoElement });
},
width: 640,
height: 480,
});
camera.start();
This is the code to get the webcam.
I think I need to change this code but I don't know how to work it.
so I tried to find out about '#mediapipe/camera_utils/camera_utils.js', I couldn't find any data.
And I found using the local video in the codepen demo.
https://codepen.io/mediapipe/details/KKgVaPJ
But I don't know which part of the code to use.
Please teach me the way.
Rather than create a new Camera, you need to send the frames using requestAnimationFrame(). However as the send needs to be in an async function
the requestAnimationFrame needs to be within a Promise.
You have the standard mediapipe setup
let videoElement = document.querySelector(".input_video");
const config = {
locateFile: (file) => {
return 'https://cdn.jsdelivr.net/npm/#mediapipe/face_mesh#' +
`${mpFaceMesh.VERSION}/${file}`;
}
};
const solutionOptions = {
selfieMode: false,
enableFaceGeometry: false,
maxNumFaces: 1,
refineLandmarks: true, //false,
minDetectionConfidence: 0.5,
minTrackingConfidence: 0.5
};
const faceMesh = new mpFaceMesh.FaceMesh(config);
faceMesh.setOptions(solutionOptions);
faceMesh.onResults(onResults);
but rather than the new Camera() or SourcePicker() you need the an animation frame loop
async function onFrame() {
if (!videoElement.paused && !videoElement.ended) {
await faceMesh.send({
image: videoElement
});
// https://stackoverflow.com/questions/65144038/how-to-use-requestanimationframe-with-promise
await new Promise(requestAnimationFrame);
onFrame();
} else
setTimeout(onFrame, 500);
}
load the video
// must be same domain otherwise it will taint the canvas!
videoElement.src = "./mylocalvideo.mp4";
videoElement.onloadeddata = (evt) => {
let video = evt.target;
canvasElement.width = video.videoWidth;
canvasElement.height = video.videoHeight;
videoElement.play();
onFrame();
}
Related
I have a project in laravel 8 with vue 2, i made an implementation of pdftron where i save the signatures user creates into a database as base64 images, so far all good, problem comes when i load those images from the database, although i can see them in the signature tool they are extremely low quality and i can no longer change color to the image, which doesn't happen in the demo sites, i think that when they get exported they become something that pdftron can no longer manipulate as a newly created signature, so i would like to know if someone else has this problem and a possible solution for it, i'll put the code i'm using and the images for you here
Above saved siganture, down new signature
let that = this;
const viewerElement = document.getElementById('webviewer');
WebViewer({
path: '/js/WebViewer/lib',
initialDoc: this.initialDoc,
extension: 'pdf',
}, viewerElement)
.then((instance) => {
const { documentViewer, annotationManager } = instance.Core;
const signatureTool = documentViewer.getTool('AnnotationCreateSignature');
documentViewer.addEventListener('documentLoaded', () => {
instance.UI.setLanguage(that.locale);
let signatures = JSON.parse(that.savedSignatures);
signatures = signatures.map(a => a.base64_signature);
signatureTool.importSignatures(["data:image/png;base64, " + signatures]); //base64 images array
document.getElementById('app').setAttribute('style', 'padding: 0');
document.getElementById('loader-container').classList.add('d-none');
document.getElementById('all-pages-content').removeAttribute('style')
document.getElementById('downloadButton').setAttribute('style', 'visibility: visible')
document.getElementById('pdf-ui').setAttribute('style', 'visibility: visible')
});
documentViewer.addEventListener('annotationsLoaded', async () => {
annotationManager.addEventListener('annotationDrawn', async (annotationList) => {
console.log('1')
annotationList.forEach(annotation => {
if (annotation.Subject === "Signature")
that.extractAnnotationSignature(annotation, documentViewer);
})
})
});
let saveSignedPdf = document.getElementById('downloadButton');
saveSignedPdf.addEventListener('click', async () => {
const doc = documentViewer.getDocument();
const xfdfString = await annotationManager.exportAnnotations();
const data = await doc.getFileData({
// saves the document with annotations in it
xfdfString
});
const arr = new Uint8Array(data);
const blob = new Blob([arr], {type: 'application/pdf'});
await that.processDocument(blob)
// Add code for handling Blob here
})
// instance.disableElements(['downloadButton', 'printButton']);
// instance.disableElements(['toolbarGroup-Insert']);
return instance
});
and here is the code i use to export the images taken from official docs
async extractAnnotationSignature(annotation, docViewer) {
let that = this;
// Create a new Canvas to draw the Annotation on
const canvas = document.createElement('canvas');
// Reference the annotation from the Document
const pageMatrix = docViewer.getDocument().getPageMatrix(annotation.PageNumber);
// Set the height & width of the canvas to match the annotation
canvas.height = annotation.Height;
canvas.width = annotation.Width;
const ctx = canvas.getContext('2d');
// Translate the Annotation to the top Top Left Corner of the Canvas ie (0, 0)
ctx.translate(-annotation.X, -annotation.Y);
// Draw the Annotation onto the Canvas
annotation.draw(ctx, pageMatrix);
// Convert the Canvas to a Blob Object for Upload
canvas.toBlob((blob) => {
let formData = new FormData();
formData.append('signature', blob);
formData.append('customer_id', that.customerId);
const config = {
headers: {
'content-type': 'multipart/form-data',
'X-CSRF-TOKEN': document.querySelector('meta[name="csrf-token"]').content,
}
}
axios.post(that.saveSignatureUrl, formData, config)
.then(function (response) {
if (!response.data.success) {
console.log("could not save signature for future use")
}
else {
console.log("saved signature for future use")
}
})
.catch(function (error) {
that.output = error;
console.log("could not reach backend")
});
});
}
I know you will see several flaws in this code, I'm a rookie so please bear with me, thank you for your time, and any help is appreciated
test base64 string generated with this code
iVBORw0KGgoAAAANSUhEUgAAAMgAAAB/CAYAAACql41TAAAS0UlEQVR4Xu1dC5BU1Zk+59yenlGUpz3dvAxgVkAeJUYcEzdoBaOLy3Qju8Nu0GgeCoJAYcwCZqbHm+kZCFJmV0yMsFsxtUnAMAnQ3UMsTCCmjEkQJSwPWZIViDow3S3ER9AZpu85+/UwYwSGmenu29333v5vVVe/7vnP/3//+e55/4czuggBQuCiCHDChhAgBC6OABGESgch8DEEAsGWCsnYC5zxWyMh70tEECoehEAnAnfocZ/LUL9VjNdFQ+U/SP1MBKHiQQh0IuAPtjSBHK9GQ95Hu0AhglDxIASAgL82vpIpNQ7NqtkfB4QIQsWj6BHwV5+oYkKsKtXKbmjUB5wighR9kSAAuhAIVJ8cqURyDzobcyN13l+cjwzVIFRWihqBVL+DKb4rUu8NdQcEEaSoi0dxG++viQUZVxWRkG/mxZAgghR3GSla6ytrWj6HZlWjkCXXhhuGvEkEKdqiQIafj8C8daqkpTm+VxmqPtrg29gTQlSDUPkpOgTQ71iHmfJkOOR9sDfjiSC9IUT/OwoBfzB2Nwxadp1Wfq2uc6wq6fkigvSGEP3vGARmVx//RFJoexWTs6Khob/ui2FEkL6gRPc4AoHUkC5X4nfh+vKGvhpEBOkrUnSfrRHw17Q8wjj/LJaS3JGOIUSQdNCie22JgL86Po0JFTU0NmWb7j2SjhFEkHTQontth0DVJqW17ovt5ZyvRu3xo3QNIIKkixjdbysEKmtiT3POFMixIBPFiSCZoEZpbIEAhnTnMcXmX+cqn9qXId3ujCKC2MLVpGS6CFQGT0zFiNUuwY2pW0PDXk03fdf9RJBMkaN0lkXgFv1Xrv7Ja3Zzrr4bDvn+KxtFiSDZoEdpLYkAFiJ+X3D+YV+WkvRmABGkN4Tof1shgKgkKxQXMyJ15TeboTgRxAwUSYYlEAjUxP9ZcfWES7o/s7lh0J/NUIoIYgaKJKPgCPiD8SlMyZekYJVNdb4dZilEBDELSZJTMAQq9eNXckPDfnLxeCTkWW+mIkQQM9EkWXlHoGr5qQFt7uQvUHtsidT7VpmtABHEbERJXl4RqAzGfon4h7sRCfGRXGRMBMkFqiQzLwig3/EzBF2IIVzPwlxlSATJFbIkN6cIBIKxZxRnJSBHaodgzi4iSM6gJcG5QgBrrFYzpq5BuJ7KXOXRJZcIkmuESb6pCARqY/OlZIuSLtdNz+lD3jNVeDfCiCC5Rpjkm4YAlq7fhqXrm4XB/37ryvK9pgnuQRARJB8oUx5ZI+DX459khnoJBHkgXOfdkrXAPgoggvQRKLqtsAgg4MILiKG7DTF01+RTEyJIPtGmvDJCAOR4EltmB6DmuCcjAVkkIoJkAR4lzT0CHbsCOVtQKt6uaNQnnMl9jufmQATJN+KUX58RmBmMfVow/iJeFVtDnox3BfY5QxrFygYqSptPBKp05T4j469KxR/vOlAzn/l35UU1SCFQpzx7RaCyNv59LtUH6JQv6vXmHN5ABMkhuCQ6MwRwsM0CxdSXovW+iswkmJeKCGIeliTJBAT81cevZ1x7GWFCPxUJlf/BBJFZiSCCZAUfJTYbAYxavQKZ6xHozdSNT5nqSQTJFDlKZzoCIMf3FOMudMrvN114hgKJIBkCR8nMRQChehZzLr54nea5MdMoiOZqdFYaESQXqJLMtBBIHaiJNVap6OsV23TfgbQS5/hmIkiOASbxPSNwhx73lRjq95LxZWhabbIaXkQQq3mkyPRBv+PnijHsKfc+akXTiSBW9EqR6IRts2tAjlEYsaqyqslEEKt6xuF64Ui0+zHXsZS1iRsjj3net6q5RBCresbBelVWx/6JC4ao6/xzVpgM7AlqIoiDC6IVTZulJ26RSblTCPX5rSaGCM2VrUSQXCFLci9AwK/HJmPb7E7GxQJEX2+0A0REEDt4yQE6ztZPjkgayZ2YDHw8XOdZZxeTiCB28ZSN9URw6Uu5IXZixCoSDflW2skUIoidvGVTXQPB+Dal1CHs7fi63UwggtjNYzbTtzIYj+AU5hOY65hvM9U71CWC2NFrNtEZ0UiaUMTezPSMciuYSQSxghccpoOuK7HHiDehcB014yDNQsJDBCkk+g7Mu+MIZmNClCn1eqH3k5sBLxHEDBRJRgcC89apkthb8SaQ43C43rfECbAQQZzgRQvYMGOtKi1JgBxcHYzU+ZZaQCVTVCCCmAJjcQu5U28pNwyxCUcw74nWeb/mJDSIIE7yZgFsqdTfHseSyUbB+WZ0yC25pyMbWIgg2aBX5GkDeuwzymCN6HM8hhNmn3AiHEQQJ3o1DzbNrG2pFBLk4Pw+zHP8KA9ZFiQLIkhBYLd3pv7q+L1MyKe4UlXh+qE/t7c1PWtPBHGyd3NgW6C25SGlxEOciapw6IpdOcjCUiKJIJZyh7WVqaxtqeeKz0TE9aqm+vI/WVtbc7QjgpiDo+OlVAZb/pMzPqZUK6tq1AeccrzBnQYSQYrF0xnaOUM/2d9ltG8EOU6hM/7FDMXYNhkRxLauy73i2Og0jie1jVjz/TzIsTz3OVovByKI9XxiCY3OhgPlGzHHsdKpcxx9AZoI0heUiuweRDu8G/s4nkHNMdcuwRVy5SIiSK6QzUKuf1niclkqx3DFRnMhPNiu6uFKlqPQXoEgz/2wt7sfCm8/JtlleC9FVhpjynX2XaQ+iyyy74e0l+KEp3fR7+juVNlWNLdGZyHfVkmJIAV0V2qRXzLJJiLSxzUo1BMwK30NmjRjodJlIMVRkOEI5hviirMElzKhOL8avz0vJTuN99Ngwl+TmtamktLgzJ1krg8M1W4YblUmMzGrXVN1grMpTMkHNUN7ozsZm1d6TmQi265piCB58Nwt+tGyy42ySYJrE6WUeOcTUAtMBPhlKPypcP8HUShfQ21wkAnX4YjuOZ4HtT7KYt48VdLijW9An4O7hWduo95tzZFPlSyTFxHEZFdU1rwzmonWyVyJSagVJqOZMhHNlatR+Pcz1UGG/ZhsOyBdyQNRfVi3T2mTVepR3Cz9xChliA2ouV5xyiYnM/EjgmSIpl9/Y5iU7vFCifEAcTxqgokoZJPRT3hfMQky8P2Ci33KkAciK8v343dUGta60Bm/CcTdoKR8Klo/dLW1tLOGNkSQi/jhFl25Bre/M7xdJEdxrkYxqfDOR6E2uApJxuOFdr46lHoprr3GpDyYNNT+51YNTVjDtT1rgXA8cxCOZyPuutfJq3Gz9YXjCFK1/NSADy5pH6IxMRgd2yFSqSFC8cvRAS5VzCjDAE+pErwUh9SX4aGO31gZ+gSlUrHLAYanY6SIKQ9+H4jPzSDAMTz7j2Fc6BhqiGNIc5QlxWuRld5YtuAXKn2gNoEFh3IZ4xLDuEN/VSg97JCvrQji/0bMq9xqNDfYKDRhhiOE/nCFdxTi4WgqDEfbfjie8B/iO9YK8ZMw7iS+pz6/h6XZbZLzNvQJWvHwb0Nhb1NMa+WCt2IItU0p9lfUEAmpiYSrzZ3Ysqr/STs4MF0dMQH4bQwdf1ZrP3PXllUj/phu+mK733IECVSfHCl5Ek0YDHcKPlpgLgCFdwwIgLF31YZCfARDoMcw5NnMJWvGE70ZQ6HNSU01y8Hlzc8t4W3F5sS+2JtqMvY3EhtQO5a4B5XPbfwax4OErt4QKBhBAtXNI5mmXYsa4Gwnlym8i3F4P43vh9DAP4ymzREYcJTx5NHSfpccaVwx+N3eDKL/L0QAtcZoYLoBNcfucF25I8Lx5MvPeSHI7fqbg91J9/XoB1yPJ9j1aPLgpdwwcg9eh/DbIRwgf0hLtv8vqn1HNm3y5dDz8/FXH5+GyfUf4/cn0Rl/rFB62DXfnBCkqmqT1jr25umY7Z0OYFKvsWgWvcKFtjv1LjX1yjbdm6od6MohAmfXVKkfYsDhLhw7sCGHWTlWtGkEmaX/ZaA0zvwrkPLDKdNRW7yMzzu4MnaEQ8NedCyCFjUsUNNSLTm7H8PPd0cbhv3GompaXq2sCRKojd2JWgHE4HPwehbBw36qCfeOrfqgdyxvvUMVRM2xDoMa47iRvDvcMPxNh5qZF7MyJkjnMb7/htriLSwifbb0jPaTxtXUic6L1y6SSecxZ8+gaXsiXOe9p5C6OCXvtAnS2a5dlppEQ82xJlrv2+kUMOxsh7829nms9XoGD6ynIyFfvZ1tsZLufSbIjMV/KnUPHPB0an5CcdmAGdhtVjKkmHVBQIUlmACtx3qwLyM27s+KGQuzbe8TQe4Mnvy0wZI4mZT/Gge/LzZbCZKXOQKp/gaaVNdimfyXw/qQ1zKXRCm7Q6BXgnRW3dsxT/GVaKj8BwSjNRCo1BPXYT3ZOiyZ2WPX8/+sgWTPWvRIkFn62zfIpPFLrHm6F52+LXYwqBh0DAQT92Gx4Xr4ZQH8Ypszx+3om4sS5I5H3rra5XLvwCRTkGoO67gWTarvoalboZiYHw1dsds6mjlTk4sSBI7YjI7fy+FQ+becabq9rJoVTHwKgyNPoam7v/SQZ35jI9Y005VzBLolSOdQ7hIMF96Qcw0og14RCNTGH8SQ+lqsal4YracmVa+AmXjDBQRBNL1LueE6LLn8UlOdb4eJeZGoNBFIhf9hpfIpzG+Mkbx9YVNoxP+kKYJuzxKBCwiCw99rUvsvMAH4lSxlU/IsEOgYPWQM5JCRSGjow1mIoqRZIHAOQap05W4z4s3MYNOxpXRfFnIpaRYI+GtiQaylWoKFhgsjDUMbsxBFSbNE4ByCoO+BJSRsUjFG8c4SR1OS/6MeG6MZ7DtYrWBwaSykhYamwJqVkHMIEgjGjhlSzmlqGJpaqk5XHhEA9vegaZsixyoEi16Vx6wpqx4Q+Igg6HugzyFmYynJTEIsfwhU6QfdbckhIAavwH77RRhWp70z+YO/15z+RpDa2IuI/bQGT69Ir6noBlMQ6DhiQPAnsff+BewVX4RNZpYLLmeKoTYW0kGQQLBlEiagwuh7jLGxLbZS3V+TCHIul0ouFkXrPKkAbnRZEIGzBKlt+ReMta8Oh3yjLKijo1SaWRP/O42r76CqaOXStSjcMIR2/FnYwx0ESe31KBnY/22cLnFTRKfh3Vz5C02qr6KfsRYPIz1S712Tq3xIrnkIfKyTHnsAIyiLtWRyGoXeMQ/glKRZ+tGBKnnpWgS7G6c0uSSq+35vbg4kLVcInDPMmzoHG3FsbxSamrtF98VzlWkxyQWmAYREXQugN4VDXuzhp8tOCFy4FisY+yaaAXMQrPn+qF5O4WKy8CYCW/wHZsQDSmmLo/WepixEUdICIdD9at7q+L1MqCexW40mrTJwzMzalumoif+9IzZYK38o8pjn/QzEUBILIHDR/SBdyx4w/HuJYHIFRrh2WUBfy6uAWmMliPFVxcVSGr61vLt6VbD3PenB2APYOJXaNPXdKZonqOuIqU7XBQhUBhM3cyYfR637R83FllIfzhmFpFeCpMyc/Y3EUEOTKzF2f7OS7NFog/eHzjDfHCsQdmcVgLwPD5KH0RH/b3OkkhQrINAngnQpmtqjgMUQ30Sz610QpSHaUNydeH9N/B8wNL4aHfF9mqYeplrDCkXaXB3SIkhX1me3gMqvY4HdH5Tk3y42ovhXY6ffabkaE34zpFQrmhp8PzHXLSTNKghkRJAu5TEzvBhDwg/h+14s1X4a+6Wft4phudKjM+ROPU6/evY9cXrFC/poHOlGl1MRyIogHzW9gvFFOPfvHsbFAJwF+GPVfmZD5Fsj/89JoPn1+DRuqFrJuIbzbYPFVms6yZfp2GIKQT5qegVbKtDsuAuHZd4Fwbs5F9uZENvtHBKzc6VzaqflNLwasOJ5fToA0732RsBUgnwcCuyrno3If7eh6XU7+io4UZZtx4Gc24VRtsvqJ8h2BOoeNPA2JY0vYE4DwRP4E5E6TwPt17B3Yc9E+5wR5ByyBONTkNHtkslbMRQ6Ff+dQMDll9HB3S3w7h3h3bN+Pm/PxIBs01Tp8ctamZzIDT4ZRJiEUamUfhWpQN14/6kv5lm3fn1hdMvWNkqfPQJ5Icj5auK888lKqKlC8KmoYVLB6aagYB7Elt/9qGn249zyA4aLHzDrHMMUCdrOqDFYXzZGCDFaMXkVmoJjMTx7NQKylYO0B/G+D92LfYKpV91aYlejPuFM9vCSBLsjUBCCnA9a1Salte9NTMLhnhPxX2p340TMSuMzH4KZ6Y4TcLE19RDI9LrG+Os8WXrsbDNN8arlf+n/YUmynLuYlxtyBJpBw5FmBNJeic9X4vMnILMf7j2KeLZHUHMdQTidoyDjYRDlMOJ/4TNdhED3CFiCIBdzzq3LXx9wiavfeKGJ8bhnrFTqk5iovApP/pH4Pggvgdd7eKWW5sdAApx6xZox9PwWlpi/ge2sf+aG8QZifOE/ugiB9BGwNEF6MidV6zTOoQDO6bucUqSDwP8DoZsT2sPJFDkAAAAASUVORK5CYII=
i also tried exportSIgnatures and i got the same base64 string for already saved signature that look blurry but for new ones i got this, which i don't quite understand what it is and how to use it for reconstructing the images
[[[{"x":181.15942028985506,"y":1.4492753623188406},{"x":178.2608695652174,"y":1.4492753623188406},{"x":168.1159420289855,"y":7.246376811594203},{"x":150.7246376811594,"y":23.18840579710145},{"x":128.9855072463768,"y":40.57971014492754},{"x":85.5072463768116,"y":66.66666666666667},{"x":56.52173913043478,"y":82.6086956521739},{"x":17.391304347826086,"y":98.55072463768116},{"x":2.898550724637681,"y":104.34782608695652},{"x":1.4492753623188406,"y":104.34782608695652},{"x":1.4492753623188406,"y":102.89855072463769},{"x":4.3478260869565215,"y":89.85507246376811},{"x":17.391304347826086,"y":65.21739130434783},{"x":24.63768115942029,"y":57.971014492753625},{"x":44.927536231884055,"y":43.47826086956522},{"x":50.72463768115942,"y":43.47826086956522},{"x":59.42028985507246,"y":43.47826086956522},{"x":73.91304347826087,"y":50.72463768115942},{"x":79.71014492753623,"y":57.971014492753625},{"x":95.65217391304348,"y":73.91304347826087},{"x":123.18840579710145,"y":97.10144927536231},{"x":149.2753623188406,"y":115.94202898550725},{"x":176.81159420289856,"y":128.9855072463768},{"x":182.6086956521739,"y":130.43478260869566},{"x":194.20289855072463,"y":130.43478260869566},{"x":198.55072463768116,"y":130.43478260869566}]]]
Hello David Gabriel Lopez Duarte,
We were able to reproduce this issue, it is likely due to our canvas isnt being adjusted to match the zoom level. This has been reported before and is in our backlog!
TLDR
Imagine I have one video and one image. I want to create another video that overlays the image (e.g. watermark) at the center for 2 seconds in the beginning of the video and export it as the final video. I need to do this on the client-side only. Is it possible to use MediaRecorder + Canvas or should I resort to using ffmpeg.js?
Context
I am making a browser-based video editor where the user can upload videos and images and combine them. So far, I implemented this by embedding the video and images inside a canvas element appropriately. The data representation looks somewhat like this:
video: {
url: 'https://archive.com/video.mp4',
duration: 34,
},
images: [{
url: 'https://archive.com/img1.jpg',
start_time: 0,
end_time: 2,
top: 30,
left: 20,
width: 50,
height: 50,
}]
Attempts
I play the video and show/hide images in the canvas. Then, I can use the MediaRecorder to capture the canvas' stream and export it as a data blob at the end. The final output is as expected, but the problem with this approach is I need to play the video from the beginning to the end for me to capture the stream from the canvas. If the video is 60 seconds, exporting it also takes 60 seconds.
function record(canvas) {
return new Promise(function (res, rej) {
const stream = canvas.captureStream();
const mediaRecorder = new MediaRecorder(stream);
const recordedData = [];
// Register recorder events
mediaRecorder.ondataavailable = function (event) {
recordedData.push(event.data);
};
mediaRecorder.onstop = function (event) {
var blob = new Blob(recordedData, {
type: "video/webm",
});
var url = URL.createObjectURL(blob);
res(url);
};
// Start the video and start recording
videoRef.current.currentTime = 0;
videoRef.current.addEventListener(
"play",
(e) => {
mediaRecorder.start();
},
{
once: true,
}
);
videoRef.current.addEventListener(
"ended",
(e) => {
mediaRecorder.stop();
},
{
once: true,
}
);
videoRef.current.play();
});
}
I can use ffmpeg.js to encode the video. I haven't tried this method yet as I will have to convert my image representation into ffmpeg args (I wonder how much work that is).
UPDATE
I found an example of high pass filter using webAudio here. Implemented it like this in my code.
function modifyGain(audioTrack, gainValue){
var ctx = new AudioContext();
var src = ctx.createMediaStreamTrackSource(audioTrack);
var dst = ctx.createMediaStreamDestination();
// create a filter node
var filterNode = ctx.createBiquadFilter();
filterNode.type = 'highpass';
filterNode.frequency.value = 0;
// cutoff frequency: for highpass, audio is attenuated below this frequency
var gainNode = ctx.createGain();
gainNode.gain.value = 1;
src.connect(filterNode);
//filterNode.connect(dst);
filterNode.connect(gainNode);
gainNode.connect(dst);
//alert(ctx.dst)
return dst.stream.getTracks()[0];
}
try {
webcamStream = await navigator.mediaDevices.getUserMedia(mediaConstraints);
document.getElementById("local_video").srcObject = webcamStream;
} catch(err) {
handleGetUserMediaError(err);
return;
}
// Add the tracks from the stream to the RTCPeerConnection
try {
webcamStream.getTracks().forEach(
function (track){
if(track.kind === 'audio'){
track = modifyGain(track, 0.5) //process only audio tracks
}
myPeerConnection.addTrack(track, webcamStream);
});
showLocalVideoContainer();
} catch(err) {
handleGetUserMediaError(err);
}
Before I can actually test if low sounds are silenced by highpass filter I am facing an issue. Using modifyGain mutes audio completely after few seconds. I tried 0, 1500 etc. It goes silence after few seconds.
Original POST
I am using the below constraints to try to suppress noise.
var mediaConstraints = {
audio: {advanced: [
{
echoCancellation: {exact: true}
},
{
autoGainControl: {exact: true}
},
{
noiseSuppression: {exact: true}
},
{
highpassFilter: {exact: true}
}
]
},
video: {
facingMode: "user",
width: { min: 160, ideal: 320, max: 640 },
height: { min: 120, ideal: 240, max: 480 },
}
};
But I want to silence some higher frequencies too. Even if I slowly move my phone on some surface the mic catches the noise and sends it to other peer. It catches even my breathing sound and send that to other side when I place it near my check(like phone call). I want some control over the frequencies which can allow me to select a threshold below which sounds will be not picked by mic.
I have tried searching but I am not sure what exactly will work for my case and how should I do it. I think following are my choices, but I maybe wrong.
Change SDP( codec params?).
Use webAudio and process the mic input before passing it on to other peer.
Use webAudio and process the received audio from other peer and direct it to output device(speaker).
Any help will be appreciated.
Thanks
You can process the audio from your microphone by piping it through web audio & using the BiquadFilter:
const stream = await navigator.mediaDevices.getUserMedia({audio: true});
const ctx = new AudioContext();
const src = ctx.createMediaStreamSource(stream);
const dst = ctx.createMediaStreamDestination();
const biquad = ctx.createBiquadFilter();
[src, biquad, dst].reduce((a, b) => a && a.connect(b));
audio.srcObject = dst.stream;
biquad.type = "highpass";
biquad.gain = 25;
biquad.frequency.value = 1000;
rangeButton.oninput = () => biquad.frequency.value = rangeButton.value;
Here's a working fiddle.
It goes silent after few seconds.
Could be a garbage collection issue. If you're experiencing this, try assigning your stream to a global JS var.
I want to create a browser extension that would allow users to add effects to their video/audio streams, without special plugins, on any site that uses the javascript web apis.
Google searching has not been particularly helpful so I'm starting to wonder if this is even possible.
I have 2 primary questions here:
Is this possible with javascript+chrome?
Any links to additional resources are greatly appreciated.
I am not really into web-extensions, so there may even be a simpler API available and I won't go into details about the implementation but theoretically you can indeed do it.
All it takes is to override the methods from where you'd get your MediaStream, to draw the original MediaStream to an HTML canvas where you'd be able to apply your filter, and then simply to return a new MediaStream made of the VideoTrack of a MediaStream from the canvas element's captureStream(), and possibly other tracks from the original MediaStream.
A very basic proof of concept implementation for gUM could look like:
// overrides getUserMedia so it applies an invert filter on the videoTrack
{
const mediaDevices = navigator.mediaDevices;
const original_gUM = mediaDevices.getUserMedia.bind(mediaDevices);
mediaDevices.getUserMedia = async (...args) => {
const original_stream = await original_gUM(...args);
// no video track, no filter
if( !original_stream.getVideoTracks().length ) {
return original_stream;
}
// prepare our DOM elements
const canvas = document.createElement('canvas');
const ctx = canvas.getContext('2d');
const video = document.createElement('video');
// a flag to know if we should keep drawing on the canvas or not
let should_draw = true;
// no need for audio there
video.muted = true;
// gUM video tracks can change size
video.onresize = (evt) => {
canvas.width = video.videoWidth;
canvas.height = video.videoHeight;
};
// in case users blocks the camera?
video.onpause = (evt) => {
should_draw = false;
};
video.onplaying = (evt) => {
should_draw = true;
drawVideoToCanvas();
};
video.srcObject = original_stream;
await video.play();
const canvas_track = canvas.captureStream().getVideoTracks()[0];
const originalStop = canvas_track.stop.bind(canvas_track);
// override the #stop method so we can revoke the camera stream
canvas_track.stop = () => {
originalStop();
should_draw = false;
original_stream.getVideoTracks()[0].stop();
};
// merge with audio tracks
return new MediaStream( original_stream.getAudioTracks().concat( canvas_track ) );
// the drawing loop
function drawVideoToCanvas() {
if(!should_draw) {
return;
}
ctx.filter = "none";
ctx.clearRect(0, 0, canvas.width, canvas.height);
ctx.filter = "invert(100%)";
ctx.drawImage(video,0,0);
requestAnimationFrame( drawVideoToCanvas );
}
};
}
And then every scripts that would call this method would receive a filtered videoTrack.
Outsourced example since gUM is not friend with StackSnippets.
Now I'm not sure how to override methods from web-extensions, you'll have to learn that by yourself, and beware this script is really just a proof of concept and not ready for production. I didn't put any though in handling anything than the demo case.
I am currently working on a project and need to be able to make a recording of my screen and save it locally to my computer.
The recording is being saved as a webm, but everyone of them has a really bad framerate of usually around 10-15 fps. Is there a way to increase the framerate for recording?
I am able to increase the quality of the recording by playing around with the MediaRecorder options and codecs, but this doesn't seem to affect the framerate I am getting at all.
Here is the code I am using to make my recording:
const options = {
mimeType: 'video/webm; codecs="vp9.00.41.8.00.01"',
videoBitsPerSecond: 800 * Mbps,
videoMaximizeFrameRate: true,
};
mediaRecorder = new MediaRecorder(stream, options);
mediaRecorder.ondataavailable = handleDataAvailable;
mediaRecorder.onstop = handleStop;
startBtn.onclick = e => {
mediaRecorder.start();
startBtn.innerHTML = 'Recording';
}
stopBtn.onclick = e => {
mediaRecorder.stop();
startBtn.innerHTML = 'Start';
}
function handleDataAvailable(e) {
recordedChunks.push(e.data);
}
async function handleStop() {
const blob = new Blob(recordedChunks, {
mimeType: 'video/webm'
});
const buffer = Buffer.from(await blob.arrayBuffer());
const { filePath } = await dialog.showSaveDialog({
buttonLabel: 'Save video',
defaultPath: `vid-${Date.now()}.webm`
});
console.log(filePath);
if (filePath) {
writeFile(filePath, buffer, () => console.log('video saved successfully'));
}
}
I have looked through the MDN documentation and haven't found anything about it. I also tried using different codecs with different parameters, but the results are always the same.
The framerate you're getting is typical for any standard screen capture.
The only way to go faster is to utilize the GPU's specific capability to capture and encode. This is out of scope for the web APIs.