camera no popping up when deployed on buildfire - javascript

I am developing a QRcode scanner with https://rawgit.com/sitepoint-editors/jsqrcode/master/src/qr_packed.js on buildfire. is working fine on development but when deployed on markeenter code heret place the camera is not popping up.
this is my code.
//const qrcode = window.qrcode;
enter code here
const video = document.createElement("video");
const canvasElement = document.getElementById("qr-canvas");
const canvas = canvasElement.getContext("2d");
const qrResult = document.getElementById("qr-result");
const outputData = document.getElementById("outputData");
const btnScanQR = document.getElementById("btn-scan-qr");
let scanning = false;
qrcode.callback = res => {
if (res) {
outputData.innerText = res;
scanning = false;
video.srcObject.getTracks().forEach(track => {
track.stop();
});
qrResult.hidden = false;
canvasElement.hidden = true;
btnScanQR.hidden = false;
}
};
buildfire.services.camera.barcodeScanner.scan(
{
preferFrontCamera: true,
showFlipCameraButton: true,
formats: "QR_CODE,PDF_417",
},
btnScanQR.onclick = () => {
navigator.mediaDevices
.getUserMedia({ video: { facingMode: "environment" } })
.then(function(stream) {
scanning = true;
qrResult.hidden = true;
btnScanQR.hidden = true;
canvasElement.hidden = false;
video.setAttribute("playsinline", true); // required to tell iOS safari we don't want fullscreen
video.srcObject = stream;
video.play();
tick();
scan();
});
}
);
function tick() {
canvasElement.height = video.videoHeight;
canvasElement.width = video.videoWidth;
canvas.drawImage(video, 0, 0, canvasElement.width, canvasElement.height);
scanning && requestAnimationFrame(tick);
}
function scan() {
try {
qrcode.decode();
} catch (e) {
setTimeout(scan, 300);
}
}

Related

Is it posible for a React function to keep calling itself until state value changes?

I'm trying to replicate on React a metronome built with Vue. Basically the play function keeps calling itself while playing is true. Here is the Vue code:
data() {
return {
playing: false,
audioContext: null,
audioBuffer: null,
currentTime: null
}
},
mounted() {
this.initContext();
},
methods: {
toggle()
if (!this.playing) {
this.currentTime = this.audioContext.currentTime;
this.playing = true;
this.play();
} else {
this.playing = false;
}
},
play() {
this.currentTime += 60 / this.$store.state.tempo;
const source = this.audioContext.createBufferSource();
source.buffer = this.audioBuffer;
source.connect(this.audioContext.destination);
source.onended = this.playing ? this.play : "";
source.start(this.currentTime);
},
async initContext() {
this.audioContext = new AudioContext();
this.audioBuffer = await fetch("click.wav")
.then(res => res.arrayBuffer())
.then(arrayBuffer =>
this.audioContext.decodeAudioData(arrayBuffer)
)
}
}
This is what I tried. The problem I have is that the state being async on React won't update while the function is running and the value of playing will never change.
const firstRender = useRef(true);
const [bpm, setBpm] = useState<number>(60);
const [playing, setPlaying] = useState<boolean>(false);
const [context, setContext] = useState<AudioContext>(undefined);
const [buffer, setBuffer] = useState<AudioBuffer>(undefined);
const [currentTime, setCurrentTime] = useState<number>(0);
useEffect(() => {
if (firstRender.current) {
firstRender.current = false;
initContext();
return;
}
});
const toggle = () => {
if (!playing) {
setCurrentTime(context.currentTime);
setPlaying(true);
play();
} else {
setPlaying(false)
}
}
const play = () => {
setCurrentTime(currentTime + 60 / bpm);
let source = context.createBufferSource();
source.buffer = buffer;
source.connect(context.destination);
source.onended = playing ? play : undefined;
source.start(currentTime);
}
const initContext = async () => {
const audioContext = new AudioContext();
const audioBuffer = await fetch("click.wav")
.then(res => res.arrayBuffer())
.then(arrayBuffer =>
audioContext.decodeAudioData(arrayBuffer)
)
setContext(audioContext);
setBuffer(audioBuffer);
}
I guess the solution should be on useEffect but I could not manage to recreate it. Any suggestion is appreciated!

Can I use addEventListener('play') inside a function? the function is running but the addeventlistener('play') is not

const faceDetection = () => {
const video = document.getElementById(`video`);
console.log(video);
const displaySize = { width: video.width, height: video.height };
video.addEventListener('click', () => {
console.log(`run`);
});
video.addEventListener('play', () => {
console.log(`run`);
const canvas = faceapi.createCanvasFromMedia(video);
camera.append(canvas);
faceapi.matchDimensions(canvas, displaySize);
// interval
setInterval(async () => {
console.log(`this run`);
const detections = await faceapi
.detectAllFaces(video, new faceapi.TinyFaceDetectorOptions())
.withFaceLandmarks();
const resizedDetections = faceapi.resizeResults(detections, displaySize);
canvas.getContext('2d').clearRect(0, 0, canvas.width, canvas.height);
faceapi.draw.drawDetections(canvas, resizedDetections);
faceapi.draw.drawFaceLandmarks(canvas, resizedDetections);
}, 100);
});
};
the video.addEventListener('click') does run but video.addEventListener('play') does not, can someone please give me idea why does it's not working?
My mistake I did call the vid.play(); on the navigator.mediaDevices.getUserMedia();
navigator.mediaDevices
.getUserMedia({ video: true })
.then((stream) => {
vid.srcObject = stream;
// vid.play();
if (backend === 'webgl') return faceDetection(100);
if (backend === 'cpu') return faceDetection(1000);
track = stream.getTracks();
resetMessages();
})
.catch((e) => {
console.log(e);
})
.finally(() => {
preloader.style.display = 'none';
});
Hi maybe your component may not be loaded yet when you call the function, can you try to make a timeout ?
Like in is issue :
Stackoverflow - How can i make a waitFor(delay)

How to switch between front camera and rear camera in javascript?

let constraints;
function handleVideo(){
const constraints = {
video: {
facingMode: {
exact: 'user'
}
}
};
var video;
navigator.mediaDevices.getUserMedia(constraints).
then((stream) => {
video = document.createElement("video")
video.srcObject = stream
video.play()
video.onloadeddata = () => {
ctx.height = video.videoHeight
}
})
}
I know by changing exact to environment can switch between front and back camera. But I don't know how to do this onclick.
Something like this :
function handleVideo(cameraFacing) {
const constraints = {
video: {
facingMode: {
exact: cameraFacing
}
}
}
return constraints
};
function turnVideo(constraints) {
let video;
navigator.mediaDevices.getUserMedia(constraints)
.then((stream) => {
video = document.createElement("video")
video.srcObject = stream
video.play()
video.onloadeddata = () => {
ctx.height = video.videoHeight
}
})
}
document.querySelector(".frontCamera").addEventListener("click", () => {
turnVideo(handleVideo("user"));
})
document.querySelector(".backCamera").addEventListener("click", () => {
turnVideo(handleVideo("enviroment"));
})
<div class="frontCamera">front</div>
<div class="backCamera">back</div>

Using camera flashlight not allowing to change facingmode - Navigator.mediaDevices

I'm trying to build a web app which takes photos using webcam or mobile camera depending on the device. I have already made a button which changes the constraints.facingmode so the user can use both cameras ( "environment", "user" ) if device supports it. The problem is that when I enable flashlight support as well, by creating a button and setting it as the flashlight toggler like that:
const SUPPORTS_MEDIA_DEVICES = 'mediaDevices' in navigator;
if (SUPPORTS_MEDIA_DEVICES) {
const track = stream.getVideoTracks()[0];
const imageCapture = new ImageCapture(track);
const photoCapabilities = imageCapture.getPhotoCapabilities().then(() => {
const btn = document.querySelector('.toggleCameraTorch');
btn.style.visibility = 'visible';
btn.addEventListener('click', function () {
try {
track.applyConstraints({
advanced: [{ torch: !wheelsfs.videoConstraint.torchState }]
});
wheelsfs.videoConstraint.torchState = !wheelsfs.videoConstraint.torchState;
}
catch(e) {
alert(e.message);
}
});
});
}
After that, the flashlight is working perfectly but I no longer have the option to swap camera ( facingmode ). When I'm trying to change the camera I get the error "could not start video source". Like the camera is already being used by something.
This is how I'm changing camera - facingmode:
wheelsfs.videoConstraint.facingMode.exact = wheelsfs.videoConstraint.facingMode.exact == "environment" ? "user" : "environment";
var cameraInput = wheelsfs.videoConstraint.facingMode.exact;
wheelsfs.videoTrue.srcObject && wheelsfs.videoTrue.srcObject.getTracks().forEach(t => t.stop());
wheelsfs.videoConstraint = {
video: {
width: { ideal: trueWidth },
height: { ideal: trueHeight },
facingMode: { ideal: "environment" }
},
facingMode: { exact: cameraInput }
};
navigator.mediaDevices.getUserMedia({ video: wheelsfs.videoConstraint }).then(function (stream) {
wheelsfs.videoTrue.srcObject = stream;
wheelsfs.videoTrue.play();
const SUPPORTS_MEDIA_DEVICES = 'mediaDevices' in navigator;
if (SUPPORTS_MEDIA_DEVICES) {
const track = stream.getVideoTracks()[0];
const imageCapture = new ImageCapture(track);
const photoCapabilities = imageCapture.getPhotoCapabilities().then(() => {
const btn = document.querySelector('.toggleCameraTorch');
btn.style.visibility = 'visible';
btn.addEventListener('click', function () {
try {
track.applyConstraints({
advanced: [{ torch: !wheelsfs.videoConstraint.torchState }]
});
wheelsfs.videoConstraint.torchState = !wheelsfs.videoConstraint.torchState;
}
catch (e) {
alert(e.message);
}
});
});
}
}).catch((e) => { console.log(e.message); }
Solved it by storing the stream.getVideoTracks()[0] to a variable and then calling stop() on it before changing the camera (facingmode).
So when I do:
if (SUPPORTS_MEDIA_DEVICES) {
wheelsfs.track = stream.getVideoTracks()[0];
const imageCapture = new ImageCapture(wheelsfs.track);
const photoCapabilities = imageCapture.getPhotoCapabilities().then(() => {
const btn = document.querySelector('.toggleCameraTorch');
btn.style.visibility = 'visible';
btn.addEventListener('click', function () {
try {
wheelsfs.track.applyConstraints({
advanced: [{ torch: !wheelsfs.videoConstraint.torchState }]
});
wheelsfs.videoConstraint.torchState = !wheelsfs.videoConstraint.torchState;
}
catch (e) {
alert(e.message);
}
});
});
}
In the 2nd line I save the track in a public variable and then when the function that changes the camera that is being used is called, I make sure I run
"wheelsfs.track.stop();" just before the navigator.mediaDevices.getUserMedia call.

using device camera for capturing image in reactjs

can I access the device camera and take a photo in ReactJs? The goal is to create a component that allows the camera to take pictures with the click of a button. According to my studies, I should use mediaDevices, but I am looking for a sample code in ReactJs. Please provide me with a sample code, or if you have experience implementing this, please guide me.
I have prepared a sample code that can be used as a component. This code snippet is applicable to devices that also have two cameras. If you want to take a video instead of a photo, you can also enable the audio feature in the outputs.
import React from "react";
class App extends React.Component {
constructor() {
super();
this.cameraNumber = 0;
this.state = {
imageDataURL: null,
};
}
initializeMedia = async () => {
this.setState({ imageDataURL: null });
if (!("mediaDevices" in navigator)) {
navigator.mediaDevices = {};
}
if (!("getUserMedia" in navigator.mediaDevices)) {
navigator.mediaDevices.getUserMedia = function (constraints) {
var getUserMedia =
navigator.webkitGetUserMedia || navigator.mozGetUserMedia;
if (!getUserMedia) {
return Promise.reject(new Error("getUserMedia Not Implemented"));
}
return new Promise((resolve, reject) => {
getUserMedia.call(navigator, constraints, resolve, reject);
});
};
}
//Get the details of video inputs of the device
const videoInputs = await this.getListOfVideoInputs();
//The device has a camera
if (videoInputs.length) {
navigator.mediaDevices
.getUserMedia({
video: {
deviceId: {
exact: videoInputs[this.cameraNumber].deviceId,
},
},
})
.then((stream) => {
this.player.srcObject = stream;
})
.catch((error) => {
console.error(error);
});
} else {
alert("The device does not have a camera");
}
};
capturePicture = () => {
var canvas = document.createElement("canvas");
canvas.width = this.player.videoWidth;
canvas.height = this.player.videoHeight;
var contex = canvas.getContext("2d");
contex.drawImage(this.player, 0, 0, canvas.width, canvas.height);
this.player.srcObject.getVideoTracks().forEach((track) => {
track.stop();
});
console.log(canvas.toDataURL());
this.setState({ imageDataURL: canvas.toDataURL() });
};
switchCamera = async () => {
const listOfVideoInputs = await this.getListOfVideoInputs();
// The device has more than one camera
if (listOfVideoInputs.length > 1) {
if (this.player.srcObject) {
this.player.srcObject.getVideoTracks().forEach((track) => {
track.stop();
});
}
// switch to second camera
if (this.cameraNumber === 0) {
this.cameraNumber = 1;
}
// switch to first camera
else if (this.cameraNumber === 1) {
this.cameraNumber = 0;
}
// Restart based on camera input
this.initializeMedia();
} else if (listOfVideoInputs.length === 1) {
alert("The device has only one camera");
} else {
alert("The device does not have a camera");
}
};
getListOfVideoInputs = async () => {
// Get the details of audio and video output of the device
const enumerateDevices = await navigator.mediaDevices.enumerateDevices();
//Filter video outputs (for devices with multiple cameras)
return enumerateDevices.filter((device) => device.kind === "videoinput");
};
render() {
const playerORImage = Boolean(this.state.imageDataURL) ? (
<img src={this.state.imageDataURL} alt="cameraPic" />
) : (
<video
ref={(refrence) => {
this.player = refrence;
}}
autoPlay
></video>
);
return (
<div className="App">
{playerORImage}
<button onClick={this.initializeMedia}>Take Photo</button>
<button onClick={this.capturePicture}>Capture</button>
<button onClick={this.switchCamera}>Switch</button>
</div>
);
}
}
export default App;

Categories

Resources