Hi there, I’m trying to integrate opencvjs in react app, my app requirement is to detect faces from a live webcam.But I’m getting error:
VM3018 opencv.js:56 Uncaught abort(90) at Error
at jsStackTrace (https://docs.opencv.org/3.4.0/opencv.js:21:17991)
at stackTrace (https://docs.opencv.org/3.4.0/opencv.js:21:18162)
at abort (https://docs.opencv.org/3.4.0/opencv.js:56:19300)
at Array.Efc (https://docs.opencv.org/3.4.0/opencv.js:49:136407)
at Array.du (https://docs.opencv.org/3.4.0/opencv.js:25:212772)
at mdc (https://docs.opencv.org/3.4.0/opencv.js:49:125296)
at dynCall_viii_122 (eval at makeDynCaller (https://docs.opencv.org/3.4.0/opencv.js:21:1703205), <anonymous>:4:12)
at Mat.Mat$copyTo (eval at new_ (https://docs.opencv.org/3.4.0/opencv.js:21:1594131), <anonymous>:9:1)
at Mat.copyTo (https://docs.opencv.org/3.4.0/opencv.js:21:1715548)
at detect (http://localhost:3000/static/js/bundle.js:97:9)
If this abort() is unexpected, build with -s ASSERTIONS=1 which can give more information.
here is how I’m integrating opencvjs in my react app:
import './App.css';
import { Button, Heading } from '@chakra-ui/react';
import { useEffect, useRef, useState } from 'react';
function App() {
const [isWebcamLoaded, setIsWebcamLoaded] = useState(false);
const videoRef = useRef<HTMLVideoElement>(null);
const [loaded, setLoaded] = useState(false);
const opencvRef = useRef<any>(null);
useEffect(() => {
const scriptTag = document.createElement('script');
scriptTag.src = 'https://docs.opencv.org/3.4.0/opencv.js';
scriptTag.addEventListener('load', () => setLoaded(true));
document.body.appendChild(scriptTag);
}, []);
useEffect(() => {
const initCamera = async () => {
navigator.mediaDevices
.getUserMedia({ video: { width: 300 } })
.then(stream => {
if (!videoRef.current) return;
let video = videoRef.current;
video.width = 300;
video.height = 300;
video.srcObject = stream;
video.play();
setIsWebcamLoaded(true);
})
.catch(err => {
console.error('error:', err);
});
};
if (!isWebcamLoaded) {
initCamera();
}
if (loaded && videoRef.current && isWebcamLoaded) {
const video = videoRef.current;
const opencvObj = {} as any;
//@ts-ignore
const { cv } = window;
opencvObj.src = new cv.Mat(video.height, video.width, cv.CV_8UC4);
opencvObj.dst = new cv.Mat(video.height, video.width, cv.CV_8UC4);
opencvObj.gray = new cv.Mat();
opencvObj.cap = new cv.VideoCapture(video);
opencvObj.faces = new cv.RectVector();
opencvObj.classifier = new cv.CascadeClassifier();
opencvObj.classifier.load('haarcascade_frontalface_default.xml');
opencvRef.current = opencvObj;
}
}, [loaded, isWebcamLoaded]);
const detect = () => {
const { cap, src, dst, gray, faces, classifier } = opencvRef.current;
//@ts-ignore
const { cv } = window;
cap.read(src);
src.copyTo(dst);
cv.cvtColor(dst, gray, cv.COLOR_RGBA2GRAY, 0);
// detect faces.
classifier.detectMultiScale(gray, faces, 1.1, 3, 0);
for (let i = 0; i < faces.size(); ++i) {
let face = faces.get(i);
let point1 = new cv.Point(face.x, face.y);
let point2 = new cv.Point(face.x + face.width, face.y + face.height);
cv.rectangle(dst, point1, point2, [255, 0, 0, 255]);
}
cv.imshow('canvasOutput', dst);
// schedule the next one.
let delay = 50;
setTimeout(detect, delay);
};
return (
<div className='App'>
<video
ref={videoRef}
style={{
clipPath: 'circle(50% at 50% 50%)',
width: '100%',
height: '100%',
}}></video>
<button onClick={detect}>detect</button>
</div>
);
}
export default App;