<!DOCTYPE html> <html> <head> <meta charset="utf-8"> <title>Face Detection Camera Example</title> <link href="js_example_style.css" rel="stylesheet" type="text/css" /> </head> <body> <h2>Face Detection Camera Example</h2> <p> Click <b>Start/Stop</b> button to start or stop the camera capture.<br> The <b>videoInput</b> is a <video> element used as face detector input. The <b>canvasOutput</b> is a <canvas> element used as face detector output.<br> The code of <textarea> will be executed when video is started. You can modify the code to investigate more. </p> <div> <div class="control"><button id="startAndStop" disabled>Start</button></div> <textarea class="code" rows="29" cols="80" id="codeEditor" spellcheck="false"> </textarea> </div> <p class="err" id="errorMessage"></p> <div> <table cellpadding="0" cellspacing="0" width="0" border="0"> <tr> <td> <video id="videoInput" width=320 height=240></video> </td> <td> <canvas id="canvasOutput" width=320 height=240></canvas> </td> <td></td> <td></td> </tr> <tr> <td> <div class="caption">videoInput</div> </td> <td> <div class="caption">canvasOutput</div> </td> <td></td> <td></td> </tr> </table> </div> <script src="https://webrtc.github.io/adapter/adapter-5.0.4.js" type="text/javascript"></script> <script src="utils.js" type="text/javascript"></script> <script id="codeSnippet" type="text/code-snippet"> let video = document.getElementById('videoInput'); let src = new cv.Mat(video.height, video.width, cv.CV_8UC4); let dst = new cv.Mat(video.height, video.width, cv.CV_8UC4); let gray = new cv.Mat(); let cap = new cv.VideoCapture(video); let faces = new cv.RectVector(); let classifier = new cv.CascadeClassifier(); // load pre-trained classifiers classifier.load('haarcascade_frontalface_default.xml'); const FPS = 30; function processVideo() { try { if (!streaming) { // clean and stop. src.delete(); dst.delete(); gray.delete(); faces.delete(); classifier.delete(); return; } let begin = Date.now(); // start processing. cap.read(src); src.copyTo(dst); cv.cvtColor(dst, gray, cv.COLOR_RGBA2GRAY, 0); // detect faces. classifier.detectMultiScale(gray, faces, 1.1, 3, 0); // draw faces. for (let i = 0; i < faces.size(); ++i) { let face = faces.get(i); let point1 = new cv.Point(face.x, face.y); let point2 = new cv.Point(face.x + face.width, face.y + face.height); cv.rectangle(dst, point1, point2, [255, 0, 0, 255]); } cv.imshow('canvasOutput', dst); // schedule the next one. let delay = 1000/FPS - (Date.now() - begin); setTimeout(processVideo, delay); } catch (err) { utils.printError(err); } }; // schedule the first one. setTimeout(processVideo, 0); </script> <script type="text/javascript"> let utils = new Utils('errorMessage'); utils.loadCode('codeSnippet', 'codeEditor'); let streaming = false; let videoInput = document.getElementById('videoInput'); let startAndStop = document.getElementById('startAndStop'); let canvasOutput = document.getElementById('canvasOutput'); let canvasContext = canvasOutput.getContext('2d'); startAndStop.addEventListener('click', () => { if (!streaming) { utils.clearError(); utils.startCamera('qvga', onVideoStarted, 'videoInput'); } else { utils.stopCamera(); onVideoStopped(); } }); function onVideoStarted() { streaming = true; startAndStop.innerText = 'Stop'; videoInput.width = videoInput.videoWidth; videoInput.height = videoInput.videoHeight; utils.executeCode('codeEditor'); } function onVideoStopped() { streaming = false; canvasContext.clearRect(0, 0, canvasOutput.width, canvasOutput.height); startAndStop.innerText = 'Start'; } utils.loadOpenCv(() => { let faceCascadeFile = 'haarcascade_frontalface_default.xml'; utils.createFileFromUrl(faceCascadeFile, faceCascadeFile, () => { startAndStop.removeAttribute('disabled'); }); }); </script> </body> </html>