-
Notifications
You must be signed in to change notification settings - Fork 3
Expand file tree
/
Copy pathscript.js
More file actions
42 lines (39 loc) · 1.21 KB
/
script.js
File metadata and controls
42 lines (39 loc) · 1.21 KB
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
/**
* Tutorial for implementing the face-api library without ml5.js
* https://www.youtube.com/watch?v=CVClHLwv-4I&t=183s&ab_channel=WebDevSimplified
*
*/
const video = document.getElementById("video");
const URL_MODELS = "./models";
Promise.all([
faceapi.nets.tinyFaceDetector.loadFromUri(URL_MODELS),
faceapi.nets.faceRecognitionNet.loadFromUri(URL_MODELS),
faceapi.nets.faceExpressionNet.loadFromUri(URL_MODELS),
]).then(startVideo);
function startVideo() {
navigator.getUserMedia(
{
video: {},
},
(stream) => (video.srcObject = stream),
(err) => console.error(err)
);
}
let detections;
if (rileva) {
video.addEventListener("play", () => {
const canvas = faceapi.createCanvasFromMedia(video);
const displaySize = { width: windowWidth, height: windowHeight };
faceapi.matchDimensions(canvas, displaySize);
setInterval(async () => {
detections = await faceapi
.detectAllFaces(
video,
new faceapi.TinyFaceDetectorOptions({ inputSize: 256 })
)
.withFaceExpressions();
// console.log("detections:", detections);
canvas.getContext("2d").clearRect(0, 0, canvas.width, canvas.height);
}, 1000);
});
} else detections = [];