Face Recognition + Age Estimation with JavaScript|Webcam Implementation Using face-api.js
This guide explains how to build a browser‑based application that uses JavaScript and face-api.js to recognize faces in real time from a webcam and estimate age using AI.
The implementation shown here runs entirely on the client side, handling face detection, age estimation, and drawing purely with JavaScript.
It’s ideal for anyone who wants to create a site that displays estimated age from facial recognition or experiment with AI‑powered face age prediction.
Please allow camera access. The video feed will appear shortly.
Position your face in front of the camera. The system will recognize your face and display the AI‑estimated age. Please remove any masks.
(If the camera feed does not display correctly, try reloading the page.)
Source Code
<div class="wait"><img src="./imgs/loading.gif" width="64" height="64"><p>Loading</p></div>
<div><canvas id="preview"></canvas></div>
<script src="./models/face-api.min.js"></script>
<script>
// Use TinyFaceDetectorModel
faceapi.nets.tinyFaceDetector.loadFromUri('./models');
// Use AgeGender model
faceapi.nets.ageGenderNet.loadFromUri("./models");
// Use Expression model
faceapi.nets.faceExpressionNet.loadFromUri("./models");
window.addEventListener('DOMContentLoaded',function(){
ShowWait();
});
var video,prev,prev_ctx,w,h,nw,nh,img;
window.addEventListener('load',function(event){
glassimg=new Image();
glassimg.addEventListener("load",function(){
nw=glassimg.naturalWidth;
nh=glassimg.naturalHeight;
});
video=document.createElement('video');
video.setAttribute("autoplay","");
video.setAttribute("muted","");
video.setAttribute("playsinline","");
video.onloadedmetadata = function(e){video.play();};
prev=document.getElementById("preview");
prev_ctx=prev.getContext("2d", {willReadFrequently:true});
// Display mirrored horizontally
prev.style.transform="scaleX(-1)";
// Camera permission dialog will appear
navigator.mediaDevices.getUserMedia(
// Microphone off, camera settings: prefer front camera, prefer 640×480
{"audio":false,"video":{"facingMode":"user","width":{"ideal":640},"height":{"ideal":480}}}
).then( // When permission is granted
function(stream){
video.srcObject = stream;
// Start scanning after 1.0 seconds
setTimeout(Scan,1000,true);
}
).catch(
// When permission is denied
function(){
HideWait(1);
}
);
function Scan(first){
if(first){
// Selected width and height
w=video.videoWidth;
h=video.videoHeight;
// Display size on screen
prev.style.width="100%";
prev.style.maxWidth="640px";
// Internal canvas size
prev.setAttribute("width",w);
prev.setAttribute("height",h);
HideWait(1);
}
detect()
.then(
function(result){
prev_ctx.drawImage(video,0,0,w,h);
for(i=0;i<result.length;i++){
if(result[i].age&&result[i].expressions){
prev_ctx.beginPath();
prev_ctx.lineWidth=4;
prev_ctx.strokeStyle="#ff0000";
prev_ctx.moveTo(result[i].detection.box.x, result[i].detection.box.y);
prev_ctx.lineTo(result[i].detection.box.x+0, result[i].detection.box.y+result[i].detection.box.height);
prev_ctx.lineTo(result[i].detection.box.x+result[i].detection.box.width, result[i].detection.box.y+result[i].detection.box.height);
prev_ctx.lineTo(result[i].detection.box.x+result[i].detection.box.width, result[i].detection.box.y+0);
prev_ctx.lineTo(result[i].detection.box.x+0, result[i].detection.box.y+0);
prev_ctx.stroke();
prev_ctx.scale(-1, 1);
prev_ctx.font="36px 'MS Gothic', 'Meiryo', 'Yu Gothic', 'Noto Sans JP', sans-serif";
prev_ctx.fillStyle="pink";
let age=Math.round(result[i].age)+"age ";
let gender="";
//if(result[i].gender="male"){gender="male";}
//if(result[i].gender="female"){gender="female";}
let exps=result[i].expressions.neutral;
let expstr="neutral";
if(result[i].expressions.angry>exps){
exps=result[i].expressions.angry;
expstr="Angry";
}
if(result[i].expressions.disgusted>exps){
exps=result[i].expressions.disgusted;
expstr="Disgusted";
}
if(result[i].expressions.fearful>exps){
exps=result[i].expressions.fearful;
expstr="Fearful";
}
if(result[i].expressions.happy>exps){
exps=result[i].expressions.happy;
expstr="Happy";
}
if(result[i].expressions.neutral>exps){
exps=result[i].expressions.neutral;
expstr="Neutral";
}
if(result[i].expressions.sad>exps){
exps=result[i].expressions.sad;
expstr="Sad";
}
if(result[i].expressions.surprised>exps){
exps=result[i].expressions.surprised;
expstr="Surprised";
}
prev_ctx.fillText(age+gender+expstr,-result[i].detection.box.x-result[i].detection.box.width, result[i].detection.box.y-4);
prev_ctx.setTransform(1,0,0,1,0,0);
}
}
setTimeout(Scan,10,false);
}
)
}
async function detect(){
return faceapi.detectAllFaces(video, new faceapi.TinyFaceDetectorOptions()).withAgeAndGender().withFaceExpressions();
}
});
function ShowWait(){
document.querySelector(".wait").style.display="block";
}
function HideWait(interval){
setTimeout(function(){
document.querySelector(".wait").style.display="none";
},interval);
}
</script>
Libraries Used
We used face-api.js (https://github.com/justadudewhohacks/face-api.js).- License information for face-api.js
-
MIT License
Copyright (c) 2018 Vincent Mühler

