2021-11-09 16:39:23 +01:00
/ * *
* Human demo for browsers
* @default Human Library
* @summary < https : / / github.com / vladmandic / human >
* @author < https : / / github.com / vladmandic >
* @copyright < https : / / github.com / vladmandic >
* @license MIT
* /
2022-08-21 19:34:51 +02:00
import * as H from '../../dist/human.esm.js' ; // equivalent of @vladmandic/Human
2021-11-11 17:30:55 +01:00
import * as indexDb from './indexdb' ; // methods to deal with indexdb
2021-11-09 16:39:23 +01:00
const humanConfig = { // user configuration for human, used to fine-tune behavior
2022-09-25 16:15:47 +02:00
cacheSensitivity : 0 ,
2021-11-09 16:39:23 +01:00
modelBasePath : '../../models' ,
filter : { equalization : true } , // lets run with histogram equilizer
face : {
enabled : true ,
2021-11-12 21:07:23 +01:00
detector : { rotation : true , return : true , cropFactor : 1.6 , mask : false } , // return tensor is used to get detected face image
2021-11-13 23:26:19 +01:00
description : { enabled : true } , // default model for face descriptor extraction is faceres
2022-08-08 21:09:26 +02:00
// mobilefacenet: { enabled: true, modelPath: 'https://vladmandic.github.io/human-models/models/mobilefacenet.json' }, // alternative model
// insightface: { enabled: true, modelPath: 'https://vladmandic.github.io/insightface/models/insightface-mobilenet-swish.json' }, // alternative model
2021-11-09 16:39:23 +01:00
iris : { enabled : true } , // needed to determine gaze direction
emotion : { enabled : false } , // not needed
2021-11-09 20:37:50 +01:00
antispoof : { enabled : true } , // enable optional antispoof module
liveness : { enabled : true } , // enable optional liveness module
2021-11-09 16:39:23 +01:00
} ,
body : { enabled : false } ,
hand : { enabled : false } ,
object : { enabled : false } ,
2021-11-11 17:30:55 +01:00
gesture : { enabled : true } , // parses face and iris gestures
2021-11-09 16:39:23 +01:00
} ;
2021-11-13 23:26:19 +01:00
// const matchOptions = { order: 2, multiplier: 1000, min: 0.0, max: 1.0 }; // for embedding model
const matchOptions = { order : 2 , multiplier : 25 , min : 0.2 , max : 0.8 } ; // for faceres model
2021-11-09 16:39:23 +01:00
const options = {
2021-11-11 17:30:55 +01:00
minConfidence : 0.6 , // overal face confidence for box, face, gender, real, live
2021-11-09 16:39:23 +01:00
minSize : 224 , // min input to face descriptor model before degradation
2022-09-25 16:15:47 +02:00
maxTime : 30000 , // max time before giving up
2021-11-09 20:37:50 +01:00
blinkMin : 10 , // minimum duration of a valid blink
blinkMax : 800 , // maximum duration of a valid blink
2021-11-11 17:30:55 +01:00
threshold : 0.5 , // minimum similarity
2021-11-12 21:07:23 +01:00
mask : humanConfig.face.detector.mask ,
rotation : humanConfig.face.detector.rotation ,
cropFactor : humanConfig.face.detector.cropFactor ,
2021-11-13 23:26:19 +01:00
. . . matchOptions ,
2021-11-09 16:39:23 +01:00
} ;
2022-09-25 16:15:47 +02:00
const ok : Record < string , { status : boolean | undefined , val : number } > = { // must meet all rules
faceCount : { status : false , val : 0 } ,
faceConfidence : { status : false , val : 0 } ,
facingCenter : { status : false , val : 0 } ,
lookingCenter : { status : false , val : 0 } ,
blinkDetected : { status : false , val : 0 } ,
faceSize : { status : false , val : 0 } ,
antispoofCheck : { status : false , val : 0 } ,
livenessCheck : { status : false , val : 0 } ,
age : { status : false , val : 0 } ,
gender : { status : false , val : 0 } ,
timeout : { status : true , val : 0 } ,
descriptor : { status : false , val : 0 } ,
elapsedMs : { status : undefined , val : 0 } , // total time while waiting for valid face
detectFPS : { status : undefined , val : 0 } , // mark detection fps performance
drawFPS : { status : undefined , val : 0 } , // mark redraw fps performance
2021-11-09 20:37:50 +01:00
} ;
2022-09-25 16:15:47 +02:00
const allOk = ( ) = > ok . faceCount . status
&& ok . faceSize . status
&& ok . blinkDetected . status
&& ok . facingCenter . status
&& ok . lookingCenter . status
&& ok . faceConfidence . status
&& ok . antispoofCheck . status
&& ok . livenessCheck . status
&& ok . descriptor . status
&& ok . age . status
&& ok . gender . status ;
2022-08-21 19:34:51 +02:00
const current : { face : H.FaceResult | null , record : indexDb.FaceRecord | null } = { face : null , record : null } ; // current face record and matched database record
2021-11-09 20:37:50 +01:00
const blink = { // internal timers for blink start/end/duration
start : 0 ,
end : 0 ,
time : 0 ,
} ;
2021-11-11 17:30:55 +01:00
// let db: Array<{ name: string, source: string, embedding: number[] }> = []; // holds loaded face descriptor database
2022-08-21 19:34:51 +02:00
const human = new H . Human ( humanConfig ) ; // create instance of human with overrides from user configuration
2021-11-09 16:39:23 +01:00
2022-08-21 19:34:51 +02:00
human . env . perfadd = false ; // is performance data showing instant or total values
2021-11-09 16:39:23 +01:00
human . draw . options . font = 'small-caps 18px "Lato"' ; // set font used to draw labels when using draw methods
human . draw . options . lineHeight = 20 ;
const dom = { // grab instances of dom objects so we dont have to look them up later
video : document.getElementById ( 'video' ) as HTMLVideoElement ,
canvas : document.getElementById ( 'canvas' ) as HTMLCanvasElement ,
log : document.getElementById ( 'log' ) as HTMLPreElement ,
fps : document.getElementById ( 'fps' ) as HTMLPreElement ,
2021-11-11 17:30:55 +01:00
match : document.getElementById ( 'match' ) as HTMLDivElement ,
name : document.getElementById ( 'name' ) as HTMLInputElement ,
save : document.getElementById ( 'save' ) as HTMLSpanElement ,
delete : document . getElementById ( 'delete' ) as HTMLSpanElement ,
retry : document.getElementById ( 'retry' ) as HTMLDivElement ,
source : document.getElementById ( 'source' ) as HTMLCanvasElement ,
2021-11-12 21:07:23 +01:00
ok : document.getElementById ( 'ok' ) as HTMLDivElement ,
2021-11-09 16:39:23 +01:00
} ;
const timestamp = { detect : 0 , draw : 0 } ; // holds information used to calculate performance and possible memory leaks
let startTime = 0 ;
const log = ( . . . msg ) = > { // helper method to output messages
dom . log . innerText += msg . join ( ' ' ) + '\n' ;
2022-08-21 19:34:51 +02:00
console . log ( . . . msg ) ; // eslint-disable-line no-console
2021-11-09 16:39:23 +01:00
} ;
async function webCam() { // initialize webcam
// @ts-ignore resizeMode is not yet defined in tslib
const cameraOptions : MediaStreamConstraints = { audio : false , video : { facingMode : 'user' , resizeMode : 'none' , width : { ideal : document.body.clientWidth } } } ;
const stream : MediaStream = await navigator . mediaDevices . getUserMedia ( cameraOptions ) ;
const ready = new Promise ( ( resolve ) = > { dom . video . onloadeddata = ( ) = > resolve ( true ) ; } ) ;
dom . video . srcObject = stream ;
2022-08-21 21:23:03 +02:00
void dom . video . play ( ) ;
2021-11-09 16:39:23 +01:00
await ready ;
dom . canvas . width = dom . video . videoWidth ;
dom . canvas . height = dom . video . videoHeight ;
2022-09-25 16:15:47 +02:00
dom . canvas . style . width = '50%' ;
dom . canvas . style . height = '50%' ;
2021-11-11 17:30:55 +01:00
if ( human . env . initial ) log ( 'video:' , dom . video . videoWidth , dom . video . videoHeight , '|' , stream . getVideoTracks ( ) [ 0 ] . label ) ;
2021-11-09 16:39:23 +01:00
dom . canvas . onclick = ( ) = > { // pause when clicked on screen and resume on next click
2022-08-21 21:23:03 +02:00
if ( dom . video . paused ) void dom . video . play ( ) ;
2021-11-09 16:39:23 +01:00
else dom . video . pause ( ) ;
} ;
}
async function detectionLoop() { // main detection loop
if ( ! dom . video . paused ) {
2022-08-21 21:23:03 +02:00
if ( current . face ? . tensor ) human . tf . dispose ( current . face . tensor ) ; // dispose previous tensor
2021-11-09 16:39:23 +01:00
await human . detect ( dom . video ) ; // actual detection; were not capturing output in a local variable as it can also be reached via human.result
const now = human . now ( ) ;
2022-09-25 16:15:47 +02:00
ok . detectFPS . val = Math . round ( 10000 / ( now - timestamp . detect ) ) / 10 ;
2021-11-09 16:39:23 +01:00
timestamp . detect = now ;
requestAnimationFrame ( detectionLoop ) ; // start new frame immediately
}
}
2022-09-25 16:15:47 +02:00
function drawValidationTests() {
2021-11-12 21:07:23 +01:00
let y = 32 ;
for ( const [ key , val ] of Object . entries ( ok ) ) {
let el = document . getElementById ( ` ok- ${ key } ` ) ;
if ( ! el ) {
el = document . createElement ( 'div' ) ;
2022-09-25 16:15:47 +02:00
el . id = ` ok- ${ key } ` ;
2021-11-12 21:07:23 +01:00
el . innerText = key ;
el . className = 'ok' ;
el . style . top = ` ${ y } px ` ;
dom . ok . appendChild ( el ) ;
}
2022-09-25 16:15:47 +02:00
if ( typeof val . status === 'boolean' ) el . style . backgroundColor = val . status ? 'lightgreen' : 'lightcoral' ;
const status = val . status ? 'ok' : 'fail' ;
el . innerText = ` ${ key } : ${ val . val === 0 ? status : val.val } ` ;
2021-11-12 21:07:23 +01:00
y += 28 ;
}
2022-09-25 16:15:47 +02:00
}
async function validationLoop ( ) : Promise < H.FaceResult > { // main screen refresh loop
const interpolated = human . next ( human . result ) ; // smoothen result using last-known results
human . draw . canvas ( dom . video , dom . canvas ) ; // draw canvas to screen
await human . draw . all ( dom . canvas , interpolated ) ; // draw labels, boxes, lines, etc.
const now = human . now ( ) ;
ok . drawFPS . val = Math . round ( 10000 / ( now - timestamp . draw ) ) / 10 ;
timestamp . draw = now ;
ok . faceCount . val = human . result . face . length ;
ok . faceCount . status = ok . faceCount . val === 1 ; // must be exactly detected face
if ( ok . faceCount . status ) { // skip the rest if no face
const gestures : string [ ] = Object . values ( human . result . gesture ) . map ( ( gesture : H.GestureResult ) = > gesture . gesture ) ; // flatten all gestures
if ( gestures . includes ( 'blink left eye' ) || gestures . includes ( 'blink right eye' ) ) blink . start = human . now ( ) ; // blink starts when eyes get closed
if ( blink . start > 0 && ! gestures . includes ( 'blink left eye' ) && ! gestures . includes ( 'blink right eye' ) ) blink . end = human . now ( ) ; // if blink started how long until eyes are back open
ok . blinkDetected . status = ok . blinkDetected . status || ( Math . abs ( blink . end - blink . start ) > options . blinkMin && Math . abs ( blink . end - blink . start ) < options . blinkMax ) ;
if ( ok . blinkDetected . status && blink . time === 0 ) blink . time = Math . trunc ( blink . end - blink . start ) ;
ok . facingCenter . status = gestures . includes ( 'facing center' ) ;
ok . lookingCenter . status = gestures . includes ( 'looking center' ) ; // must face camera and look at camera
ok . faceConfidence . val = human . result . face [ 0 ] . faceScore || human . result . face [ 0 ] . boxScore || 0 ;
ok . faceConfidence . status = ok . faceConfidence . val >= options . minConfidence ;
ok . antispoofCheck . val = human . result . face [ 0 ] . real || 0 ;
ok . antispoofCheck . status = ok . antispoofCheck . val >= options . minConfidence ;
ok . livenessCheck . val = human . result . face [ 0 ] . live || 0 ;
ok . livenessCheck . status = ok . livenessCheck . val >= options . minConfidence ;
ok . faceSize . val = Math . min ( human . result . face [ 0 ] . box [ 2 ] , human . result . face [ 0 ] . box [ 3 ] ) ;
ok . faceSize . status = ok . faceSize . val >= options . minSize ;
ok . descriptor . val = human . result . face [ 0 ] . embedding ? . length || 0 ;
ok . descriptor . status = ok . descriptor . val > 0 ;
ok . age . val = human . result . face [ 0 ] . age || 0 ;
ok . age . status = ok . age . val > 0 ;
ok . gender . val = human . result . face [ 0 ] . genderScore || 0 ;
ok . gender . status = ok . gender . val >= options . minConfidence ;
2021-11-09 16:39:23 +01:00
}
2022-09-25 16:15:47 +02:00
// run again
ok . timeout . status = ok . elapsedMs . val <= options . maxTime ;
drawValidationTests ( ) ;
if ( allOk ( ) || ! ok . timeout . status ) { // all criteria met
2021-11-09 16:39:23 +01:00
dom . video . pause ( ) ;
2021-11-11 17:30:55 +01:00
return human . result . face [ 0 ] ;
2021-11-09 16:39:23 +01:00
}
2022-09-25 16:15:47 +02:00
ok . elapsedMs . val = Math . trunc ( human . now ( ) - startTime ) ;
2022-08-21 19:34:51 +02:00
return new Promise ( ( resolve ) = > {
setTimeout ( async ( ) = > {
2022-08-21 21:23:03 +02:00
await validationLoop ( ) ; // run validation loop until conditions are met
resolve ( human . result . face [ 0 ] ) ; // recursive promise resolve
2022-08-21 19:34:51 +02:00
} , 30 ) ; // use to slow down refresh from max refresh rate to target of 30 fps
} ) ;
2021-11-09 16:39:23 +01:00
}
2021-11-11 17:30:55 +01:00
async function saveRecords() {
if ( dom . name . value . length > 0 ) {
const image = dom . canvas . getContext ( '2d' ) ? . getImageData ( 0 , 0 , dom . canvas . width , dom . canvas . height ) as ImageData ;
2021-11-12 21:07:23 +01:00
const rec = { id : 0 , name : dom.name.value , descriptor : current.face?.embedding as number [ ] , image } ;
2021-11-11 17:30:55 +01:00
await indexDb . save ( rec ) ;
2022-08-08 21:09:26 +02:00
log ( 'saved face record:' , rec . name , 'descriptor length:' , current . face ? . embedding ? . length ) ;
log ( 'known face records:' , await indexDb . count ( ) ) ;
2021-11-11 17:30:55 +01:00
} else {
log ( 'invalid name' ) ;
}
}
2021-11-09 16:39:23 +01:00
2021-11-11 17:30:55 +01:00
async function deleteRecord() {
2021-11-12 21:07:23 +01:00
if ( current . record && current . record . id > 0 ) {
await indexDb . remove ( current . record ) ;
2021-11-11 17:30:55 +01:00
}
2021-11-09 20:37:50 +01:00
}
2021-11-11 17:30:55 +01:00
async function detectFace() {
2022-09-25 16:15:47 +02:00
dom . canvas . style . height = '' ;
2021-11-11 23:01:10 +01:00
dom . canvas . getContext ( '2d' ) ? . clearRect ( 0 , 0 , options . minSize , options . minSize ) ;
2022-08-28 19:12:27 +02:00
if ( ! current ? . face ? . tensor || ! current ? . face ? . embedding ) return false ;
2022-08-21 19:34:51 +02:00
console . log ( 'face record:' , current . face ) ; // eslint-disable-line no-console
2022-09-25 16:15:47 +02:00
log ( ` detected face: ${ current . face . gender } ${ current . face . age || 0 } y distance ${ current . face . iris || 0 } cm/ ${ Math . round ( 100 * ( current . face . iris || 0 ) / 2.54 ) / 100 } in ` ) ;
2022-08-21 19:34:51 +02:00
human . tf . browser . toPixels ( current . face . tensor as unknown as H . TensorLike , dom . canvas ) ;
2021-11-12 21:07:23 +01:00
if ( await indexDb . count ( ) === 0 ) {
2022-09-25 16:15:47 +02:00
log ( 'face database is empty: nothing to compare face with' ) ;
2021-11-12 21:07:23 +01:00
document . body . style . background = 'black' ;
2021-11-11 17:30:55 +01:00
dom . delete . style . display = 'none' ;
2021-11-12 21:07:23 +01:00
return false ;
}
const db = await indexDb . load ( ) ;
2022-08-08 21:09:26 +02:00
const descriptors = db . map ( ( rec ) = > rec . descriptor ) . filter ( ( desc ) = > desc . length > 0 ) ;
2022-08-21 21:23:03 +02:00
const res = human . match ( current . face . embedding , descriptors , matchOptions ) ;
2021-11-12 21:07:23 +01:00
current . record = db [ res . index ] || null ;
if ( current . record ) {
log ( ` best match: ${ current . record . name } | id: ${ current . record . id } | similarity: ${ Math . round ( 1000 * res . similarity ) / 10 } % ` ) ;
dom . name . value = current . record . name ;
2021-11-11 17:30:55 +01:00
dom . source . style . display = '' ;
2021-11-12 21:07:23 +01:00
dom . source . getContext ( '2d' ) ? . putImageData ( current . record . image , 0 , 0 ) ;
2021-11-11 17:30:55 +01:00
}
2021-11-12 21:07:23 +01:00
document . body . style . background = res . similarity > options . threshold ? 'darkgreen' : 'maroon' ;
2021-11-11 17:30:55 +01:00
return res . similarity > options . threshold ;
2021-11-09 16:39:23 +01:00
}
async function main() { // main entry point
2022-09-25 16:15:47 +02:00
ok . faceCount . status = false ;
ok . faceConfidence . status = false ;
ok . facingCenter . status = false ;
ok . blinkDetected . status = false ;
ok . faceSize . status = false ;
ok . antispoofCheck . status = false ;
ok . livenessCheck . status = false ;
ok . age . status = false ;
ok . gender . status = false ;
ok . elapsedMs . val = 0 ;
2021-11-11 17:30:55 +01:00
dom . match . style . display = 'none' ;
dom . retry . style . display = 'none' ;
2021-11-12 21:07:23 +01:00
dom . source . style . display = 'none' ;
2022-09-25 16:15:47 +02:00
dom . canvas . style . height = '50%' ;
2021-11-11 17:30:55 +01:00
document . body . style . background = 'black' ;
await webCam ( ) ;
await detectionLoop ( ) ; // start detection loop
startTime = human . now ( ) ;
2021-11-12 21:07:23 +01:00
current . face = await validationLoop ( ) ; // start validation loop
2022-08-21 19:34:51 +02:00
dom . canvas . width = current . face . tensor ? . shape [ 1 ] || options . minSize ;
dom . canvas . height = current . face . tensor ? . shape [ 0 ] || options . minSize ;
2021-11-11 23:01:10 +01:00
dom . source . width = dom . canvas . width ;
dom . source . height = dom . canvas . height ;
dom . canvas . style . width = '' ;
dom . match . style . display = 'flex' ;
2021-11-12 21:07:23 +01:00
dom . save . style . display = 'flex' ;
dom . delete . style . display = 'flex' ;
2021-11-11 23:01:10 +01:00
dom . retry . style . display = 'block' ;
2021-11-12 21:07:23 +01:00
if ( ! allOk ( ) ) { // is all criteria met?
2021-11-11 23:01:10 +01:00
log ( 'did not find valid face' ) ;
return false ;
2021-11-11 17:30:55 +01:00
}
2022-08-21 19:34:51 +02:00
return detectFace ( ) ;
2021-11-11 17:30:55 +01:00
}
async function init() {
2021-11-18 16:10:06 +01:00
log ( 'human version:' , human . version , '| tfjs version:' , human . tf . version [ 'tfjs-core' ] ) ;
2021-11-11 17:30:55 +01:00
log ( 'options:' , JSON . stringify ( options ) . replace ( /{|}|"|\[|\]/g , '' ) . replace ( /,/g , ' ' ) ) ;
2022-09-25 16:15:47 +02:00
log ( 'initializing webcam...' ) ;
2021-11-11 17:30:55 +01:00
await webCam ( ) ; // start webcam
2022-09-25 16:15:47 +02:00
log ( 'loading human models...' ) ;
2021-11-09 16:39:23 +01:00
await human . load ( ) ; // preload all models
2022-09-25 16:15:47 +02:00
log ( 'initializing human...' ) ;
log ( 'face embedding model:' , humanConfig . face . description . enabled ? 'faceres' : '' , humanConfig . face [ 'mobilefacenet' ] ? . enabled ? 'mobilefacenet' : '' , humanConfig . face [ 'insightface' ] ? . enabled ? 'insightface' : '' ) ;
log ( 'loading face database...' ) ;
log ( 'known face records:' , await indexDb . count ( ) ) ;
2021-11-11 17:30:55 +01:00
dom . retry . addEventListener ( 'click' , main ) ;
dom . save . addEventListener ( 'click' , saveRecords ) ;
dom . delete . addEventListener ( 'click' , deleteRecord ) ;
2021-11-09 16:39:23 +01:00
await human . warmup ( ) ; // warmup function to initialize backend for future faster detection
2021-11-11 17:30:55 +01:00
await main ( ) ;
2021-11-09 16:39:23 +01:00
}
2021-11-11 17:30:55 +01:00
window . onload = init ;