2021-11-09 16:39:23 +01:00
/ * *
* Human demo for browsers
* @default Human Library
* @summary < https : / / github.com / vladmandic / human >
* @author < https : / / github.com / vladmandic >
* @copyright < https : / / github.com / vladmandic >
* @license MIT
* /
2021-11-11 17:30:55 +01:00
import { Human , TensorLike , FaceResult } from '../../dist/human.esm.js' ; // equivalent of @vladmandic/Human
import * as indexDb from './indexdb' ; // methods to deal with indexdb
let db : Array < indexDb.FaceRecord > = [ ] ; // face descriptor database stored in indexdb
let face : FaceResult ; // face result from human.detect
let current : indexDb.FaceRecord ; // currently matched db record
2021-11-09 16:39:23 +01:00
const humanConfig = { // user configuration for human, used to fine-tune behavior
modelBasePath : '../../models' ,
filter : { equalization : true } , // lets run with histogram equilizer
face : {
enabled : true ,
2021-11-11 17:30:55 +01:00
detector : { rotation : true , return : true } , // return tensor is used to get detected face image
2021-11-09 16:39:23 +01:00
description : { enabled : true } ,
iris : { enabled : true } , // needed to determine gaze direction
emotion : { enabled : false } , // not needed
2021-11-09 20:37:50 +01:00
antispoof : { enabled : true } , // enable optional antispoof module
liveness : { enabled : true } , // enable optional liveness module
2021-11-09 16:39:23 +01:00
} ,
body : { enabled : false } ,
hand : { enabled : false } ,
object : { enabled : false } ,
2021-11-11 17:30:55 +01:00
gesture : { enabled : true } , // parses face and iris gestures
2021-11-09 16:39:23 +01:00
} ;
const options = {
2021-11-11 17:30:55 +01:00
minConfidence : 0.6 , // overal face confidence for box, face, gender, real, live
2021-11-09 16:39:23 +01:00
minSize : 224 , // min input to face descriptor model before degradation
maxTime : 10000 , // max time before giving up
2021-11-09 20:37:50 +01:00
blinkMin : 10 , // minimum duration of a valid blink
blinkMax : 800 , // maximum duration of a valid blink
2021-11-11 17:30:55 +01:00
threshold : 0.5 , // minimum similarity
2021-11-09 16:39:23 +01:00
} ;
2021-11-09 20:37:50 +01:00
const ok = { // must meet all rules
faceCount : false ,
faceConfidence : false ,
facingCenter : false ,
blinkDetected : false ,
faceSize : false ,
antispoofCheck : false ,
livenessCheck : false ,
elapsedMs : 0 , // total time while waiting for valid face
} ;
const allOk = ( ) = > ok . faceCount && ok . faceSize && ok . blinkDetected && ok . facingCenter && ok . faceConfidence && ok . antispoofCheck && ok . livenessCheck ;
const blink = { // internal timers for blink start/end/duration
start : 0 ,
end : 0 ,
time : 0 ,
} ;
2021-11-11 17:30:55 +01:00
// let db: Array<{ name: string, source: string, embedding: number[] }> = []; // holds loaded face descriptor database
2021-11-09 16:39:23 +01:00
const human = new Human ( humanConfig ) ; // create instance of human with overrides from user configuration
human . env [ 'perfadd' ] = false ; // is performance data showing instant or total values
human . draw . options . font = 'small-caps 18px "Lato"' ; // set font used to draw labels when using draw methods
human . draw . options . lineHeight = 20 ;
const dom = { // grab instances of dom objects so we dont have to look them up later
video : document.getElementById ( 'video' ) as HTMLVideoElement ,
canvas : document.getElementById ( 'canvas' ) as HTMLCanvasElement ,
log : document.getElementById ( 'log' ) as HTMLPreElement ,
fps : document.getElementById ( 'fps' ) as HTMLPreElement ,
status : document.getElementById ( 'status' ) as HTMLPreElement ,
2021-11-11 17:30:55 +01:00
match : document.getElementById ( 'match' ) as HTMLDivElement ,
name : document.getElementById ( 'name' ) as HTMLInputElement ,
save : document.getElementById ( 'save' ) as HTMLSpanElement ,
delete : document . getElementById ( 'delete' ) as HTMLSpanElement ,
retry : document.getElementById ( 'retry' ) as HTMLDivElement ,
source : document.getElementById ( 'source' ) as HTMLCanvasElement ,
2021-11-09 16:39:23 +01:00
} ;
const timestamp = { detect : 0 , draw : 0 } ; // holds information used to calculate performance and possible memory leaks
const fps = { detect : 0 , draw : 0 } ; // holds calculated fps information for both detect and screen refresh
let startTime = 0 ;
const log = ( . . . msg ) = > { // helper method to output messages
dom . log . innerText += msg . join ( ' ' ) + '\n' ;
// eslint-disable-next-line no-console
console . log ( . . . msg ) ;
} ;
const printFPS = ( msg ) = > dom . fps . innerText = msg ; // print status element
const printStatus = ( msg ) = > dom . status . innerText = 'status: ' + JSON . stringify ( msg ) . replace ( /"|{|}/g , '' ) . replace ( /,/g , ' | ' ) ; // print status element
async function webCam() { // initialize webcam
printFPS ( 'starting webcam...' ) ;
// @ts-ignore resizeMode is not yet defined in tslib
const cameraOptions : MediaStreamConstraints = { audio : false , video : { facingMode : 'user' , resizeMode : 'none' , width : { ideal : document.body.clientWidth } } } ;
const stream : MediaStream = await navigator . mediaDevices . getUserMedia ( cameraOptions ) ;
const ready = new Promise ( ( resolve ) = > { dom . video . onloadeddata = ( ) = > resolve ( true ) ; } ) ;
dom . video . srcObject = stream ;
dom . video . play ( ) ;
await ready ;
dom . canvas . width = dom . video . videoWidth ;
dom . canvas . height = dom . video . videoHeight ;
2021-11-11 17:30:55 +01:00
if ( human . env . initial ) log ( 'video:' , dom . video . videoWidth , dom . video . videoHeight , '|' , stream . getVideoTracks ( ) [ 0 ] . label ) ;
2021-11-09 16:39:23 +01:00
dom . canvas . onclick = ( ) = > { // pause when clicked on screen and resume on next click
if ( dom . video . paused ) dom . video . play ( ) ;
else dom . video . pause ( ) ;
} ;
}
async function detectionLoop() { // main detection loop
if ( ! dom . video . paused ) {
2021-11-11 17:30:55 +01:00
if ( face && face . tensor ) human . tf . dispose ( face . tensor ) ; // dispose previous tensor
2021-11-09 16:39:23 +01:00
await human . detect ( dom . video ) ; // actual detection; were not capturing output in a local variable as it can also be reached via human.result
const now = human . now ( ) ;
fps . detect = 1000 / ( now - timestamp . detect ) ;
timestamp . detect = now ;
requestAnimationFrame ( detectionLoop ) ; // start new frame immediately
}
}
2021-11-11 17:30:55 +01:00
async function validationLoop ( ) : Promise < FaceResult > { // main screen refresh loop
2021-11-09 16:39:23 +01:00
const interpolated = await human . next ( human . result ) ; // smoothen result using last-known results
await human . draw . canvas ( dom . video , dom . canvas ) ; // draw canvas to screen
await human . draw . all ( dom . canvas , interpolated ) ; // draw labels, boxes, lines, etc.
const now = human . now ( ) ;
fps . draw = 1000 / ( now - timestamp . draw ) ;
timestamp . draw = now ;
printFPS ( ` fps: ${ fps . detect . toFixed ( 1 ) . padStart ( 5 , ' ' ) } detect | ${ fps . draw . toFixed ( 1 ) . padStart ( 5 , ' ' ) } draw ` ) ; // write status
ok . faceCount = human . result . face . length === 1 ; // must be exactly detected face
2021-11-09 20:37:50 +01:00
if ( ok . faceCount ) { // skip the rest if no face
const gestures : string [ ] = Object . values ( human . result . gesture ) . map ( ( gesture ) = > gesture . gesture ) ; // flatten all gestures
if ( gestures . includes ( 'blink left eye' ) || gestures . includes ( 'blink right eye' ) ) blink . start = human . now ( ) ; // blink starts when eyes get closed
if ( blink . start > 0 && ! gestures . includes ( 'blink left eye' ) && ! gestures . includes ( 'blink right eye' ) ) blink . end = human . now ( ) ; // if blink started how long until eyes are back open
ok . blinkDetected = ok . blinkDetected || ( blink . end - blink . start > options . blinkMin && blink . end - blink . start < options . blinkMax ) ;
if ( ok . blinkDetected && blink . time === 0 ) blink . time = Math . trunc ( blink . end - blink . start ) ;
ok . facingCenter = gestures . includes ( 'facing center' ) && gestures . includes ( 'looking center' ) ; // must face camera and look at camera
ok . faceConfidence = ( human . result . face [ 0 ] . boxScore || 0 ) > options . minConfidence && ( human . result . face [ 0 ] . faceScore || 0 ) > options . minConfidence && ( human . result . face [ 0 ] . genderScore || 0 ) > options . minConfidence ;
ok . antispoofCheck = ( human . result . face [ 0 ] . real || 0 ) > options . minConfidence ;
ok . livenessCheck = ( human . result . face [ 0 ] . live || 0 ) > options . minConfidence ;
ok . faceSize = human . result . face [ 0 ] . box [ 2 ] >= options . minSize && human . result . face [ 0 ] . box [ 3 ] >= options . minSize ;
}
2021-11-09 16:39:23 +01:00
printStatus ( ok ) ;
if ( allOk ( ) ) { // all criteria met
dom . video . pause ( ) ;
2021-11-11 17:30:55 +01:00
return human . result . face [ 0 ] ;
2021-11-09 16:39:23 +01:00
}
if ( ok . elapsedMs > options . maxTime ) { // give up
dom . video . pause ( ) ;
2021-11-11 17:30:55 +01:00
return human . result . face [ 0 ] ;
2021-11-09 16:39:23 +01:00
} else { // run again
ok . elapsedMs = Math . trunc ( human . now ( ) - startTime ) ;
return new Promise ( ( resolve ) = > {
setTimeout ( async ( ) = > {
const res = await validationLoop ( ) ; // run validation loop until conditions are met
2021-11-11 17:30:55 +01:00
if ( res ) resolve ( human . result . face [ 0 ] ) ; // recursive promise resolve
2021-11-09 16:39:23 +01:00
} , 30 ) ; // use to slow down refresh from max refresh rate to target of 30 fps
} ) ;
}
}
2021-11-11 17:30:55 +01:00
async function saveRecords() {
if ( dom . name . value . length > 0 ) {
const image = dom . canvas . getContext ( '2d' ) ? . getImageData ( 0 , 0 , dom . canvas . width , dom . canvas . height ) as ImageData ;
const rec = { id : 0 , name : dom.name.value , descriptor : face.embedding as number [ ] , image } ;
await indexDb . save ( rec ) ;
log ( 'saved face record:' , rec . name ) ;
db . push ( rec ) ;
} else {
log ( 'invalid name' ) ;
}
}
2021-11-09 16:39:23 +01:00
2021-11-11 17:30:55 +01:00
async function deleteRecord() {
if ( current . id > 0 ) {
await indexDb . remove ( current ) ;
}
2021-11-09 20:37:50 +01:00
}
2021-11-11 17:30:55 +01:00
async function detectFace() {
2021-11-11 23:01:10 +01:00
dom . canvas . getContext ( '2d' ) ? . clearRect ( 0 , 0 , options . minSize , options . minSize ) ;
2021-11-11 17:30:55 +01:00
if ( ! face || ! face . tensor || ! face . embedding ) return 0 ;
human . tf . browser . toPixels ( face . tensor as unknown as TensorLike , dom . canvas ) ;
const descriptors = db . map ( ( rec ) = > rec . descriptor ) ;
const res = await human . match ( face . embedding , descriptors ) ;
if ( res . index === - 1 ) {
log ( 'no matches' ) ;
dom . delete . style . display = 'none' ;
dom . source . style . display = 'none' ;
} else {
current = db [ res . index ] ;
log ( ` best match: ${ current . name } | id: ${ current . id } | similarity: ${ Math . round ( 1000 * res . similarity ) / 10 } % ` ) ;
dom . delete . style . display = '' ;
dom . name . value = current . name ;
dom . source . style . display = '' ;
dom . source . getContext ( '2d' ) ? . putImageData ( current . image , 0 , 0 ) ;
}
return res . similarity > options . threshold ;
2021-11-09 16:39:23 +01:00
}
async function main() { // main entry point
2021-11-11 17:30:55 +01:00
ok . faceCount = false ;
ok . faceConfidence = false ;
ok . facingCenter = false ;
ok . blinkDetected = false ;
ok . faceSize = false ;
ok . antispoofCheck = false ;
ok . livenessCheck = false ;
ok . elapsedMs = 0 ;
dom . match . style . display = 'none' ;
dom . retry . style . display = 'none' ;
document . body . style . background = 'black' ;
await webCam ( ) ;
await detectionLoop ( ) ; // start detection loop
startTime = human . now ( ) ;
face = await validationLoop ( ) ; // start validation loop
dom . fps . style . display = 'none' ;
2021-11-11 23:01:10 +01:00
dom . canvas . width = face ? . tensor ? . shape [ 1 ] || options . minSize ;
dom . canvas . height = face ? . tensor ? . shape [ 0 ] || options . minSize ;
dom . source . width = dom . canvas . width ;
dom . source . height = dom . canvas . height ;
dom . canvas . style . width = '' ;
dom . match . style . display = 'flex' ;
dom . retry . style . display = 'block' ;
2021-11-11 17:30:55 +01:00
if ( ! allOk ( ) ) {
2021-11-11 23:01:10 +01:00
log ( 'did not find valid face' ) ;
return false ;
2021-11-11 17:30:55 +01:00
} else {
// log('found valid face');
const res = await detectFace ( ) ;
document . body . style . background = res ? 'darkgreen' : 'maroon' ;
return res ;
}
}
async function init() {
2021-11-09 16:39:23 +01:00
log ( 'human version:' , human . version , '| tfjs version:' , human . tf . version_core ) ;
2021-11-11 17:30:55 +01:00
log ( 'options:' , JSON . stringify ( options ) . replace ( /{|}|"|\[|\]/g , '' ) . replace ( /,/g , ' ' ) ) ;
2021-11-09 16:39:23 +01:00
printFPS ( 'loading...' ) ;
2021-11-11 17:30:55 +01:00
db = await indexDb . load ( ) ; // load face database from indexdb
log ( 'loaded face records:' , db . length ) ;
await webCam ( ) ; // start webcam
2021-11-09 16:39:23 +01:00
await human . load ( ) ; // preload all models
printFPS ( 'initializing...' ) ;
2021-11-11 17:30:55 +01:00
dom . retry . addEventListener ( 'click' , main ) ;
dom . save . addEventListener ( 'click' , saveRecords ) ;
dom . delete . addEventListener ( 'click' , deleteRecord ) ;
2021-11-09 16:39:23 +01:00
await human . warmup ( ) ; // warmup function to initialize backend for future faster detection
2021-11-11 17:30:55 +01:00
await main ( ) ;
2021-11-09 16:39:23 +01:00
}
2021-11-11 17:30:55 +01:00
window . onload = init ;