2021-04-12 14:29:52 +02:00
// @ts-nocheck
2021-04-08 18:10:15 +02:00
/* global tf */
2021-03-29 20:40:34 +02:00
import Human from '../dist/human.esm.js' ; // equivalent of @vladmandic/human
2021-04-08 18:10:15 +02:00
// import Human from '../dist/human.esm-nobundle.js'; // this requires that tf is loaded manually and bundled before human can be used
2021-03-29 20:40:34 +02:00
import Menu from './helpers/menu.js' ;
import GLBench from './helpers/gl-bench.js' ;
2021-04-12 23:48:59 +02:00
import webRTC from './helpers/webrtc.js' ;
2020-10-12 16:08:00 +02:00
2021-04-25 22:56:10 +02:00
// const userConfig = {};
2021-04-09 16:02:40 +02:00
let human ;
2021-01-11 20:35:57 +01:00
2020-12-17 00:36:24 +01:00
const userConfig = {
2021-03-28 19:22:22 +02:00
backend : 'humangl' ,
2021-02-06 23:41:53 +01:00
async : false ,
2021-03-27 15:25:31 +01:00
profile : false ,
2021-03-17 16:32:37 +01:00
warmup : 'full' ,
2021-03-27 20:43:48 +01:00
videoOptimized : false ,
2021-04-19 22:02:47 +02:00
filter : {
2021-04-24 22:04:49 +02:00
enabled : false ,
2021-04-19 22:19:03 +02:00
flip : false ,
2021-04-19 22:02:47 +02:00
} ,
2021-04-25 22:56:10 +02:00
face : { enabled : true ,
2021-03-27 15:25:31 +01:00
mesh : { enabled : true } ,
iris : { enabled : true } ,
2021-04-19 22:02:47 +02:00
description : { enabled : false } ,
emotion : { enabled : false } ,
2021-03-27 15:25:31 +01:00
} ,
2021-03-05 13:39:37 +01:00
hand : { enabled : false } ,
2021-04-24 22:04:49 +02:00
gesture : { enabled : false } ,
2021-04-25 22:56:10 +02:00
body : { enabled : false , modelPath : 'posenet.json' } ,
2021-04-09 14:07:58 +02:00
// body: { enabled: true, modelPath: 'blazepose.json' },
2021-04-19 22:02:47 +02:00
// object: { enabled: true },
2020-12-17 00:36:24 +01:00
} ;
2020-11-09 12:32:11 +01:00
2020-10-17 16:06:02 +02:00
// ui options
2020-10-13 15:59:21 +02:00
const ui = {
2020-11-03 04:15:37 +01:00
baseBackground : 'rgba(50, 50, 50, 1)' , // 'grey'
2021-03-05 17:43:50 +01:00
crop : true , // video mode crop to size or leave full frame
columns : 2 , // when processing sample images create this many columns
facing : true , // camera facing front or back
useWorker : false , // use web workers for processing
2021-03-29 20:40:34 +02:00
worker : 'index-worker.js' ,
2021-02-25 13:50:13 +01:00
samples : [ '../assets/sample6.jpg' , '../assets/sample1.jpg' , '../assets/sample4.jpg' , '../assets/sample5.jpg' , '../assets/sample3.jpg' , '../assets/sample2.jpg' ] ,
2020-11-13 22:13:35 +01:00
compare : '../assets/sample-me.jpg' ,
2021-04-12 23:48:59 +02:00
useWebRTC : false , // use webrtc as camera source instead of local webcam
webRTCServer : 'http://localhost:8002' ,
webRTCStream : 'reowhite' ,
2021-03-05 17:43:50 +01:00
console : true , // log messages to browser console
maxFPSframes : 10 , // keep fps history for how many frames
modelsPreload : true , // preload human models on startup
2021-04-09 16:02:40 +02:00
modelsWarmup : true , // warmup human models on startup
2021-03-05 17:43:50 +01:00
busy : false , // internal camera busy flag
menuWidth : 0 , // internal
menuHeight : 0 , // internal
camera : { } , // internal, holds details of webcam details
detectFPS : [ ] , // internal, holds fps values for detection performance
drawFPS : [ ] , // internal, holds fps values for draw performance
2021-04-12 23:48:59 +02:00
buffered : true , // should output be buffered between frames
2021-03-11 17:44:22 +01:00
drawWarmup : false , // debug only, should warmup image processing be displayed on startup
2021-03-06 16:38:04 +01:00
drawThread : null , // internl, perform draw operations in a separate thread
detectThread : null , // internl, perform detect operations in a separate thread
2021-03-05 17:43:50 +01:00
framesDraw : 0 , // internal, statistics on frames drawn
framesDetect : 0 , // internal, statistics on frames detected
2021-03-06 16:38:04 +01:00
bench : true , // show gl fps benchmark window
2021-03-05 17:43:50 +01:00
lastFrame : 0 , // time of last frame processing
2021-04-19 15:30:04 +02:00
viewportSet : false , // internal, has custom viewport been set
2020-10-13 15:59:21 +02:00
} ;
2020-10-17 16:06:02 +02:00
// global variables
2020-11-19 20:45:59 +01:00
const menu = { } ;
2020-10-15 15:43:16 +02:00
let worker ;
2020-11-12 23:00:06 +01:00
let bench ;
2020-11-12 15:21:26 +01:00
let lastDetectedResult = { } ;
2020-10-12 16:08:00 +02:00
2020-10-17 16:06:02 +02:00
// helper function: translates json to human readable string
2020-10-15 00:22:38 +02:00
function str ( ... msg ) {
if ( ! Array . isArray ( msg ) ) return msg ;
let line = '' ;
for ( const entry of msg ) {
if ( typeof entry === 'object' ) line += JSON . stringify ( entry ) . replace ( /{|}|"|\[|\]/g , '' ) . replace ( /,/g , ', ' ) ;
else line += entry ;
}
return line ;
}
2020-10-17 16:06:02 +02:00
// helper function: wrapper around console output
2020-11-25 15:13:19 +01:00
function log ( ... msg ) {
2020-11-16 21:51:46 +01:00
const dt = new Date ( ) ;
const ts = ` ${ dt . getHours ( ) . toString ( ) . padStart ( 2 , '0' ) } : ${ dt . getMinutes ( ) . toString ( ) . padStart ( 2 , '0' ) } : ${ dt . getSeconds ( ) . toString ( ) . padStart ( 2 , '0' ) } . ${ dt . getMilliseconds ( ) . toString ( ) . padStart ( 3 , '0' ) } ` ;
2020-10-15 21:25:58 +02:00
// eslint-disable-next-line no-console
2020-11-16 21:51:46 +01:00
if ( ui . console ) console . log ( ts , ... msg ) ;
2020-11-25 15:13:19 +01:00
}
2020-10-15 14:16:34 +02:00
2020-11-25 15:13:19 +01:00
function status ( msg ) {
2020-11-03 04:15:37 +01:00
// eslint-disable-next-line no-console
2021-03-05 17:43:50 +01:00
const div = document . getElementById ( 'status' ) ;
if ( div ) div . innerText = msg ;
2020-11-25 15:13:19 +01:00
}
2020-11-03 04:15:37 +01:00
2021-03-21 22:47:00 +01:00
const compare = { enabled : false , original : null } ;
2020-11-19 20:45:59 +01:00
async function calcSimmilariry ( result ) {
2021-03-21 22:47:00 +01:00
document . getElementById ( 'compare-container' ) . style . display = compare . enabled ? 'block' : 'none' ;
if ( ! compare . enabled ) return ;
2021-04-19 01:33:40 +02:00
if ( ! result || ! result . face || result . face [ 0 ] . embedding ) return ;
if ( ! ( result . face . length > 0 ) || ( result . face [ 0 ] . embedding . length <= 64 ) ) return ;
2021-03-21 22:47:00 +01:00
if ( ! compare . original ) {
compare . original = result ;
log ( 'setting face compare baseline:' , result . face [ 0 ] ) ;
2021-03-12 18:54:08 +01:00
if ( result . face [ 0 ] . tensor ) {
const enhanced = human . enhance ( result . face [ 0 ] ) ;
if ( enhanced ) {
const c = document . getElementById ( 'orig' ) ;
const squeeze = enhanced . squeeze ( ) ;
2021-03-21 22:47:00 +01:00
const norm = squeeze . div ( 255 ) ;
human . tf . browser . toPixels ( norm , c ) ;
2021-03-12 18:54:08 +01:00
enhanced . dispose ( ) ;
squeeze . dispose ( ) ;
2021-03-21 22:47:00 +01:00
norm . dispose ( ) ;
2021-03-12 18:54:08 +01:00
}
} else {
2021-03-21 22:47:00 +01:00
document . getElementById ( 'compare-canvas' ) . getContext ( '2d' ) . drawImage ( compare . original . canvas , 0 , 0 , 200 , 200 ) ;
2021-03-12 18:54:08 +01:00
}
2020-11-13 22:13:35 +01:00
}
2021-04-19 01:33:40 +02:00
const similarity = human . similarity ( compare . original . face [ 0 ] . embedding , result . face [ 0 ] . embedding ) ;
2021-03-21 19:18:51 +01:00
document . getElementById ( 'similarity' ) . innerText = ` similarity: ${ Math . trunc ( 1000 * similarity ) / 10 } % ` ;
2020-11-13 22:13:35 +01:00
}
2020-10-17 16:06:02 +02:00
// draws processed results and starts processing of a next frame
2020-11-14 23:22:59 +01:00
let lastDraw = performance . now ( ) ;
2020-11-12 15:21:26 +01:00
async function drawResults ( input ) {
const result = lastDetectedResult ;
const canvas = document . getElementById ( 'canvas' ) ;
2020-11-14 23:22:59 +01:00
// update draw fps data
ui . drawFPS . push ( 1000 / ( performance . now ( ) - lastDraw ) ) ;
if ( ui . drawFPS . length > ui . maxFPSframes ) ui . drawFPS . shift ( ) ;
lastDraw = performance . now ( ) ;
2020-11-03 00:54:03 +01:00
// draw fps chart
2020-11-19 20:45:59 +01:00
await menu . process . updateChart ( 'FPS' , ui . detectFPS ) ;
2020-11-12 15:21:26 +01:00
// get updated canvas
2021-03-06 16:38:04 +01:00
if ( ui . buffered || ! result . canvas ) {
const image = await human . image ( input ) ;
result . canvas = image . canvas ;
human . tf . dispose ( image . tensor ) ;
}
2020-11-12 15:21:26 +01:00
2020-10-15 15:43:16 +02:00
// draw image from video
const ctx = canvas . getContext ( '2d' ) ;
2020-10-27 15:06:01 +01:00
ctx . fillStyle = ui . baseBackground ;
ctx . fillRect ( 0 , 0 , canvas . width , canvas . height ) ;
2020-11-03 16:55:33 +01:00
if ( result . canvas ) {
if ( result . canvas . width !== canvas . width ) canvas . width = result . canvas . width ;
if ( result . canvas . height !== canvas . height ) canvas . height = result . canvas . height ;
ctx . drawImage ( result . canvas , 0 , 0 , result . canvas . width , result . canvas . height , 0 , 0 , result . canvas . width , result . canvas . height ) ;
} else {
ctx . drawImage ( input , 0 , 0 , input . width , input . height , 0 , 0 , canvas . width , canvas . height ) ;
}
2020-11-13 22:13:35 +01:00
2020-10-15 15:43:16 +02:00
// draw all results
2021-03-06 23:22:47 +01:00
human . draw . face ( canvas , result . face ) ;
human . draw . body ( canvas , result . body ) ;
human . draw . hand ( canvas , result . hand ) ;
2021-03-17 16:32:37 +01:00
human . draw . object ( canvas , result . object ) ;
2021-03-06 23:22:47 +01:00
human . draw . gesture ( canvas , result . gesture ) ;
2020-11-19 20:45:59 +01:00
await calcSimmilariry ( result ) ;
2020-11-13 22:13:35 +01:00
2020-10-15 15:43:16 +02:00
// update log
2020-10-18 02:59:43 +02:00
const engine = human . tf . engine ( ) ;
2020-11-03 16:55:33 +01:00
const gpu = engine . backendInstance ? ` gpu: ${ ( engine . backendInstance . numBytesInGPU ? engine . backendInstance . numBytesInGPU : 0 ) . toLocaleString ( ) } bytes ` : '' ;
2020-11-03 17:11:53 +01:00
const memory = ` system: ${ engine . state . numBytes . toLocaleString ( ) } bytes ${ gpu } | tensors: ${ engine . state . numTensors . toLocaleString ( ) } ` ;
2020-11-03 16:55:33 +01:00
const processing = result . canvas ? ` processing: ${ result . canvas . width } x ${ result . canvas . height } ` : '' ;
2020-11-14 23:22:59 +01:00
const avgDetect = Math . trunc ( 10 * ui . detectFPS . reduce ( ( a , b ) => a + b , 0 ) / ui . detectFPS . length ) / 10 ;
const avgDraw = Math . trunc ( 10 * ui . drawFPS . reduce ( ( a , b ) => a + b , 0 ) / ui . drawFPS . length ) / 10 ;
const warning = ( ui . detectFPS . length > 5 ) && ( avgDetect < 5 ) ? '<font color="lightcoral">warning: your performance is low: try switching to higher performance backend, lowering resolution or disabling some models</font>' : '' ;
2020-11-26 16:37:04 +01:00
document . getElementById ( 'log' ) . innerHTML = `
video : $ { ui . camera . name } | facing : $ { ui . camera . facing } | screen : $ { window . innerWidth } x $ { window . innerHeight } camera : $ { ui . camera . width } x $ { ui . camera . height } $ { processing } < br >
backend : $ { human . tf . getBackend ( ) } | $ { memory } < br >
performance : $ { str ( result . performance ) } ms FPS process : $ { avgDetect } refresh : $ { avgDraw } < br >
$ { warning } < br >
2020-10-15 15:43:16 +02:00
` ;
2020-11-12 15:21:26 +01:00
ui . framesDraw ++ ;
ui . lastFrame = performance . now ( ) ;
// if buffered, immediate loop but limit frame rate although it's going to run slower as JS is singlethreaded
2020-11-19 20:45:59 +01:00
if ( ui . buffered ) {
2020-11-14 23:22:59 +01:00
ui . drawThread = requestAnimationFrame ( ( ) => drawResults ( input , canvas ) ) ;
} else if ( ! ui . buffered && ui . drawThread ) {
log ( 'stopping buffered refresh' ) ;
2020-11-19 20:45:59 +01:00
cancelAnimationFrame ( ui . drawThread ) ;
2020-11-12 15:21:26 +01:00
ui . drawThread = null ;
}
2020-10-15 15:43:16 +02:00
}
2020-10-17 16:06:02 +02:00
// setup webcam
async function setupCamera ( ) {
if ( ui . busy ) return null ;
ui . busy = true ;
const video = document . getElementById ( 'video' ) ;
const canvas = document . getElementById ( 'canvas' ) ;
const output = document . getElementById ( 'log' ) ;
2021-04-12 23:48:59 +02:00
if ( ui . useWebRTC ) {
status ( 'setting up webrtc connection' ) ;
try {
video . onloadeddata = ( ) => ui . camera = { name : ui . webRTCStream , width : video . videoWidth , height : video . videoHeight , facing : 'default' } ;
await webRTC ( ui . webRTCServer , ui . webRTCStream , video ) ;
} catch ( err ) {
log ( err ) ;
} finally {
status ( '' ) ;
}
return '' ;
}
2020-10-17 16:06:02 +02:00
const live = video . srcObject ? ( ( video . srcObject . getVideoTracks ( ) [ 0 ] . readyState === 'live' ) && ( video . readyState > 2 ) && ( ! video . paused ) ) : false ;
2020-11-04 17:43:51 +01:00
let msg = '' ;
2020-11-05 21:38:09 +01:00
status ( 'setting up camera' ) ;
2020-10-17 16:06:02 +02:00
// setup webcam. note that navigator.mediaDevices requires that page is accessed via https
if ( ! navigator . mediaDevices ) {
2020-11-03 04:15:37 +01:00
msg = 'camera access not supported' ;
2020-10-17 16:25:27 +02:00
output . innerText += ` \n ${ msg } ` ;
2020-10-17 16:06:02 +02:00
log ( msg ) ;
2020-11-03 04:15:37 +01:00
status ( msg ) ;
2020-11-20 13:52:50 +01:00
ui . busy = false ;
return msg ;
2020-10-17 16:06:02 +02:00
}
let stream ;
2020-11-05 15:06:09 +01:00
const constraints = {
audio : false ,
2020-11-18 15:15:03 +01:00
video : { facingMode : ui . facing ? 'user' : 'environment' , resizeMode : ui . crop ? 'crop-and-scale' : 'none' } ,
2020-11-05 15:06:09 +01:00
} ;
2020-11-18 15:15:03 +01:00
if ( window . innerWidth > window . innerHeight ) constraints . video . width = { ideal : window . innerWidth } ;
2020-11-19 20:45:59 +01:00
else constraints . video . height = { ideal : ( window . innerHeight - document . getElementById ( 'menubar' ) . offsetHeight ) } ;
2020-10-17 16:06:02 +02:00
try {
2020-11-05 15:06:09 +01:00
stream = await navigator . mediaDevices . getUserMedia ( constraints ) ;
2020-10-17 16:06:02 +02:00
} catch ( err ) {
2020-11-20 13:52:50 +01:00
if ( err . name === 'PermissionDeniedError' || err . name === 'NotAllowedError' ) msg = 'camera permission denied' ;
2020-11-05 15:06:09 +01:00
else if ( err . name === 'SourceUnavailableError' ) msg = 'camera not available' ;
2020-11-20 13:52:50 +01:00
else msg = ` camera error: ${ err . message || err } ` ;
2020-11-05 15:06:09 +01:00
output . innerText += ` \n ${ msg } ` ;
status ( msg ) ;
2020-11-20 13:52:50 +01:00
log ( 'camera error:' , err ) ;
ui . busy = false ;
return msg ;
2020-10-17 16:06:02 +02:00
}
if ( stream ) video . srcObject = stream ;
2020-11-20 13:52:50 +01:00
else {
ui . busy = false ;
return 'camera stream empty' ;
}
2020-11-03 16:55:33 +01:00
const track = stream . getVideoTracks ( ) [ 0 ] ;
const settings = track . getSettings ( ) ;
2020-11-08 18:32:31 +01:00
// log('camera constraints:', constraints, 'window:', { width: window.innerWidth, height: window.innerHeight }, 'settings:', settings, 'track:', track);
2021-04-19 01:33:40 +02:00
ui . camera = { name : track . label . toLowerCase ( ) , width : settings . width , height : settings . height , facing : settings . facingMode === 'user' ? 'front' : 'back' } ;
2020-10-17 16:06:02 +02:00
return new Promise ( ( resolve ) => {
video . onloadeddata = async ( ) => {
video . width = video . videoWidth ;
video . height = video . videoHeight ;
2020-10-27 15:06:01 +01:00
canvas . width = video . width ;
canvas . height = video . height ;
2020-11-05 21:59:28 +01:00
canvas . style . width = canvas . width > canvas . height ? '100vw' : '' ;
canvas . style . height = canvas . width > canvas . height ? '' : '100vh' ;
2020-11-08 15:56:02 +01:00
ui . menuWidth . input . setAttribute ( 'value' , video . width ) ;
ui . menuHeight . input . setAttribute ( 'value' , video . height ) ;
2020-11-07 04:20:42 +01:00
// silly font resizing for paint-on-canvas since viewport can be zoomed
2020-10-17 16:06:02 +02:00
if ( live ) video . play ( ) ;
2020-11-14 13:02:05 +01:00
// eslint-disable-next-line no-use-before-define
if ( live && ! ui . detectThread ) runHumanDetect ( video , canvas ) ;
2020-10-17 16:06:02 +02:00
ui . busy = false ;
// do once more because onresize events can be delayed or skipped
2020-11-03 16:55:33 +01:00
// if (video.width > window.innerWidth) await setupCamera();
2020-11-05 21:38:09 +01:00
status ( '' ) ;
2020-11-20 13:52:50 +01:00
resolve ( ) ;
2020-10-17 16:06:02 +02:00
} ;
} ) ;
}
2020-11-19 20:45:59 +01:00
function initPerfMonitor ( ) {
if ( ! bench ) {
const gl = null ;
// cosnt gl = human.tf.engine().backend.gpgpu.gl;
// if (!gl) log('bench cannot get tensorflow webgl context');
bench = new GLBench ( gl , {
trackGPU : false , // this is really slow
chartHz : 20 ,
chartLen : 20 ,
} ) ;
bench . begin ( ) ;
}
}
2020-10-17 16:06:02 +02:00
// wrapper for worker.postmessage that creates worker if one does not exist
2020-11-12 23:00:06 +01:00
function webWorker ( input , image , canvas , timestamp ) {
2020-10-15 15:43:16 +02:00
if ( ! worker ) {
2020-10-16 00:16:05 +02:00
// create new webworker and add event handler only once
2020-11-05 14:21:23 +01:00
log ( 'creating worker thread' ) ;
2020-10-17 12:30:00 +02:00
worker = new Worker ( ui . worker , { type : 'module' } ) ;
2020-10-15 15:43:16 +02:00
// after receiving message from webworker, parse&draw results and send new frame for processing
2020-11-05 14:21:23 +01:00
worker . addEventListener ( 'message' , ( msg ) => {
2020-11-14 23:22:59 +01:00
if ( msg . data . result . performance && msg . data . result . performance . total ) ui . detectFPS . push ( 1000 / msg . data . result . performance . total ) ;
if ( ui . detectFPS . length > ui . maxFPSframes ) ui . detectFPS . shift ( ) ;
2020-11-19 20:45:59 +01:00
if ( ui . bench ) {
if ( ! bench ) initPerfMonitor ( ) ;
bench . nextFrame ( timestamp ) ;
}
if ( document . getElementById ( 'gl-bench' ) ) document . getElementById ( 'gl-bench' ) . style . display = ui . bench ? 'block' : 'none' ;
2020-11-12 15:21:26 +01:00
lastDetectedResult = msg . data . result ;
ui . framesDetect ++ ;
if ( ! ui . drawThread ) drawResults ( input ) ;
// eslint-disable-next-line no-use-before-define
2020-11-14 13:02:05 +01:00
ui . detectThread = requestAnimationFrame ( ( now ) => runHumanDetect ( input , canvas , now ) ) ;
2020-11-05 14:21:23 +01:00
} ) ;
2020-10-15 15:43:16 +02:00
}
2020-10-16 00:16:05 +02:00
// pass image data as arraybuffer to worker by reference to avoid copy
2021-01-03 16:41:56 +01:00
worker . postMessage ( { image : image . data . buffer , width : canvas . width , height : canvas . height , userConfig } , [ image . data . buffer ] ) ;
2020-10-15 15:43:16 +02:00
}
2020-10-17 16:06:02 +02:00
// main processing function when input is webcam, can use direct invocation or web worker
2020-11-12 23:00:06 +01:00
function runHumanDetect ( input , canvas , timestamp ) {
2020-11-05 21:38:09 +01:00
// if live video
const live = input . srcObject && ( input . srcObject . getVideoTracks ( ) [ 0 ] . readyState === 'live' ) && ( input . readyState > 2 ) && ( ! input . paused ) ;
2020-11-06 21:35:58 +01:00
if ( ! live && input . srcObject ) {
2020-11-12 15:21:26 +01:00
// stop ui refresh
2020-11-19 20:45:59 +01:00
if ( ui . drawThread ) cancelAnimationFrame ( ui . drawThread ) ;
2020-11-14 13:02:05 +01:00
if ( ui . detectThread ) cancelAnimationFrame ( ui . detectThread ) ;
2020-11-12 15:21:26 +01:00
ui . drawThread = null ;
2020-11-14 13:02:05 +01:00
ui . detectThread = null ;
2020-11-05 21:38:09 +01:00
// if we want to continue and camera not ready, retry in 0.5sec, else just give up
2020-11-08 18:26:45 +01:00
if ( input . paused ) log ( 'camera paused' ) ;
else if ( ( input . srcObject . getVideoTracks ( ) [ 0 ] . readyState === 'live' ) && ( input . readyState <= 2 ) ) setTimeout ( ( ) => runHumanDetect ( input , canvas ) , 500 ) ;
2021-04-19 01:33:40 +02:00
else log ( ` camera not ready: track state: ${ input . srcObject . getVideoTracks ( ) [ 0 ] . readyState } stream state: ${ input . readyState } ` ) ;
2020-11-12 15:21:26 +01:00
clearTimeout ( ui . drawThread ) ;
ui . drawThread = null ;
2020-11-21 18:21:47 +01:00
log ( 'frame statistics: process:' , ui . framesDetect , 'refresh:' , ui . framesDraw ) ;
2020-12-10 20:47:53 +01:00
log ( 'memory' , human . tf . engine ( ) . memory ( ) ) ;
2020-11-05 21:38:09 +01:00
return ;
}
status ( '' ) ;
if ( ui . useWorker ) {
// get image data from video as we cannot send html objects to webworker
2020-11-15 15:28:57 +01:00
const offscreen = ( typeof OffscreenCanvas !== 'undefined' ) ? new OffscreenCanvas ( canvas . width , canvas . height ) : document . createElement ( 'canvas' ) ;
offscreen . width = canvas . width ;
offscreen . height = canvas . height ;
2020-11-05 21:38:09 +01:00
const ctx = offscreen . getContext ( '2d' ) ;
ctx . drawImage ( input , 0 , 0 , input . width , input . height , 0 , 0 , canvas . width , canvas . height ) ;
const data = ctx . getImageData ( 0 , 0 , canvas . width , canvas . height ) ;
// perform detection in worker
2020-11-12 23:00:06 +01:00
webWorker ( input , data , canvas , userConfig , timestamp ) ;
2020-11-05 21:38:09 +01:00
} else {
2020-11-09 12:32:11 +01:00
human . detect ( input , userConfig ) . then ( ( result ) => {
2020-11-14 23:22:59 +01:00
if ( result . performance && result . performance . total ) ui . detectFPS . push ( 1000 / result . performance . total ) ;
if ( ui . detectFPS . length > ui . maxFPSframes ) ui . detectFPS . shift ( ) ;
2020-11-19 20:45:59 +01:00
if ( ui . bench ) {
if ( ! bench ) initPerfMonitor ( ) ;
bench . nextFrame ( timestamp ) ;
}
if ( document . getElementById ( 'gl-bench' ) ) document . getElementById ( 'gl-bench' ) . style . display = ui . bench ? 'block' : 'none' ;
2020-11-20 13:52:50 +01:00
if ( result . error ) {
log ( result . error ) ;
document . getElementById ( 'log' ) . innerText += ` \n Human error: ${ result . error } ` ;
} else {
2020-11-12 15:21:26 +01:00
lastDetectedResult = result ;
if ( ! ui . drawThread ) drawResults ( input ) ;
ui . framesDetect ++ ;
2020-11-14 13:02:05 +01:00
ui . detectThread = requestAnimationFrame ( ( now ) => runHumanDetect ( input , canvas , now ) ) ;
2020-11-12 15:21:26 +01:00
}
2020-11-05 21:38:09 +01:00
} ) ;
2020-10-12 16:08:00 +02:00
}
}
2020-10-17 16:06:02 +02:00
// main processing function when input is image, can use direct invocation or web worker
2020-10-16 21:04:51 +02:00
async function processImage ( input ) {
2020-10-13 15:59:21 +02:00
return new Promise ( ( resolve ) => {
2020-11-03 15:34:36 +01:00
const image = new Image ( ) ;
2020-10-16 21:04:51 +02:00
image . onload = async ( ) => {
2021-03-21 22:47:00 +01:00
log ( 'processing image:' , encodeURI ( image . src ) ) ;
2020-10-16 21:04:51 +02:00
const canvas = document . getElementById ( 'canvas' ) ;
image . width = image . naturalWidth ;
image . height = image . naturalHeight ;
2020-11-06 19:50:16 +01:00
canvas . width = human . config . filter . width && human . config . filter . width > 0 ? human . config . filter . width : image . naturalWidth ;
canvas . height = human . config . filter . height && human . config . filter . height > 0 ? human . config . filter . height : image . naturalHeight ;
2020-11-09 12:32:11 +01:00
const result = await human . detect ( image , userConfig ) ;
2020-11-13 22:13:35 +01:00
lastDetectedResult = result ;
await drawResults ( image ) ;
2020-10-16 21:04:51 +02:00
const thumb = document . createElement ( 'canvas' ) ;
2020-11-03 15:34:36 +01:00
thumb . className = 'thumbnail' ;
2020-10-18 18:12:09 +02:00
thumb . width = window . innerWidth / ( ui . columns + 0.1 ) ;
2021-03-03 15:59:04 +01:00
thumb . height = thumb . width * canvas . height / canvas . width ;
if ( result . face && result . face . length > 0 ) {
thumb . title = result . face . map ( ( a , i ) => ` # ${ i } face: ${ Math . trunc ( 100 * a . faceConfidence ) } % box: ${ Math . trunc ( 100 * a . boxConfidence ) } % age: ${ Math . trunc ( a . age ) } gender: ${ Math . trunc ( 100 * a . genderConfidence ) } % ${ a . gender } ` ) . join ( ' | ' ) ;
} else {
thumb . title = 'no face detected' ;
}
2020-10-16 21:04:51 +02:00
const ctx = thumb . getContext ( '2d' ) ;
ctx . drawImage ( canvas , 0 , 0 , canvas . width , canvas . height , 0 , 0 , thumb . width , thumb . height ) ;
2020-11-03 15:34:36 +01:00
document . getElementById ( 'samples-container' ) . appendChild ( thumb ) ;
2020-10-16 21:04:51 +02:00
image . src = '' ;
resolve ( true ) ;
} ;
image . src = input ;
2020-10-13 15:59:21 +02:00
} ) ;
2020-10-12 16:59:55 +02:00
}
2020-10-17 16:06:02 +02:00
// just initialize everything and call main function
2020-10-17 13:15:23 +02:00
async function detectVideo ( ) {
2020-11-03 15:34:36 +01:00
document . getElementById ( 'samples-container' ) . style . display = 'none' ;
2020-10-17 13:15:23 +02:00
document . getElementById ( 'canvas' ) . style . display = 'block' ;
const video = document . getElementById ( 'video' ) ;
const canvas = document . getElementById ( 'canvas' ) ;
2020-10-17 16:25:27 +02:00
if ( ( video . srcObject !== null ) && ! video . paused ) {
2020-11-03 04:15:37 +01:00
document . getElementById ( 'play' ) . style . display = 'block' ;
2021-04-17 00:00:24 +02:00
document . getElementById ( 'btnStartText' ) . innerHTML = 'start video' ;
2020-11-03 04:15:37 +01:00
status ( 'paused' ) ;
2020-10-17 13:15:23 +02:00
video . pause ( ) ;
} else {
2020-11-20 13:52:50 +01:00
const cameraError = await setupCamera ( ) ;
if ( ! cameraError ) {
document . getElementById ( 'play' ) . style . display = 'none' ;
for ( const m of Object . values ( menu ) ) m . hide ( ) ;
status ( '' ) ;
2021-04-17 00:00:24 +02:00
document . getElementById ( 'btnStartText' ) . innerHTML = 'pause video' ;
2020-11-20 13:52:50 +01:00
await video . play ( ) ;
if ( ! ui . detectThread ) runHumanDetect ( video , canvas ) ;
} else {
status ( cameraError ) ;
}
2020-10-17 13:15:23 +02:00
}
}
2020-10-17 16:06:02 +02:00
// just initialize everything and call main function
2020-10-16 21:04:51 +02:00
async function detectSampleImages ( ) {
2021-03-10 15:44:45 +01:00
userConfig . videoOptimized = false ; // force disable video optimizations
2020-11-03 04:15:37 +01:00
document . getElementById ( 'play' ) . style . display = 'none' ;
2020-10-16 21:04:51 +02:00
document . getElementById ( 'canvas' ) . style . display = 'none' ;
2020-11-03 15:34:36 +01:00
document . getElementById ( 'samples-container' ) . style . display = 'block' ;
2021-03-21 22:47:00 +01:00
log ( 'running detection of sample images' ) ;
2020-11-03 15:34:36 +01:00
status ( 'processing images' ) ;
document . getElementById ( 'samples-container' ) . innerHTML = '' ;
2021-03-03 15:59:04 +01:00
for ( const m of Object . values ( menu ) ) m . hide ( ) ;
2020-11-13 22:13:35 +01:00
for ( const image of ui . samples ) await processImage ( image ) ;
2020-11-03 15:34:36 +01:00
status ( '' ) ;
2020-10-17 13:15:23 +02:00
}
2020-10-18 02:59:43 +02:00
function setupMenu ( ) {
2021-04-17 00:00:24 +02:00
const x = [ ` ${ document . getElementById ( 'btnDisplay' ) . offsetLeft } px ` , ` ${ document . getElementById ( 'btnImage' ) . offsetLeft } px ` , ` ${ document . getElementById ( 'btnProcess' ) . offsetLeft } px ` , ` ${ document . getElementById ( 'btnModel' ) . offsetLeft } px ` ] ;
2021-04-19 01:33:40 +02:00
const top = ` ${ document . getElementById ( 'menubar' ) . clientHeight } px ` ;
2020-11-19 20:45:59 +01:00
2021-04-17 00:00:24 +02:00
menu . display = new Menu ( document . body , '' , { top , left : x [ 0 ] } ) ;
2020-11-19 20:45:59 +01:00
menu . display . addBool ( 'perf monitor' , ui , 'bench' , ( val ) => ui . bench = val ) ;
2021-04-19 01:33:40 +02:00
menu . display . addBool ( 'buffer output' , ui , 'buffered' , ( val ) => ui . buffered = val ) ;
2021-02-08 19:07:49 +01:00
menu . display . addBool ( 'crop & scale' , ui , 'crop' , ( val ) => {
ui . crop = val ;
setupCamera ( ) ;
} ) ;
menu . display . addBool ( 'camera facing' , ui , 'facing' , ( val ) => {
ui . facing = val ;
setupCamera ( ) ;
} ) ;
2020-11-19 20:45:59 +01:00
menu . display . addHTML ( '<hr style="border-style: inset; border-color: dimgray">' ) ;
2021-04-19 01:33:40 +02:00
menu . display . addBool ( 'use depth' , human . draw . options , 'useDepth' ) ;
menu . display . addBool ( 'use curves' , human . draw . options , 'useCurves' ) ;
2021-04-13 17:05:52 +02:00
menu . display . addBool ( 'print labels' , human . draw . options , 'drawLabels' ) ;
menu . display . addBool ( 'draw points' , human . draw . options , 'drawPoints' ) ;
menu . display . addBool ( 'draw boxes' , human . draw . options , 'drawBoxes' ) ;
menu . display . addBool ( 'draw polygons' , human . draw . options , 'drawPolygons' ) ;
menu . display . addBool ( 'fill polygons' , human . draw . options , 'fillPolygons' ) ;
2020-11-19 20:45:59 +01:00
2021-04-17 00:00:24 +02:00
menu . image = new Menu ( document . body , '' , { top , left : x [ 1 ] } ) ;
2021-02-08 19:07:49 +01:00
menu . image . addBool ( 'enabled' , human . config . filter , 'enabled' , ( val ) => human . config . filter . enabled = val ) ;
2020-11-19 20:45:59 +01:00
ui . menuWidth = menu . image . addRange ( 'image width' , human . config . filter , 'width' , 0 , 3840 , 10 , ( val ) => human . config . filter . width = parseInt ( val ) ) ;
ui . menuHeight = menu . image . addRange ( 'image height' , human . config . filter , 'height' , 0 , 2160 , 10 , ( val ) => human . config . filter . height = parseInt ( val ) ) ;
menu . image . addHTML ( '<hr style="border-style: inset; border-color: dimgray">' ) ;
menu . image . addRange ( 'brightness' , human . config . filter , 'brightness' , - 1.0 , 1.0 , 0.05 , ( val ) => human . config . filter . brightness = parseFloat ( val ) ) ;
menu . image . addRange ( 'contrast' , human . config . filter , 'contrast' , - 1.0 , 1.0 , 0.05 , ( val ) => human . config . filter . contrast = parseFloat ( val ) ) ;
menu . image . addRange ( 'sharpness' , human . config . filter , 'sharpness' , 0 , 1.0 , 0.05 , ( val ) => human . config . filter . sharpness = parseFloat ( val ) ) ;
menu . image . addRange ( 'blur' , human . config . filter , 'blur' , 0 , 20 , 1 , ( val ) => human . config . filter . blur = parseInt ( val ) ) ;
menu . image . addRange ( 'saturation' , human . config . filter , 'saturation' , - 1.0 , 1.0 , 0.05 , ( val ) => human . config . filter . saturation = parseFloat ( val ) ) ;
menu . image . addRange ( 'hue' , human . config . filter , 'hue' , 0 , 360 , 5 , ( val ) => human . config . filter . hue = parseInt ( val ) ) ;
menu . image . addRange ( 'pixelate' , human . config . filter , 'pixelate' , 0 , 32 , 1 , ( val ) => human . config . filter . pixelate = parseInt ( val ) ) ;
menu . image . addHTML ( '<hr style="border-style: inset; border-color: dimgray">' ) ;
2021-02-08 19:07:49 +01:00
menu . image . addBool ( 'negative' , human . config . filter , 'negative' , ( val ) => human . config . filter . negative = val ) ;
menu . image . addBool ( 'sepia' , human . config . filter , 'sepia' , ( val ) => human . config . filter . sepia = val ) ;
menu . image . addBool ( 'vintage' , human . config . filter , 'vintage' , ( val ) => human . config . filter . vintage = val ) ;
menu . image . addBool ( 'kodachrome' , human . config . filter , 'kodachrome' , ( val ) => human . config . filter . kodachrome = val ) ;
menu . image . addBool ( 'technicolor' , human . config . filter , 'technicolor' , ( val ) => human . config . filter . technicolor = val ) ;
menu . image . addBool ( 'polaroid' , human . config . filter , 'polaroid' , ( val ) => human . config . filter . polaroid = val ) ;
2020-11-19 20:45:59 +01:00
2021-04-17 00:00:24 +02:00
menu . process = new Menu ( document . body , '' , { top , left : x [ 2 ] } ) ;
2020-12-13 00:34:30 +01:00
menu . process . addList ( 'backend' , [ 'cpu' , 'webgl' , 'wasm' , 'humangl' ] , human . config . backend , ( val ) => human . config . backend = val ) ;
2020-11-19 20:45:59 +01:00
menu . process . addBool ( 'async operations' , human . config , 'async' , ( val ) => human . config . async = val ) ;
menu . process . addBool ( 'use web worker' , ui , 'useWorker' ) ;
menu . process . addHTML ( '<hr style="border-style: inset; border-color: dimgray">' ) ;
menu . process . addLabel ( 'model parameters' ) ;
2021-04-25 19:16:04 +02:00
menu . process . addRange ( 'max objects' , human . config . face . detector , 'maxDetected' , 1 , 50 , 1 , ( val ) => {
human . config . face . detector . maxDetected = parseInt ( val ) ;
human . config . body . maxDetected = parseInt ( val ) ;
human . config . hand . maxDetected = parseInt ( val ) ;
2020-10-17 13:15:23 +02:00
} ) ;
2020-11-19 20:45:59 +01:00
menu . process . addRange ( 'skip frames' , human . config . face . detector , 'skipFrames' , 0 , 50 , 1 , ( val ) => {
2020-11-06 19:50:16 +01:00
human . config . face . detector . skipFrames = parseInt ( val ) ;
human . config . face . emotion . skipFrames = parseInt ( val ) ;
human . config . hand . skipFrames = parseInt ( val ) ;
2020-10-17 13:15:23 +02:00
} ) ;
2020-11-19 20:45:59 +01:00
menu . process . addRange ( 'min confidence' , human . config . face . detector , 'minConfidence' , 0.0 , 1.0 , 0.05 , ( val ) => {
2020-11-06 19:50:16 +01:00
human . config . face . detector . minConfidence = parseFloat ( val ) ;
human . config . face . emotion . minConfidence = parseFloat ( val ) ;
human . config . hand . minConfidence = parseFloat ( val ) ;
2020-10-17 13:15:23 +02:00
} ) ;
2020-11-19 20:45:59 +01:00
menu . process . addRange ( 'overlap' , human . config . face . detector , 'iouThreshold' , 0.1 , 1.0 , 0.05 , ( val ) => {
2020-11-06 19:50:16 +01:00
human . config . face . detector . iouThreshold = parseFloat ( val ) ;
human . config . hand . iouThreshold = parseFloat ( val ) ;
2020-10-17 13:15:23 +02:00
} ) ;
2021-04-17 00:03:15 +02:00
menu . process . addBool ( 'rotation detection' , human . config . face . detector , 'rotation' , ( val ) => {
2020-12-10 21:46:45 +01:00
human . config . face . detector . rotation = val ;
human . config . hand . rotation = val ;
} ) ;
2020-11-19 20:45:59 +01:00
menu . process . addHTML ( '<hr style="border-style: inset; border-color: dimgray">' ) ;
menu . process . addButton ( 'process sample images' , 'process images' , ( ) => detectSampleImages ( ) ) ;
menu . process . addHTML ( '<hr style="border-style: inset; border-color: dimgray">' ) ;
menu . process . addChart ( 'FPS' , 'FPS' ) ;
2021-04-17 00:00:24 +02:00
menu . models = new Menu ( document . body , '' , { top , left : x [ 3 ] } ) ;
2021-02-08 19:07:49 +01:00
menu . models . addBool ( 'face detect' , human . config . face , 'enabled' , ( val ) => human . config . face . enabled = val ) ;
menu . models . addBool ( 'face mesh' , human . config . face . mesh , 'enabled' , ( val ) => human . config . face . mesh . enabled = val ) ;
menu . models . addBool ( 'face iris' , human . config . face . iris , 'enabled' , ( val ) => human . config . face . iris . enabled = val ) ;
2021-04-25 00:43:59 +02:00
menu . models . addBool ( 'face description' , human . config . face . description , 'enabled' , ( val ) => human . config . face . description . enabled = val ) ;
2021-02-08 19:07:49 +01:00
menu . models . addBool ( 'face emotion' , human . config . face . emotion , 'enabled' , ( val ) => human . config . face . emotion . enabled = val ) ;
2020-11-19 20:45:59 +01:00
menu . models . addHTML ( '<hr style="border-style: inset; border-color: dimgray">' ) ;
2021-02-08 19:07:49 +01:00
menu . models . addBool ( 'body pose' , human . config . body , 'enabled' , ( val ) => human . config . body . enabled = val ) ;
menu . models . addBool ( 'hand pose' , human . config . hand , 'enabled' , ( val ) => human . config . hand . enabled = val ) ;
2020-11-19 20:45:59 +01:00
menu . models . addHTML ( '<hr style="border-style: inset; border-color: dimgray">' ) ;
2021-02-08 19:07:49 +01:00
menu . models . addBool ( 'gestures' , human . config . gesture , 'enabled' , ( val ) => human . config . gesture . enabled = val ) ;
2020-11-19 20:45:59 +01:00
menu . models . addHTML ( '<hr style="border-style: inset; border-color: dimgray">' ) ;
2021-03-17 16:32:37 +01:00
menu . models . addBool ( 'object detection' , human . config . object , 'enabled' , ( val ) => human . config . object . enabled = val ) ;
menu . models . addHTML ( '<hr style="border-style: inset; border-color: dimgray">' ) ;
2021-03-21 22:47:00 +01:00
menu . models . addBool ( 'face compare' , compare , 'enabled' , ( val ) => {
compare . enabled = val ;
compare . original = null ;
2020-11-19 20:45:59 +01:00
} ) ;
2020-10-18 02:59:43 +02:00
2020-11-19 20:45:59 +01:00
document . getElementById ( 'btnDisplay' ) . addEventListener ( 'click' , ( evt ) => menu . display . toggle ( evt ) ) ;
document . getElementById ( 'btnImage' ) . addEventListener ( 'click' , ( evt ) => menu . image . toggle ( evt ) ) ;
document . getElementById ( 'btnProcess' ) . addEventListener ( 'click' , ( evt ) => menu . process . toggle ( evt ) ) ;
document . getElementById ( 'btnModel' ) . addEventListener ( 'click' , ( evt ) => menu . models . toggle ( evt ) ) ;
document . getElementById ( 'btnStart' ) . addEventListener ( 'click' , ( ) => detectVideo ( ) ) ;
document . getElementById ( 'play' ) . addEventListener ( 'click' , ( ) => detectVideo ( ) ) ;
2020-11-12 23:00:06 +01:00
}
2021-04-17 00:00:24 +02:00
async function resize ( ) {
2021-04-19 01:33:40 +02:00
const viewportScale = Math . min ( 1 , Math . round ( 100 * window . innerWidth / 960 ) / 100 ) ;
2021-04-19 15:30:04 +02:00
if ( ! ui . viewportSet ) {
const viewport = document . querySelector ( 'meta[name=viewport]' ) ;
viewport . setAttribute ( 'content' , ` width=device-width, shrink-to-fit=yes, minimum-scale=0.2, maximum-scale=2.0, user-scalable=yes, initial-scale= ${ viewportScale } ` ) ;
ui . viewportSet = true ;
}
2021-04-17 00:00:24 +02:00
const x = [ ` ${ document . getElementById ( 'btnDisplay' ) . offsetLeft } px ` , ` ${ document . getElementById ( 'btnImage' ) . offsetLeft } px ` , ` ${ document . getElementById ( 'btnProcess' ) . offsetLeft } px ` , ` ${ document . getElementById ( 'btnModel' ) . offsetLeft } px ` ] ;
2021-04-19 01:33:40 +02:00
const top = ` ${ document . getElementById ( 'menubar' ) . clientHeight - 3 } px ` ;
menu . display . menu . style . top = top ;
menu . image . menu . style . top = top ;
menu . process . menu . style . top = top ;
menu . models . menu . style . top = top ;
2021-04-17 00:00:24 +02:00
menu . display . menu . style . left = x [ 0 ] ;
menu . image . menu . style . left = x [ 1 ] ;
menu . process . menu . style . left = x [ 2 ] ;
menu . models . menu . style . left = x [ 3 ] ;
2021-04-19 01:33:40 +02:00
const fontSize = Math . trunc ( 10 * ( 1 - viewportScale ) ) + 16 ;
document . documentElement . style . fontSize = ` ${ fontSize } px ` ;
human . draw . options . font = ` small-caps ${ fontSize + 4 } px "Segoe UI" ` ;
2021-04-17 00:00:24 +02:00
setupCamera ( ) ;
}
2021-03-04 16:33:08 +01:00
async function drawWarmup ( res ) {
const canvas = document . getElementById ( 'canvas' ) ;
canvas . width = res . canvas . width ;
canvas . height = res . canvas . height ;
const ctx = canvas . getContext ( '2d' ) ;
ctx . drawImage ( res . canvas , 0 , 0 , res . canvas . width , res . canvas . height , 0 , 0 , canvas . width , canvas . height ) ;
2021-03-05 17:43:50 +01:00
await human . draw . all ( canvas , res ) ;
2021-03-04 16:33:08 +01:00
}
2020-10-12 16:08:00 +02:00
async function main ( ) {
2021-04-25 20:30:40 +02:00
window . addEventListener ( 'unhandledrejection' , ( evt ) => {
// eslint-disable-next-line no-console
console . error ( evt . reason || evt ) ;
document . getElementById ( 'log' ) . innerHTML = evt ? . reason ? . message || evt ? . reason || evt ;
status ( 'exception error' ) ;
evt . preventDefault ( ) ;
} ) ;
2021-03-21 22:47:00 +01:00
log ( 'demo starting ...' ) ;
2021-04-09 16:02:40 +02:00
// parse url search params
const params = new URLSearchParams ( location . search ) ;
log ( 'url options:' , params . toString ( ) ) ;
if ( params . has ( 'worker' ) ) {
ui . useWorker = JSON . parse ( params . get ( 'worker' ) ) ;
log ( 'overriding worker:' , ui . useWorker ) ;
}
if ( params . has ( 'backend' ) ) {
2021-04-14 18:53:00 +02:00
userConfig . backend = params . get ( 'backend' ) ;
2021-04-09 16:02:40 +02:00
log ( 'overriding backend:' , userConfig . backend ) ;
}
if ( params . has ( 'preload' ) ) {
ui . modelsPreload = JSON . parse ( params . get ( 'preload' ) ) ;
log ( 'overriding preload:' , ui . modelsPreload ) ;
}
if ( params . has ( 'warmup' ) ) {
ui . modelsWarmup = JSON . parse ( params . get ( 'warmup' ) ) ;
log ( 'overriding warmup:' , ui . modelsWarmup ) ;
}
// create instance of human
human = new Human ( userConfig ) ;
if ( typeof tf !== 'undefined' ) {
log ( 'TensorFlow external version:' , tf . version ) ;
human . tf = tf ; // use externally loaded version of tfjs
}
// setup main menu
2021-04-17 00:00:24 +02:00
await setupMenu ( ) ;
await resize ( ) ;
2020-11-19 20:45:59 +01:00
document . getElementById ( 'log' ) . innerText = ` Human: version ${ human . version } ` ;
2021-04-09 16:02:40 +02:00
// preload models
2020-11-15 15:28:57 +01:00
if ( ui . modelsPreload && ! ui . useWorker ) {
2020-11-04 16:18:22 +01:00
status ( 'loading' ) ;
2020-12-08 15:58:30 +01:00
await human . load ( userConfig ) ; // this is not required, just pre-loads all models
2021-02-26 16:13:31 +01:00
const loaded = Object . keys ( human . models ) . filter ( ( a ) => human . models [ a ] ) ;
2021-03-21 22:47:00 +01:00
log ( 'demo loaded models:' , loaded ) ;
2020-11-04 16:18:22 +01:00
}
2021-04-09 16:02:40 +02:00
// warmup models
if ( ui . modelsWarmup && ! ui . useWorker ) {
2020-11-04 16:18:22 +01:00
status ( 'initializing' ) ;
2021-03-04 16:33:08 +01:00
const res = await human . warmup ( userConfig ) ; // this is not required, just pre-warms all models for faster initial inference
if ( res && res . canvas && ui . drawWarmup ) await drawWarmup ( res ) ;
2020-11-04 16:18:22 +01:00
}
2021-04-09 16:02:40 +02:00
// setup camera
2021-04-05 17:48:24 +02:00
await setupCamera ( ) ;
2021-04-09 16:02:40 +02:00
// ready
2020-11-03 04:15:37 +01:00
status ( 'human: ready' ) ;
2020-11-03 15:34:36 +01:00
document . getElementById ( 'loader' ) . style . display = 'none' ;
document . getElementById ( 'play' ) . style . display = 'block' ;
2021-03-21 22:47:00 +01:00
log ( 'demo ready...' ) ;
2021-04-19 01:33:40 +02:00
for ( const m of Object . values ( menu ) ) m . hide ( ) ;
2020-10-12 16:08:00 +02:00
}
window . onload = main ;
2021-04-17 00:00:24 +02:00
window . onresize = resize ;